4235
Comment:
|
6511
|
Deletions are marked like this. | Additions are marked like this. |
Line 21: | Line 21: |
---- /!\ '''Edit conflict - other version:''' ---- |
|
Line 101: | Line 103: |
---- /!\ '''Edit conflict - your version:''' ---- == Examples == {{{ #!java import org.neuroph.core.NeuralNetwork; import org.neuroph.nnet.MultiLayerPerceptron; import org.neuroph.core.learning.TrainingSet; import org.neuroph.core.learning.TrainingElement; import org.neuroph.core.learning.SupervisedTrainingElement; import java.util.Vector; import org.neuroph.util.TransferFunctionType; /** * This sample shows how to create, train, save and load simple Multi Layer Perceptron */ void setup() { // create training set (logical XOR function) TrainingSet trainingSet = new TrainingSet(); trainingSet.addElement(new SupervisedTrainingElement(new double[] { 0, 0 } , new double[] { 0 } )); trainingSet.addElement(new SupervisedTrainingElement(new double[] { 0, 1 } , new double[] { 1 } )); trainingSet.addElement(new SupervisedTrainingElement(new double[] { 1, 0 } , new double[] { 1 } )); trainingSet.addElement(new SupervisedTrainingElement(new double[] { 1, 1 } , new double[] { 0 } )); // create multi layer perceptron MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.TANH, 2, 3, 1); // learn the training set myMlPerceptron.learnInSameThread(trainingSet); // test perceptron System.out.println("Testing trained neural network"); testNeuralNetwork(myMlPerceptron, trainingSet); // save trained neural network myMlPerceptron.save("myMlPerceptron.nnet"); // load saved neural network NeuralNetwork loadedMlPerceptron = NeuralNetwork.load("myMlPerceptron.nnet"); // test loaded neural network System.out.println("Testing loaded neural network"); testNeuralNetwork(loadedMlPerceptron, trainingSet); noLoop(); } void testNeuralNetwork(NeuralNetwork nnet, TrainingSet tset) { nnet.setInput(new double[]{1, 0}); nnet.calculate(); double networkOutput = nnet.getOutput()[0]; System.out.println(" Output: " + networkOutput); } }}} ---- /!\ '''End of edit conflict''' ---- |
Contents
Preparation
Follow the instructions from the assignment Creative Programming to prepare the SoftwareEnvironment, including Processing, Arduino and the AdMoVeo Robot. If you are not planning to use the GUI components from the ControlP5 library, you can skip the ControlP5 part.
Install Neuroph, a lightweight Java neural network framework.
Download Neuroph Studio and install it.
Download neuroph-2.5b.zip. Unzip it into C:\Programs. In C:\Programs\neuroph-2.5b you shall be able to find neuroph-2.5b.jar. (You can unzip it to anywhere you want, but then remember where it is for later reference).
- In "Processing Sketchbook location"\libraries, create a sub-directory "neuroph".
- In "Processing Sketchbook location"\libraries\neuroph, create a sub-directory "library".
- From C:\Programs\neuroph-2.5b copy neuroph-2.5b.jar to "Processing Sketchbook location"\libraries\neuroph\library. Rename neuroph-2.5b.jar to neuroph.jar.
- From C:\Programs\neuroph-2.5b\lib copy two jar files found there to "Processing Sketchbook location"\libraries\neuroph\library.
If you are confident enough, you can also try to use Eclipse to program in Java. Then try to follow the instructions from the assignment Processing2Java. Remember you shall include the aforementioned jar files in the projects if you want to use neuroph.
References
Edit conflict - other version:
Examples
{{!java import org.neuroph.core.NeuralNetwork; import org.neuroph.nnet.MultiLayerPerceptron; import org.neuroph.core.learning.TrainingSet; import org.neuroph.core.learning.TrainingElement; import org.neuroph.core.learning.SupervisedTrainingElement; import java.util.Vector; import org.neuroph.util.TransferFunctionType;
/**
- This sample shows how to create, train, save and load simple Multi Layer Perceptron
*/
void setup() {
- // create training set (logical XOR function)
TrainingSet trainingSet = new TrainingSet(); trainingSet.addElement(new SupervisedTrainingElement(new double[] {
- 0, 0
- 0
trainingSet.addElement(new SupervisedTrainingElement(new double[] {
- 0, 1
- 1
trainingSet.addElement(new SupervisedTrainingElement(new double[] {
- 1, 0
- 1
trainingSet.addElement(new SupervisedTrainingElement(new double[] {
- 1, 1
- 0
MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.TANH, 2, 3, 1); // learn the training set myMlPerceptron.learnInSameThread(trainingSet); // test perceptron System.out.println("Testing trained neural network"); testNeuralNetwork(myMlPerceptron, trainingSet); // save trained neural network myMlPerceptron.save("myMlPerceptron.nnet"); // load saved neural network
NeuralNetwork loadedMlPerceptron = NeuralNetwork.load("myMlPerceptron.nnet"); // test loaded neural network System.out.println("Testing loaded neural network"); testNeuralNetwork(loadedMlPerceptron, trainingSet); noLoop();
- }
void testNeuralNetwork(NeuralNetwork nnet, TrainingSet tset) {
- nnet.setInput(new double[]{1, 0}); nnet.calculate(); double networkOutput = nnet.getOutput()[0]; System.out.println(" Output: " + networkOutput);
- }
}}
Edit conflict - your version:
Examples
1 import org.neuroph.core.NeuralNetwork;
2 import org.neuroph.nnet.MultiLayerPerceptron;
3 import org.neuroph.core.learning.TrainingSet;
4 import org.neuroph.core.learning.TrainingElement;
5 import org.neuroph.core.learning.SupervisedTrainingElement;
6 import java.util.Vector;
7 import org.neuroph.util.TransferFunctionType;
8
9 /**
10 * This sample shows how to create, train, save and load simple Multi Layer Perceptron
11 */
12
13 void setup() {
14
15 // create training set (logical XOR function)
16 TrainingSet trainingSet = new TrainingSet();
17 trainingSet.addElement(new SupervisedTrainingElement(new double[] {
18 0, 0
19 }
20 , new double[] {
21 0
22 }
23 ));
24 trainingSet.addElement(new SupervisedTrainingElement(new double[] {
25 0, 1
26 }
27 , new double[] {
28 1
29 }
30 ));
31 trainingSet.addElement(new SupervisedTrainingElement(new double[] {
32 1, 0
33 }
34 , new double[] {
35 1
36 }
37 ));
38 trainingSet.addElement(new SupervisedTrainingElement(new double[] {
39 1, 1
40 }
41 , new double[] {
42 0
43 }
44 ));
45
46 // create multi layer perceptron
47 MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.TANH, 2, 3, 1);
48 // learn the training set
49 myMlPerceptron.learnInSameThread(trainingSet);
50
51 // test perceptron
52 System.out.println("Testing trained neural network");
53 testNeuralNetwork(myMlPerceptron, trainingSet);
54
55 // save trained neural network
56 myMlPerceptron.save("myMlPerceptron.nnet");
57
58 // load saved neural network
59 NeuralNetwork loadedMlPerceptron = NeuralNetwork.load("myMlPerceptron.nnet");
60
61 // test loaded neural network
62 System.out.println("Testing loaded neural network");
63 testNeuralNetwork(loadedMlPerceptron, trainingSet);
64
65 noLoop();
66 }
67
68 void testNeuralNetwork(NeuralNetwork nnet, TrainingSet tset) {
69
70 nnet.setInput(new double[]{1, 0});
71 nnet.calculate();
72 double networkOutput = nnet.getOutput()[0];
73 System.out.println(" Output: " + networkOutput);
74 }
End of edit conflict