Jannis (0.1preAlpha) | ||
Frames | No Frames |
1: /* TrainingCoordinator.java - Copyright (c) 2005 by Stefan Thesing 2: <p>This file is part of Jannis.</p> 3: <p>Jannis is free software; you can redistribute it and/or modify 4: it under the terms of the GNU General Public License as published by 5: the Free Software Foundation; either version 2 of the License, or 6: (at your option) any later version.</p> 7: <p>Jannis is distributed in the hope that it will be useful, 8: but WITHOUT ANY WARRANTY; without even the implied warranty of 9: MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 10: GNU General Public License for more details.</p> 11: <p>You should have received a copy of the GNU General Public License 12: along with Jannis; if not, write to the<br> 13: Free Software Foundation, Inc.,<br> 14: 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA<br> 15: */ 16: package de.webdings.jannis.neuralnet; 17: 18: import java.io.IOException; 19: import de.webdings.jannis.exceptions.PatternCreateException; 20: import de.webdings.jannis.exceptions.PatternGiverReaderCommunicationException; 21: import de.webdings.jannis.exceptions.PatternLayerMismatchException; 22: 23: /** 24: * TrainingCoordinator is a subclass of {@link Coordinator}. 25: * It does the same as its superclass yet it also features 26: * a {@link Teacher} and calls its method 27: * {@link Teacher#teach()} at the appropriate time. 28: * 29: * @author Copyright 2005 by Stefan Thesing 30: * Website: <a href="http://www.webdings.de">http://www.webdings.de</a> 31: * @version 0.1 11.08.2005 32: */ 33: public class TrainingCoordinator extends Coordinator { 34: //ATTRIBUTES 35: /** 36: * See {@link Teacher} 37: */ 38: public Teacher teacher; 39: 40: //CONSTRUCTORS 41: /** 42: * @param net The neural net to be coordinated 43: * @param fileNameInputPattern Name of the file containing the input pattern used for 44: * the training 45: * @param teacher The teacher used for this training 46: * @throws IOException if an error occurs while reading a file 47: * @throws PatternCreateException See {@link PatternCreateException} 48: * @throws PatternLayerMismatchException 49: */ 50: public TrainingCoordinator(NeuralNet net, String fileNameInputPattern, 51: Teacher teacher) throws IOException, PatternCreateException, 52: PatternLayerMismatchException{ 53: this(net.getLayers(), fileNameInputPattern, teacher); 54: } 55: 56: /** 57: * @param layers The layers of the neural net to be coordinated 58: * @param fileNameInputPattern Name of the file containing the input pattern used for 59: * the training 60: * @param teacher The teacher used for this training 61: * @throws IOException if an error occurs while reading a file 62: * @throws PatternCreateException {@link PatternCreateException} 63: * @throws PatternLayerMismatchException 64: */ 65: public TrainingCoordinator(Neuron[][] layers, String fileNameInputPattern, 66: Teacher teacher) throws IOException, PatternCreateException, 67: PatternLayerMismatchException{ 68: super(layers, fileNameInputPattern); 69: this.teacher = teacher; 70: if(layers[layers.length-1].length != 71: teacher.getDesiredOutput()[0].entries.length) { 72: throw new PatternLayerMismatchException 73: ("The size of the desired output pattern " + 74: "doesn't match the size of the output layer!"); 75: } 76: } 77: 78: /** 79: * @param net The neural net to be coordinated 80: * @param fileNameInputPattern Name of the file containing the input pattern used for 81: * the training 82: * @param fileNameDesiredOutputPattern 83: * @throws IOException if an error occurs while reading a file 84: * @throws PatternCreateException {@link PatternCreateException} 85: * @throws PatternLayerMismatchException 86: */ 87: public TrainingCoordinator(NeuralNet net, String fileNameInputPattern, 88: String fileNameDesiredOutputPattern) throws IOException, PatternCreateException, 89: PatternLayerMismatchException{ 90: this(net.getLayers(), fileNameInputPattern, fileNameDesiredOutputPattern); 91: } 92: 93: /** 94: * @param layers The layers of the neural net to be coordinated 95: * @param fileNameInputPattern Name of the file containing the input pattern used for 96: * the training 97: * @param fileNameDesiredOutputPattern 98: * @throws IOException if an error occurs while reading a file 99: * @throws PatternCreateException {@link PatternCreateException} 100: * @throws PatternLayerMismatchException 101: */ 102: public TrainingCoordinator(Neuron[][] layers, String fileNameInputPattern, 103: String fileNameDesiredOutputPattern) throws IOException, PatternCreateException, 104: PatternLayerMismatchException{ 105: super(layers, fileNameInputPattern); 106: this.teacher = new Teacher(fileNameDesiredOutputPattern, layers); 107: if(layers[layers.length-1].length != teacher.getDesiredOutput()[0].entries.length) { 108: throw new PatternLayerMismatchException("The size of the desired output pattern " + 109: "doesn't match the size of the output layer!"); 110: } 111: } 112: 113: /** 114: * @param net The neural net to be coordinated 115: * @param fileNameInputPattern Name of the file containing the input pattern used for 116: * the training 117: * @param desiredOutputPattern 118: * @throws IOException if an error occurs while reading a file 119: * @throws PatternCreateException {@link PatternCreateException} 120: * @throws PatternLayerMismatchException 121: */ 122: public TrainingCoordinator(NeuralNet net, String fileNameInputPattern, 123: Pattern[] desiredOutputPattern) throws IOException, PatternCreateException, 124: PatternLayerMismatchException { 125: this(net.getLayers(), fileNameInputPattern, desiredOutputPattern); 126: } 127: 128: /** 129: * @param layers The layers of the neural net to be coordinated 130: * @param fileNameInputPattern Name of the file containing the input pattern used for 131: * the training 132: * @param desiredOutputPattern 133: * @throws IOException if an error occurs while reading a file 134: * @throws PatternCreateException 135: * @throws PatternLayerMismatchException 136: */ 137: public TrainingCoordinator(Neuron[][] layers, String fileNameInputPattern, 138: Pattern[] desiredOutputPattern) throws IOException, PatternCreateException, 139: PatternLayerMismatchException { 140: super(layers, fileNameInputPattern); 141: this.teacher = new Teacher(desiredOutputPattern, layers); 142: if(layers[layers.length-1].length != desiredOutputPattern[0].entries.length) { 143: throw new PatternLayerMismatchException("The size of the desired output pattern " + 144: "doesn't match the size of the output layer!"); 145: } 146: } 147: 148: /** 149: * @param net The neural net to be coordinated 150: * @param inputPattern the input pattern used for 151: * the training 152: * @param desiredOutputPattern 153: * @throws PatternLayerMismatchException 154: */ 155: public TrainingCoordinator(NeuralNet net, Pattern[] inputPattern, 156: Pattern[] desiredOutputPattern) throws PatternLayerMismatchException { 157: this(net.getLayers(), inputPattern, desiredOutputPattern); 158: } 159: 160: /** 161: * @param layers The layers of the neural net to be coordinated 162: * @param inputPattern the input pattern used for 163: * the training 164: * @param desiredOutputPattern 165: * @throws PatternLayerMismatchException 166: */ 167: public TrainingCoordinator(Neuron[][] layers, Pattern[] inputPattern, 168: Pattern[] desiredOutputPattern) throws PatternLayerMismatchException { 169: super(layers, inputPattern); 170: this.teacher = new Teacher(desiredOutputPattern, layers); 171: if(layers[layers.length-1].length != desiredOutputPattern[0].entries.length) { 172: throw new PatternLayerMismatchException("The size of the desired output pattern " + 173: "doesn't match the size of the output layer!"); 174: } 175: } 176: 177: /** 178: * @param net The neural net to be coordinated 179: * @param giver 180: * @param reader 181: * @param desiredOutputPattern 182: * @throws PatternLayerMismatchException 183: */ 184: public TrainingCoordinator(NeuralNet net, PatternGiver giver, PatternReader reader, 185: Pattern[] desiredOutputPattern) throws PatternLayerMismatchException { 186: this(net.getLayers(), giver, reader, desiredOutputPattern); 187: } 188: 189: /** 190: * @param layers The layers of the neural net to be coordinated 191: * @param giver 192: * @param reader 193: * @param desiredOutputPattern 194: * @throws PatternLayerMismatchException 195: */ 196: public TrainingCoordinator(Neuron[][] layers, PatternGiver giver, PatternReader reader, 197: Pattern[] desiredOutputPattern) throws PatternLayerMismatchException { 198: super(layers, giver, reader); 199: this.teacher = new Teacher(desiredOutputPattern, layers); 200: if(layers[layers.length-1].length != desiredOutputPattern[0].entries.length) { 201: throw new PatternLayerMismatchException("The size of the desired output pattern " + 202: "doesn't match the size of the output layer!"); 203: } 204: } 205: 206: //METHODS 207: 208: /** 209: * This method overwrites the method of the superclass. It does the 210: * same as {@link de.webdings.jannis.neuralnet.Coordinator#start()}, but 211: * additionally calls the method {@link Teacher#teach()} at the appropriate 212: * time. 213: * 214: * @see de.webdings.jannis.neuralnet.Coordinator#start() 215: */ 216: public void start() throws PatternGiverReaderCommunicationException { 217: int i,j; 218: giver.nextPattern(); 219: for(i=1;i<layers.length;++i) { 220: for (j = 0; j < layers[i].length; ++j) { 221: if (layers[i][j].tresholdReached()) { 222: layers[i][j].fire(); 223: } 224: } 225: } 226: reader.readPattern(); 227: //Lehrer starten 228: teacher.teach(); 229: //Weitermachen 230: if(giver.numberSent() == reader.numberOfPatternsRead()) { 231: clearAll(); 232: if(reader.numberOfPatternsRead() < reader.getNumberOfPatternsToRead()){ 233: start(); 234: } 235: } else { 236: throw new PatternGiverReaderCommunicationException("There was a problem in communication " + 237: "between PatternGiver and PatternReader!"); 238: } 239: } 240: }
Jannis (0.1preAlpha) |
© 2005 by Stefan Thesing;
Verbatim copying and redistribution of this entire page are permitted provided this notice is preserved.