1from __future__ import print_function
2
3#!/usr/bin/env python
4# A simple recurrent neural network that learns a simple sequential data set.
5
6__author__ = 'Tom Schaul, tom@idsia.ch and Daan Wierstra'
7
8from datasets import AnBnCnDataSet #@UnresolvedImport
9from pybrain.supervised import BackpropTrainer
10from pybrain.structure import FullConnection, RecurrentNetwork, TanhLayer, LinearLayer, BiasUnit
11
12
13def testTraining():
14    # the AnBnCn dataset (sequential)
15    d = AnBnCnDataSet()
16
17    # build a recurrent network to be trained
18    hsize = 2
19    n = RecurrentNetwork()
20    n.addModule(TanhLayer(hsize, name = 'h'))
21    n.addModule(BiasUnit(name = 'bias'))
22    n.addOutputModule(LinearLayer(1, name = 'out'))
23    n.addConnection(FullConnection(n['bias'], n['h']))
24    n.addConnection(FullConnection(n['h'], n['out']))
25    n.addRecurrentConnection(FullConnection(n['h'], n['h']))
26    n.sortModules()
27
28    # initialize the backprop trainer and train
29    t = BackpropTrainer(n, learningrate = 0.1, momentum = 0.0, verbose = True)
30    t.trainOnDataset(d, 200)
31
32    # the resulting weights are in the network:
33    print('Final weights:', n.params)
34
35if __name__ == '__main__':
36    testTraining()