In PyBrain, networks are composed of Modules which are connected with Connections. You can think of a network as a directed acyclic graph, where the nodes are Modules and the edges are Connections. This makes PyBrain very flexible but it is also not necessary in all cases.
The buildNetwork Shortcut
Thus, there is a simple way to create networks, which is the buildNetwork shortcut:
from pybrain.tools.shortcuts import buildNetwork
net = buildNetwork(2, 3, 1)
This call returns a network that has two inputs, three hidden and a single output neuron. In PyBrain, these layers are Module objects and they are already connected with FullConnection objects.
The Source Code
__author__ = 'Tom Schaul and Thomas Rueckstiess'
from itertools import chain
import logging
from sys import exit as errorexit
from pybrain.structure.networks.feedforward import FeedForwardNetwork
from pybrain.structure.networks.recurrent import RecurrentNetwork
from pybrain.structure.modules import BiasUnit, SigmoidLayer, LinearLayer, LSTMLayer
from pybrain.structure.connections import FullConnection, IdentityConnection
try:
from arac.pybrainbridge import _RecurrentNetwork, _FeedForwardNetwork
except ImportError, e:
logging.info("No fast networks available: %s" % e)
class NetworkError(Exception): pass
def buildNetwork(*layers, **options):
"""Build arbitrarily deep networks.
`layers` should be a list or tuple of integers, that indicate how many
neurons the layers should have. `bias` and `outputbias` are flags to
indicate whether the network should have the corresponding biases; both
default to True.
To adjust the classes for the layers use the `hiddenclass` and `outclass`
parameters, which expect a subclass of :class:`NeuronLayer`.
If the `recurrent` flag is set, a :class:`RecurrentNetwork` will be created,
otherwise a :class:`FeedForwardNetwork`.
If the `fast` flag is set, faster arac networks will be used instead of the
pybrain implementations."""
# options
opt = {'bias': True,
'hiddenclass': SigmoidLayer,
'outclass': LinearLayer,
'outputbias': True,
'peepholes': False,
'recurrent': False,
'fast': False,
}
for key in options:
if key not in opt.keys():
raise NetworkError('buildNetwork unknown option: %s' % key)
opt[key] = options[key]
if len(layers) < 2:
raise NetworkError('buildNetwork needs 2 arguments for input and output layers at least.')
# Bind the right class to the Network name
network_map = {
(False, False): FeedForwardNetwork,
(True, False): RecurrentNetwork,
}
try:
network_map[(False, True)] = _FeedForwardNetwork
network_map[(True, True)] = _RecurrentNetwork
except NameError:
if opt['fast']:
raise NetworkError("No fast networks available.")
if opt['hiddenclass'].sequential or opt['outclass'].sequential:
if not opt['recurrent']:
# CHECKME: a warning here?
opt['recurrent'] = True
Network = network_map[opt['recurrent'], opt['fast']]
n = Network()
# linear input layer
n.addInputModule(LinearLayer(layers[0], name='in'))
# output layer of type 'outclass'
n.addOutputModule(opt['outclass'](layers[-1], name='out'))
if opt['bias']:
# add bias module and connection to out module, if desired
n.addModule(BiasUnit(name='bias'))
if opt['outputbias']:
n.addConnection(FullConnection(n['bias'], n['out']))
# arbitrary number of hidden layers of type 'hiddenclass'
for i, num in enumerate(layers[1:-1]):
layername = 'hidden%i' % i
n.addModule(opt['hiddenclass'](num, name=layername))
if opt['bias']:
# also connect all the layers with the bias
n.addConnection(FullConnection(n['bias'], n[layername]))
# connections between hidden layers
for i in range(len(layers) - 3):
n.addConnection(FullConnection(n['hidden%i' % i], n['hidden%i' % (i + 1)]))
# other connections
if len(layers) == 2:
# flat network, connection from in to out
n.addConnection(FullConnection(n['in'], n['out']))
else:
# network with hidden layer(s), connections from in to first hidden and last hidden to out
n.addConnection(FullConnection(n['in'], n['hidden0']))
n.addConnection(FullConnection(n['hidden%i' % (len(layers) - 3)], n['out']))
# recurrent connections
if issubclass(opt['hiddenclass'], LSTMLayer):
if len(layers) > 3:
errorexit("LSTM networks with > 1 hidden layers are not supported!")
n.addRecurrentConnection(FullConnection(n['hidden0'], n['hidden0']))
n.sortModules()
return n
def _buildNetwork(*layers, **options):
"""This is a helper function to create different kinds of networks.
`layers` is a list of tuples. Each tuple can contain an arbitrary number of
layers, each being connected to the next one with IdentityConnections. Due
to this, all layers have to have the same dimension. We call these tuples
'parts.'
Afterwards, the last layer of one tuple is connected to the first layer of
the following tuple by a FullConnection.
If the keyword argument bias is given, BiasUnits are added additionally with
every FullConnection.
Example:
_buildNetwork(
(LinearLayer(3),),
(SigmoidLayer(4), GaussianLayer(4)),
(SigmoidLayer(3),),
)
"""
bias = options['bias'] if 'bias' in options else False
net = FeedForwardNetwork()
layerParts = iter(layers)
firstPart = iter(layerParts.next())
firstLayer = firstPart.next()
net.addInputModule(firstLayer)
prevLayer = firstLayer
for part in chain(firstPart, layerParts):
new_part = True
for layer in part:
net.addModule(layer)
# Pick class depending on whether we entered a new part
if new_part:
ConnectionClass = FullConnection
if bias:
biasUnit = BiasUnit('BiasUnit for %s' % layer.name)
net.addModule(biasUnit)
net.addConnection(FullConnection(biasUnit, layer))
else:
ConnectionClass = IdentityConnection
new_part = False
conn = ConnectionClass(prevLayer, layer)
net.addConnection(conn)
prevLayer = layer
net.addOutputModule(layer)
net.sortModules()
return net
名次解釋
RecurrentNetwork:遞歸網(wǎng)絡(luò)
FeedForwardNetwork:前饋神經(jīng)網(wǎng)絡(luò)
關(guān)鍵點(diǎn)解釋
1- def buildNetwork (layers, options)中“”與“”的意義徽诲。
def func(*args):print(args)
當(dāng)用func(1,2,3) 調(diào)用函數(shù)時(shí),參數(shù)args就是元組(1,2,3)
def func(**args):print(args)
當(dāng)用func(a=1,b=2) 調(diào)用函數(shù)時(shí),參數(shù)args將會(huì)是字典{'a':1,'b':2}
def func(*args1, **args2):
print(args1)
print(args2)
當(dāng)調(diào)用func(1,2, a=1,b=2)時(shí)蚜迅,打印:(1,2) {'a': 1, 'b': 2}
當(dāng)調(diào)用func(a=1,b=2)時(shí),打印:() {'a': 1, 'b': 2}
當(dāng)調(diào)用func(1,2)時(shí),打印:(1,2) {}
2 - def buildNetwork(*layers, **options): 中“”與“__”的意義。
_單下劃線開(kāi)頭:弱“內(nèi)部使用”標(biāo)識(shí)昌讲,如:”from M import *”,將不導(dǎo)入所有以下劃線開(kāi)頭的對(duì)象减噪,包括包短绸、模塊、成員
單下劃線結(jié)尾_:只是為了避免與python關(guān)鍵字的命名沖突
__雙下劃線開(kāi)頭:模塊內(nèi)的成員筹裕,表示私有成員醋闭,外部無(wú)法直接調(diào)用
雙下劃線開(kāi)頭雙下劃線結(jié)尾:指那些包含在用戶無(wú)法控制的命名空間中的“魔術(shù)”對(duì)象或?qū)傩裕珙惓蓡T的name饶碘、doc目尖、init、import扎运、file瑟曲、等。推薦永遠(yuǎn)不要將這樣的命名方式應(yīng)用于自己的變量或函數(shù)豪治。
步驟
S-1 Bind the right class to the Network name
Network = network_map[opt['recurrent'], opt['fast']]
S-2 Init network
n = Network()
# linear input layer
n.addInputModule(LinearLayer(layers[0], name='in'))
# output layer of type 'outclass'
n.addOutputModule(opt\['outclass'](layers[-1], name='out'))
# arbitrary number of hidden layers of type 'hiddenclass'
for i, num in enumerate(layers[1:-1]):
layername = 'hidden%i' % i
n.addModule(opt\['hiddenclass'](num, name=layername))
S-3 Connections among layers
# connections between hidden layers
for i in range(len(layers) - 3):
n.addConnection(FullConnection(n['hidden%i' % i], n['hidden%i' % (i + 1)]))
# other connections
if len(layers) == 2:
# flat network, connection from in to out
n.addConnection(FullConnection(n['in'], n['out']))
else:
# network with hidden layer(s), connections from in to first hidden and last hidden to out
n.addConnection(FullConnection(n['in'], n['hidden0']))
n.addConnection(FullConnection(n['hidden%i' % (len(layers) - 3)], n['out']))
S-4 Recurrent connections
# recurrent connections
if issubclass(opt['hiddenclass'], LSTMLayer):
if len(layers) > 3:
errorexit("LSTM networks with > 1 hidden layers are not supported!")
n.addRecurrentConnection(FullConnection(n['hidden0'], n['hidden0']))
結(jié)語(yǔ)
PyBrain是Python實(shí)現(xiàn)人工神經(jīng)網(wǎng)絡(luò)的一個(gè)第三方庫(kù)洞拨,可以利用其快速構(gòu)建神經(jīng)網(wǎng)絡(luò),本次只是展開(kāi)其構(gòu)建神經(jīng)網(wǎng)絡(luò)的大體步驟负拟,接下來(lái)會(huì)對(duì)具體實(shí)現(xiàn)細(xì)節(jié)進(jìn)行詳細(xì)描述烦衣。