summaryrefslogtreecommitdiff
path: root/NetGen
diff options
context:
space:
mode:
authorJoe Zhao <ztuowen@gmail.com>2014-04-14 08:14:45 +0800
committerJoe Zhao <ztuowen@gmail.com>2014-04-14 08:14:45 +0800
commitcccccbf6cca94a3eaf813b4468453160e91c332b (patch)
tree23418cb73a10ae3b0688681a7f0ba9b06424583e /NetGen
downloadtnet-cccccbf6cca94a3eaf813b4468453160e91c332b.tar.gz
tnet-cccccbf6cca94a3eaf813b4468453160e91c332b.tar.bz2
tnet-cccccbf6cca94a3eaf813b4468453160e91c332b.zip
First commit
Diffstat (limited to 'NetGen')
-rw-r--r--NetGen/layerobj.py160
-rw-r--r--NetGen/layerobj.pycbin0 -> 8454 bytes
-rw-r--r--NetGen/mathobj.py32
-rw-r--r--NetGen/mathobj.pycbin0 -> 1177 bytes
-rw-r--r--NetGen/testscript.py72
5 files changed, 264 insertions, 0 deletions
diff --git a/NetGen/layerobj.py b/NetGen/layerobj.py
new file mode 100644
index 0000000..3cfadf0
--- /dev/null
+++ b/NetGen/layerobj.py
@@ -0,0 +1,160 @@
+'''
+Created on Mar 16, 2014
+
+@author: joe
+'''
+
+import random
+import mathobj
+
+matRandFnc=lambda :3*random.gauss(0.0,1.0)
+vecRandFnc=lambda :random.random()/5.0-4.1
+
+class SharedLinearity:
+ def __init__(self,nIn=1,nOut=1,inS=True,
+ matRand=matRandFnc,vecRand=vecRandFnc):
+ self.nInputs=nIn
+ self.nOutputs=nOut
+ self.inScale=inS
+ self.matRand=matRand
+ self.vecRand=vecRand
+
+ def flush(self):
+ print "<biasedlinearity>", self.nOutputs, self.nInputs
+ mathobj.randomFnc=self.matRand
+ mathobj.inputScale=self.inScale
+ mathobj.PrintTransMatrix(self.nInputs, self.nOutputs)
+ mathobj.randomFnc=self.vecRand
+ mathobj.PrintVector(self.nOutputs)
+
+class Linearity:
+ def __init__(self,nIn=1,nOut=1,inS=True,
+ matRand=matRandFnc):
+ self.nInputs=nIn
+ self.nOutputs=nOut
+ self.inScale=inS
+ self.matRand=matRand
+
+ def flush(self):
+ print "<linearity>", self.nOutputs, self.nInputs
+ mathobj.randomFnc=self.matRand
+ mathobj.inputScale=self.inScale
+ mathobj.PrintTransMatrix(self.nInputs, self.nOutputs)
+
+class UpdatableBias:
+ def __init__(self,nIn=1,vecRand=vecRandFnc):
+ self.nInputs=nIn
+ self.nOutputs=nIn
+ self.vecRand=vecRand
+
+ def flush(self):
+ print "<updatablebias>", self.nOutputs, self.nInputs
+ mathobj.randomFnc=self.vecRand
+ mathobj.PrintVector(self.nInputs)
+
+class MiscPipe:
+ def __init__(self,nIn=1,name="<pipe>"):
+ self.nInputs=nIn
+ self.nOutputs=nIn
+ self.name=name
+
+ def flush(self):
+ print self.name, self.nOutputs, self.nInputs
+
+class Distrib:
+ def __init__(self,nIn=1,size=1):
+ self.nInputs=nIn
+ self.nOutputs=nIn
+ self.size=size
+
+ def flush(self):
+ print "<distrib>", self.nOutputs, self.nInputs
+ print self.size
+
+class Combine:
+ def __init__(self,nIn=1,size=1):
+ self.nInputs=nIn
+ self.nOutputs=nIn
+ self.size=size
+
+ def flush(self):
+ print "<combine>", self.nOutputs, self.nInputs
+ print self.size
+
+class Divide:
+ def __init__(self,nIn=1,divLen=[]):
+ self.nInputs=nIn
+ self.nOutputs=divLen[0]
+ self.divLen=divLen
+
+ def push(self,nxtLen):
+ self.divLen+=[nxtLen];
+
+ def flush(self):
+ print "<divide>", self.nOutputs, self.nInputs
+ print len(self.divLen),
+ for Len in self.divLen:
+ print Len,
+ print
+
+class Merge:
+ def __init__(self,nOut=1,divLen=[]):
+ self.nOutputs=nOut
+ self.nInputs=divLen[0]
+ self.divLen=divLen
+
+ def push(self,nxtLen):
+ self.divLen+=[nxtLen];
+
+ def flush(self):
+ print "<merge>", self.nOutputs, self.nInputs
+ print len(self.divLen),
+ for Len in self.divLen:
+ print Len,
+ print
+
+class Reorder:
+ def __init__(self,nIn=1,Order=[]):
+ self.nInputs=nIn
+ self.nOutputs=nIn
+ self.Order=Order
+
+ def push(self,nxtPos):
+ self.Order+=[nxtPos];
+
+ def flush(self):
+ print "<reorder>", self.nOutputs, self.nInputs
+ print len(self.Order),
+ for Len in self.Order:
+ print Len,
+ print
+
+class Compound:
+ def __init__(self,nIn=1,nOut=1,Objs=[]):
+ self.nInputs=nIn
+ self.nOutputs=nOut
+ self.Objs=Objs
+
+ def push(self,nxtObj):
+ self.Objs+=[nxtObj];
+
+ def flush(self):
+ print "<compound>", self.nOutputs, self.nInputs
+ for Obj in self.Objs:
+ Obj.flush()
+ print "<endblock>"
+
+class Discrete:
+ def __init__(self,nIn=1,nOut=1,Objs=[]):
+ self.nInputs=nIn
+ self.nOutputs=nOut
+ self.Objs=Objs
+
+ def push(self,nxtObj):
+ self.Objs+=[nxtObj];
+
+ def flush(self):
+ print "<discrete>", self.nOutputs, self.nInputs
+ for Obj in self.Objs:
+ Obj.flush()
+ print "<endblock>" \ No newline at end of file
diff --git a/NetGen/layerobj.pyc b/NetGen/layerobj.pyc
new file mode 100644
index 0000000..1ad80a9
--- /dev/null
+++ b/NetGen/layerobj.pyc
Binary files differ
diff --git a/NetGen/mathobj.py b/NetGen/mathobj.py
new file mode 100644
index 0000000..77dad90
--- /dev/null
+++ b/NetGen/mathobj.py
@@ -0,0 +1,32 @@
+'''
+Created on Mar 16, 2014
+
+@author: joe
+'''
+
+import math, random
+
+
+randomFnc=lambda :3*random.gauss(0.0,1.0)
+inputScale=True
+
+def PrintTransMatrix(rows,cols):
+ PrintMatrix(cols,rows)
+
+def PrintMatrix(rows,cols):
+ print 'm', rows, cols
+ for row in range(rows):
+ for col in range(cols):
+ if(inputScale):
+ print randomFnc()/math.sqrt(rows),
+ else:
+ print randomFnc(),
+ print
+
+def PrintVector(cols):
+ print 'v', cols
+ for col in range(cols):
+ print randomFnc(),
+ print
+
+ \ No newline at end of file
diff --git a/NetGen/mathobj.pyc b/NetGen/mathobj.pyc
new file mode 100644
index 0000000..4963b50
--- /dev/null
+++ b/NetGen/mathobj.pyc
Binary files differ
diff --git a/NetGen/testscript.py b/NetGen/testscript.py
new file mode 100644
index 0000000..70fa005
--- /dev/null
+++ b/NetGen/testscript.py
@@ -0,0 +1,72 @@
+'''
+Created on Mar 16, 2014
+
+@author: joe
+'''
+from layerobj import *
+
+if __name__ == '__main__':
+ Discrete(1,1,[Distrib(1,3)]).flush();
+ Discrete(1,1,
+ [SharedLinearity(1,1),
+ MiscPipe(1,"<learnstop>"),
+ MiscPipe(1,"<learnstop>"),
+ ]).flush();
+ Discrete(1,1,
+ [MiscPipe(1,"<softmax>"),
+ MiscPipe(1),
+ MiscPipe(1),
+ ]).flush();
+ Discrete(1,1,
+ [SharedLinearity(1,1),
+ MiscPipe(1),
+ MiscPipe(1),
+ ]).flush();
+ Discrete(1,1,
+ [MiscPipe(1,"<softmax>"),
+ MiscPipe(1),
+ MiscPipe(1),
+ ]).flush();
+ Discrete(1,1,
+ [MiscPipe(1,"<sigmoid>"),
+ MiscPipe(1),
+ MiscPipe(1),
+ ]).flush();
+ Discrete(1,1,
+ [Distrib(1,2),
+ MiscPipe(1),
+ MiscPipe(1),
+ ]).flush();
+ Discrete(1,1,
+ [MiscPipe(1,"<learnstop>"),
+ SharedLinearity(1,1),
+ SharedLinearity(1,1),
+ MiscPipe(1),
+ ]).flush();
+ Discrete(1,1,
+ [MiscPipe(1),
+ Combine(2),
+ MiscPipe(1),
+ ]).flush();
+ Discrete(1,1,
+ [MiscPipe(1),
+ SharedLinearity(1,1),
+ MiscPipe(1),
+ ]).flush();
+ Discrete(1,1,
+ [MiscPipe(1),
+ MiscPipe(1,"<sigmoid>"),
+ MiscPipe(1),
+ ]).flush();
+ Discrete(1,1,
+ [SharedLinearity(1,1),
+ SharedLinearity(1,1),
+ SharedLinearity(1,1),
+ ]).flush();
+ Discrete(1,1,
+ [Combine(3)
+ ]).flush();
+ MiscPipe(1,"<sigmoid>").flush()
+ SharedLinearity(1,1).flush();
+ MiscPipe(1,"<softmax>").flush();
+ \ No newline at end of file