summaryrefslogtreecommitdiff
path: root/config
diff options
context:
space:
mode:
authorAlex Auvolat <alex@adnab.me>2016-03-08 13:26:28 +0100
committerAlex Auvolat <alex@adnab.me>2016-03-08 13:26:28 +0100
commit2f479926c16d2911d0dd878c21de082abfc5b237 (patch)
treeb399e9ad9af04a9449334dff1a47449808b7ca13 /config
parent23093608e0edc43477c3a2ed804ae1016790f7e4 (diff)
downloadtext-rnn-2f479926c16d2911d0dd878c21de082abfc5b237.tar.gz
text-rnn-2f479926c16d2911d0dd878c21de082abfc5b237.zip
Revive project
Diffstat (limited to 'config')
-rw-r--r--config/__init__.py0
-rw-r--r--config/hpc-lstm-1.py37
-rw-r--r--config/lstm-frigo-irc.py39
3 files changed, 76 insertions, 0 deletions
diff --git a/config/__init__.py b/config/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/config/__init__.py
diff --git a/config/hpc-lstm-1.py b/config/hpc-lstm-1.py
new file mode 100644
index 0000000..e4009d5
--- /dev/null
+++ b/config/hpc-lstm-1.py
@@ -0,0 +1,37 @@
+from blocks.algorithms import AdaDelta, Momentum
+from blocks.bricks import Tanh, Rectifier
+
+from model.hpc_lstm import Model
+
+dataset = 'data/logcompil-2016-03-07.txt'
+io_dim = 256
+
+# An epoch will be composed of 'num_seqs' sequences of len 'seq_len'
+# divided in chunks of lengh 'seq_div_size'
+num_seqs = 100
+seq_len = 2000
+seq_div_size = 100
+
+hidden_dims = [128, 128, 256, 512]
+cost_factors = [1., 1., 1., 1.]
+hidden_q = [0.02, 0.02, 0.05, 0.05]
+activation_function = Tanh()
+
+out_hidden = [512]
+out_hidden_act = [Rectifier]
+
+step_rule = AdaDelta()
+#step_rule = Momentum(learning_rate=0.0001, momentum=0.99)
+
+# parameter saving freq (number of batches)
+monitor_freq = 10
+save_freq = 100
+
+# used for sample generation and IRC mode
+sample_temperature = 0.7 #0.5
+
+# do we want to generate samples at times during training?
+sample_len = 1000
+sample_freq = 100
+sample_init = '\nalex\ttu crois?\n'
+
diff --git a/config/lstm-frigo-irc.py b/config/lstm-frigo-irc.py
new file mode 100644
index 0000000..2d0bf3a
--- /dev/null
+++ b/config/lstm-frigo-irc.py
@@ -0,0 +1,39 @@
+from blocks.algorithms import AdaDelta
+from blocks.bricks import Tanh
+
+from model.lstm import Model
+
+dataset = 'data/logcompil-2016-03-07.txt'
+io_dim = 256
+
+# An epoch will be composed of 'num_seqs' sequences of len 'seq_len'
+# divided in chunks of lengh 'seq_div_size'
+num_seqs = 100
+seq_len = 2000
+seq_div_size = 100
+
+hidden_dims = [1024, 1024, 1024]
+activation_function = Tanh()
+
+i2h_all = True # input to all hidden layers or only first layer
+h2o_all = True # all hiden layers to output or only last layer
+
+w_noise_std = 0.02
+i_dropout = 0.5
+
+l1_reg = 0
+
+step_rule = AdaDelta()
+
+# parameter saving freq (number of batches)
+monitor_freq = 10
+save_freq = 100
+
+# used for sample generation and IRC mode
+sample_temperature = 0.7 #0.5
+
+# do we want to generate samples at times during training?
+sample_len = 1000
+sample_freq = 100
+sample_init = '\nalex\ttu crois?\n'
+