From 2f479926c16d2911d0dd878c21de082abfc5b237 Mon Sep 17 00:00:00 2001 From: Alex Auvolat Date: Tue, 8 Mar 2016 13:26:28 +0100 Subject: Revive project --- config/hpc-lstm-1.py | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 config/hpc-lstm-1.py (limited to 'config/hpc-lstm-1.py') diff --git a/config/hpc-lstm-1.py b/config/hpc-lstm-1.py new file mode 100644 index 0000000..e4009d5 --- /dev/null +++ b/config/hpc-lstm-1.py @@ -0,0 +1,37 @@ +from blocks.algorithms import AdaDelta, Momentum +from blocks.bricks import Tanh, Rectifier + +from model.hpc_lstm import Model + +dataset = 'data/logcompil-2016-03-07.txt' +io_dim = 256 + +# An epoch will be composed of 'num_seqs' sequences of len 'seq_len' +# divided in chunks of lengh 'seq_div_size' +num_seqs = 100 +seq_len = 2000 +seq_div_size = 100 + +hidden_dims = [128, 128, 256, 512] +cost_factors = [1., 1., 1., 1.] +hidden_q = [0.02, 0.02, 0.05, 0.05] +activation_function = Tanh() + +out_hidden = [512] +out_hidden_act = [Rectifier] + +step_rule = AdaDelta() +#step_rule = Momentum(learning_rate=0.0001, momentum=0.99) + +# parameter saving freq (number of batches) +monitor_freq = 10 +save_freq = 100 + +# used for sample generation and IRC mode +sample_temperature = 0.7 #0.5 + +# do we want to generate samples at times during training? +sample_len = 1000 +sample_freq = 100 +sample_init = '\nalex\ttu crois?\n' + -- cgit v1.2.3