diff options
Diffstat (limited to 'config')
-rw-r--r-- | config/hpc-gru-0.py | 52 | ||||
-rw-r--r-- | config/hpc-gru-1.py | 52 | ||||
-rw-r--r-- | config/hpc-lstm-0.py | 43 | ||||
-rw-r--r-- | config/hpc-lstm-2.py | 46 | ||||
-rw-r--r-- | config/hpc-lstm-3.py | 53 |
5 files changed, 246 insertions, 0 deletions
diff --git a/config/hpc-gru-0.py b/config/hpc-gru-0.py new file mode 100644 index 0000000..ab58a86 --- /dev/null +++ b/config/hpc-gru-0.py @@ -0,0 +1,52 @@ +import numpy +from numpy.random import RandomState + +from blocks.algorithms import AdaDelta, Momentum, RMSProp, CompositeRule, BasicMomentum +from blocks.bricks import Tanh, Rectifier +from blocks.initialization import IsotropicGaussian, Constant + +from model.hpc_gru import Model + +dataset = 'data/logcompil-2016-03-07.txt' + +io_dim = 256 +repr_dim = 64 +embedding_matrix = (RandomState(42).binomial(1, 10./repr_dim, ((io_dim, repr_dim))) + -RandomState(123).binomial(1, 10./repr_dim, ((io_dim, repr_dim)))) + +# An epoch will be composed of 'num_seqs' sequences of len 'seq_len' +# divided in chunks of lengh 'seq_div_size' +num_seqs = 100 +seq_len = 2000 +seq_div_size = 100 + +hidden_dims = [128, 384, 1024] +cost_factors = [1., 1., 1.] +hidden_q = [0.5, 0.5, 0.5] +activation_function = Tanh() + +out_hidden = [512] +out_hidden_act = [Tanh] + +weight_noise = 0 + +step_rule = AdaDelta() +#step_rule = CompositeRule([RMSProp(learning_rate=0.01), +# BasicMomentum(momentum=0.9)]) +#step_rule = Momentum(learning_rate=.1, momentum=0.9) + +weights_init = IsotropicGaussian(0.1) +biases_init = Constant(0.) + +# parameter saving freq (number of batches) +monitor_freq = 100 +save_freq = 100 + +# used for sample generation and IRC mode +sample_temperature = 0.5 #0.7 + +# do we want to generate samples at times during training? +sample_len = 1000 +sample_freq = 100 +sample_init = '\nalex\ttu crois?\n' + diff --git a/config/hpc-gru-1.py b/config/hpc-gru-1.py new file mode 100644 index 0000000..b59b025 --- /dev/null +++ b/config/hpc-gru-1.py @@ -0,0 +1,52 @@ +import numpy +from numpy.random import RandomState + +from blocks.algorithms import AdaDelta, Momentum, RMSProp, CompositeRule, BasicMomentum, Adam +from blocks.bricks import Tanh, Rectifier +from blocks.initialization import IsotropicGaussian, Constant + +from model.hpc_gru import Model + +dataset = 'data/logcompil-2016-03-07.txt' + +io_dim = 256 +repr_dim = 128 +embedding_matrix = (RandomState(42).binomial(1, 0.1, ((io_dim, repr_dim))) + -RandomState(123).binomial(1, 0.1, ((io_dim, repr_dim)))) + +# An epoch will be composed of 'num_seqs' sequences of len 'seq_len' +# divided in chunks of lengh 'seq_div_size' +num_seqs = 20 +seq_len = 5000 +seq_div_size = 50 + +hidden_dims = [128, 192, 256, 512] +cost_factors = [1., 1., 1., 1.] +hidden_q = [0.5, 0.5, 0.5, 0.5] +activation_function = Tanh() + +out_hidden = [512] +out_hidden_act = [Rectifier] + +weight_noise = 0.05 + +step_rule = Adam() +#step_rule = CompositeRule([RMSProp(learning_rate=0.01), +# BasicMomentum(momentum=0.9)]) +#step_rule = Momentum(learning_rate=.1, momentum=0.9) + +weights_init = IsotropicGaussian(0.1) +biases_init = Constant(0.01) + +# parameter saving freq (number of batches) +monitor_freq = 500 +save_freq = monitor_freq + +# used for sample generation and IRC mode +sample_temperature = 0.5 #0.7 + +# do we want to generate samples at times during training? +sample_len = 1000 +sample_freq = monitor_freq +sample_init = '\nalex\ttu crois?\n' + diff --git a/config/hpc-lstm-0.py b/config/hpc-lstm-0.py new file mode 100644 index 0000000..afb6471 --- /dev/null +++ b/config/hpc-lstm-0.py @@ -0,0 +1,43 @@ +import numpy +from numpy.random import RandomState + +from blocks.algorithms import AdaDelta, Momentum +from blocks.bricks import Tanh, Rectifier + +from model.hpc_lstm import Model + +dataset = 'data/logcompil-2016-03-07.txt' + +io_dim = 256 +repr_dim = 256 +embedding_matrix = numpy.eye(io_dim) + +# An epoch will be composed of 'num_seqs' sequences of len 'seq_len' +# divided in chunks of lengh 'seq_div_size' +num_seqs = 100 +seq_len = 2000 +seq_div_size = 100 + +hidden_dims = [128, 128, 256, 512] +cost_factors = [1., 1., 1., 1.] +hidden_q = [0.1, 0.15, 0.22, 0.33] +activation_function = Tanh() + +out_hidden = [512] +out_hidden_act = [Rectifier] + +step_rule = AdaDelta() +#step_rule = Momentum(learning_rate=0.0001, momentum=0.99) + +# parameter saving freq (number of batches) +monitor_freq = 10 +save_freq = 100 + +# used for sample generation and IRC mode +sample_temperature = 0.7 #0.5 + +# do we want to generate samples at times during training? +sample_len = 1000 +sample_freq = 100 +sample_init = '\nalex\ttu crois?\n' + diff --git a/config/hpc-lstm-2.py b/config/hpc-lstm-2.py new file mode 100644 index 0000000..aaed80e --- /dev/null +++ b/config/hpc-lstm-2.py @@ -0,0 +1,46 @@ +import numpy +from numpy.random import RandomState + +from blocks.algorithms import AdaDelta, Momentum, RMSProp, CompositeRule, BasicMomentum +from blocks.bricks import Tanh, Rectifier + +from model.hpc_lstm import Model + +dataset = 'data/logcompil-2016-03-07.txt' + +io_dim = 256 +repr_dim = 64 +embedding_matrix = (RandomState(42).binomial(1, 10./repr_dim, ((io_dim, repr_dim))) + -RandomState(123).binomial(1, 10./repr_dim, ((io_dim, repr_dim)))) + +# An epoch will be composed of 'num_seqs' sequences of len 'seq_len' +# divided in chunks of lengh 'seq_div_size' +num_seqs = 100 +seq_len = 2000 +seq_div_size = 100 + +hidden_dims = [64, 256, 1024] +cost_factors = [1., 1., 1.] +hidden_q = [0.5, 0.5, 0.5] +activation_function = Tanh() + +out_hidden = [512] +out_hidden_act = [Rectifier] + +step_rule = AdaDelta() +#step_rule = CompositeRule([RMSProp(learning_rate=0.01), +# BasicMomentum(momentum=0.9)]) +#step_rule = Momentum(learning_rate=.1, momentum=0.9) + +# parameter saving freq (number of batches) +monitor_freq = 100 +save_freq = 100 + +# used for sample generation and IRC mode +sample_temperature = 0.7 #0.5 + +# do we want to generate samples at times during training? +sample_len = 1000 +sample_freq = 100 +sample_init = '\nalex\ttu crois?\n' + diff --git a/config/hpc-lstm-3.py b/config/hpc-lstm-3.py new file mode 100644 index 0000000..fa0f77e --- /dev/null +++ b/config/hpc-lstm-3.py @@ -0,0 +1,53 @@ + +import numpy +from numpy.random import RandomState + +from blocks.algorithms import AdaDelta, Momentum, RMSProp, CompositeRule, BasicMomentum, Adam +from blocks.bricks import Tanh, Rectifier +from blocks.initialization import IsotropicGaussian, Constant + +from model.hpc_lstm import Model + +dataset = 'data/logcompil-2016-03-07.txt' + +io_dim = 256 +repr_dim = 128 +embedding_matrix = (RandomState(42).binomial(1, 0.1, ((io_dim, repr_dim))) + -RandomState(123).binomial(1, 0.1, ((io_dim, repr_dim)))) + +# An epoch will be composed of 'num_seqs' sequences of len 'seq_len' +# divided in chunks of lengh 'seq_div_size' +num_seqs = 20 +seq_len = 5000 +seq_div_size = 50 + +hidden_dims = [128, 192, 256, 512] +cost_factors = [1., 1., 1., 1.] +hidden_q = [0.5, 0.5, 0.5, 0.5] +activation_function = Tanh() + +out_hidden = [512] +out_hidden_act = [Rectifier] + +weight_noise = 0.05 + +step_rule = Adam() +#step_rule = CompositeRule([RMSProp(learning_rate=0.01), +# BasicMomentum(momentum=0.9)]) +#step_rule = Momentum(learning_rate=.1, momentum=0.9) + +weights_init = IsotropicGaussian(0.1) +biases_init = Constant(0.01) + +# parameter saving freq (number of batches) +monitor_freq = 500 +save_freq = monitor_freq + +# used for sample generation and IRC mode +sample_temperature = 0.5 #0.7 + +# do we want to generate samples at times during training? +sample_len = 1000 +sample_freq = monitor_freq +sample_init = '\nalex\ttu crois?\n' + |