diff options
author | Étienne Simon <esimon@esimon.eu> | 2015-07-27 13:07:19 -0400 |
---|---|---|
committer | Étienne Simon <esimon@esimon.eu> | 2015-07-27 13:07:19 -0400 |
commit | 9933cafdf3b218a509c2efd6e6e6ba91ae87aa9c (patch) | |
tree | d696681c2b5a73424bdd226b8c9e385b2580acd7 | |
parent | 0021c3fb99d1cd3f8792a8cf5c35548815536428 (diff) | |
download | taxi-9933cafdf3b218a509c2efd6e6e6ba91ae87aa9c.tar.gz taxi-9933cafdf3b218a509c2efd6e6e6ba91ae87aa9c.zip |
Add bidirectional with recurrent ReLu
-rw-r--r-- | config/bidirectional_tgtcls_relu_1.py | 36 | ||||
-rw-r--r-- | model/bidirectional.py | 3 |
2 files changed, 38 insertions, 1 deletions
diff --git a/config/bidirectional_tgtcls_relu_1.py b/config/bidirectional_tgtcls_relu_1.py new file mode 100644 index 0000000..918c8e2 --- /dev/null +++ b/config/bidirectional_tgtcls_relu_1.py @@ -0,0 +1,36 @@ +import os +import cPickle + +from blocks.bricks import Rectifier +from blocks.initialization import IsotropicGaussian, Constant + +import data +from model.bidirectional_tgtcls import Model, Stream + + +with open(os.path.join(data.path, 'arrival-clusters.pkl')) as f: tgtcls = cPickle.load(f) + +dim_embeddings = [ + ('origin_call', data.origin_call_train_size, 10), + ('origin_stand', data.stands_size, 10), + ('week_of_year', 52, 10), + ('day_of_week', 7, 10), + ('qhour_of_day', 24 * 4, 10), + ('taxi_id', data.taxi_id_size, 10), +] + +rec_activation = Rectifier + +hidden_state_dim = 100 + +dim_hidden = [500, 500] + +embed_weights_init = IsotropicGaussian(0.01) +weights_init = IsotropicGaussian(0.1) +biases_init = Constant(0.01) + +batch_size = 100 +batch_sort_size = 20 + +max_splits = 100 + diff --git a/model/bidirectional.py b/model/bidirectional.py index 4c4ffb0..5779752 100644 --- a/model/bidirectional.py +++ b/model/bidirectional.py @@ -37,7 +37,8 @@ class BidiRNN(Initializable): self.context_embedder = ContextEmbedder(config) - self.rec = SegregatedBidirectional(LSTM(dim=config.hidden_state_dim, name='recurrent')) + act = config.rec_activation() if hasattr(config, 'rec_activation') else None + self.rec = SegregatedBidirectional(LSTM(dim=config.hidden_state_dim, activation=act, name='recurrent')) self.fwd_fork = Fork([name for name in self.rec.prototype.apply.sequences if name!='mask'], prototype=Linear(), name='fwd_fork') |