aboutsummaryrefslogtreecommitdiff
path: root/model
diff options
context:
space:
mode:
authorAlex Auvolat <alex.auvolat@ens.fr>2015-07-24 10:27:15 -0400
committerAlex Auvolat <alex.auvolat@ens.fr>2015-07-24 10:27:15 -0400
commit1e64a442e78b5e471b2f573295bd9a747b7c6c3f (patch)
tree836d804e16bb19cb08eab7bce4c7c2b5a5e7489a /model
parent389bfd3637dfb523a3e4194c7281a0c538166546 (diff)
downloadtaxi-1e64a442e78b5e471b2f573295bd9a747b7c6c3f.tar.gz
taxi-1e64a442e78b5e471b2f573295bd9a747b7c6c3f.zip
Memory net refactoring
Diffstat (limited to 'model')
-rw-r--r--model/memory_network.py49
-rw-r--r--model/memory_network_bidir.py70
-rw-r--r--model/memory_network_mlp.py133
3 files changed, 107 insertions, 145 deletions
diff --git a/model/memory_network.py b/model/memory_network.py
index e7ba51c..84a8edf 100644
--- a/model/memory_network.py
+++ b/model/memory_network.py
@@ -14,11 +14,58 @@ import error
from model import ContextEmbedder
class MemoryNetworkBase(Initializable):
- def __init__(self, config, **kwargs):
+ def __init__(self, config, prefix_encoder, candidate_encoder, **kwargs):
super(MemoryNetworkBase, self).__init__(**kwargs)
+ self.prefix_encoder = prefix_encoder
+ self.candidate_encoder = candidate_encoder
self.config = config
+ self.softmax = Softmax()
+ self.children = [ self.softmax, prefix_encoder, candidate_encoder ]
+
+ self.inputs = self.prefix_encoder.apply.inputs \
+ + ['candidate_%s'%x for x in self.candidate_encoder.apply.inputs]
+
+ def candidate_destination(**kwargs):
+ return tensor.concatenate(
+ (tensor.shape_padright(kwargs['candidate_last_k_latitude'][:,-1]),
+ tensor.shape_padright(kwargs['candidate_last_k_longitude'][:,-1])),
+ axis=1)
+
+ @application(outputs=['cost'])
+ def cost(self, **kwargs):
+ y_hat = self.predict(**kwargs)
+ y = tensor.concatenate((kwargs['destination_latitude'][:, None],
+ kwargs['destination_longitude'][:, None]), axis=1)
+
+ return error.erdist(y_hat, y).mean()
+
+ @application(outputs=['destination'])
+ def predict(self, **kwargs):
+ prefix_representation = self.prefix_encoder.apply(
+ { x: kwargs[x] for x in self.prefix_encoder.apply.inputs })
+ candidate_representatin = self.candidate_encoder.apply(
+ { x: kwargs['candidate_'+x] for x in self.candidate_encoder.apply.inputs })
+
+ if self.config.normalize_representation:
+ prefix_representation = prefix_representation \
+ / tensor.sqrt((prefix_representation ** 2).sum(axis=1, keepdims=True))
+ candidate_representation = candidate_representation \
+ / tensor.sqrt((candidate_representation ** 2).sum(axis=1, keepdims=True))
+
+ similarity_score = tensor.dot(prefix_representation, candidate_representation.T)
+ similarity = self.softmax.apply(similarity_score)
+
+ return tensor.dot(similarity, self.candidate_destination(**kwargs))
+
+ @predict.property('inputs')
+ def predict_inputs(self):
+ return self.inputs
+
+ @cost.property('inputs')
+ def cost_inputs(self):
+ return self.inputs + ['destination_latitude', 'destination_longitude']
class StreamBase(object):
def __init__(self, config):
diff --git a/model/memory_network_bidir.py b/model/memory_network_bidir.py
index 9dad091..cc99312 100644
--- a/model/memory_network_bidir.py
+++ b/model/memory_network_bidir.py
@@ -75,69 +75,19 @@ class RecurrentEncoder(Initializable):
class Model(MemoryNetworkBase):
def __init__(self, config, **kwargs):
- super(Model, self).__init__(config, **kwargs)
# Build prefix encoder : recurrent then MLP
- self.prefix_encoder = RecurrentEncoder(self.config.prefix_encoder,
- self.config.representation_size,
- self.config.representation_activation(),
- name='prefix_encoder')
+ prefix_encoder = RecurrentEncoder(self.config.prefix_encoder,
+ self.config.representation_size,
+ self.config.representation_activation(),
+ name='prefix_encoder')
# Build candidate encoder
- self.candidate_encoder = RecurrentEncoder(self.config.candidate_encoder,
- self.config.representation_size,
- self.config.representation_activation(),
- name='candidate_encoder')
+ candidate_encoder = RecurrentEncoder(self.config.candidate_encoder,
+ self.config.representation_size,
+ self.config.representation_activation(),
+ name='candidate_encoder')
- # Rest of the stuff
- self.softmax = Softmax()
+ # And... that's it!
+ super(Model, self).__init__(config, prefix_encoder, candidate_encoder, **kwargs)
- self.inputs = self.prefix_encoder.inputs \
- + ['candidate_'+k for k in self.candidate_encoder.inputs]
-
- self.children = [ self.prefix_encoder,
- self.candidate_encoder,
- self.softmax ]
-
-
- @application(outputs=['destination'])
- def predict(self, **kwargs):
- prefix_representation = self.prefix_encoder.apply(
- **{ name: kwargs[name] for name in self.prefix_encoder.inputs })
-
- candidate_representation = self.prefix_encoder.apply(
- **{ name: kwargs['candidate_'+name] for name in self.candidate_encoder.inputs })
-
- if self.config.normalize_representation:
- candidate_representation = candidate_representation \
- / tensor.sqrt((candidate_representation ** 2).sum(axis=1, keepdims=True))
-
- similarity_score = tensor.dot(prefix_representation, candidate_representation.T)
- similarity = self.softmax.apply(similarity_score)
-
- candidate_mask = kwargs['candidate_latitude_mask']
- candidate_last = tensor.cast(candidate_mask.sum(axis=1) - 1, 'int64')
- candidate_destination = tensor.concatenate(
- (kwargs['candidate_latitude'][tensor.arange(candidate_mask.shape[0]), candidate_last]
- [:, None],
- kwargs['candidate_longitude'][tensor.arange(candidate_mask.shape[0]), candidate_last]
- [:, None]),
- axis=1)
-
- return tensor.dot(similarity, candidate_destination)
-
- @predict.property('inputs')
- def predict_inputs(self):
- return self.inputs
-
- @application(outputs=['cost'])
- def cost(self, **kwargs):
- y_hat = self.predict(**kwargs)
- y = tensor.concatenate((kwargs['destination_latitude'][:, None],
- kwargs['destination_longitude'][:, None]), axis=1)
-
- return error.erdist(y_hat, y).mean()
-
- @cost.property('inputs')
- def cost_inputs(self):
- return self.inputs + ['destination_latitude', 'destination_longitude']
diff --git a/model/memory_network_mlp.py b/model/memory_network_mlp.py
index cb8de2a..de07e60 100644
--- a/model/memory_network_mlp.py
+++ b/model/memory_network_mlp.py
@@ -16,91 +16,56 @@ from model import ContextEmbedder
from memory_network import StreamSimple as Stream
from memory_network import MemoryNetworkBase
+class MLPEncoder(Initializable):
+ def __init__(self, config, output_dim, activation, **kwargs):
+ super(RecurrentEncoder, self).__init__(**kwargs)
+
+ self.config = config
+ self.context_embedder = ContextEmbedder(self.config)
+
+ self.encoder_mlp = MLP(activations=[Rectifier() for _ in config.prefix_encoder.dim_hidden]
+ + [config.representation_activation()],
+ dims=[config.prefix_encoder.dim_input]
+ + config.prefix_encoder.dim_hidden
+ + [config.representation_size],
+ name='prefix_encoder')
+
+ self.extremities = {'%s_k_%s' % (side, ['latitude', 'longitude'][axis]): axis
+ for side in ['first', 'last'] for axis in [0, 1]}
-class Model(MemoryNetworkBase):
- def __init__(self, **kwargs):
- super(Model, self).__init__(**kwargs)
-
- self.prefix_encoder = MLP(activations=[Rectifier() for _ in config.prefix_encoder.dim_hidden]
- + [config.representation_activation()],
- dims=[config.prefix_encoder.dim_input]
- + config.prefix_encoder.dim_hidden
- + [config.representation_size],
- name='prefix_encoder')
-
- self.candidate_encoder = MLP(
- activations=[Rectifier() for _ in config.candidate_encoder.dim_hidden]
- + [config.representation_activation()],
- dims=[config.candidate_encoder.dim_input]
- + config.candidate_encoder.dim_hidden
- + [config.representation_size],
- name='candidate_encoder')
- self.softmax = Softmax()
-
- self.prefix_extremities = {'%s_k_%s' % (side, ['latitude', 'longitude'][axis]): axis
- for side in ['first', 'last'] for axis in [0, 1]}
- self.candidate_extremities = {'candidate_%s_k_%s' % (side, axname): axis
- for side in ['first', 'last']
- for axis, axname in enumerate(['latitude', 'longitude'])}
-
- self.inputs = self.context_embedder.inputs \
- + ['candidate_%s'%k for k in self.context_embedder.inputs] \
- + self.prefix_extremities.keys() + self.candidate_extremities.keys()
self.children = [ self.context_embedder,
- self.prefix_encoder,
- self.candidate_encoder,
- self.softmax ]
+ self.encoder_mlp ]
def _push_initialization_config(self):
- for (mlp, config) in [[self.prefix_encoder, self.config.prefix_encoder],
- [self.candidate_encoder, self.config.candidate_encoder]]:
- mlp.weights_init = config.weights_init
- mlp.biases_init = config.biases_init
-
- @application(outputs=['destination'])
- def predict(self, **kwargs):
- prefix_embeddings = tuple(self.context_embedder.apply(
- **{k: kwargs[k] for k in self.context_embedder.inputs }))
- prefix_extremities = tuple((kwargs[k] - data.train_gps_mean[v]) / data.train_gps_std[v]
- for k, v in self.prefix_extremities.items())
- prefix_inputs = tensor.concatenate(prefix_extremities + prefix_embeddings, axis=1)
- prefix_representation = self.prefix_encoder.apply(prefix_inputs)
- if self.config.normalize_representation:
- prefix_representation = prefix_representation \
- / tensor.sqrt((prefix_representation ** 2).sum(axis=1, keepdims=True))
-
- candidate_embeddings = tuple(self.context_embedder.apply(**{k: kwargs['candidate_%s'%k]
- for k in self.context_embedder.inputs }))
- candidate_extremities = tuple((kwargs[k] - data.train_gps_mean[v]) / data.train_gps_std[v]
- for k, v in self.candidate_extremities.items())
- candidate_inputs = tensor.concatenate(candidate_extremities + candidate_embeddings, axis=1)
- candidate_representation = self.candidate_encoder.apply(candidate_inputs)
- if self.config.normalize_representation:
- candidate_representation = candidate_representation \
- / tensor.sqrt((candidate_representation ** 2).sum(axis=1, keepdims=True))
-
- similarity_score = tensor.dot(prefix_representation, candidate_representation.T)
- similarity = self.softmax.apply(similarity_score)
-
- candidate_destination = tensor.concatenate(
- (tensor.shape_padright(kwargs['candidate_last_k_latitude'][:,-1]),
- tensor.shape_padright(kwargs['candidate_last_k_longitude'][:,-1])),
- axis=1)
-
- return tensor.dot(similarity, candidate_destination)
-
- @predict.property('inputs')
- def predict_inputs(self):
- return self.inputs
-
- @application(outputs=['cost'])
- def cost(self, **kwargs):
- y_hat = self.predict(**kwargs)
- y = tensor.concatenate((kwargs['destination_latitude'][:, None],
- kwargs['destination_longitude'][:, None]), axis=1)
-
- return error.erdist(y_hat, y).mean()
-
- @cost.property('inputs')
- def cost_inputs(self):
- return self.inputs + ['destination_latitude', 'destination_longitude']
+ for brick in [self.contex_encoder, self.encoder_mlp]:
+ brick.weights_init = self.config.weights_init
+ brick.biases_init = self.config.biases_init
+
+ @application
+ def apply(self, **kwargs):
+ embeddings = tuple(self.context_embedder.apply(
+ **{k: kwargs[k] for k in self.context_embedder.inputs }))
+ extremities = tuple((kwargs[k] - data.train_gps_mean[v]) / data.train_gps_std[v]
+ for k, v in self.prefix_extremities.items())
+ inputs = tensor.concatenate(extremities + embeddings, axis=1)
+
+ return self.encoder_mlp.apply(inputs)
+
+ @apply.property('inputs')
+ def apply_inputs(self):
+ return self.context_embedder.inputs + self.extremities.keys()
+
+
+class Model(MemoryNetworkBase):
+ def __init__(self, config, **kwargs):
+ prefix_encoder = MLPEncoder(config.prefix_encoder,
+ config.representation_size,
+ config.representation_activation())
+
+ candidate_encoer = MLPEncoder(config.candidate_encoder,
+ config.representation_size,
+ config.representation_activation())
+
+ super(Model, self).__init__(config, prefix_encoder, candidate_encoder, **kwargs)
+
+