diff options
author | AdeB <adbrebs@gmail.com> | 2015-05-05 22:15:22 -0400 |
---|---|---|
committer | AdeB <adbrebs@gmail.com> | 2015-05-05 22:15:22 -0400 |
commit | f4d3ee6449217535bdbe19ac9c5fdd825d71b0d3 (patch) | |
tree | b2dfd7f6f914f5f9e4521634b9ffc4a2b0171fdd /model/dest_simple_mlp_tgtcls_alexandre.py | |
parent | 54613c1f9cf510ca7a71d6619418f2247515aec6 (diff) | |
download | taxi-f4d3ee6449217535bdbe19ac9c5fdd825d71b0d3.tar.gz taxi-f4d3ee6449217535bdbe19ac9c5fdd825d71b0d3.zip |
New hyperparameters. Training error is monitored.
Diffstat (limited to 'model/dest_simple_mlp_tgtcls_alexandre.py')
-rw-r--r-- | model/dest_simple_mlp_tgtcls_alexandre.py | 75 |
1 files changed, 75 insertions, 0 deletions
diff --git a/model/dest_simple_mlp_tgtcls_alexandre.py b/model/dest_simple_mlp_tgtcls_alexandre.py new file mode 100644 index 0000000..87e20a3 --- /dev/null +++ b/model/dest_simple_mlp_tgtcls_alexandre.py @@ -0,0 +1,75 @@ +import numpy + +import theano +from theano import tensor + +from blocks.bricks import MLP, Rectifier, Linear, Sigmoid, Identity, Softmax +from blocks.bricks.lookup import LookupTable + +from blocks.initialization import IsotropicGaussian, Constant + +import data +import error + +class Model(object): + def __init__(self, config): + # The input and the targets + x_firstk_latitude = (tensor.matrix('first_k_latitude') - data.porto_center[0]) / data.data_std[0] + x_firstk_longitude = (tensor.matrix('first_k_longitude') - data.porto_center[1]) / data.data_std[1] + + x_lastk_latitude = (tensor.matrix('last_k_latitude') - data.porto_center[0]) / data.data_std[0] + x_lastk_longitude = (tensor.matrix('last_k_longitude') - data.porto_center[1]) / data.data_std[1] + + input_list = [x_firstk_latitude, x_firstk_longitude, x_lastk_latitude, x_lastk_longitude] + embed_tables = [] + + self.require_inputs = ['first_k_latitude', 'first_k_longitude', 'last_k_latitude', 'last_k_longitude'] + + for (varname, num, dim) in config.dim_embeddings: + self.require_inputs.append(varname) + vardata = tensor.lvector(varname) + tbl = LookupTable(length=num, dim=dim, name='%s_lookup'%varname) + embed_tables.append(tbl) + input_list.append(tbl.apply(vardata)) + + y = tensor.concatenate((tensor.vector('destination_latitude')[:, None], + tensor.vector('destination_longitude')[:, None]), axis=1) + + # Define the model + mlp = MLP(activations=[Rectifier() for _ in config.dim_hidden] + [Softmax()], + dims=[config.dim_input] + config.dim_hidden + [config.dim_output]) + classes = theano.shared(numpy.array(config.tgtcls, dtype=theano.config.floatX), name='classes') + + # Create the Theano variables + inputs = tensor.concatenate(input_list, axis=1) + + # inputs = theano.printing.Print("inputs")(inputs) + cls_probas = mlp.apply(inputs) + outputs = tensor.dot(cls_probas, classes) + + # outputs = theano.printing.Print("outputs")(outputs) + # y = theano.printing.Print("y")(y) + + outputs.name = 'outputs' + + # Calculate the cost + cost = error.erdist(outputs, y).mean() + cost.name = 'cost' + hcost = error.hdist(outputs, y).mean() + hcost.name = 'hcost' + + # Initialization + for tbl in embed_tables: + tbl.weights_init = IsotropicGaussian(0.01) + mlp.weights_init = IsotropicGaussian(0.1) + mlp.biases_init = Constant(0.01) + + for tbl in embed_tables: + tbl.initialize() + mlp.initialize() + + self.cost = cost + self.monitor = [cost, hcost] + self.outputs = outputs + self.pred_vars = ['destination_latitude', 'destination_longitude'] + |