aboutsummaryrefslogtreecommitdiff
path: root/model/dest_simple_mlp_tgtcls_alexandre.py
blob: 87e20a383f72cf99d61d432acd9d19a50b087017 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
import numpy 

import theano
from theano import tensor

from blocks.bricks import MLP, Rectifier, Linear, Sigmoid, Identity, Softmax
from blocks.bricks.lookup import LookupTable

from blocks.initialization import IsotropicGaussian, Constant

import data
import error

class Model(object):
    def __init__(self, config):
        # The input and the targets
        x_firstk_latitude = (tensor.matrix('first_k_latitude') - data.porto_center[0]) / data.data_std[0]
        x_firstk_longitude = (tensor.matrix('first_k_longitude') - data.porto_center[1]) / data.data_std[1]

        x_lastk_latitude = (tensor.matrix('last_k_latitude') - data.porto_center[0]) / data.data_std[0]
        x_lastk_longitude = (tensor.matrix('last_k_longitude') - data.porto_center[1]) / data.data_std[1]

        input_list = [x_firstk_latitude, x_firstk_longitude, x_lastk_latitude, x_lastk_longitude]
        embed_tables = []

        self.require_inputs = ['first_k_latitude', 'first_k_longitude', 'last_k_latitude', 'last_k_longitude']

        for (varname, num, dim) in config.dim_embeddings:
            self.require_inputs.append(varname)
            vardata = tensor.lvector(varname)
            tbl = LookupTable(length=num, dim=dim, name='%s_lookup'%varname)
            embed_tables.append(tbl)
            input_list.append(tbl.apply(vardata))

        y = tensor.concatenate((tensor.vector('destination_latitude')[:, None],
                                tensor.vector('destination_longitude')[:, None]), axis=1)

        # Define the model
        mlp = MLP(activations=[Rectifier() for _ in config.dim_hidden] + [Softmax()],
                           dims=[config.dim_input] + config.dim_hidden + [config.dim_output])
        classes = theano.shared(numpy.array(config.tgtcls, dtype=theano.config.floatX), name='classes')

        # Create the Theano variables
        inputs = tensor.concatenate(input_list, axis=1)

        # inputs = theano.printing.Print("inputs")(inputs)
        cls_probas = mlp.apply(inputs)
        outputs = tensor.dot(cls_probas, classes)

        # outputs = theano.printing.Print("outputs")(outputs)
        # y = theano.printing.Print("y")(y)

        outputs.name = 'outputs'

        # Calculate the cost
        cost = error.erdist(outputs, y).mean()
        cost.name = 'cost'
        hcost = error.hdist(outputs, y).mean()
        hcost.name = 'hcost'

        # Initialization
        for tbl in embed_tables:
            tbl.weights_init = IsotropicGaussian(0.01)
        mlp.weights_init = IsotropicGaussian(0.1)
        mlp.biases_init = Constant(0.01)

        for tbl in embed_tables:
            tbl.initialize()
        mlp.initialize()

        self.cost = cost
        self.monitor = [cost, hcost]
        self.outputs = outputs
        self.pred_vars = ['destination_latitude', 'destination_longitude']