aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAlex Auvolat <alex.auvolat@ens.fr>2015-05-07 13:16:23 -0400
committerAlex Auvolat <alex.auvolat@ens.fr>2015-05-07 13:16:23 -0400
commit1ffd1fc355f6fddcb6cd3d93c0df58513d064472 (patch)
tree8fc93f32c3f94644338093ac4a8a66d8d316d5a5
parent1ff071800fc876eb6f2c25fe0eb1f7dc64efe0be (diff)
downloadtaxi-1ffd1fc355f6fddcb6cd3d93c0df58513d064472.tar.gz
taxi-1ffd1fc355f6fddcb6cd3d93c0df58513d064472.zip
Add target class based model for time prediction (seems to work)
-rw-r--r--config/time_simple_mlp_tgtcls_2_cswdtx.py41
-rw-r--r--model/time_simple_mlp_tgtcls.py67
-rwxr-xr-xtrain.py6
3 files changed, 111 insertions, 3 deletions
diff --git a/config/time_simple_mlp_tgtcls_2_cswdtx.py b/config/time_simple_mlp_tgtcls_2_cswdtx.py
new file mode 100644
index 0000000..4579df3
--- /dev/null
+++ b/config/time_simple_mlp_tgtcls_2_cswdtx.py
@@ -0,0 +1,41 @@
+import model.time_simple_mlp_tgtcls as model
+
+from blocks.initialization import IsotropicGaussian, Constant
+
+import data
+
+n_begin_end_pts = 5 # how many points we consider at the beginning and end of the known trajectory
+n_end_pts = 5
+
+n_valid = 1000
+
+# generate target classes as a Fibonacci sequence
+tgtcls = [1, 2]
+for i in range(22):
+ tgtcls.append(tgtcls[-1] + tgtcls[-2])
+
+dim_embeddings = [
+ ('origin_call', data.origin_call_size+1, 10),
+ ('origin_stand', data.stands_size+1, 10),
+ ('week_of_year', 52, 10),
+ ('day_of_week', 7, 10),
+ ('qhour_of_day', 24 * 4, 10),
+ ('day_type', 3, 10),
+ ('taxi_id', 448, 10),
+]
+
+dim_input = n_begin_end_pts * 2 * 2 + sum(x for (_, _, x) in dim_embeddings)
+dim_hidden = [500, 100]
+dim_output = len(tgtcls)
+
+embed_weights_init = IsotropicGaussian(0.001)
+mlp_weights_init = IsotropicGaussian(0.01)
+mlp_biases_init = Constant(0.001)
+
+exp_base = 1.5
+
+learning_rate = 0.0001
+momentum = 0.99
+batch_size = 32
+
+valid_set = 'cuts/test_times_0'
diff --git a/model/time_simple_mlp_tgtcls.py b/model/time_simple_mlp_tgtcls.py
new file mode 100644
index 0000000..1f1eab7
--- /dev/null
+++ b/model/time_simple_mlp_tgtcls.py
@@ -0,0 +1,67 @@
+from blocks.bricks import MLP, Rectifier, Linear, Sigmoid, Identity, Softmax
+from blocks.bricks.lookup import LookupTable
+
+import numpy
+import theano
+from theano import tensor
+
+import data
+import error
+
+class Model(object):
+ def __init__(self, config):
+ # The input and the targets
+ x_firstk_latitude = (tensor.matrix('first_k_latitude') - data.train_gps_mean[0]) / data.train_gps_std[0]
+ x_firstk_longitude = (tensor.matrix('first_k_longitude') - data.train_gps_mean[1]) / data.train_gps_std[1]
+
+ x_lastk_latitude = (tensor.matrix('last_k_latitude') - data.train_gps_mean[0]) / data.train_gps_std[0]
+ x_lastk_longitude = (tensor.matrix('last_k_longitude') - data.train_gps_mean[1]) / data.train_gps_std[1]
+
+ input_list = [x_firstk_latitude, x_firstk_longitude, x_lastk_latitude, x_lastk_longitude]
+ embed_tables = []
+
+ self.require_inputs = ['first_k_latitude', 'first_k_longitude', 'last_k_latitude', 'last_k_longitude']
+
+ for (varname, num, dim) in config.dim_embeddings:
+ self.require_inputs.append(varname)
+ vardata = tensor.lvector(varname)
+ tbl = LookupTable(length=num, dim=dim, name='%s_lookup'%varname)
+ embed_tables.append(tbl)
+ input_list.append(tbl.apply(vardata))
+
+ y = tensor.lvector('travel_time')
+
+ # Define the model
+ mlp = MLP(activations=[Rectifier() for _ in config.dim_hidden] + [Softmax()],
+ dims=[config.dim_input] + config.dim_hidden + [config.dim_output])
+ classes = theano.shared(numpy.array(config.tgtcls, dtype=theano.config.floatX), name='classes')
+
+ # Create the Theano variables
+ inputs = tensor.concatenate(input_list, axis=1)
+ # inputs = theano.printing.Print("inputs")(inputs)
+ cls_probas = mlp.apply(inputs)
+ outputs = tensor.dot(cls_probas, classes)
+
+ # outputs = theano.printing.Print("outputs")(outputs)
+ # y = theano.printing.Print("y")(y)
+
+ outputs.name = 'outputs'
+
+ # Calculate the cost
+ cost = error.rmsle(outputs.flatten(), y.flatten())
+ cost.name = 'cost'
+
+ # Initialization
+ for tbl in embed_tables:
+ tbl.weights_init = config.embed_weights_init
+ mlp.weights_init = config.mlp_weights_init
+ mlp.biases_init = config.mlp_biases_init
+
+ for tbl in embed_tables:
+ tbl.initialize()
+ mlp.initialize()
+
+ self.cost = cost
+ self.monitor = [cost]
+ self.outputs = outputs
+ self.pred_vars = ['travel_time']
diff --git a/train.py b/train.py
index 40449f0..4e2b983 100755
--- a/train.py
+++ b/train.py
@@ -108,7 +108,7 @@ def main():
# Checkpoint('model.pkl', every_n_batches=100),
Dump('model_data/' + model_name, every_n_batches=1000),
LoadFromDump('model_data/' + model_name),
- FinishAfter(after_epoch=42),
+ # FinishAfter(after_epoch=42),
]
main_loop = MainLoop(
@@ -124,12 +124,12 @@ def main():
outfile = open("output/test-output-%s.csv" % model_name, "w")
outcsv = csv.writer(outfile)
- if model.pred_vars == ['time']:
+ if model.pred_vars == ['travel_time']:
outcsv.writerow(["TRIP_ID", "TRAVEL_TIME"])
for out in apply_model.Apply(outputs=outputs, stream=test_stream, return_vars=['trip_id', 'outputs']):
time = out['outputs']
for i, trip in enumerate(out['trip_id']):
- outcsv.writerow([trip, int(time[i, 0])])
+ outcsv.writerow([trip, int(time[i])])
else:
outcsv.writerow(["TRIP_ID", "LATITUDE", "LONGITUDE"])
for out in apply_model.Apply(outputs=outputs, stream=test_stream, return_vars=['trip_id', 'outputs']):