From 27a0e0949c6ca3f7bd18569a23ddd0e1b3e9a64e Mon Sep 17 00:00:00 2001 From: Alex Auvolat Date: Fri, 10 Jul 2015 17:16:20 -0400 Subject: Batch shuffling --- config/dest_mlp_tgtcls_1_cswdtx_batchshuffle.py | 4 ++-- model/mlp.py | 18 +++++++++++------- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/config/dest_mlp_tgtcls_1_cswdtx_batchshuffle.py b/config/dest_mlp_tgtcls_1_cswdtx_batchshuffle.py index a4db33c..b816930 100644 --- a/config/dest_mlp_tgtcls_1_cswdtx_batchshuffle.py +++ b/config/dest_mlp_tgtcls_1_cswdtx_batchshuffle.py @@ -23,14 +23,14 @@ dim_embeddings = [ ] dim_input = n_begin_end_pts * 2 * 2 + sum(x for (_, _, x) in dim_embeddings) -dim_hidden = [1000] +dim_hidden = [500] dim_output = tgtcls.shape[0] embed_weights_init = IsotropicGaussian(0.01) mlp_weights_init = IsotropicGaussian(0.1) mlp_biases_init = Constant(0.01) -step_rule = Momentum(learning_rate=0.01, momentum=0.9) +step_rule = Momentum(learning_rate=0.001, momentum=0.99) batch_size = 200 diff --git a/model/mlp.py b/model/mlp.py index 1f53e8c..7d04c82 100644 --- a/model/mlp.py +++ b/model/mlp.py @@ -52,6 +52,12 @@ class FFMLP(Initializable): def predict_inputs(self): return self.inputs +class UniformGenerator(object): + def __init__(self): + self.rng = numpy.random.RandomState(123) + def __call__(self, *args): + return float(self.rng.uniform()) + class Stream(object): def __init__(self, config): self.config = config @@ -69,17 +75,15 @@ class Stream(object): stream = transformers.TaxiExcludeTrips(stream, valid_trips_ids) stream = transformers.TaxiGenerateSplits(stream, max_splits=self.config.max_splits) - stream = transformers.add_destination(stream) - - stream = transformers.taxi_add_datetime(stream) - stream = transformers.taxi_add_first_last_len(stream, self.config.n_begin_end_pts) - stream = transformers.Select(stream, tuple(req_vars)) if hasattr(self.config, 'shuffle_batch_size'): stream = transformers.Batch(stream, iteration_scheme=ConstantScheme(self.config.shuffle_batch_size)) - rng = numpy.random.RandomState(123) - stream = Mapping(stream, SortMapping(lambda x: float(rng.uniform()))) + stream = Mapping(stream, SortMapping(key=UniformGenerator())) stream = Unpack(stream) + + stream = transformers.taxi_add_datetime(stream) + stream = transformers.taxi_add_first_last_len(stream, self.config.n_begin_end_pts) + stream = transformers.Select(stream, tuple(req_vars)) stream = Batch(stream, iteration_scheme=ConstantScheme(self.config.batch_size)) -- cgit v1.2.3 From 788747ec066234c16643595253ebe4d6bfeebe74 Mon Sep 17 00:00:00 2001 From: Alex Auvolat Date: Fri, 10 Jul 2015 17:16:26 -0400 Subject: Adjust cluster_arrival.py to make it work again --- data_analysis/cluster_arrival.py | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) mode change 100644 => 100755 data_analysis/cluster_arrival.py diff --git a/data_analysis/cluster_arrival.py b/data_analysis/cluster_arrival.py old mode 100644 new mode 100755 index fd4ea04..5e990cd --- a/data_analysis/cluster_arrival.py +++ b/data_analysis/cluster_arrival.py @@ -1,20 +1,31 @@ -import matplotlib.pyplot as plt +#!/usr/bin/env python import numpy import cPickle import scipy.misc +import os from sklearn.cluster import MeanShift, estimate_bandwidth from sklearn.datasets.samples_generator import make_blobs from itertools import cycle -print "Reading arrival point list" -with open("arrivals.pkl") as f: - pts = cPickle.load(f) +import data +from data.hdf5 import taxi_it +from data.transformers import add_destination + +print "Generating arrival point list" +dests = [] +for v in taxi_it("train"): + if len(v['latitude']) == 0: continue + dests.append([v['latitude'][-1], v['longitude'][-1]]) +pts = numpy.array(dests) + +with open(os.path.join(data.path, "arrivals.pkl"), "w") as f: + cPickle.dump(pts, f, protocol=cPickle.HIGHEST_PROTOCOL) print "Doing clustering" bw = estimate_bandwidth(pts, quantile=.1, n_samples=1000) print bw -bw = 0.001 +bw = 0.001 # ( ms = MeanShift(bandwidth=bw, bin_seeding=True, min_bin_freq=5) ms.fit(pts) @@ -22,6 +33,6 @@ cluster_centers = ms.cluster_centers_ print "Clusters shape: ", cluster_centers.shape -with open("arrival-cluters.pkl", "w") as f: +with open(os.path.join(data.path, "arrival-clusters.pkl"), "w") as f: cPickle.dump(cluster_centers, f, protocol=cPickle.HIGHEST_PROTOCOL) -- cgit v1.2.3 From 97b9672860b97a397ae48b24287741922e7fcb8a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexandre=20de=20Br=C3=A9bisson?= Date: Fri, 10 Jul 2015 18:45:52 -0400 Subject: Create README.md --- README.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 0000000..00eb60b --- /dev/null +++ b/README.md @@ -0,0 +1,3 @@ +Winning entry to the Kaggle ECML/PKDD destination competition. + +https://www.kaggle.com/c/pkdd-15-predict-taxi-service-trajectory-i -- cgit v1.2.3