From e4d1c9c632158d7f40d26567d10c558c8815b84d Mon Sep 17 00:00:00 2001 From: Thomas Mesnard Date: Sun, 20 Dec 2015 10:58:07 +0100 Subject: Add some random code --- .gitignore | 3 + README.md | 2 + mohammad/LICENSE | 201 +++++++++++++++++++++++++++++++++++++++++++ mohammad/README.md | 33 ++++++++ mohammad/ctc_cost.py | 206 +++++++++++++++++++++++++++++++++++++++++++++ mohammad/ctc_test_data.pkl | Bin 0 -> 2080084 bytes mohammad/test_ctc.py | 135 +++++++++++++++++++++++++++++ 7 files changed, 580 insertions(+) create mode 100644 .gitignore create mode 100644 mohammad/LICENSE create mode 100644 mohammad/README.md create mode 100644 mohammad/ctc_cost.py create mode 100644 mohammad/ctc_test_data.pkl create mode 100644 mohammad/test_ctc.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5942bbc --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +__pycache__/ +*.pyc +*.swp diff --git a/README.md b/README.md index 7dd39a7..c4c9da8 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,4 @@ # pgm Projet PGM + +Thomas Mesnard, Alex Auvolat diff --git a/mohammad/LICENSE b/mohammad/LICENSE new file mode 100644 index 0000000..5c304d1 --- /dev/null +++ b/mohammad/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/mohammad/README.md b/mohammad/README.md new file mode 100644 index 0000000..8c553c3 --- /dev/null +++ b/mohammad/README.md @@ -0,0 +1,33 @@ +CTC-Connectionist Temporal Classification +========================================= + +-CTC cost is implemented in pure [Theano](https://github.com/Theano/Theano). + +-Supports mini-batch. + +-Supports both normal- and log-scale. + +-"apple" problem (two same consecutive labels) is solved. + +-Test file is implemented using [Blocks](https://github.com/bartvm/blocks). + + + + +Reference +========= +Graves, Alex, et al. "Connectionist temporal classification: labelling unsegmented sequence data with recurrent neural networks." Proceedings of the 23rd international conference on Machine learning. ACM, 2006. + + +Credits +======= +Theano implementation of CTC by [Shawn Tan](https://github.com/shawntan/rnn-experiment/) + +Theano implementation of CTC by [Rakesh Var](https://github.com/rakeshvar/rnn_ctc) + + +Special thanks to +================= +[Kyle Kastner](https://github.com/kastnerkyle) + +Pascal Lambdin diff --git a/mohammad/ctc_cost.py b/mohammad/ctc_cost.py new file mode 100644 index 0000000..979ed93 --- /dev/null +++ b/mohammad/ctc_cost.py @@ -0,0 +1,206 @@ +""" +CTC-Connectionist Temporal Classification + +Code provided by Mohammad Pezeshki - May. 2015 - +Montreal Institute for Learning Algorithms + +Referece: Graves, Alex, et al. "Connectionist temporal classification: +labelling unsegmented sequence data with recurrent neural networks." +Proceedings of the 23rd international conference on Machine learning. +ACM, 2006. + +Credits: Shawn Tan, Rakesh Var + +This code is distributed without any warranty, express or implied. +""" + +import theano +from theano import tensor + +floatX = theano.config.floatX + + +# T: INPUT_SEQUENCE_LENGTH +# B: BATCH_SIZE +# L: OUTPUT_SEQUENCE_LENGTH +# C: NUM_CLASSES +class CTC(object): + """Connectionist Temporal Classification + y_hat : T x B x C+1 + y : L x B + y_hat_mask : T x B + y_mask : L x B + """ + @staticmethod + def add_blanks(y, blank_symbol, y_mask=None): + """Add blanks to a matrix and updates mask + + Input shape: L x B + Output shape: 2L+1 x B + + """ + # for y + y_extended = y.T.dimshuffle(0, 1, 'x') + blanks = tensor.zeros_like(y_extended) + blank_symbol + concat = tensor.concatenate([y_extended, blanks], axis=2) + res = concat.reshape((concat.shape[0], + concat.shape[1] * concat.shape[2])).T + begining_blanks = tensor.zeros((1, res.shape[1])) + blank_symbol + blanked_y = tensor.concatenate([begining_blanks, res], axis=0) + # for y_mask + if y_mask is not None: + y_mask_extended = y_mask.T.dimshuffle(0, 1, 'x') + concat = tensor.concatenate([y_mask_extended, + y_mask_extended], axis=2) + res = concat.reshape((concat.shape[0], + concat.shape[1] * concat.shape[2])).T + begining_blanks = tensor.ones((1, res.shape[1]), dtype=floatX) + blanked_y_mask = tensor.concatenate([begining_blanks, res], axis=0) + else: + blanked_y_mask = None + return blanked_y, blanked_y_mask + + @staticmethod + def class_batch_to_labeling_batch(y, y_hat, y_hat_mask=None): + y_hat = y_hat * y_hat_mask.dimshuffle(0, 'x', 1) + batch_size = y_hat.shape[2] + res = y_hat[:, y.astype('int32'), tensor.arange(batch_size)] + return res + + @staticmethod + def recurrence_relation(y, y_mask, blank_symbol): + n_y = y.shape[0] + blanks = tensor.zeros((2, y.shape[1])) + blank_symbol + ybb = tensor.concatenate((y, blanks), axis=0).T + sec_diag = (tensor.neq(ybb[:, :-2], ybb[:, 2:]) * + tensor.eq(ybb[:, 1:-1], blank_symbol) * + y_mask.T) + + # r1: LxL + # r2: LxL + # r3: LxLxB + r2 = tensor.eye(n_y, k=1) + r3 = (tensor.eye(n_y, k=2).dimshuffle(0, 1, 'x') * + sec_diag.dimshuffle(1, 'x', 0)) + + return r2, r3 + + @classmethod + def path_probabs(cls, y, y_hat, y_mask, y_hat_mask, blank_symbol): + pred_y = cls.class_batch_to_labeling_batch(y, y_hat, y_hat_mask) + + r2, r3 = cls.recurrence_relation(y, y_mask, blank_symbol) + + def step(p_curr, p_prev): + # instead of dot product, we * first + # and then sum oven one dimension. + # objective: T.dot((p_prev)BxL, LxLxB) + # solusion: Lx1xB * LxLxB --> LxLxB --> (sumover)xLxB + dotproduct = (p_prev + tensor.dot(p_prev, r2) + + (p_prev.dimshuffle(1, 'x', 0) * r3).sum(axis=0).T) + return p_curr.T * dotproduct * y_mask.T # B x L + + probabilities, _ = theano.scan( + step, + sequences=[pred_y], + outputs_info=[tensor.eye(y.shape[0])[0] * tensor.ones(y.T.shape)]) + return probabilities, probabilities.shape + + @classmethod + def cost(cls, y, y_hat, y_mask, y_hat_mask, blank_symbol): + y_hat_mask_len = tensor.sum(y_hat_mask, axis=0, dtype='int32') + y_mask_len = tensor.sum(y_mask, axis=0, dtype='int32') + probabilities, sth = cls.path_probabs(y, y_hat, + y_mask, y_hat_mask, + blank_symbol) + batch_size = probabilities.shape[1] + labels_probab = (probabilities[y_hat_mask_len - 1, + tensor.arange(batch_size), + y_mask_len - 1] + + probabilities[y_hat_mask_len - 1, + tensor.arange(batch_size), + y_mask_len - 2]) + avg_cost = tensor.mean(-tensor.log(labels_probab)) + return avg_cost, sth + + @staticmethod + def _epslog(x): + return tensor.cast(tensor.log(tensor.clip(x, 1E-12, 1E12)), + theano.config.floatX) + + @staticmethod + def log_add(a, b): + max_ = tensor.maximum(a, b) + return (max_ + tensor.log1p(tensor.exp(a + b - 2 * max_))) + + @staticmethod + def log_dot_matrix(x, z): + inf = 1E12 + log_dot = tensor.dot(x, z) + zeros_to_minus_inf = (z.max(axis=0) - 1) * inf + return log_dot + zeros_to_minus_inf + + @staticmethod + def log_dot_tensor(x, z): + inf = 1E12 + log_dot = (x.dimshuffle(1, 'x', 0) * z).sum(axis=0).T + zeros_to_minus_inf = (z.max(axis=0) - 1) * inf + return log_dot + zeros_to_minus_inf.T + + @classmethod + def log_path_probabs(cls, y, y_hat, y_mask, y_hat_mask, blank_symbol): + pred_y = cls.class_batch_to_labeling_batch(y, y_hat, y_hat_mask) + r2, r3 = cls.recurrence_relation(y, y_mask, blank_symbol) + + def step(log_p_curr, log_p_prev): + p1 = log_p_prev + p2 = cls.log_dot_matrix(p1, r2) + p3 = cls.log_dot_tensor(p1, r3) + p123 = cls.log_add(p3, cls.log_add(p1, p2)) + + return (log_p_curr.T + + p123 + + cls._epslog(y_mask.T)) + + log_probabilities, _ = theano.scan( + step, + sequences=[cls._epslog(pred_y)], + outputs_info=[cls._epslog(tensor.eye(y.shape[0])[0] * + tensor.ones(y.T.shape))]) + return log_probabilities + + @classmethod + def log_cost(cls, y, y_hat, y_mask, y_hat_mask, blank_symbol): + y_hat_mask_len = tensor.sum(y_hat_mask, axis=0, dtype='int32') + y_mask_len = tensor.sum(y_mask, axis=0, dtype='int32') + log_probabs = cls.log_path_probabs(y, y_hat, + y_mask, y_hat_mask, + blank_symbol) + batch_size = log_probabs.shape[1] + labels_probab = cls.log_add( + log_probabs[y_hat_mask_len - 1, + tensor.arange(batch_size), + y_mask_len - 1], + log_probabs[y_hat_mask_len - 1, + tensor.arange(batch_size), + y_mask_len - 2]) + avg_cost = tensor.mean(-labels_probab) + return avg_cost + + @classmethod + def apply(cls, y, y_hat, y_mask, y_hat_mask, scale='log_scale'): + y_hat = y_hat.dimshuffle(0, 2, 1) + num_classes = y_hat.shape[1] - 1 + blanked_y, blanked_y_mask = cls.add_blanks( + y=y, + blank_symbol=num_classes.astype(floatX), + y_mask=y_mask) + if scale == 'log_scale': + final_cost = cls.log_cost(blanked_y, y_hat, + blanked_y_mask, y_hat_mask, + num_classes) + else: + final_cost, sth = cls.cost(blanked_y, y_hat, + blanked_y_mask, y_hat_mask, + num_classes) + return final_cost diff --git a/mohammad/ctc_test_data.pkl b/mohammad/ctc_test_data.pkl new file mode 100644 index 0000000..2833c1b Binary files /dev/null and b/mohammad/ctc_test_data.pkl differ diff --git a/mohammad/test_ctc.py b/mohammad/test_ctc.py new file mode 100644 index 0000000..a24d634 --- /dev/null +++ b/mohammad/test_ctc.py @@ -0,0 +1,135 @@ +import theano +import numpy +from theano import tensor +from blocks.model import Model +from blocks.bricks import Linear, Tanh +from ctc_cost import CTC +from blocks.initialization import IsotropicGaussian, Constant +from fuel.datasets import IterableDataset +from fuel.streams import DataStream +from blocks.algorithms import (GradientDescent, Scale, + StepClipping, CompositeRule) +from blocks.extensions.monitoring import TrainingDataMonitoring +from blocks.main_loop import MainLoop +from blocks.extensions import FinishAfter, Printing +from blocks.bricks.recurrent import SimpleRecurrent +from blocks.graph import ComputationGraph +try: + import cPickle as pickle +except: + import pickle + +floatX = theano.config.floatX + + +@theano.compile.ops.as_op(itypes=[tensor.lvector], + otypes=[tensor.lvector]) +def print_pred(y_hat): + blank_symbol = 4 + res = [] + for i, s in enumerate(y_hat): + if (s != blank_symbol) and (i == 0 or s != y_hat[i - 1]): + res += [s] + return numpy.asarray(res) + +n_epochs = 200 +x_dim = 4 +h_dim = 9 +num_classes = 4 + +with open("ctc_test_data.pkl", "rb") as pkl_file: + try: + data = pickle.load(pkl_file) + inputs = data['inputs'] + labels = data['labels'] + # from S x T x B x D to S x T x B + inputs_mask = numpy.max(data['mask_inputs'], axis=-1) + labels_mask = data['mask_labels'] + except: + data = pickle.load(pkl_file, encoding='bytes') + inputs = data[b'inputs'] + labels = data[b'labels'] + # from S x T x B x D to S x T x B + inputs_mask = numpy.max(data[b'mask_inputs'], axis=-1) + labels_mask = data[b'mask_labels'] + + + +print('Building model ...') +# T x B x F +x = tensor.tensor3('x', dtype=floatX) +# T x B +x_mask = tensor.matrix('x_mask', dtype=floatX) +# L x B +y = tensor.matrix('y', dtype=floatX) +# L x B +y_mask = tensor.matrix('y_mask', dtype=floatX) + +x_to_h = Linear(name='x_to_h', + input_dim=x_dim, + output_dim=h_dim) +x_transform = x_to_h.apply(x) +rnn = SimpleRecurrent(activation=Tanh(), + dim=h_dim, name="rnn") +h = rnn.apply(x_transform) +h_to_o = Linear(name='h_to_o', + input_dim=h_dim, + output_dim=num_classes + 1) +h_transform = h_to_o.apply(h) +# T x B x C+1 +y_hat = tensor.nnet.softmax( + h_transform.reshape((-1, num_classes + 1)) +).reshape((h.shape[0], h.shape[1], -1)) +y_hat.name = 'y_hat' + +y_hat_mask = x_mask +cost = CTC().apply(y, y_hat, y_mask, y_hat_mask, 'normal_scale') +cost.name = 'CTC' +# Initialization +for brick in (rnn, x_to_h, h_to_o): + brick.weights_init = IsotropicGaussian(0.01) + brick.biases_init = Constant(0) + brick.initialize() + +print('Bulding DataStream ...') +dataset = IterableDataset({'x': inputs, + 'x_mask': inputs_mask, + 'y': labels, + 'y_mask': labels_mask}) +stream = DataStream(dataset) + +print('Bulding training process...') +algorithm = GradientDescent(cost=cost, + parameters=ComputationGraph(cost).parameters, + step_rule=CompositeRule([StepClipping(10.0), + Scale(0.02)])) +monitor_cost = TrainingDataMonitoring([cost], + prefix="train", + after_epoch=True) + +# sample number to monitor +sample = 8 + +y_hat_max_path = print_pred(tensor.argmax(y_hat[:, sample, :], axis=1)) +y_hat_max_path.name = 'Viterbi' +monitor_output = TrainingDataMonitoring([y_hat_max_path], + prefix="y_hat", + every_n_epochs=1) + +length = tensor.sum(y_mask[:, sample]).astype('int32') +tar = y[:length, sample].astype('int32') +tar.name = '_Target_Seq' +monitor_target = TrainingDataMonitoring([tar], + prefix="y", + every_n_epochs=1) + +model = Model(cost) +main_loop = MainLoop(data_stream=stream, algorithm=algorithm, + extensions=[monitor_cost, monitor_output, + monitor_target, + FinishAfter(after_n_epochs=n_epochs), + Printing()], + model=model) + +print('Starting training ...') +main_loop.run() -- cgit v1.2.3