memory_network_mlp_4_momentum.py (1710B)
1 from blocks.initialization import IsotropicGaussian, Constant 2 from blocks.algorithms import Momentum 3 4 from blocks.bricks import Tanh 5 6 import data 7 from model.memory_network_mlp import Model, Stream 8 9 n_begin_end_pts = 5 10 11 dim_embeddings = [ 12 ('origin_call', data.origin_call_train_size, 10), 13 ('origin_stand', data.stands_size, 10), 14 ('week_of_year', 52, 10), 15 ('day_of_week', 7, 10), 16 ('qhour_of_day', 24 * 4, 10), 17 ('day_type', 3, 10), 18 ] 19 20 embed_weights_init = IsotropicGaussian(0.001) 21 22 class MLPConfig(object): 23 __slots__ = ('dim_input', 'dim_hidden', 'dim_output', 'weights_init', 'biases_init', 'embed_weights_init', 'dim_embeddings') 24 25 prefix_encoder = MLPConfig() 26 prefix_encoder.dim_input = n_begin_end_pts * 2 * 2 + sum(x for (_, _, x) in dim_embeddings) 27 prefix_encoder.dim_hidden = [500] 28 prefix_encoder.weights_init = IsotropicGaussian(0.01) 29 prefix_encoder.biases_init = Constant(0.001) 30 prefix_encoder.embed_weights_init = embed_weights_init 31 prefix_encoder.dim_embeddings = dim_embeddings 32 33 candidate_encoder = MLPConfig() 34 candidate_encoder.dim_input = n_begin_end_pts * 2 * 2 + sum(x for (_, _, x) in dim_embeddings) 35 candidate_encoder.dim_hidden = [500] 36 candidate_encoder.weights_init = IsotropicGaussian(0.01) 37 candidate_encoder.biases_init = Constant(0.001) 38 candidate_encoder.embed_weights_init = embed_weights_init 39 candidate_encoder.dim_embeddings = dim_embeddings 40 41 representation_size = 500 42 representation_activation = Tanh 43 44 normalize_representation = False 45 46 step_rule = Momentum(learning_rate=0.1, momentum=0.9) 47 48 batch_size = 10000 49 # batch_sort_size = 20 50 51 monitor_freq = 1000 52 53 max_splits = 200 54 55 train_candidate_size = 20000 56 valid_candidate_size = 20000 57 test_candidate_size = 20000