Skip to content

Commit

Permalink
Clean up bin mlf test
Browse files Browse the repository at this point in the history
  • Loading branch information
vmazalov committed Nov 12, 2018
1 parent 8d87948 commit d7101a2
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 48 deletions.
2 changes: 0 additions & 2 deletions Source/SequenceTrainingLib/gammacalculation.h
Original file line number Diff line number Diff line change
Expand Up @@ -208,8 +208,6 @@ class GammaCalculation
(const msra::math::ssematrixbase&) predstripe, (const msra::asr::simplesenonehmm&) m_hset,
(msra::math::ssematrixbase&) dengammasstripe, (msra::math::ssematrixbase&) gammasbuffer /*empty, not used*/,
lmf, wp, amf, boostmmifactor, seqsMBRmode, uidsstripe, boundariesstripe);
if (denavlogp < LOGZERO / 2)
return;

objectValue += (ElemType)((numavlogp - denavlogp) * numframes);

Expand Down
46 changes: 0 additions & 46 deletions Tests/EndToEndTests/CNTKv2Python/Examples/htk_deserializer_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,52 +54,6 @@ def test_htk_deserializers():
assert True
os.chdir(abs_path)

def test_htk_binary_deserializers():
mbsize = 640
epoch_size = 1000 * mbsize
lr = [0.001]

feature_dim = 33
num_classes = 132
context = 2

os.chdir(data_path)

features_file = "glob_0000.scp"
labels_file = "mlf2.bin"

fd = HTKFeatureDeserializer(StreamDefs(
amazing_features = StreamDef(shape=feature_dim, context=(context,context), scp=features_file)))

ld = HTKMLFBinaryDeserializer(StreamDefs(awesome_labels = StreamDef(shape=num_classes, mlf=labels_file)))

reader = MinibatchSource([fd,ld])

features = C.sequence.input_variable(((2*context+1)*feature_dim))
labels = C.sequence.input_variable((num_classes))

model = Sequential([For(range(3), lambda : Recurrence(LSTM(256))),
Dense(num_classes)])
z = model(features)
ce = C.cross_entropy_with_softmax(z, labels)
errs = C.classification_error (z, labels)

learner = C.fsadagrad(z.parameters,
lr=C.learning_parameter_schedule_per_sample(lr, epoch_size=epoch_size),
momentum=C.momentum_schedule_per_sample(0.9990913221888589),
gradient_clipping_threshold_per_sample=15, gradient_clipping_with_truncation=True)
progress_printer = C.logging.ProgressPrinter(freq=0)
trainer = C.Trainer(z, (ce, errs), learner, progress_printer)

input_map={ features: reader.streams.amazing_features, labels: reader.streams.awesome_labels }

# just run and verify it doesn't crash
for i in range(3):
mb_data = reader.next_minibatch(mbsize, input_map=input_map)
trainer.train_minibatch(mb_data)
assert True
os.chdir(abs_path)

def test_multiple_mlf_files():
os.chdir(data_path)

Expand Down

0 comments on commit d7101a2

Please sign in to comment.