1  res = 0.75486237
input_keep_prob_ = 0.5
output_keep_prob_ = 0.5
lstmUnitNum = 256
attn_length = -1
attention_size = 128
layer_num = 1
forget_bias = 0.5
200ά

2  res = 0.7599767
input_keep_prob_ = 0.5
output_keep_prob_ = 0.5
lstmUnitNum = 128
attn_length = -1
attention_size = 128
layer_num = 1
forget_bias = 0.5
200ά

3  res = 0.75977309
input_keep_prob_ = 0.5
output_keep_prob_ = 0.5
lstmUnitNum = 64
attn_length = -1
attention_size = 128
layer_num = 1
forget_bias = 0.5
200ά

4  res = 0.74973319
input_keep_prob_ = 0.25
output_keep_prob_ = 0.25
lstmUnitNum = 256
attn_length = -1
attention_size = 128
layer_num = 1
forget_bias = 0.5
200ά

5  res = 0.75029755
input_keep_prob_ = 0.75
output_keep_prob_ = 0.75
lstmUnitNum = 256
attn_length = -1
attention_size = 128
layer_num = 1
forget_bias = 0.5
200ά

9  res = 0.73949185
input_keep_prob_ = 0.25
output_keep_prob_ = 0.25
lstmUnitNum = 128
attn_length = -1
attention_size = 128
layer_num = 1
forget_bias = 0.5
200ά

8  res = 0.7585081
input_keep_prob_ = 0.75
output_keep_prob_ = 0.75
lstmUnitNum = 128
attn_length = -1
attention_size = 128
layer_num = 1
forget_bias = 0.5
200ά

6  res = 0.7596117
input_keep_prob_ = 0.5
output_keep_prob_ = 0.5
lstmUnitNum = 128
attn_length = -1
attention_size = 64
layer_num = 1
forget_bias = 0.5
200ά

7  res = 0.761237
input_keep_prob_ = 0.5
output_keep_prob_ = 0.5
lstmUnitNum = 128
attn_length = -1
attention_size = 256
layer_num = 1
forget_bias = 0.5
200ά

10  res = 
input_keep_prob_ = 0.5
output_keep_prob_ = 0.5
lstmUnitNum = 128
attn_length = 8
attention_size = 128
layer_num = 1
forget_bias = 0.5
200ά