提交 6353576f 编写于 作者: R root

add an example of the NER task in fluid style

上级 3b77123b
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
##
# Utility functions for NER assignment
# Assigment 2, part 1 for CS224D
##
from utils import invert_dict
from numpy import *
def load_wv(vocabfile, wvfile):
wv = loadtxt(wvfile, dtype=float)
with open(vocabfile) as fd:
words = [line.strip() for line in fd]
num_to_word = dict(enumerate(words))
word_to_num = invert_dict(num_to_word)
return wv, word_to_num, num_to_word
def save_predictions(y, filename):
"""Save predictions, one per line."""
with open(filename, 'w') as fd:
fd.write("\n".join(map(str, y)))
fd.write("\n")
\ No newline at end of file
import sys, os, re, json
import itertools
from collections import Counter
import time
from numpy import *
import pandas as pd
def invert_dict(d):
return {v:k for k,v in d.iteritems()}
def flatten1(lst):
return list(itertools.chain.from_iterable(lst))
def load_wv_pandas(fname):
return pd.read_hdf(fname, 'data')
def extract_wv(df):
num_to_word = dict(enumerate(df.index))
word_to_num = invert_dict(num_to_word)
wv = df.as_matrix()
return wv, word_to_num, num_to_word
def canonicalize_digits(word):
if any([c.isalpha() for c in word]): return word
word = re.sub("\d", "DG", word)
if word.startswith("DG"):
word = word.replace(",", "") # remove thousands separator
return word
def canonicalize_word(word, wordset=None, digits=True):
word = word.lower()
if digits:
if (wordset != None) and (word in wordset): return word
word = canonicalize_digits(word) # try to canonicalize numbers
if (wordset == None) or (word in wordset): return word
else: return "UUUNKKK" # unknown token
##
# Utility functions used to create dataset
##
def augment_wv(df, extra=["UUUNKKK"]):
for e in extra:
df.loc[e] = zeros(len(df.columns))
def prune_wv(df, vocab, extra=["UUUNKKK"]):
"""Prune word vectors to vocabulary."""
items = set(vocab).union(set(extra))
return df.filter(items=items, axis='index')
def load_wv_raw(fname):
return pd.read_table(fname, sep="\s+",
header=None,
index_col=0,
quoting=3)
def load_dataset(fname):
docs = []
with open(fname) as fd:
cur = []
for line in fd:
# new sentence on -DOCSTART- or blank line
if re.match(r"-DOCSTART-.+", line) or (len(line.strip()) == 0):
if len(cur) > 0:
docs.append(cur)
cur = []
else: # read in tokens
cur.append(line.strip().split("\t",1))
# flush running buffer
docs.append(cur)
return docs
def extract_tag_set(docs):
tags = set(flatten1([[t[1].split("|")[0] for t in d] for d in docs]))
return tags
def extract_word_set(docs):
words = set(flatten1([[t[0] for t in d] for d in docs]))
return words
def pad_sequence(seq, left=1, right=1):
return left*[("<s>", "")] + seq + right*[("</s>", "")]
##
# For window models
def seq_to_windows(words, tags, word_to_num, tag_to_num, left=1, right=1):
ns = len(words)
X = []
y = []
for i in range(ns):
if words[i] == "<s>" or words[i] == "</s>":
continue # skip sentence delimiters
tagn = tag_to_num[tags[i]]
idxs = [word_to_num[words[ii]]
for ii in range(i - left, i + right + 1)]
X.append(idxs)
y.append(tagn)
return array(X), array(y)
def docs_to_windows(docs, word_to_num, tag_to_num, wsize=3):
pad = (wsize - 1)/2
docs = flatten1([pad_sequence(seq, left=pad, right=pad) for seq in docs])
words, tags = zip(*docs)
words = [canonicalize_word(w, word_to_num) for w in words]
tags = [t.split("|")[0] for t in tags]
return seq_to_windows(words, tags, word_to_num, tag_to_num, pad, pad)
def window_to_vec(window, L):
"""Concatenate word vectors for a given window."""
return concatenate([L[i] for i in window])
##
# For fixed-window LM:
# each row of X is a list of word indices
# each entry of y is the word index to predict
def seq_to_lm_windows(words, word_to_num, ngram=2):
ns = len(words)
X = []
y = []
for i in range(ns):
if words[i] == "<s>":
continue # skip sentence begin, but do predict end
idxs = [word_to_num[words[ii]]
for ii in range(i - ngram + 1, i + 1)]
X.append(idxs[:-1])
y.append(idxs[-1])
return array(X), array(y)
def docs_to_lm_windows(docs, word_to_num, ngram=2):
docs = flatten1([pad_sequence(seq, left=(ngram-1), right=1)
for seq in docs])
words = [canonicalize_word(wt[0], word_to_num) for wt in docs]
return seq_to_lm_windows(words, word_to_num, ngram)
##
# For RNN LM
# just convert each sentence to a list of indices
# after padding each with <s> ... </s> tokens
def seq_to_indices(words, word_to_num):
return array([word_to_num[w] for w in words])
def docs_to_indices(docs, word_to_num):
docs = [pad_sequence(seq, left=1, right=1) for seq in docs]
ret = []
for seq in docs:
words = [canonicalize_word(wt[0], word_to_num) for wt in seq]
ret.append(seq_to_indices(words, word_to_num))
# return as numpy array for fancier slicing
return array(ret, dtype=object)
def offset_seq(seq):
return seq[:-1], seq[1:]
def seqs_to_lmXY(seqs):
X, Y = zip(*[offset_seq(s) for s in seqs])
return array(X, dtype=object), array(Y, dtype=object)
##
# For RNN tagger
# return X, Y as lists
# where X[i] is indices, Y[i] is tags for a sequence
# NOTE: this does not use padding tokens!
# (RNN should natively handle begin/end)
def docs_to_tag_sequence(docs, word_to_num, tag_to_num):
# docs = [pad_sequence(seq, left=1, right=1) for seq in docs]
X = []
Y = []
for seq in docs:
if len(seq) < 1: continue
words, tags = zip(*seq)
words = [canonicalize_word(w, word_to_num) for w in words]
x = seq_to_indices(words, word_to_num)
X.append(x)
tags = [t.split("|")[0] for t in tags]
y = seq_to_indices(tags, tag_to_num)
Y.append(y)
# return as numpy array for fancier slicing
return array(X, dtype=object), array(Y, dtype=object)
def idxs_to_matrix(idxs, L):
"""Return a matrix X with each row
as a word vector for the corresponding
index in idxs."""
return vstack([L[i] for i in idxs])
\ No newline at end of file
class Model(object):
"""Abstracts a Tensorflow graph for a learning task.
We use various Model classes as usual abstractions to encapsulate tensorflow
computational graphs. Each algorithm you will construct in this homework will
inherit from a Model object.
"""
def load_data(self):
"""Loads data from disk and stores it in memory.
Feel free to add instance variables to Model object that store loaded data.
"""
raise NotImplementedError("Each Model must re-implement this method.")
def add_placeholders(self):
"""Adds placeholder variables to tensorflow computational graph.
Tensorflow uses placeholder variables to represent locations in a
computational graph where data is inserted. These placeholders are used as
inputs by the rest of the model building code and will be fed data during
training.
See for more information:
https://www.tensorflow.org/versions/r0.7/api_docs/python/io_ops.html#placeholders
"""
raise NotImplementedError("Each Model must re-implement this method.")
def create_feed_dict(self, input_batch, label_batch):
"""Creates the feed_dict for training the given step.
A feed_dict takes the form of:
feed_dict = {
<placeholder>: <tensor of values to be passed for placeholder>,
....
}
If label_batch is None, then no labels are added to feed_dict.
Hint: The keys for the feed_dict should be a subset of the placeholder
tensors created in add_placeholders.
Args:
input_batch: A batch of input data.
label_batch: A batch of label data.
Returns:
feed_dict: The feed dictionary mapping from placeholders to values.
"""
raise NotImplementedError("Each Model must re-implement this method.")
def add_model(self, input_data):
"""Implements core of model that transforms input_data into predictions.
The core transformation for this model which transforms a batch of input
data into a batch of predictions.
Args:
input_data: A tensor of shape (batch_size, n_features).
Returns:
out: A tensor of shape (batch_size, n_classes)
"""
raise NotImplementedError("Each Model must re-implement this method.")
def add_loss_op(self, pred):
"""Adds ops for loss to the computational graph.
Args:
pred: A tensor of shape (batch_size, n_classes)
Returns:
loss: A 0-d tensor (scalar) output
"""
raise NotImplementedError("Each Model must re-implement this method.")
def run_epoch(self, sess, input_data, input_labels):
"""Runs an epoch of training.
Trains the model for one-epoch.
Args:
sess: tf.Session() object
input_data: np.ndarray of shape (n_samples, n_features)
input_labels: np.ndarray of shape (n_samples, n_classes)
Returns:
average_loss: scalar. Average minibatch loss of model on epoch.
"""
raise NotImplementedError("Each Model must re-implement this method.")
def fit(self, sess, input_data, input_labels):
"""Fit model on provided data.
Args:
sess: tf.Session()
input_data: np.ndarray of shape (n_samples, n_features)
input_labels: np.ndarray of shape (n_samples, n_classes)
Returns:
losses: list of loss per epoch
"""
raise NotImplementedError("Each Model must re-implement this method.")
def predict(self, sess, input_data, input_labels=None):
"""Make predictions from the provided model.
Args:
sess: tf.Session()
input_data: np.ndarray of shape (n_samples, n_features)
input_labels: np.ndarray of shape (n_samples, n_classes)
Returns:
average_loss: Average loss of model.
predictions: Predictions of model on input_data
"""
raise NotImplementedError("Each Model must re-implement this method.")
class LanguageModel(Model):
"""Abstracts a Tensorflow graph for learning language models.
Adds ability to do embedding.
"""
def add_embedding(self):
"""Add embedding layer. that maps from vocabulary to vectors.
"""
raise NotImplementedError("Each Model must re-implement this method.")
import time
import math
import numpy as np
import tensorflow as tf
from q1_softmax import softmax
from q1_softmax import cross_entropy_loss
from model import Model
from utils import data_iterator
class Config(object):
"""Holds model hyperparams and data information.
The config class is used to store various hyperparameters and dataset
information parameters. Model objects are passed a Config() object at
instantiation.
"""
batch_size = 64
n_samples = 1024
n_features = 100
n_classes = 5
# You may adjust the max_epochs to ensure convergence.
max_epochs = 50
# You may adjust this learning rate to ensure convergence.
lr = 1e-4
class SoftmaxModel(Model):
"""Implements a Softmax classifier with cross-entropy loss."""
def load_data(self):
"""Creates a synthetic dataset and stores it in memory."""
np.random.seed(1234)
self.input_data = np.random.rand(
self.config.n_samples, self.config.n_features)
self.input_labels = np.ones((self.config.n_samples,), dtype=np.int32)
def add_placeholders(self):
"""Generate placeholder variables to represent the input tensors.
These placeholders are used as inputs by the rest of the model building
code and will be fed data during training.
Adds following nodes to the computational graph
input_placeholder: Input placeholder tensor of shape
(batch_size, n_features), type tf.float32
labels_placeholder: Labels placeholder tensor of shape
(batch_size, n_classes), type tf.int32
Add these placeholders to self as the instance variables
self.input_placeholder
self.labels_placeholder
(Don't change the variable names)
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
def create_feed_dict(self, input_batch, label_batch):
"""Creates the feed_dict for softmax classifier.
A feed_dict takes the form of:
feed_dict = {
<placeholder>: <tensor of values to be passed for placeholder>,
....
}
If label_batch is None, then no labels are added to feed_dict.
Hint: The keys for the feed_dict should match the placeholder tensors
created in add_placeholders.
Args:
input_batch: A batch of input data.
label_batch: A batch of label data.
Returns:
feed_dict: The feed dictionary mapping from placeholders to values.
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return feed_dict
def add_training_op(self, loss):
"""Sets up the training Ops.
Creates an optimizer and applies the gradients to all trainable variables.
The Op returned by this function is what must be passed to the
`sess.run()` call to cause the model to train. See
https://www.tensorflow.org/versions/r0.7/api_docs/python/train.html#Optimizer
for more information.
Hint: Use tf.train.GradientDescentOptimizer to get an optimizer object.
Calling optimizer.minimize() will return a train_op object.
Args:
loss: Loss tensor, from cross_entropy_loss.
Returns:
train_op: The Op for training.
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return train_op
def add_model(self, input_data):
"""Adds a linear-layer plus a softmax transformation
The core transformation for this model which transforms a batch of input
data into a batch of predictions. In this case, the mathematical
transformation effected is
y = softmax(xW + b)
Hint: Make sure to create tf.Variables as needed. Also, make sure to use
tf.name_scope to ensure that your name spaces are clean.
Hint: For this simple use-case, it's sufficient to initialize both weights W
and biases b with zeros.
Args:
input_data: A tensor of shape (batch_size, n_features).
Returns:
out: A tensor of shape (batch_size, n_classes)
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return out
def add_loss_op(self, pred):
"""Adds cross_entropy_loss ops to the computational graph.
Hint: Use the cross_entropy_loss function we defined. This should be a very
short function.
Args:
pred: A tensor of shape (batch_size, n_classes)
Returns:
loss: A 0-d tensor (scalar)
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return loss
def run_epoch(self, sess, input_data, input_labels):
"""Runs an epoch of training.
Trains the model for one-epoch.
Args:
sess: tf.Session() object
input_data: np.ndarray of shape (n_samples, n_features)
input_labels: np.ndarray of shape (n_samples, n_classes)
Returns:
average_loss: scalar. Average minibatch loss of model on epoch.
"""
# And then after everything is built, start the training loop.
average_loss = 0
for step, (input_batch, label_batch) in enumerate(
data_iterator(input_data, input_labels,
batch_size=self.config.batch_size,
label_size=self.config.n_classes)):
# Fill a feed dictionary with the actual set of images and labels
# for this particular training step.
feed_dict = self.create_feed_dict(input_batch, label_batch)
# Run one step of the model. The return values are the activations
# from the `self.train_op` (which is discarded) and the `loss` Op. To
# inspect the values of your Ops or variables, you may include them
# in the list passed to sess.run() and the value tensors will be
# returned in the tuple from the call.
_, loss_value = sess.run([self.train_op, self.loss], feed_dict=feed_dict)
average_loss += loss_value
average_loss = average_loss / step
return average_loss
def fit(self, sess, input_data, input_labels):
"""Fit model on provided data.
Args:
sess: tf.Session()
input_data: np.ndarray of shape (n_samples, n_features)
input_labels: np.ndarray of shape (n_samples, n_classes)
Returns:
losses: list of loss per epoch
"""
losses = []
for epoch in range(self.config.max_epochs):
start_time = time.time()
average_loss = self.run_epoch(sess, input_data, input_labels)
duration = time.time() - start_time
# Print status to stdout.
print('Epoch %d: loss = %.2f (%.3f sec)'
% (epoch, average_loss, duration))
losses.append(average_loss)
return losses
def __init__(self, config):
"""Initializes the model.
Args:
config: A model configuration object of type Config
"""
self.config = config
# Generate placeholders for the images and labels.
self.load_data()
self.add_placeholders()
self.pred = self.add_model(self.input_placeholder)
self.loss = self.add_loss_op(self.pred)
self.train_op = self.add_training_op(self.loss)
def test_SoftmaxModel():
"""Train softmax model for a number of steps."""
config = Config()
with tf.Graph().as_default():
model = SoftmaxModel(config)
# Create a session for running Ops on the Graph.
sess = tf.Session()
# Run the Op to initialize the variables.
init = tf.initialize_all_variables()
sess.run(init)
losses = model.fit(sess, model.input_data, model.input_labels)
# If ops are implemented correctly, the average loss should fall close to zero
# rapidly.
assert losses[-1] < .5
print "Basic (non-exhaustive) classifier tests pass\n"
if __name__ == "__main__":
test_SoftmaxModel()
import numpy as np
import tensorflow as tf
def softmax(x):
"""
Compute the softmax function in tensorflow.
You might find the tensorflow functions tf.exp, tf.reduce_max,
tf.reduce_sum, tf.expand_dims useful. (Many solutions are possible, so you may
not need to use all of these functions). Recall also that many common
tensorflow operations are sugared (e.g. x * y does a tensor multiplication
if x and y are both tensors). Make sure to implement the numerical stability
fixes as in the previous homework!
Args:
x: tf.Tensor with shape (n_samples, n_features). Note feature vectors are
represented by row-vectors. (For simplicity, no need to handle 1-d
input as in the previous homework)
Returns:
out: tf.Tensor with shape (n_sample, n_features). You need to construct this
tensor in this problem.
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return out
def cross_entropy_loss(y, yhat):
"""
Compute the cross entropy loss in tensorflow.
y is a one-hot tensor of shape (n_samples, n_classes) and yhat is a tensor
of shape (n_samples, n_classes). y should be of dtype tf.int32, and yhat should
be of dtype tf.float32.
The functions tf.to_float, tf.reduce_sum, and tf.log might prove useful. (Many
solutions are possible, so you may not need to use all of these functions).
Note: You are NOT allowed to use the tensorflow built-in cross-entropy
functions.
Args:
y: tf.Tensor with shape (n_samples, n_classes). One-hot encoded.
yhat: tf.Tensorwith shape (n_sample, n_classes). Each row encodes a
probability distribution and should sum to 1.
Returns:
out: tf.Tensor with shape (1,) (Scalar output). You need to construct this
tensor in the problem.
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return out
def test_softmax_basic():
"""
Some simple tests to get you started.
Warning: these are not exhaustive.
"""
print "Running basic tests..."
test1 = softmax(tf.convert_to_tensor(
np.array([[1001,1002],[3,4]]), dtype=tf.float32))
with tf.Session():
test1 = test1.eval()
assert np.amax(np.fabs(test1 - np.array(
[0.26894142, 0.73105858]))) <= 1e-6
test2 = softmax(tf.convert_to_tensor(
np.array([[-1001,-1002]]), dtype=tf.float32))
with tf.Session():
test2 = test2.eval()
assert np.amax(np.fabs(test2 - np.array(
[0.73105858, 0.26894142]))) <= 1e-6
print "Basic (non-exhaustive) softmax tests pass\n"
def test_cross_entropy_loss_basic():
"""
Some simple tests to get you started.
Warning: these are not exhaustive.
"""
y = np.array([[0, 1], [1, 0], [1, 0]])
yhat = np.array([[.5, .5], [.5, .5], [.5, .5]])
test1 = cross_entropy_loss(
tf.convert_to_tensor(y, dtype=tf.int32),
tf.convert_to_tensor(yhat, dtype=tf.float32))
with tf.Session():
test1 = test1.eval()
result = -3 * np.log(.5)
assert np.amax(np.fabs(test1 - result)) <= 1e-6
print "Basic (non-exhaustive) cross-entropy tests pass\n"
if __name__ == "__main__":
test_softmax_basic()
test_cross_entropy_loss_basic()
import os
import getpass
import sys
import time
import numpy as np
import tensorflow as tf
from q2_initialization import xavier_weight_init
import data_utils.utils as du
import data_utils.ner as ner
from utils import data_iterator
from model import LanguageModel
class Config(object):
"""Holds model hyperparams and data information.
The config class is used to store various hyperparameters and dataset
information parameters. Model objects are passed a Config() object at
instantiation.
"""
embed_size = 50
batch_size = 64
label_size = 5
hidden_size = 100
max_epochs = 24
early_stopping = 2
dropout = 0.9
lr = 0.001
l2 = 0.001
window_size = 3
class NERModel(LanguageModel):
"""Implements a NER (Named Entity Recognition) model.
This class implements a deep network for named entity recognition. It
inherits from LanguageModel, which has an add_embedding method in addition to
the standard Model method.
"""
def load_data(self, debug=False):
"""Loads starter word-vectors and train/dev/test data."""
# Load the starter word vectors
self.wv, word_to_num, num_to_word = ner.load_wv(
'data/ner/vocab.txt', 'data/ner/wordVectors.txt')
tagnames = ['O', 'LOC', 'MISC', 'ORG', 'PER']
self.num_to_tag = dict(enumerate(tagnames))
tag_to_num = {v:k for k,v in self.num_to_tag.iteritems()}
# Load the training set
docs = du.load_dataset('data/ner/train')
self.X_train, self.y_train = du.docs_to_windows(
docs, word_to_num, tag_to_num, wsize=self.config.window_size)
if debug:
self.X_train = self.X_train[:1024]
self.y_train = self.y_train[:1024]
# Load the dev set (for tuning hyperparameters)
docs = du.load_dataset('data/ner/dev')
self.X_dev, self.y_dev = du.docs_to_windows(
docs, word_to_num, tag_to_num, wsize=self.config.window_size)
if debug:
self.X_dev = self.X_dev[:1024]
self.y_dev = self.y_dev[:1024]
# Load the test set (dummy labels only)
docs = du.load_dataset('data/ner/test.masked')
self.X_test, self.y_test = du.docs_to_windows(
docs, word_to_num, tag_to_num, wsize=self.config.window_size)
def add_placeholders(self):
"""Generate placeholder variables to represent the input tensors
These placeholders are used as inputs by the rest of the model building
code and will be fed data during training. Note that when "None" is in a
placeholder's shape, it's flexible
Adds following nodes to the computational graph
input_placeholder: Input placeholder tensor of shape
(None, window_size), type tf.int32
labels_placeholder: Labels placeholder tensor of shape
(None, label_size), type tf.float32
dropout_placeholder: Dropout value placeholder (scalar),
type tf.float32
Add these placeholders to self as the instance variables
self.input_placeholder
self.labels_placeholder
self.dropout_placeholder
(Don't change the variable names)
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
def create_feed_dict(self, input_batch, dropout, label_batch=None):
"""Creates the feed_dict for softmax classifier.
A feed_dict takes the form of:
feed_dict = {
<placeholder>: <tensor of values to be passed for placeholder>,
....
}
Hint: The keys for the feed_dict should be a subset of the placeholder
tensors created in add_placeholders.
Hint: When label_batch is None, don't add a labels entry to the feed_dict.
Args:
input_batch: A batch of input data.
label_batch: A batch of label data.
Returns:
feed_dict: The feed dictionary mapping from placeholders to values.
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return feed_dict
def add_embedding(self):
"""Add embedding layer that maps from vocabulary to vectors.
Creates an embedding tensor (of shape (len(self.wv), embed_size). Use the
input_placeholder to retrieve the embeddings for words in the current batch.
(Words are discrete entities. They need to be transformed into vectors for use
in deep-learning. Although we won't do so in this problem, in practice it's
useful to initialize the embedding with pre-trained word-vectors. For this
problem, using the default initializer is sufficient.)
Hint: This layer should use the input_placeholder to index into the
embedding.
Hint: You might find tf.nn.embedding_lookup useful.
Hint: See following link to understand what -1 in a shape means.
https://www.tensorflow.org/versions/r0.8/api_docs/python/array_ops.html#reshape
Hint: Check the last slide from the TensorFlow lecture.
Hint: Here are the dimensions of the variables you will need to create:
L: (len(self.wv), embed_size)
Returns:
window: tf.Tensor of shape (-1, window_size*embed_size)
"""
# The embedding lookup is currently only implemented for the CPU
with tf.device('/cpu:0'):
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return window
def add_model(self, window):
"""Adds the 1-hidden-layer NN.
Hint: Use a variable_scope (e.g. "Layer") for the first hidden layer, and
another variable_scope (e.g. "Softmax") for the linear transformation
preceding the softmax. Make sure to use the xavier_weight_init you
defined in the previous part to initialize weights.
Hint: Make sure to add in regularization and dropout to this network.
Regularization should be an addition to the cost function, while
dropout should be added after both variable scopes.
Hint: You might consider using a tensorflow Graph Collection (e.g
"total_loss") to collect the regularization and loss terms (which you
will add in add_loss_op below).
Hint: Here are the dimensions of the various variables you will need to
create
W: (window_size*embed_size, hidden_size)
b1: (hidden_size,)
U: (hidden_size, label_size)
b2: (label_size)
https://www.tensorflow.org/versions/r0.7/api_docs/python/framework.html#graph-collections
Args:
window: tf.Tensor of shape (-1, window_size*embed_size)
Returns:
output: tf.Tensor of shape (batch_size, label_size)
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return output
def add_loss_op(self, y):
"""Adds cross_entropy_loss ops to the computational graph.
Hint: You can use tf.nn.softmax_cross_entropy_with_logits to simplify your
implementation. You might find tf.reduce_mean useful.
Args:
pred: A tensor of shape (batch_size, n_classes)
Returns:
loss: A 0-d tensor (scalar)
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return loss
def add_training_op(self, loss):
"""Sets up the training Ops.
Creates an optimizer and applies the gradients to all trainable variables.
The Op returned by this function is what must be passed to the
`sess.run()` call to cause the model to train. See
https://www.tensorflow.org/versions/r0.7/api_docs/python/train.html#Optimizer
for more information.
Hint: Use tf.train.AdamOptimizer for this model.
Calling optimizer.minimize() will return a train_op object.
Args:
loss: Loss tensor, from cross_entropy_loss.
Returns:
train_op: The Op for training.
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return train_op
def __init__(self, config):
"""Constructs the network using the helper functions defined above."""
self.config = config
self.load_data(debug=False)
self.add_placeholders()
window = self.add_embedding()
y = self.add_model(window)
self.loss = self.add_loss_op(y)
self.predictions = tf.nn.softmax(y)
one_hot_prediction = tf.argmax(self.predictions, 1)
correct_prediction = tf.equal(
tf.argmax(self.labels_placeholder, 1), one_hot_prediction)
self.correct_predictions = tf.reduce_sum(tf.cast(correct_prediction, 'int32'))
self.train_op = self.add_training_op(self.loss)
def run_epoch(self, session, input_data, input_labels,
shuffle=True, verbose=True):
orig_X, orig_y = input_data, input_labels
dp = self.config.dropout
# We're interested in keeping track of the loss and accuracy during training
total_loss = []
total_correct_examples = 0
total_processed_examples = 0
total_steps = len(orig_X) / self.config.batch_size
for step, (x, y) in enumerate(
data_iterator(orig_X, orig_y, batch_size=self.config.batch_size,
label_size=self.config.label_size, shuffle=shuffle)):
feed = self.create_feed_dict(input_batch=x, dropout=dp, label_batch=y)
loss, total_correct, _ = session.run(
[self.loss, self.correct_predictions, self.train_op],
feed_dict=feed)
total_processed_examples += len(x)
total_correct_examples += total_correct
total_loss.append(loss)
##
if verbose and step % verbose == 0:
sys.stdout.write('\r{} / {} : loss = {}'.format(
step, total_steps, np.mean(total_loss)))
sys.stdout.flush()
if verbose:
sys.stdout.write('\r')
sys.stdout.flush()
return np.mean(total_loss), total_correct_examples / float(total_processed_examples)
def predict(self, session, X, y=None):
"""Make predictions from the provided model."""
# If y is given, the loss is also calculated
# We deactivate dropout by setting it to 1
dp = 1
losses = []
results = []
if np.any(y):
data = data_iterator(X, y, batch_size=self.config.batch_size,
label_size=self.config.label_size, shuffle=False)
else:
data = data_iterator(X, batch_size=self.config.batch_size,
label_size=self.config.label_size, shuffle=False)
for step, (x, y) in enumerate(data):
feed = self.create_feed_dict(input_batch=x, dropout=dp)
if np.any(y):
feed[self.labels_placeholder] = y
loss, preds = session.run(
[self.loss, self.predictions], feed_dict=feed)
losses.append(loss)
else:
preds = session.run(self.predictions, feed_dict=feed)
predicted_indices = preds.argmax(axis=1)
results.extend(predicted_indices)
return np.mean(losses), results
def print_confusion(confusion, num_to_tag):
"""Helper method that prints confusion matrix."""
# Summing top to bottom gets the total number of tags guessed as T
total_guessed_tags = confusion.sum(axis=0)
# Summing left to right gets the total number of true tags
total_true_tags = confusion.sum(axis=1)
print
print confusion
for i, tag in sorted(num_to_tag.items()):
prec = confusion[i, i] / float(total_guessed_tags[i])
recall = confusion[i, i] / float(total_true_tags[i])
print 'Tag: {} - P {:2.4f} / R {:2.4f}'.format(tag, prec, recall)
def calculate_confusion(config, predicted_indices, y_indices):
"""Helper method that calculates confusion matrix."""
confusion = np.zeros((config.label_size, config.label_size), dtype=np.int32)
for i in xrange(len(y_indices)):
correct_label = y_indices[i]
guessed_label = predicted_indices[i]
confusion[correct_label, guessed_label] += 1
return confusion
def save_predictions(predictions, filename):
"""Saves predictions to provided file."""
with open(filename, "wb") as f:
for prediction in predictions:
f.write(str(prediction) + "\n")
def test_NER():
"""Test NER model implementation.
You can use this function to test your implementation of the Named Entity
Recognition network. When debugging, set max_epochs in the Config object to 1
so you can rapidly iterate.
"""
config = Config()
with tf.Graph().as_default():
model = NERModel(config)
init = tf.initialize_all_variables()
saver = tf.train.Saver()
with tf.Session() as session:
best_val_loss = float('inf')
best_val_epoch = 0
session.run(init)
for epoch in xrange(config.max_epochs):
print 'Epoch {}'.format(epoch)
start = time.time()
###
train_loss, train_acc = model.run_epoch(session, model.X_train,
model.y_train)
val_loss, predictions = model.predict(session, model.X_dev, model.y_dev)
print 'Training loss: {}'.format(train_loss)
print 'Training acc: {}'.format(train_acc)
print 'Validation loss: {}'.format(val_loss)
if val_loss < best_val_loss:
best_val_loss = val_loss
best_val_epoch = epoch
if not os.path.exists("./weights"):
os.makedirs("./weights")
saver.save(session, './weights/ner.weights')
if epoch - best_val_epoch > config.early_stopping:
break
###
confusion = calculate_confusion(config, predictions, model.y_dev)
print_confusion(confusion, model.num_to_tag)
print 'Total time: {}'.format(time.time() - start)
saver.restore(session, './weights/ner.weights')
print 'Test'
print '=-=-='
print 'Writing predictions to q2_test.predicted'
_, predictions = model.predict(session, model.X_test, model.y_test)
save_predictions(predictions, "q2_test.predicted")
if __name__ == "__main__":
test_NER()
import numpy as np
import tensorflow as tf
def xavier_weight_init():
"""
Returns function that creates random tensor.
The specified function will take in a shape (tuple or 1-d array) and must
return a random tensor of the specified shape and must be drawn from the
Xavier initialization distribution.
Hint: You might find tf.random_uniform useful.
"""
def _xavier_initializer(shape, **kwargs):
"""Defines an initializer for the Xavier distribution.
This function will be used as a variable scope initializer.
https://www.tensorflow.org/versions/r0.7/how_tos/variable_scope/index.html#initializers-in-variable-scope
Args:
shape: Tuple or 1-d array that species dimensions of requested tensor.
Returns:
out: tf.Tensor of specified shape sampled from Xavier distribution.
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return out
# Returns defined initializer function.
return _xavier_initializer
def test_initialization_basic():
"""
Some simple tests for the initialization.
"""
print "Running basic tests..."
xavier_initializer = xavier_weight_init()
shape = (1,)
xavier_mat = xavier_initializer(shape)
assert xavier_mat.get_shape() == shape
shape = (1, 2, 3)
xavier_mat = xavier_initializer(shape)
assert xavier_mat.get_shape() == shape
print "Basic (non-exhaustive) Xavier initialization tests pass\n"
def test_initialization():
"""
Use this space to test your Xavier initialization code by running:
python q1_initialization.py
This function will not be called by the autograder, nor will
your tests be graded.
"""
print "Running your tests..."
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
if __name__ == "__main__":
test_initialization_basic()
import getpass
import sys
import time
import numpy as np
from copy import deepcopy
from utils import calculate_perplexity, get_ptb_dataset, Vocab
from utils import ptb_iterator, sample
import tensorflow as tf
from tensorflow.python.ops.seq2seq import sequence_loss
from model import LanguageModel
# Let's set the parameters of our model
# http://arxiv.org/pdf/1409.2329v4.pdf shows parameters that would achieve near
# SotA numbers
class Config(object):
"""Holds model hyperparams and data information.
The config class is used to store various hyperparameters and dataset
information parameters. Model objects are passed a Config() object at
instantiation.
"""
batch_size = 64
embed_size = 50
hidden_size = 100
num_steps = 10
max_epochs = 16
early_stopping = 2
dropout = 0.9
lr = 0.001
class RNNLM_Model(LanguageModel):
def load_data(self, debug=False):
"""Loads starter word-vectors and train/dev/test data."""
self.vocab = Vocab()
self.vocab.construct(get_ptb_dataset('train'))
self.encoded_train = np.array(
[self.vocab.encode(word) for word in get_ptb_dataset('train')],
dtype=np.int32)
self.encoded_valid = np.array(
[self.vocab.encode(word) for word in get_ptb_dataset('valid')],
dtype=np.int32)
self.encoded_test = np.array(
[self.vocab.encode(word) for word in get_ptb_dataset('test')],
dtype=np.int32)
if debug:
num_debug = 1024
self.encoded_train = self.encoded_train[:num_debug]
self.encoded_valid = self.encoded_valid[:num_debug]
self.encoded_test = self.encoded_test[:num_debug]
def add_placeholders(self):
"""Generate placeholder variables to represent the input tensors
These placeholders are used as inputs by the rest of the model building
code and will be fed data during training. Note that when "None" is in a
placeholder's shape, it's flexible
Adds following nodes to the computational graph.
(When None is in a placeholder's shape, it's flexible)
input_placeholder: Input placeholder tensor of shape
(None, num_steps), type tf.int32
labels_placeholder: Labels placeholder tensor of shape
(None, num_steps), type tf.float32
dropout_placeholder: Dropout value placeholder (scalar),
type tf.float32
Add these placeholders to self as the instance variables
self.input_placeholder
self.labels_placeholder
self.dropout_placeholder
(Don't change the variable names)
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
def add_embedding(self):
"""Add embedding layer.
Hint: This layer should use the input_placeholder to index into the
embedding.
Hint: You might find tf.nn.embedding_lookup useful.
Hint: You might find tf.split, tf.squeeze useful in constructing tensor inputs
Hint: Check the last slide from the TensorFlow lecture.
Hint: Here are the dimensions of the variables you will need to create:
L: (len(self.vocab), embed_size)
Returns:
inputs: List of length num_steps, each of whose elements should be
a tensor of shape (batch_size, embed_size).
"""
# The embedding lookup is currently only implemented for the CPU
with tf.device('/cpu:0'):
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return inputs
def add_projection(self, rnn_outputs):
"""Adds a projection layer.
The projection layer transforms the hidden representation to a distribution
over the vocabulary.
Hint: Here are the dimensions of the variables you will need to
create
U: (hidden_size, len(vocab))
b_2: (len(vocab),)
Args:
rnn_outputs: List of length num_steps, each of whose elements should be
a tensor of shape (batch_size, embed_size).
Returns:
outputs: List of length num_steps, each a tensor of shape
(batch_size, len(vocab)
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return outputs
def add_loss_op(self, output):
"""Adds loss ops to the computational graph.
Hint: Use tensorflow.python.ops.seq2seq.sequence_loss to implement sequence loss.
Args:
output: A tensor of shape (None, self.vocab)
Returns:
loss: A 0-d tensor (scalar)
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return loss
def add_training_op(self, loss):
"""Sets up the training Ops.
Creates an optimizer and applies the gradients to all trainable variables.
The Op returned by this function is what must be passed to the
`sess.run()` call to cause the model to train. See
https://www.tensorflow.org/versions/r0.7/api_docs/python/train.html#Optimizer
for more information.
Hint: Use tf.train.AdamOptimizer for this model.
Calling optimizer.minimize() will return a train_op object.
Args:
loss: Loss tensor, from cross_entropy_loss.
Returns:
train_op: The Op for training.
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return train_op
def __init__(self, config):
self.config = config
self.load_data(debug=False)
self.add_placeholders()
self.inputs = self.add_embedding()
self.rnn_outputs = self.add_model(self.inputs)
self.outputs = self.add_projection(self.rnn_outputs)
# We want to check how well we correctly predict the next word
# We cast o to float64 as there are numerical issues at hand
# (i.e. sum(output of softmax) = 1.00000298179 and not 1)
self.predictions = [tf.nn.softmax(tf.cast(o, 'float64')) for o in self.outputs]
# Reshape the output into len(vocab) sized chunks - the -1 says as many as
# needed to evenly divide
output = tf.reshape(tf.concat(1, self.outputs), [-1, len(self.vocab)])
self.calculate_loss = self.add_loss_op(output)
self.train_step = self.add_training_op(self.calculate_loss)
def add_model(self, inputs):
"""Creates the RNN LM model.
In the space provided below, you need to implement the equations for the
RNN LM model. Note that you may NOT use built in rnn_cell functions from
tensorflow.
Hint: Use a zeros tensor of shape (batch_size, hidden_size) as
initial state for the RNN. Add this to self as instance variable
self.initial_state
(Don't change variable name)
Hint: Add the last RNN output to self as instance variable
self.final_state
(Don't change variable name)
Hint: Make sure to apply dropout to the inputs and the outputs.
Hint: Use a variable scope (e.g. "RNN") to define RNN variables.
Hint: Perform an explicit for-loop over inputs. You can use
scope.reuse_variables() to ensure that the weights used at each
iteration (each time-step) are the same. (Make sure you don't call
this for iteration 0 though or nothing will be initialized!)
Hint: Here are the dimensions of the various variables you will need to
create:
H: (hidden_size, hidden_size)
I: (embed_size, hidden_size)
b_1: (hidden_size,)
Args:
inputs: List of length num_steps, each of whose elements should be
a tensor of shape (batch_size, embed_size).
Returns:
outputs: List of length num_steps, each of whose elements should be
a tensor of shape (batch_size, hidden_size)
"""
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
return rnn_outputs
def run_epoch(self, session, data, train_op=None, verbose=10):
config = self.config
dp = config.dropout
if not train_op:
train_op = tf.no_op()
dp = 1
total_steps = sum(1 for x in ptb_iterator(data, config.batch_size, config.num_steps))
total_loss = []
state = self.initial_state.eval()
for step, (x, y) in enumerate(
ptb_iterator(data, config.batch_size, config.num_steps)):
# We need to pass in the initial state and retrieve the final state to give
# the RNN proper history
feed = {self.input_placeholder: x,
self.labels_placeholder: y,
self.initial_state: state,
self.dropout_placeholder: dp}
loss, state, _ = session.run(
[self.calculate_loss, self.final_state, train_op], feed_dict=feed)
total_loss.append(loss)
if verbose and step % verbose == 0:
sys.stdout.write('\r{} / {} : pp = {}'.format(
step, total_steps, np.exp(np.mean(total_loss))))
sys.stdout.flush()
if verbose:
sys.stdout.write('\r')
return np.exp(np.mean(total_loss))
def generate_text(session, model, config, starting_text='<eos>',
stop_length=100, stop_tokens=None, temp=1.0):
"""Generate text from the model.
Hint: Create a feed-dictionary and use sess.run() to execute the model. Note
that you will need to use model.initial_state as a key to feed_dict
Hint: Fetch model.final_state and model.predictions[-1]. (You set
model.final_state in add_model() and model.predictions is set in
__init__)
Hint: Store the outputs of running the model in local variables state and
y_pred (used in the pre-implemented parts of this function.)
Args:
session: tf.Session() object
model: Object of type RNNLM_Model
config: A Config() object
starting_text: Initial text passed to model.
Returns:
output: List of word idxs
"""
state = model.initial_state.eval()
# Imagine tokens as a batch size of one, length of len(tokens[0])
tokens = [model.vocab.encode(word) for word in starting_text.split()]
for i in xrange(stop_length):
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
next_word_idx = sample(y_pred[0], temperature=temp)
tokens.append(next_word_idx)
if stop_tokens and model.vocab.decode(tokens[-1]) in stop_tokens:
break
output = [model.vocab.decode(word_idx) for word_idx in tokens]
return output
def generate_sentence(session, model, config, *args, **kwargs):
"""Convenice to generate a sentence from the model."""
return generate_text(session, model, config, *args, stop_tokens=['<eos>'], **kwargs)
def test_RNNLM():
config = Config()
gen_config = deepcopy(config)
gen_config.batch_size = gen_config.num_steps = 1
# We create the training model and generative model
with tf.variable_scope('RNNLM') as scope:
model = RNNLM_Model(config)
# This instructs gen_model to reuse the same variables as the model above
scope.reuse_variables()
gen_model = RNNLM_Model(gen_config)
init = tf.initialize_all_variables()
saver = tf.train.Saver()
with tf.Session() as session:
best_val_pp = float('inf')
best_val_epoch = 0
session.run(init)
for epoch in xrange(config.max_epochs):
print 'Epoch {}'.format(epoch)
start = time.time()
###
train_pp = model.run_epoch(
session, model.encoded_train,
train_op=model.train_step)
valid_pp = model.run_epoch(session, model.encoded_valid)
print 'Training perplexity: {}'.format(train_pp)
print 'Validation perplexity: {}'.format(valid_pp)
if valid_pp < best_val_pp:
best_val_pp = valid_pp
best_val_epoch = epoch
saver.save(session, './ptb_rnnlm.weights')
if epoch - best_val_epoch > config.early_stopping:
break
print 'Total time: {}'.format(time.time() - start)
saver.restore(session, 'ptb_rnnlm.weights')
test_pp = model.run_epoch(session, model.encoded_test)
print '=-=' * 5
print 'Test perplexity: {}'.format(test_pp)
print '=-=' * 5
starting_text = 'in palo alto'
while starting_text:
print ' '.join(generate_sentence(
session, gen_model, gen_config, starting_text=starting_text, temp=1.0))
starting_text = raw_input('> ')
if __name__ == "__main__":
test_RNNLM()
from collections import defaultdict
import numpy as np
class Vocab(object):
def __init__(self):
self.word_to_index = {}
self.index_to_word = {}
self.word_freq = defaultdict(int)
self.total_words = 0
self.unknown = '<unk>'
self.add_word(self.unknown, count=0)
def add_word(self, word, count=1):
if word not in self.word_to_index:
index = len(self.word_to_index)
self.word_to_index[word] = index
self.index_to_word[index] = word
self.word_freq[word] += count
def construct(self, words):
for word in words:
self.add_word(word)
self.total_words = float(sum(self.word_freq.values()))
print '{} total words with {} uniques'.format(self.total_words, len(self.word_freq))
def encode(self, word):
if word not in self.word_to_index:
word = self.unknown
return self.word_to_index[word]
def decode(self, index):
return self.index_to_word[index]
def __len__(self):
return len(self.word_freq)
def calculate_perplexity(log_probs):
# https://web.stanford.edu/class/cs124/lec/languagemodeling.pdf
perp = 0
for p in log_probs:
perp += -p
return np.exp(perp / len(log_probs))
def get_ptb_dataset(dataset='train'):
fn = 'data/ptb/ptb.{}.txt'
for line in open(fn.format(dataset)):
for word in line.split():
yield word
# Add token to the end of the line
# Equivalent to <eos> in:
# https://github.com/wojzaremba/lstm/blob/master/data.lua#L32
# https://github.com/tensorflow/tensorflow/blob/master/tensorflow/models/rnn/ptb/reader.py#L31
yield '<eos>'
def ptb_iterator(raw_data, batch_size, num_steps):
# Pulled from https://github.com/tensorflow/tensorflow/blob/master/tensorflow/models/rnn/ptb/reader.py#L82
raw_data = np.array(raw_data, dtype=np.int32)
data_len = len(raw_data)
batch_len = data_len // batch_size
data = np.zeros([batch_size, batch_len], dtype=np.int32)
for i in range(batch_size):
data[i] = raw_data[batch_len * i:batch_len * (i + 1)]
epoch_size = (batch_len - 1) // num_steps
if epoch_size == 0:
raise ValueError("epoch_size == 0, decrease batch_size or num_steps")
for i in range(epoch_size):
x = data[:, i * num_steps:(i + 1) * num_steps]
y = data[:, i * num_steps + 1:(i + 1) * num_steps + 1]
yield (x, y)
def sample(a, temperature=1.0):
# helper function to sample an index from a probability array
# from https://github.com/fchollet/keras/blob/master/examples/lstm_text_generation.py
a = np.log(a) / temperature
a = np.exp(a) / np.sum(np.exp(a))
return np.argmax(np.random.multinomial(1, a, 1))
def data_iterator(orig_X, orig_y=None, batch_size=32, label_size=2, shuffle=False):
# Optionally shuffle the data before training
if shuffle:
indices = np.random.permutation(len(orig_X))
data_X = orig_X[indices]
data_y = orig_y[indices] if np.any(orig_y) else None
else:
data_X = orig_X
data_y = orig_y
###
total_processed_examples = 0
total_steps = int(np.ceil(len(data_X) / float(batch_size)))
for step in xrange(total_steps):
# Create the batch by selecting up to batch_size elements
batch_start = step * batch_size
x = data_X[batch_start:batch_start + batch_size]
# Convert our target from the class index to a one hot vector
y = None
if np.any(data_y):
y_indices = data_y[batch_start:batch_start + batch_size]
y = np.zeros((len(x), label_size), dtype=np.int32)
y[np.arange(len(y_indices)), y_indices] = 1
###
yield x, y
total_processed_examples += len(x)
# Sanity check to make sure we iterated over all the dataset as intended
assert total_processed_examples == len(data_X), 'Expected {} and processed {}'.format(len(data_X), total_processed_examples)
因为 它太大了无法显示 source diff 。你可以改为 查看blob
if [ -f assignment2.zip ]; then
echo "data exist"
else
wget http://cs224d.stanford.edu/assignment2/assignment2.zip
fi
if [ $? -eq 0 ];then
unzip assignment2.zip
cp assignment2_release/data/ner/wordVectors.txt ./data
cp assignment2_release/data/ner/vocab.txt ./data
rm -rf assignment2.zip assignment2_release
else
echo "download data error!" >> /dev/stderr
exit 1
fi
B-LOC
I-LOC
B-MISC
I-MISC
B-ORG
I-ORG
B-PER
I-PER
O
CRICKET NNP I-NP O
- : O O
LEICESTERSHIRE NNP I-NP I-ORG
TAKE NNP I-NP O
OVER IN I-PP O
AT NNP I-NP O
TOP NNP I-NP O
AFTER NNP I-NP O
INNINGS NNP I-NP O
VICTORY NN I-NP O
. . O O
LONDON NNP I-NP I-LOC
1996-08-30 CD I-NP O
West NNP I-NP I-MISC
Indian NNP I-NP I-MISC
all-rounder NN I-NP O
Phil NNP I-NP I-PER
Simmons NNP I-NP I-PER
took VBD I-VP O
four CD I-NP O
for IN I-PP O
38 CD I-NP O
on IN I-PP O
Friday NNP I-NP O
as IN I-PP O
Leicestershire NNP I-NP I-ORG
beat VBD I-VP O
Somerset NNP I-NP I-ORG
by IN I-PP O
an DT I-NP O
innings NN I-NP O
and CC O O
39 CD I-NP O
runs NNS I-NP O
in IN I-PP O
two CD I-NP O
days NNS I-NP O
to TO I-VP O
take VB I-VP O
over IN I-PP O
at IN B-PP O
the DT I-NP O
head NN I-NP O
of IN I-PP O
the DT I-NP O
county NN I-NP O
championship NN I-NP O
. . O O
Their PRP$ I-NP O
stay NN I-NP O
on IN I-PP O
top NN I-NP O
, , O O
though RB I-ADVP O
, , O O
may MD I-VP O
be VB I-VP O
short-lived JJ I-ADJP O
as IN I-PP O
title NN I-NP O
rivals NNS I-NP O
Essex NNP I-NP I-ORG
, , O O
Derbyshire NNP I-NP I-ORG
and CC I-NP O
Surrey NNP I-NP I-ORG
all DT O O
closed VBD I-VP O
in RP I-PRT O
on IN I-PP O
victory NN I-NP O
while IN I-SBAR O
Kent NNP I-NP I-ORG
made VBD I-VP O
up RP I-PRT O
for IN I-PP O
lost VBN I-NP O
time NN I-NP O
in IN I-PP O
their PRP$ I-NP O
rain-affected JJ I-NP O
match NN I-NP O
against IN I-PP O
Nottinghamshire NNP I-NP I-ORG
. . O O
After IN I-PP O
bowling VBG I-NP O
Somerset NNP I-NP I-ORG
out RP I-PRT O
for IN I-PP O
83 CD I-NP O
on IN I-PP O
the DT I-NP O
opening NN I-NP O
morning NN I-NP O
at IN I-PP O
Grace NNP I-NP I-LOC
Road NNP I-NP I-LOC
, , O O
Leicestershire NNP I-NP I-ORG
extended VBD I-VP O
their PRP$ I-NP O
first JJ I-NP O
innings NN I-NP O
by IN I-PP O
94 CD I-NP O
runs VBZ I-VP O
before IN I-PP O
being VBG I-VP O
bowled VBD I-VP O
out RP I-PRT O
for IN I-PP O
296 CD I-NP O
with IN I-PP O
England NNP I-NP I-LOC
discard VBP I-VP O
Andy NNP I-NP I-PER
Caddick NNP I-NP I-PER
taking VBG I-VP O
three CD I-NP O
for IN I-PP O
83 CD I-NP O
. . O O
EU NNP I-NP I-ORG
rejects VBZ I-VP O
German JJ I-NP I-MISC
call NN I-NP O
to TO I-VP O
boycott VB I-VP O
British JJ I-NP I-MISC
lamb NN I-NP O
. . O O
Peter NNP I-NP I-PER
Blackburn NNP I-NP I-PER
BRUSSELS NNP I-NP I-LOC
1996-08-22 CD I-NP O
The DT I-NP O
European NNP I-NP I-ORG
Commission NNP I-NP I-ORG
said VBD I-VP O
on IN I-PP O
Thursday NNP I-NP O
it PRP B-NP O
disagreed VBD I-VP O
with IN I-PP O
German JJ I-NP I-MISC
advice NN I-NP O
to TO I-PP O
consumers NNS I-NP O
to TO I-VP O
shun VB I-VP O
British JJ I-NP I-MISC
lamb NN I-NP O
until IN I-SBAR O
scientists NNS I-NP O
determine VBP I-VP O
whether IN I-SBAR O
mad JJ I-NP O
cow NN I-NP O
disease NN I-NP O
can MD I-VP O
be VB I-VP O
transmitted VBN I-VP O
to TO I-PP O
sheep NN I-NP O
. . O O
Germany NNP I-NP I-LOC
's POS B-NP O
representative NN I-NP O
to TO I-PP O
the DT I-NP O
European NNP I-NP I-ORG
Union NNP I-NP I-ORG
's POS B-NP O
veterinary JJ I-NP O
committee NN I-NP O
Werner NNP I-NP I-PER
Zwingmann NNP I-NP I-PER
said VBD I-VP O
on IN I-PP O
Wednesday NNP I-NP O
consumers NNS I-NP O
should MD I-VP O
buy VB I-VP O
sheepmeat NN I-NP O
from IN I-PP O
countries NNS I-NP O
other JJ I-ADJP O
than IN I-PP O
Britain NNP I-NP I-LOC
until IN I-SBAR O
the DT I-NP O
scientific JJ I-NP O
advice NN I-NP O
was VBD I-VP O
clearer JJR I-ADJP O
. . O O
" " O O
We PRP I-NP O
do VBP I-VP O
n't RB I-VP O
support VB I-VP O
any DT I-NP O
such JJ I-NP O
recommendation NN I-NP O
because IN I-SBAR O
we PRP I-NP O
do VBP I-VP O
n't RB I-VP O
see VB I-VP O
any DT I-NP O
grounds NNS I-NP O
for IN I-PP O
it PRP I-NP O
, , O O
" " O O
the DT I-NP O
Commission NNP I-NP I-ORG
's POS B-NP O
chief JJ I-NP O
spokesman NN I-NP O
Nikolaus NNP I-NP I-PER
van NNP I-NP I-PER
der FW I-NP I-PER
Pas NNP I-NP I-PER
told VBD I-VP O
a DT I-NP O
news NN I-NP O
briefing NN I-NP O
. . O O
He PRP I-NP O
said VBD I-VP O
further JJ I-NP O
scientific JJ I-NP O
study NN I-NP O
was VBD I-VP O
required VBN I-VP O
and CC O O
if IN I-SBAR O
it PRP I-NP O
was VBD I-VP O
found VBN I-VP O
that IN I-SBAR O
action NN I-NP O
was VBD I-VP O
needed VBN I-VP O
it PRP I-NP O
should MD I-VP O
be VB I-VP O
taken VBN I-VP O
by IN I-PP O
the DT I-NP O
European NNP I-NP I-ORG
Union NNP I-NP I-ORG
. . O O
因为 它太大了无法显示 source diff 。你可以改为 查看blob
import paddle.v2.fluid as fluid
from paddle.v2.fluid.initializer import NormalInitializer
from utils import logger, load_dict, get_embedding
import math
def ner_net(word_dict_len, label_dict_len, stack_num=2, is_train=True):
mark_dict_len = 2
word_dim = 50
mark_dim = 5
hidden_dim = 300
IS_SPARSE = True
embedding_name = 'emb'
stack_num = 2
word = fluid.layers.data(
name='word', shape=[1], dtype='int64', lod_level=1)
word_embedding = fluid.layers.embedding(
input=word,
size=[word_dict_len, word_dim],
dtype='float32',
is_sparse=IS_SPARSE,
param_attr=fluid.ParamAttr(
name=embedding_name, trainable=True))
mark = fluid.layers.data(
name='mark', shape=[1], dtype='int64', lod_level=1)
mark_embedding = fluid.layers.embedding(
input=mark,
size=[mark_dict_len, mark_dim],
dtype='float32',
is_sparse=IS_SPARSE)
#print dir(word_embedding)
#print word_embedding.shape
#print word_embedding.to_string
# print mark_embedding.to_string("")
gru_h, c = fluid.layers.dynamic_lstm(input=word_embedding, size=hid_dim, is_reverse=False)
gru_max = fluid.layers.sequence_pool(input=gru_h, pool_type='max', act='tanh')
fc1 = fluid.layers.fc(input=gru_max, size=hid_dim2, act='tanh')
prediction = fluid.layers.fc(input=fc1, size=label_dict_len, act='softmax')
'''
cost = fluid.layers.cross_entropy(input=prediction, label=label)
avg_cost = fluid.layers.mean(x=cost)
sgd_optimizer = fluid.optimizer.SGD(learning_rate=0.001)
word_caps_vector = fluid.layers.concat(input=[word_embedding, mark_embedding], axis = 1)
mix_hidden_lr = 1e-3
rnn_para_attr = fluid.ParamAttr(
initializer=NormalInitializer(loc=0.0, scale=0.0, seed=0), learning_rate=0.1)
hidden_para_attr = fluid.ParamAttr(
initializer=NormalInitializer(loc=0.0, scale=(1. / math.sqrt(hidden_dim) / 3), seed=0),
learning_rate=mix_hidden_lr)
hidden = fluid.layers.fc(
input=word_caps_vector,
name="__hidden00__",
size=hidden_dim,
act="tanh",
bias_attr=fluid.ParamAttr(
initializer=NormalInitializer(loc=0.0, scale=(1. / math.sqrt(hidden_dim) / 3), seed=0)),
param_attr=fluid.ParamAttr(
initializer=NormalInitializer(loc=0.0, scale=(1. / math.sqrt(hidden_dim) / 3), seed=0)))
fea = []
for direction in ["fwd", "bwd"]:
for i in range(stack_num):
if i != 0:
#print i
#print rnn.shape
#print hidden.shape
#print isinstance(hidden, fluid.framework.Variable)
#print isinstance(rnn, fluid.framework.Variable)
#print dir(rnn)
#print type(rnn)
#print rnn[0].shape
#print rnn[1].shape
#print rnn[2].shape
hidden = fluid.layers.fc(
name="__hidden%02d_%s__" % (i, direction),
size=hidden_dim,
act="stanh",
bias_attr=fluid.ParamAttr(initializer=NormalInitializer(loc=0.0, scale=1.0, seed=0)),
input=[hidden, rnn[0], rnn[1]],
param_attr=[hidden_para_attr, rnn_para_attr, rnn_para_attr])
rnn = fluid.layers.dynamic_lstm(
name="__rnn%02d_%s__" % (i, direction),
input=hidden,
size=hidden_dim,
candidate_activation='relu',
gate_activation='sigmoid',
cell_activation='sigmoid',
bias_attr=fluid.ParamAttr(initializer=NormalInitializer(loc=0.0, scale=1.0, seed=0)),
is_reverse=(i % 2) if direction == "fwd" else not i % 2,
param_attr=rnn_para_attr)
fea += [hidden, rnn[0], rnn[1]]
rnn_fea = fluid.layers.fc(
size=hidden_dim,
bias_attr=fluid.ParamAttr(initializer=NormalInitializer(loc=0.0, scale=(1. / math.sqrt(hidden_dim) / 3), seed=0)),
act="stanh",
input=fea,
param_attr=[hidden_para_attr, rnn_para_attr, rnn_para_attr] * 2)
emission = fluid.layers.fc(size=label_dict_len,
input=rnn_fea,
param_attr=fluid.ParamAttr(initializer=NormalInitializer(loc=0.0, scale=(1. / math.sqrt(hidden_dim) / 3), seed=0)))
'''
if is_train:
target = fluid.layers.data(
name="target",
shape=[1], dtype='int64', lod_level=1)
crf_cost = fluid.layers.linear_chain_crf(
input=prediction,
label=target,
param_attr=fluid.ParamAttr(
name='crfw',
initializer=NormalInitializer(loc=0.0, scale=(1. / math.sqrt(hidden_dim) / 3), seed=0),
learning_rate=mix_hidden_lr))
crf_decode = fluid.layers.crf_decoding(
input=emission,
label=target,
param_attr=fluid.ParamAttr(name='crfw'))
return crf_cost, crf_decode, word, mark, target
else:
predict = fluid.layers.crf_decoding(
input=emission,
param_attr=fluid.ParamAttr(name='crfw'))
return predict
0.0 0.0 0.0 0.0 0.0 0.0
0.0 0.0 0.0 0.0 0.0 0.0
0.0 0.0 0.0 0.0 0.0 0.0
0.0 0.0 0.0 0.0 0.0 0.0
0.53687316 0.03063457 0.05796178 0.5543097 0.03366387 0.06347296
0.2512572 0.22706614 0.23854996 0.2248291 0.22675236 0.22578663
0.36869416 0.21528362 0.27183846 0.33051637 0.18142742 0.23426294
0.37615198 0.33664367 0.3553029 0.33972117 0.31838107 0.32870513
0.2903275 0.3208214 0.30481368 0.27548686 0.31787038 0.29516488
0.44141933 0.33294058 0.37958166 0.4099027 0.3047623 0.34959847
0.45023003 0.3623969 0.4015667 0.42058375 0.34157553 0.3769845
0.4726939 0.34674296 0.40003884 0.4501971 0.3256586 0.37793252
0.44375455 0.40902206 0.425681 0.40445408 0.38877302 0.39645857
0.434138 0.38444707 0.4077843 0.3983865 0.36357832 0.38018736
0.5249196 0.21982831 0.30988255 0.49900734 0.181853 0.2665627
0.46960297 0.3822589 0.4214531 0.42552018 0.36119506 0.39072788
0.53188485 0.31728664 0.3974697 0.49952334 0.28990936 0.36688748
0.43561974 0.42652753 0.43102568 0.39406797 0.4071158 0.40048563
0.4840162 0.36189193 0.4141385 0.4514062 0.33880922 0.3870858
0.4669723 0.37720922 0.41731843 0.43740126 0.36532325 0.39812624
0.46078616 0.41238847 0.43524605 0.42018306 0.39073074 0.40492204
0.5557674 0.29439488 0.38490316 0.52547526 0.2646721 0.35203215
0.42919663 0.46221176 0.4450928 0.39331308 0.44056687 0.4156011
0.53233 0.3214947 0.40088153 0.5147349 0.3107205 0.38751626
0.5554526 0.30264264 0.39180648 0.53473395 0.28271693 0.36987752
0.53459823 0.32250464 0.40230972 0.5158304 0.3106354 0.38775998
0.43857673 0.45850867 0.44832125 0.40440542 0.43911988 0.42104834
0.51660156 0.356169 0.42163992 0.4847563 0.33225518 0.394273
0.4496071 0.44302306 0.4462908 0.41276315 0.42639485 0.41946828
0.49761093 0.36811984 0.42318112 0.47786382 0.3518747 0.40530404
0.464952 0.43208215 0.44791484 0.4256322 0.41115886 0.41827038
0.48590505 0.3307524 0.3935904 0.4833667 0.32897818 0.39150143
0.5039805 0.38360545 0.43563032 0.4760838 0.3617483 0.4111146
0.5338852 0.34741625 0.42092383 0.51310015 0.32838234 0.4004671
0.43654248 0.4556472 0.4458903 0.4075157 0.44490787 0.42539164
0.49368685 0.394883 0.43879172 0.4641828 0.3769843 0.41606387
0.53806585 0.35212928 0.42567912 0.5102616 0.3311912 0.40167233
0.49013627 0.40565562 0.44391233 0.45396528 0.39051795 0.41985816
0.52667 0.355664 0.4245956 0.5066761 0.33591524 0.40399233
0.550027 0.3423666 0.4220355 0.5294691 0.32382858 0.40186965
0.4926858 0.42518094 0.45645103 0.4561252 0.40439206 0.42870355
0.5112716 0.3931998 0.44452903 0.4786423 0.37149423 0.418316
0.5138616 0.3962296 0.44744346 0.4803075 0.36694047 0.41603938
0.57464385 0.33950514 0.42683315 0.53835595 0.31718943 0.3991859
0.5586014 0.3361387 0.41971415 0.5339128 0.31893432 0.3993286
0.51960135 0.394883 0.44873756 0.47871512 0.36659998 0.4152226
0.4790314 0.4595186 0.46907216 0.4453678 0.4470358 0.44620025
0.49682572 0.40834877 0.4482631 0.4605656 0.3874537 0.420858
0.5017668 0.4302306 0.4632533 0.47126052 0.41941524 0.44382894
0.5163413 0.3962296 0.44838095 0.4829536 0.37438822 0.4217971
0.47734597 0.42383438 0.44900143 0.46094143 0.42967188 0.4447577
0.49915904 0.4495876 0.47307828 0.465172 0.43797082 0.45116177
0.58132875 0.3416933 0.4304039 0.5514373 0.32165807 0.40631133
0.5009266 0.4549739 0.47684574 0.46882156 0.4444397 0.45630518
0.55947244 0.39269483 0.4614776 0.51647264 0.35493892 0.42073402
0.5259964 0.43763676 0.47776553 0.49539262 0.4209899 0.45517084
0.4876082 0.41726983 0.4497052 0.46234012 0.41537216 0.43759948
0.40803188 0.4480727 0.42711592 0.40356767 0.45541984 0.42792875
0.5000878 0.47938058 0.4895153 0.46529266 0.4621441 0.46371305
0.6071206 0.32721764 0.42524335 0.56792873 0.3038686 0.39590785
0.5162762 0.41912135 0.46265328 0.49004823 0.40656254 0.4444186
0.51463056 0.47365764 0.49329478 0.48375162 0.4618462 0.47254518
0.61904764 0.39606127 0.483063 0.5880358 0.36898327 0.45344
0.509107 0.48459855 0.49655053 0.47544634 0.4726135 0.4740257
0.5575062 0.41693318 0.47708014 0.5377632 0.4033281 0.46094358
0.52738684 0.46675643 0.49522278 0.49952716 0.4495893 0.4732445
0.5921875 0.44655782 0.5091642 0.5677467 0.42656508 0.48713276
0.5339616 0.50547045 0.51932555 0.50292903 0.4969145 0.49990368
0.58171976 0.4702912 0.5201042 0.5497975 0.45061073 0.4952871
0.5592882 0.51843125 0.5380853 0.5305389 0.5090437 0.5195691
0.59650946 0.483252 0.53394085 0.56828034 0.46861303 0.5136566
0.6126598 0.49200472 0.5457431 0.5876683 0.47742265 0.5268398
0.58903563 0.49553946 0.5382576 0.5681293 0.4815934 0.5212945
0.6112505 0.49200472 0.54518324 0.5872394 0.4782738 0.5271849
0.55700797 0.51641136 0.535942 0.52526605 0.4998936 0.51226586
0.6092073 0.49671772 0.54724157 0.5845364 0.48423204 0.5296774
0.5613079 0.48544016 0.5206246 0.5337786 0.47278377 0.50143313
0.60042405 0.5243225 0.5597987 0.5735259 0.5087458 0.5391971
0.6062877 0.49015316 0.54207 0.57351273 0.4734647 0.51870847
0.5214235 0.49570778 0.5082406 0.49142236 0.48155084 0.48643652
0.639504 0.48611346 0.55235726 0.6177016 0.4678044 0.53240335
0.6783597 0.46221176 0.5498048 0.6507896 0.43843895 0.5239149
0.56344825 0.55007577 0.5566817 0.5339239 0.54121804 0.5375462
0.607362 0.53324354 0.5678946 0.5816327 0.5191301 0.5486069
0.59336513 0.50277734 0.54432803 0.56637514 0.49061581 0.5257805
0.6436317 0.50951016 0.5687711 0.6200343 0.49287143 0.5491879
0.49968493 0.40043762 0.4445898 0.50569606 0.42128783 0.45964897
0.63173896 0.52465916 0.57324135 0.6034517 0.5104056 0.55304235
0.62391216 0.50681704 0.55930156 0.60423493 0.49427587 0.5437521
0.6229867 0.5273523 0.5711942 0.5914885 0.501596 0.54284596
0.6225723 0.4856085 0.54562646 0.60569125 0.4737626 0.5316649
0.5243383 0.47685575 0.4994711 0.5116352 0.48657274 0.49878934
0.57847434 0.44420132 0.50252306 0.5662531 0.43703452 0.49332243
0.6404865 0.5229759 0.5757969 0.60166806 0.50044686 0.54640925
0.5867785 0.49452955 0.536719 0.5579657 0.48278505 0.51765996
0.63840455 0.49301463 0.5563681 0.61177826 0.47482657 0.53467196
0.71424633 0.43629017 0.5416928 0.68678945 0.40622208 0.5104961
0.60857594 0.5207878 0.5612698 0.5843935 0.512193 0.5459164
0.63013023 0.43157718 0.51228774 0.6185109 0.40869048 0.4921713
0.62619096 0.51994616 0.5681442 0.60125804 0.50036174 0.5461894
0.5881407 0.4925097 0.53609383 0.57295805 0.48512575 0.5253964
0.61784714 0.51977783 0.56458545 0.5860178 0.50478786 0.5423783
0.6195402 0.5443528 0.57951796 0.591059 0.5261097 0.5566964
0.57728356 0.52869886 0.5519241 0.5396984 0.5056816 0.5221365
0.59105825 0.48510352 0.5328649 0.5741558 0.4761459 0.5205779
0.594355 0.5529372 0.5728985 0.55065066 0.518662 0.53417784
0.63940746 0.5303821 0.57981414 0.61291325 0.50419205 0.55326205
0.62177265 0.54317456 0.5798221 0.59381443 0.527046 0.5584416
0.6433367 0.53223366 0.582535 0.6187308 0.51453376 0.56184214
0.62861156 0.5273523 0.5735469 0.596523 0.505256 0.54710937
0.69973123 0.48207372 0.5708591 0.66948307 0.4552496 0.5419633
0.6182796 0.5807103 0.59890634 0.5796681 0.5530493 0.56604594
0.5986309 0.55933344 0.5783154 0.5569418 0.5338554 0.5451543
0.5816256 0.5637098 0.5725276 0.5562711 0.5551347 0.55570227
0.62560517 0.58727485 0.60583436 0.583086 0.56015664 0.57139134
0.56720656 0.57178926 0.5694887 0.5315831 0.5497723 0.5405247
0.63837993 0.55714524 0.5950027 0.60654163 0.53823894 0.5703527
0.6169173 0.55243224 0.5828967 0.5858897 0.5340256 0.5587567
0.6309039 0.52162933 0.57108635 0.6020016 0.5068732 0.550357
0.5921222 0.5414913 0.5656761 0.55593544 0.51900244 0.5368345
0.6566632 0.52667904 0.584532 0.62182784 0.4890837 0.54752487
0.65 0.55798686 0.6004891 0.6210287 0.5349194 0.57476676
0.6143827 0.5637098 0.5879565 0.5873916 0.5507937 0.5685043
0.6343561 0.54637265 0.58708626 0.6082354 0.53308934 0.56818855
0.6470002 0.54637265 0.5924439 0.6054241 0.5139805 0.55596733
0.6535596 0.53156036 0.5862805 0.6297399 0.51312935 0.56548554
0.4745509 0.42686418 0.44944617 0.48224705 0.45376006 0.46757007
0.62888086 0.58643323 0.6069158 0.5942458 0.56432736 0.5789003
0.63340086 0.5790271 0.6049947 0.6031391 0.56258243 0.5821553
0.67451775 0.50614375 0.57832485 0.65687007 0.48606205 0.55870265
0.63804287 0.546541 0.58875793 0.6189979 0.5294293 0.57072073
0.5468298 0.5414913 0.5441475 0.53386474 0.5444525 0.5391066
0.63917905 0.5871065 0.6120372 0.60748005 0.5668383 0.5864559
0.656087 0.5079953 0.57262117 0.63425297 0.49410564 0.55547583
0.54421526 0.5106884 0.52691907 0.5368905 0.5168745 0.5266924
0.6842368 0.4610335 0.55088496 0.6606293 0.4253309 0.51748866
0.6301319 0.57094765 0.5990816 0.6014509 0.55747545 0.57862884
0.5490551 0.45968693 0.5004123 0.5344828 0.46439973 0.49698266
0.6676743 0.5576502 0.60772264 0.637555 0.5300251 0.5788385
0.60119504 0.5249958 0.56051755 0.5754179 0.5024897 0.53648674
0.6492308 0.56825453 0.6060497 0.62133783 0.55147463 0.5843254
0.6542961 0.546036 0.595284 0.62881154 0.5318551 0.5762837
0.6326402 0.50698537 0.56288546 0.61421937 0.49525473 0.548359
0.68857145 0.5273523 0.5972738 0.67181927 0.5049581 0.57655865
0.5942188 0.5467093 0.5694749 0.56381136 0.52606714 0.54428566
0.6614049 0.56421477 0.60895634 0.63172835 0.53994125 0.58223957
0.6329114 0.5638781 0.5964038 0.6055719 0.55134696 0.5771887
0.7085939 0.51489645 0.59641254 0.68941796 0.49048814 0.5731835
0.6411396 0.53408515 0.58273643 0.62813735 0.5208324 0.56947416
0.588564 0.531897 0.55879754 0.5769319 0.5360259 0.5557271
0.6517281 0.5776805 0.6124743 0.6191626 0.5563689 0.58608866
0.6200697 0.56892776 0.59339887 0.59732705 0.5592203 0.5776459
0.5832575 0.53947145 0.5605107 0.5653784 0.5391752 0.551966
0.6034191 0.5406497 0.5703125 0.5895318 0.54070735 0.564065
0.65182674 0.5495708 0.59634703 0.62789416 0.533217 0.57669556
0.63652414 0.5462043 0.5879156 0.5999213 0.5188322 0.55643797
0.59398496 0.5451944 0.56854486 0.5649286 0.53211045 0.54802865
0.6894844 0.39841777 0.5050139 0.6755249 0.37106866 0.4790133
0.61986613 0.5766706 0.59748864 0.5989127 0.56730646 0.5826813
0.65438855 0.52331257 0.5815563 0.63686 0.50134057 0.56103253
0.6756757 0.5638781 0.6147353 0.65597063 0.5402817 0.5925321
0.64981204 0.58188856 0.61397743 0.6228128 0.56351876 0.591684
0.6342691 0.5514223 0.5899514 0.6152517 0.54560155 0.5783372
0.5810866 0.4788756 0.5250531 0.57346886 0.48018897 0.5226999
0.67653954 0.5344218 0.59714127 0.66371053 0.5159808 0.58059573
0.6150226 0.5719576 0.5927089 0.58842444 0.5529642 0.57014346
0.70035005 0.5051338 0.5869353 0.6803119 0.4752947 0.55961716
0.6208048 0.594681 0.60746217 0.58982974 0.5750947 0.582369
0.58136326 0.56707627 0.5741309 0.5687082 0.56432736 0.5665093
0.6153433 0.57784885 0.59600693 0.6005853 0.57645655 0.58827364
0.60810304 0.5482242 0.57661325 0.5965139 0.5490914 0.5718211
0.6234172 0.60494864 0.6140441 0.61011714 0.60518366 0.6076404
0.5831296 0.5620266 0.57238364 0.57946265 0.5718177 0.57561475
0.68081063 0.5428379 0.6040457 0.6676136 0.5200664 0.584675
0.74176437 0.4472311 0.55801743 0.7201747 0.414138 0.5258721
0.6040655 0.5652247 0.584 0.5728086 0.5476018 0.5599217
0.6191435 0.56943274 0.59324855 0.6019615 0.5668383 0.5838722
0.63307446 0.56253153 0.5957219 0.6197772 0.5612206 0.5890472
0.63198984 0.5859283 0.608088 0.6204261 0.5873941 0.60345846
0.6362982 0.59484935 0.61487603 0.6129783 0.5829255 0.5975743
0.64506406 0.57633394 0.60876524 0.62530565 0.56598717 0.59416956
0.6388297 0.5549571 0.59394705 0.60306495 0.5275567 0.56278944
0.6518187 0.56404644 0.60476446 0.62349415 0.53964335 0.57854635
0.63194567 0.5953543 0.6131045 0.6181544 0.5932672 0.60545516
0.69569963 0.5364417 0.6057784 0.6825638 0.51712984 0.5884404
0.613895 0.6083151 0.6110923 0.60268325 0.60986507 0.6062529
0.7243526 0.4896482 0.58431256 0.697425 0.44839767 0.54585016
0.61188084 0.594681 0.60315835 0.5965295 0.59399074 0.5952574
0.638349 0.49200472 0.5557034 0.64290154 0.48431715 0.552454
0.6217057 0.57178926 0.59570366 0.6092952 0.57356256 0.59088916
0.6706292 0.550749 0.6048059 0.6682382 0.5428778 0.59907013
0.718427 0.47971722 0.5752927 0.70492023 0.44937652 0.5488616
0.6237402 0.56253153 0.5915568 0.6049725 0.5571349 0.5800691
0.71477664 0.5251641 0.60547256 0.69937307 0.5032557 0.5853236
0.59872967 0.53947145 0.567558 0.5658933 0.5127889 0.5380339
0.63954705 0.5133816 0.5695612 0.62463754 0.4950419 0.5523398
0.6207792 0.5632048 0.5905922 0.5914078 0.54368645 0.566544
0.5895659 0.5440162 0.5658759 0.5736823 0.5401115 0.556391
0.58968365 0.5522639 0.5703607 0.5528481 0.52623737 0.5392146
0.5753553 0.5179263 0.54513246 0.5710277 0.5282802 0.5488228
0.62176347 0.59821576 0.6097624 0.60909486 0.59569305 0.6023194
0.69338804 0.54014474 0.6072476 0.6864268 0.5189173 0.5910325
0.6654538 0.52331257 0.5858852 0.66050124 0.5136826 0.5779129
0.60965353 0.51237166 0.5567953 0.6005837 0.50797975 0.55041385
0.644036 0.5780172 0.60924333 0.63773924 0.5757756 0.60517544
0.62480515 0.60713685 0.6158443 0.60376954 0.59577817 0.59974724
0.6211462 0.57650226 0.5979921 0.5970554 0.562625 0.5793291
0.64839846 0.58946306 0.6175278 0.6341925 0.58428735 0.60821795
0.60750794 0.5802054 0.5935428 0.5997724 0.58318084 0.5913603
0.67626053 0.52600574 0.591744 0.6644609 0.49572286 0.5678212
0.6429431 0.5398081 0.58687896 0.63953906 0.53377026 0.58188736
0.6151717 0.5610167 0.5868474 0.6155648 0.570243 0.5920378
0.5783683 0.5751557 0.57675755 0.5715296 0.5845427 0.57796293
0.6283123 0.594681 0.6110342 0.61803335 0.5927565 0.6051311
0.57655543 0.5381249 0.5566777 0.54766387 0.51981103 0.5333741
0.6010753 0.5645514 0.5822411 0.5786432 0.55394304 0.56602377
0.6440183 0.59350276 0.6177295 0.6279916 0.5873941 0.60701483
0.65634984 0.55327386 0.6004201 0.65407807 0.54470783 0.5944038
0.6599429 0.5451944 0.59710574 0.6536778 0.5275993 0.58391035
0.6306224 0.5781855 0.6032666 0.60812104 0.56790227 0.58732396
0.6134001 0.53627336 0.57224965 0.61166966 0.5411755 0.57426727
0.6377432 0.55175894 0.59164333 0.6262454 0.5457292 0.5832215
0.6392578 0.59148294 0.61444306 0.62712324 0.58764946 0.606745
0.59335124 0.59484935 0.59409934 0.56608033 0.5765842 0.571284
0.65391374 0.5638781 0.60556763 0.6380194 0.5456441 0.5882272
0.5982393 0.50328225 0.5466679 0.60546345 0.51125675 0.55438644
0.52024794 0.42383438 0.46711808 0.52909946 0.44533345 0.48361602
0.6911993 0.5248275 0.59663224 0.69200504 0.5164489 0.5914751
0.62573403 0.60983 0.61767966 0.6080597 0.5997787 0.60389084
0.592717 0.5643831 0.57820314 0.59078187 0.574967 0.5827672
0.6467391 0.56084836 0.6007392 0.6381093 0.55155975 0.59168625
0.5960601 0.51439154 0.5522226 0.58428603 0.5111291 0.54526466
0.7068176 0.49385625 0.58145064 0.69347435 0.46312296 0.5553599
0.64592594 0.5871065 0.6151133 0.6385018 0.581138 0.6084709
0.62601477 0.58407676 0.60431904 0.61230576 0.5853088 0.598503
0.63198227 0.5275206 0.5750459 0.6234387 0.51619357 0.56477
0.65187 0.5808786 0.61433023 0.6465452 0.5766268 0.60958767
0.5649062 0.49149975 0.5256526 0.55871004 0.4991701 0.52726454
0.68832004 0.5386299 0.6043437 0.68816787 0.52623737 0.5964066
0.6404255 0.60797846 0.6237803 0.61380166 0.5890114 0.60115105
0.67958033 0.56690794 0.6181518 0.66752 0.55487937 0.6060099
0.64540905 0.6081468 0.6262241 0.6254615 0.5984168 0.61164033
0.6696886 0.5719576 0.61697686 0.65441924 0.54955953 0.597423
0.6460092 0.5667396 0.6037837 0.62137645 0.5510065 0.58407956
0.61756533 0.5610167 0.5879344 0.6076298 0.563306 0.584629
0.6508406 0.5929978 0.62057424 0.64378005 0.5873941 0.6142959
0.65083 0.5741458 0.61008763 0.6384771 0.56454015 0.59923655
0.62444913 0.54856086 0.5840502 0.6139247 0.5478997 0.5790361
0.6366058 0.5632048 0.59766006 0.61990374 0.55909264 0.5879299
0.6611705 0.6142064 0.6368237 0.6408742 0.6002894 0.6199182
0.66026103 0.6130281 0.63576853 0.638403 0.59952337 0.6183526
0.64016587 0.59771085 0.6182103 0.6258079 0.5933949 0.60917056
0.64484996 0.6185827 0.6314433 0.6320844 0.6217815 0.62689066
0.6589812 0.6206026 0.63921636 0.6516633 0.62110054 0.636015
0.66341656 0.5726309 0.61468965 0.64262766 0.5533047 0.5946304
0.63250166 0.6315435 0.63202226 0.6112169 0.6284632 0.6197201
0.6465783 0.59956235 0.6221834 0.6185529 0.5752224 0.5961013
0.6944444 0.5386299 0.60669255 0.6824405 0.51125675 0.5845742
0.61478394 0.59636426 0.60543406 0.5964039 0.5914798 0.5939316
0.6522233 0.5802054 0.6141101 0.6407628 0.5719879 0.60442525
0.6505079 0.6143747 0.6319252 0.6427047 0.61527 0.6286882
0.6297293 0.63036525 0.6300471 0.60470164 0.616334 0.6104624
0.6551975 0.60579026 0.62952596 0.6324535 0.59450144 0.6128905
0.652232 0.5754923 0.61146384 0.63237625 0.55960333 0.59376836
0.73714954 0.5310554 0.6173564 0.7381025 0.51683193 0.6079599
0.6361998 0.59400773 0.61438024 0.6226508 0.5936077 0.6077825
0.7526329 0.49318296 0.59589183 0.7331771 0.46601695 0.5698376
0.5699825 0.5497391 0.55967784 0.5512699 0.54500574 0.5481199
0.6331947 0.5381249 0.58180165 0.63101494 0.53130186 0.5768813
0.640811 0.57986873 0.6088186 0.62493646 0.57573307 0.5993266
0.6889599 0.5808786 0.63031965 0.6807579 0.5688386 0.6197862
0.6239437 0.521966 0.5684172 0.62570333 0.53479165 0.57668656
0.6003367 0.60023564 0.6002862 0.57399046 0.58981997 0.5817976
0.6562054 0.61942434 0.63728464 0.6322168 0.6105035 0.62117046
0.61042845 0.61875105 0.61456156 0.57329994 0.594161 0.5835441
0.61269146 0.51843125 0.5616338 0.6019957 0.5160659 0.5557287
0.65919363 0.6137014 0.6356346 0.6448112 0.6119079 0.6279288
0.66414094 0.59013635 0.6249554 0.65313447 0.58884114 0.6193237
0.60329163 0.5923245 0.59775776 0.5865211 0.5944589 0.5904633
0.6515581 0.61942434 0.635085 0.647241 0.63148487 0.6392659
0.6609315 0.6401279 0.6503634 0.6412735 0.63518745 0.63821596
0.6927924 0.59215623 0.6385334 0.6809519 0.58088267 0.6269493
0.69204974 0.590473 0.63723886 0.68715805 0.58799 0.63371783
0.67054486 0.5820569 0.6231753 0.66078013 0.5796485 0.61756104
0.6662464 0.62262243 0.6436962 0.65442777 0.6280802 0.64098334
0.6808595 0.5759973 0.62405396 0.663848 0.5679874 0.6121878
0.62435234 0.5679179 0.59479946 0.61023104 0.57215816 0.5905816
0.5564902 0.48998484 0.52112424 0.5471724 0.5003192 0.522698
0.67164177 0.6059586 0.6371118 0.6613555 0.60590714 0.6324183
0.6839007 0.44975594 0.54264826 0.69942576 0.45099375 0.54838544
0.65221334 0.60511696 0.6277831 0.6393855 0.60577947 0.62212896
0.664287 0.6264939 0.64483714 0.6487074 0.6225901 0.63538045
0.67299676 0.48914325 0.56652695 0.6772312 0.48925394 0.56809646
0.6699507 0.6409695 0.6551398 0.64542526 0.6310593 0.6381614
0.68295264 0.5435112 0.6053051 0.6846788 0.5370898 0.60197
0.64816207 0.58172023 0.6131465 0.6324309 0.5736477 0.6016068
0.6239957 0.5882848 0.6056143 0.6201839 0.6028429 0.6113905
0.7183986 0.53761995 0.61499953 0.7207702 0.53049326 0.6111642
0.6868997 0.6019189 0.6416076 0.6775396 0.5958207 0.634058
0.6764706 0.5574819 0.61123925 0.6693975 0.54943186 0.60351074
0.63272727 0.615048 0.62376237 0.6049176 0.59994894 0.602423
0.671159 0.54485774 0.60144925 0.6743501 0.5453462 0.603026
0.6725539 0.6409695 0.65638196 0.6587014 0.6428906 0.6507
0.7375938 0.51287663 0.6050437 0.73582894 0.49942547 0.5950057
0.5700435 0.4849352 0.5240564 0.5562879 0.4956803 0.52423817
0.6785359 0.5741458 0.6219913 0.661272 0.5686258 0.61145943
0.67266256 0.60427535 0.6366377 0.67030025 0.6071413 0.63715947
0.66536826 0.63255346 0.64854604 0.64646554 0.62739927 0.6367897
0.6334761 0.5975425 0.61498487 0.6221907 0.59735286 0.6095188
0.61779296 0.60781014 0.6127609 0.5931632 0.59152234 0.5923416
0.68634063 0.61656284 0.6495833 0.68201506 0.61650425 0.64760715
0.6987823 0.55058074 0.6158915 0.70399386 0.54687834 0.6155689
0.6721664 0.6308702 0.65086395 0.6675012 0.63540024 0.6510553
0.6858 0.61538464 0.648687 0.68227 0.6134826 0.6460504
0.6822877 0.60040396 0.6387322 0.67726856 0.5981189 0.63523775
0.723784 0.546036 0.62246954 0.7248931 0.5409627 0.6195652
0.65708375 0.53475845 0.58964366 0.65965986 0.5315572 0.5887205
0.6363964 0.5945127 0.614742 0.62354827 0.59637403 0.6096585
0.6417739 0.56269985 0.59964126 0.64085174 0.5750947 0.6061952
0.66376656 0.6163945 0.639204 0.6521096 0.619611 0.63544506
0.66285294 0.604612 0.6323944 0.652991 0.597438 0.62398046
0.6210733 0.6389497 0.62988466 0.59514916 0.6307614 0.612438
0.57984793 0.56471974 0.57218385 0.5630455 0.56556153 0.5643007
0.7072326 0.5497391 0.6186192 0.7099653 0.54849553 0.61887157
0.64851034 0.5972059 0.6218016 0.64292806 0.60629016 0.62407184
0.6993789 0.47382596 0.5649207 0.7067519 0.4659744 0.56164557
0.67929447 0.5704427 0.6201281 0.67727983 0.57305187 0.6208216
0.65240014 0.65426695 0.65333223 0.6414009 0.65472186 0.6479929
0.6992776 0.57027435 0.62822175 0.69727546 0.5587522 0.62037516
0.60052663 0.5374516 0.5672411 0.5832911 0.5387071 0.56011325
0.68988097 0.585255 0.6332757 0.69284487 0.5827127 0.63302433
0.5882353 0.50328225 0.5424528 0.5798259 0.5130017 0.5443707
0.6628959 0.5425013 0.5966861 0.66885906 0.54964465 0.6034201
0.65299684 0.6620098 0.65747243 0.637034 0.6581266 0.64740855
0.67318857 0.5270156 0.5912009 0.6806853 0.5241946 0.59227735
0.67372036 0.62699884 0.6495205 0.6742906 0.6361238 0.65465134
0.67447823 0.62009764 0.64614576 0.65746564 0.6142912 0.63514555
0.6444717 0.6190877 0.63152474 0.6292397 0.62058985 0.62488484
0.65114677 0.6355832 0.64327085 0.63517624 0.63952845 0.6373449
0.6135737 0.56152165 0.5863948 0.5934303 0.5504958 0.57115734
0.66158754 0.6285137 0.6446267 0.65380055 0.63365537 0.64357036
0.60450965 0.5685911 0.5860005 0.5894709 0.56849813 0.5787946
0.68994707 0.5487292 0.6112882 0.6980087 0.5579436 0.62016606
0.6746492 0.6069685 0.6390218 0.6619765 0.5906711 0.62429434
0.6541807 0.62026596 0.6367721 0.6485496 0.62514365 0.63663155
0.6300127 0.5844134 0.606357 0.62023443 0.5878197 0.60359216
0.6710999 0.6285137 0.64910907 0.6679163 0.6369324 0.65205646
0.66602427 0.58172023 0.62102425 0.6561987 0.57803124 0.61463964
0.6465835 0.61959267 0.6328004 0.6378901 0.62893134 0.63337904
0.6864312 0.62161255 0.6524159 0.6853689 0.6321658 0.65769315
0.71366423 0.6074735 0.65630114 0.7140447 0.6123335 0.6592893
0.63335866 0.561185 0.59509146 0.6325141 0.57722265 0.6036048
0.578887 0.50951016 0.5419875 0.5714954 0.51998127 0.5445227
0.68205875 0.62901866 0.65446585 0.67675304 0.6358259 0.6556514
0.68323785 0.6222858 0.651339 0.66566706 0.62348384 0.6438853
0.62906307 0.5537788 0.58902514 0.61429375 0.55858195 0.58511466
0.61012185 0.54788756 0.5773324 0.62356645 0.56462526 0.59263396
0.69872046 0.6066319 0.6494279 0.6965708 0.6103332 0.6506068
0.6213355 0.51371825 0.56242514 0.6123576 0.528493 0.5673428
0.66778773 0.6015822 0.6329585 0.66775876 0.6158658 0.6407634
0.58718616 0.5707793 0.5788665 0.58836246 0.5908414 0.5895993
0.6726179 0.60242385 0.6355887 0.67802393 0.61739796 0.64629227
0.69411767 0.5859283 0.6354509 0.69906694 0.5962463 0.6435757
0.69515723 0.60646355 0.6477886 0.69587106 0.6118228 0.65114594
0.6763832 0.61319643 0.6432418 0.6771054 0.6248032 0.6499037
0.65548664 0.5690961 0.60924405 0.66562593 0.58973485 0.6253864
0.65106094 0.6507322 0.65089655 0.6406784 0.65433884 0.64743656
0.65639627 0.5665713 0.608185 0.65032065 0.57398814 0.6097748
0.57295173 0.51556975 0.5427483 0.567371 0.52793974 0.54694563
0.7003498 0.6066319 0.6501308 0.6969903 0.61007786 0.65064454
0.6610311 0.6647029 0.66286194 0.6474524 0.6630208 0.65514416
0.62691927 0.52920383 0.5739321 0.61726236 0.52623737 0.568127
0.5806395 0.55327386 0.5666264 0.5696705 0.5555603 0.56252694
0.6216379 0.5212927 0.56706035 0.61229205 0.5075116 0.55499965
0.6013197 0.5828985 0.5919658 0.60376954 0.59577817 0.59974724
0.6723761 0.6211076 0.6457258 0.6703921 0.6279951 0.64850134
0.708684 0.6236324 0.66344345 0.7136322 0.6307188 0.66961867
0.6720339 0.66739607 0.66970694 0.6558679 0.6642976 0.6600558
0.65875876 0.5842451 0.61926854 0.66293144 0.5967145 0.6280825
0.6510946 0.56068003 0.60251427 0.64762324 0.56300807 0.60235864
0.6817738 0.5874432 0.6311031 0.68416154 0.5947142 0.6363098
0.59721714 0.5490658 0.57213014 0.58209556 0.5520705 0.5666856
0.6322709 0.59097797 0.61092746 0.6269488 0.5990126 0.61266243
0.64716005 0.6328901 0.6399455 0.6389441 0.64280546 0.64086896
0.70097756 0.61555296 0.6554938 0.70568174 0.6253139 0.66307145
0.6066929 0.5187679 0.5592959 0.62866676 0.5454313 0.58409864
0.67052025 0.6052853 0.63623494 0.6672815 0.61582327 0.6405206
0.6846001 0.6382764 0.6606272 0.67989594 0.6452313 0.6621102
0.64557403 0.6199293 0.6324918 0.6383731 0.6379112 0.63814205
0.64921904 0.6576334 0.6533991 0.6355427 0.6619143 0.64846045
0.6471774 0.5943444 0.6196368 0.63870674 0.60114056 0.61935455
0.6712049 0.6638613 0.6675129 0.6490066 0.65897775 0.65395415
0.65424955 0.6089884 0.6308081 0.66102 0.62442017 0.64219904
0.566382 0.4911631 0.52609754 0.5782976 0.51236326 0.54333746
0.6865616 0.6157213 0.6492147 0.6919987 0.63050604 0.65982276
0.6754888 0.57566065 0.62159216 0.6798696 0.5946291 0.6343988
0.5795413 0.5359367 0.55688673 0.5915392 0.5629655 0.57689875
0.64821076 0.6128598 0.6300398 0.65154034 0.63365537 0.6424734
0.6982306 0.60444367 0.647961 0.7041822 0.6262927 0.66295755
0.7014218 0.62279075 0.65977174 0.7032116 0.6262076 0.66247946
0.64099544 0.6416428 0.641319 0.6165091 0.63540024 0.6258121
0.6623884 0.599394 0.6293187 0.6626911 0.61246115 0.63658684
0.6443939 0.62009764 0.63201237 0.64568865 0.63706005 0.6413453
0.6495505 0.63238513 0.64085287 0.65710473 0.64927435 0.65316606
0.62140733 0.63322675 0.62726134 0.59514517 0.6229306 0.60872096
0.65481174 0.5795321 0.61487633 0.6634882 0.60114056 0.6307775
0.70474917 0.6219492 0.66076535 0.70758927 0.62058985 0.6612402
0.56982374 0.49520284 0.5298991 0.5838827 0.52019405 0.5502014
0.6569163 0.6275038 0.6418733 0.6449294 0.63008046 0.63741845
0.43857634 0.45009258 0.44425985 0.4481239 0.47167724 0.459599
0.6824774 0.67328733 0.6778512 0.66658133 0.6651062 0.66584295
0.56020594 0.56774956 0.5639525 0.5781212 0.60186404 0.58975375
0.6958915 0.5958593 0.64200217 0.70143974 0.59922546 0.6463163
0.67443544 0.6283454 0.6505751 0.6756696 0.64199686 0.658403
0.6854564 0.5751557 0.62548053 0.69408983 0.58777714 0.6365249
0.6245448 0.5773439 0.6000175 0.63166434 0.5965017 0.61357963
0.6552474 0.60848343 0.63100016 0.6607882 0.6251011 0.64244944
0.6923077 0.62413734 0.6564575 0.6980938 0.6343363 0.6646896
0.60802853 0.5736408 0.5903343 0.60385704 0.58101034 0.59221345
0.68223596 0.6121865 0.6453158 0.6886739 0.62416476 0.65483445
0.6571756 0.5788588 0.6155361 0.66530997 0.59828913 0.63002217
0.655101 0.63878137 0.64683825 0.6450762 0.65570074 0.6503451
0.6756217 0.6264939 0.650131 0.68289816 0.64340127 0.66256166
0.6107369 0.5036189 0.55202955 0.63279694 0.5271737 0.5751765
0.6637993 0.62346405 0.64299977 0.6603673 0.6320381 0.6458922
0.59304744 0.5283622 0.55883926 0.6059953 0.5506235 0.576984
0.66701376 0.5391348 0.59629524 0.6835893 0.55700725 0.61384046
0.69026875 0.6268305 0.6570219 0.67792916 0.622505 0.649036
0.6875694 0.62565225 0.6551511 0.6878161 0.636677 0.6612593
0.6479804 0.57784885 0.61090845 0.64912444 0.5963314 0.62160903
0.678887 0.60781014 0.64138544 0.67845035 0.62156874 0.6487651
0.7116778 0.4133984 0.5229983 0.7356891 0.41732988 0.5325585
0.6278626 0.6091567 0.6183682 0.62525976 0.6274418 0.62634885
0.64186674 0.6389497 0.6404049 0.6288307 0.6488488 0.6386829
0.61628115 0.57852215 0.596805 0.604642 0.5876069 0.59600276
0.70537394 0.63188016 0.6666075 0.70162827 0.6271865 0.6623222
0.71920985 0.6005723 0.6545588 0.72298586 0.5976933 0.65439636
0.6968982 0.64669245 0.67085737 0.6976607 0.65493464 0.6756229
0.7002897 0.65090054 0.6746925 0.69973266 0.6571477 0.6777719
0.6869981 0.67682207 0.6818721 0.6724545 0.6790654 0.67574376
0.6535726 0.6497223 0.6516418 0.64719576 0.6507214 0.6489538
0.7076556 0.54923415 0.61846095 0.72438127 0.5679874 0.63672143
0.69025546 0.61403805 0.6499198 0.69402057 0.6194408 0.6546133
0.6753525 0.6369298 0.6555787 0.6797123 0.6556156 0.66744655
0.6777934 0.64938563 0.6632855 0.6760637 0.66136104 0.66863155
0.692728 0.6189194 0.653747 0.69347703 0.62710136 0.6586211
0.66320044 0.5797004 0.6186456 0.6567443 0.5830957 0.6177326
0.63815904 0.62548393 0.6317579 0.6322539 0.6273567 0.6297958
0.6575768 0.64130616 0.64933956 0.6483968 0.6514874 0.64993846
0.685872 0.6692476 0.6774578 0.6815042 0.6717879 0.6766112
0.70465934 0.6389497 0.6701977 0.70566946 0.6473167 0.67523474
0.6734733 0.59400773 0.6312494 0.6773946 0.60194916 0.6374473
0.67750627 0.6347416 0.65542716 0.6764521 0.6413585 0.658438
0.65050834 0.60309714 0.62590617 0.6598377 0.61956847 0.6390694
0.68753326 0.6525837 0.66960275 0.6869734 0.66276544 0.67465234
0.674219 0.66116816 0.66762984 0.6635427 0.6659148 0.6647266
0.60117805 0.5325703 0.5647983 0.6037259 0.54892117 0.5750206
0.5603236 0.53627336 0.5480347 0.55592763 0.5440269 0.54991287
0.6717812 0.67800033 0.67487645 0.66000247 0.67438394 0.66711575
0.65762883 0.6572968 0.6574628 0.6452417 0.66136104 0.65320194
0.5426163 0.5850867 0.56305176 0.5364404 0.59518236 0.5642867
0.61779326 0.62767214 0.6226935 0.5927613 0.6147593 0.6035599
0.66353124 0.5699377 0.6131836 0.6652034 0.58041453 0.6199232
0.56243235 0.52465916 0.5428895 0.5664045 0.54560155 0.5558085
0.6715369 0.61117655 0.63993657 0.6687341 0.614887 0.64068115
0.683327 0.60983 0.6444899 0.682527 0.61841935 0.64889365
0.7223807 0.63945466 0.6783928 0.72316897 0.64293313 0.6806948
0.70818573 0.63928634 0.67197454 0.7082884 0.6426352 0.67386645
0.68286216 0.6505639 0.6663219 0.676468 0.65599865 0.6660761
0.6889183 0.61319643 0.6488556 0.6921115 0.62356895 0.6560548
0.6735981 0.66520786 0.6693767 0.65571165 0.66740435 0.66150635
0.66689765 0.64803904 0.6573331 0.66811925 0.6590203 0.6635386
0.65443426 0.6123548 0.6326957 0.6384704 0.61322725 0.62559426
0.73123753 0.55596703 0.6316695 0.7420713 0.562625 0.64000773
0.6911911 0.5850867 0.6337283 0.70247185 0.5950547 0.644317
0.6685514 0.57717556 0.6195122 0.6765337 0.5904158 0.63054794
0.48386565 0.49469787 0.4892218 0.48435923 0.5107035 0.49718264
0.6849068 0.5751557 0.6252516 0.6932241 0.586926 0.6356618
0.68901545 0.6313752 0.6589372 0.6835067 0.6374005 0.65964895
0.68927556 0.65342534 0.67087185 0.6771774 0.63795376 0.6569807
0.70614034 0.6503956 0.6771226 0.70028675 0.654807 0.6767837
0.7015554 0.6453459 0.67227775 0.7000829 0.6467209 0.67234474
0.68889683 0.648544 0.6681117 0.68570024 0.6536579 0.6692958
0.6837786 0.6591483 0.6712376 0.6765152 0.66361666 0.6700039
0.64893204 0.56253153 0.6026508 0.6613208 0.58088267 0.6184974
0.66017914 0.6451776 0.6525921 0.66147345 0.659531 0.6605008
0.6770903 0.6815351 0.67930543 0.67310435 0.68229985 0.67767096
0.7257028 0.6083151 0.66184413 0.74347395 0.60969484 0.66997147
0.69332546 0.59097797 0.6380736 0.7053687 0.60165125 0.6493948
0.68891925 0.6791786 0.68401426 0.68724775 0.68119335 0.68420714
0.6752242 0.6463558 0.6604747 0.673601 0.64906156 0.66110367
0.70188415 0.6709308 0.6860585 0.6995098 0.67408603 0.68656266
0.68207383 0.653257 0.66735446 0.6775378 0.65957355 0.66843504
0.7239371 0.62767214 0.6723765 0.727052 0.6371026 0.6791118
0.68684536 0.6512372 0.6685675 0.68603987 0.6611057 0.673342
0.6597674 0.6492173 0.6544498 0.65480626 0.65867984 0.6567373
0.57646626 0.5310554 0.55282986 0.60486376 0.5716049 0.5877642
0.6522732 0.57717556 0.6124308 0.65758044 0.5975231 0.6261149
0.660957 0.5975425 0.62765205 0.6720209 0.6125037 0.6408835
0.67562574 0.6769904 0.6763074 0.67579126 0.6860876 0.6809005
0.72001535 0.6315435 0.6728838 0.7228723 0.6362089 0.6767775
0.7192043 0.6328901 0.67329216 0.71692055 0.6368898 0.6745397
0.61332875 0.6025922 0.6079131 0.60529405 0.6140784 0.6096546
0.6853147 0.64332604 0.6636569 0.68091387 0.64816785 0.6641374
0.5845806 0.56539303 0.5748267 0.6003111 0.591267 0.59575474
0.74353796 0.619761 0.67603046 0.7442333 0.6233987 0.67847794
0.63414633 0.6345733 0.6343598 0.63653076 0.6484232 0.64242196
0.66992325 0.6463558 0.6579285 0.67430884 0.6446355 0.6591384
0.7220555 0.64803904 0.683048 0.71287084 0.6463378 0.67797595
0.6646919 0.66099983 0.6628407 0.66276217 0.66702133 0.6648849
0.66684073 0.64484096 0.65565634 0.66270864 0.65557307 0.6591215
0.70337737 0.6450093 0.67293 0.7032612 0.6387624 0.66946185
0.6573344 0.561185 0.60546625 0.6568294 0.5759033 0.61371005
0.7249191 0.60326546 0.6585209 0.73407346 0.6134826 0.6683822
0.65206236 0.5987207 0.6242541 0.6670784 0.6206324 0.64301777
0.66284484 0.62279075 0.64219385 0.6634219 0.643912 0.65352136
0.6357791 0.649049 0.6423455 0.62776273 0.65033835 0.63885117
0.69550604 0.6616731 0.6781679 0.6949034 0.67485213 0.684731
0.7140449 0.64181113 0.6760039 0.7185472 0.6550624 0.6853377
0.6992274 0.6397913 0.6681902 0.7004673 0.6570626 0.6780711
0.64849275 0.58660156 0.6159965 0.65417606 0.60701364 0.629713
0.72189116 0.6116815 0.66223234 0.7314382 0.6263778 0.67484355
0.6951812 0.6264939 0.65905267 0.69980997 0.64250755 0.66993564
0.69317186 0.6647029 0.67863894 0.68928933 0.6691067 0.67904806
0.7074895 0.6232957 0.6627293 0.7135987 0.6378261 0.67358816
0.6937898 0.67884195 0.6862345 0.6832761 0.6863004 0.6847849
0.6473265 0.5746507 0.6088275 0.65652007 0.588373 0.6205813
0.696205 0.5897997 0.63860035 0.713958 0.6093118 0.6574971
0.6405771 0.5978792 0.61849207 0.6501145 0.6162489 0.6327289
0.630724 0.6143747 0.622442 0.6375352 0.6358259 0.63667935
0.64525694 0.6594849 0.6522933 0.65179634 0.67715025 0.6642314
0.70850503 0.63659316 0.6706268 0.71618086 0.65250885 0.68286383
0.69700104 0.6806935 0.68875074 0.6934974 0.6908116 0.6921519
0.67970866 0.67547554 0.6775855 0.6812296 0.6828531 0.6820404
0.671089 0.6130281 0.64074594 0.66580003 0.6212282 0.64274234
0.71148664 0.6662178 0.6881085 0.7144014 0.6757884 0.6945587
0.7323836 0.48459855 0.5832658 0.7492702 0.5024897 0.6015539
0.65457517 0.6742973 0.66428983 0.64646095 0.6794484 0.66254437
0.7251167 0.65359366 0.6875 0.72970694 0.6634038 0.6949776
0.6855842 0.6588116 0.6719313 0.69186044 0.66851085 0.6799853
0.699756 0.62767214 0.6617569 0.707392 0.63697493 0.6703393
0.63633054 0.5884531 0.61145604 0.6549866 0.61573815 0.63475627
0.7075041 0.6522471 0.67875284 0.7111384 0.6648934 0.6872388
0.727444 0.5849184 0.64844185 0.74225235 0.6013959 0.66444105
0.6177533 0.60444367 0.61102605 0.6135771 0.611227 0.6123998
0.661951 0.67732704 0.6695508 0.6460612 0.67119205 0.6583869
0.72183096 0.6211076 0.667692 0.7295209 0.63901776 0.6812768
0.7009721 0.5583235 0.62156844 0.7156572 0.5647104 0.63128597
0.6712424 0.65022725 0.6605677 0.66636467 0.65740305 0.66185355
0.60023665 0.59771085 0.59897107 0.60045385 0.6081202 0.6042627
0.73601973 0.6025922 0.6626562 0.74793106 0.61156744 0.67291033
0.67304087 0.60427535 0.6368071 0.6753907 0.61612123 0.64439595
0.6932247 0.58727485 0.6358666 0.7091552 0.60590714 0.6534781
0.6923336 0.6931493 0.6927412 0.68728566 0.69936585 0.6932731
0.7151767 0.5790271 0.6399405 0.7265015 0.59045833 0.65145326
0.7106581 0.668911 0.68915284 0.71036017 0.67991656 0.694805
0.6969529 0.6352466 0.66467065 0.6983917 0.6505086 0.67360026
0.59334415 0.6152163 0.6040823 0.59043217 0.62505853 0.6072521
0.71824616 0.61487967 0.6625555 0.72654223 0.6195259 0.66878
0.6907308 0.6522471 0.6709376 0.68879396 0.6654892 0.67694104
0.6224914 0.5795321 0.60024405 0.61902 0.5897774 0.604045
0.714185 0.68220836 0.69783056 0.71472883 0.6864706 0.70031476
0.6825516 0.6123548 0.64555055 0.69912255 0.6307188 0.66316146
0.7115927 0.6798519 0.69536024 0.7142982 0.6954505 0.70474833
0.7193887 0.63390005 0.6739442 0.7293764 0.6505937 0.6877362
0.63817716 0.6246423 0.63133717 0.63670987 0.63318723 0.63494366
0.65674776 0.6315435 0.6438991 0.6693513 0.6508065 0.65994865
0.72092617 0.6970207 0.70877194 0.7140679 0.6975359 0.70570505
0.679271 0.67126745 0.6752455 0.67067504 0.66936207 0.6700179
0.71739525 0.66571283 0.6905884 0.72463834 0.67791635 0.7004991
0.71425956 0.6567918 0.6843213 0.7189988 0.66872364 0.69295055
0.71022207 0.6514055 0.67954344 0.71373296 0.66731924 0.6897462
0.7137111 0.6869214 0.70006 0.7108386 0.6908116 0.70068204
0.6543875 0.63390005 0.64398086 0.6466935 0.64259267 0.6446366
0.63080645 0.65830666 0.64426327 0.62812465 0.6683832 0.64762884
0.70050126 0.6586433 0.6789277 0.7123777 0.67872494 0.6951443
0.6749384 0.64551425 0.65989846 0.68103564 0.6616164 0.67118555
0.7114516 0.6096617 0.6566352 0.72451955 0.6241222 0.6705839
0.69188255 0.6742973 0.6829767 0.6903814 0.6863855 0.6883777
0.71955794 0.6137014 0.6624273 0.7339824 0.63527256 0.6810695
0.67526215 0.66116816 0.6681408 0.68003243 0.6777461 0.67888737
0.7246295 0.65022725 0.6854152 0.73405004 0.6683832 0.69967926
0.6986373 0.673119 0.6856408 0.69481194 0.68395966 0.6893431
0.70883274 0.6470291 0.6765224 0.72267234 0.6642976 0.6922565
0.7349194 0.66032654 0.69562906 0.73758996 0.67157507 0.70303625
0.60947776 0.5303821 0.5671857 0.6322048 0.56343365 0.5958414
0.7201938 0.70055544 0.71023893 0.71586174 0.70145124 0.70858324
0.70413345 0.653762 0.67801344 0.7085901 0.66561687 0.6864316
0.6875342 0.6340683 0.65971977 0.698399 0.6534877 0.6751973
0.69190377 0.68759465 0.6897425 0.6894384 0.7000894 0.69472307
0.7122203 0.69651574 0.7042805 0.7087512 0.70383453 0.7062843
0.74722457 0.5891264 0.65882355 0.7570279 0.60054475 0.6697677
0.64772505 0.5583235 0.5997107 0.6680648 0.58296806 0.6226222
0.7312174 0.61269146 0.6667277 0.7502262 0.63518745 0.6879307
0.6558463 0.66655445 0.661157 0.6455817 0.66195685 0.65366673
0.7056338 0.6746339 0.6897857 0.7041119 0.68796015 0.6959423
0.67877394 0.64484096 0.6613725 0.68416417 0.6611908 0.67248136
0.67793655 0.6547719 0.6661529 0.68153167 0.67187303 0.67666787
0.7396204 0.62969196 0.6802437 0.7524929 0.6455292 0.6949191
0.7140828 0.67682207 0.6949533 0.71918994 0.6891944 0.70387274
0.7031168 0.67210907 0.6872633 0.7101456 0.6851513 0.6974246
0.64202404 0.6022555 0.62150425 0.6527281 0.6211431 0.63654405
0.67616636 0.5391348 0.5999251 0.7019246 0.55722004 0.6212574
0.6886023 0.61117655 0.64758337 0.6989328 0.6327191 0.66417974
0.71188605 0.65628684 0.68295676 0.7154259 0.6637869 0.6886397
0.704242 0.70139706 0.70281667 0.70689726 0.70792013 0.70740837
0.729884 0.6886046 0.7086437 0.7223112 0.6911095 0.70636594
0.73594457 0.4649049 0.56983703 0.75205797 0.4860195 0.59045553
0.7044001 0.6305336 0.6654232 0.7157702 0.64786994 0.6801296
0.63939184 0.6300286 0.6346757 0.64706635 0.64676344 0.64691484
0.6760405 0.6069685 0.6396452 0.68455344 0.6259948 0.65396583
0.702407 0.702407 0.702407 0.70031834 0.7114951 0.70586246
0.7045626 0.62901866 0.664651 0.7135956 0.65382814 0.6824057
0.64865845 0.6022555 0.6245963 0.6698254 0.6317402 0.6502256
0.69641787 0.58576 0.6363138 0.7195091 0.6138656 0.6625023
0.657693 0.59215623 0.6232064 0.6710999 0.6120356 0.64020836
0.6772288 0.6367615 0.656372 0.6797893 0.64816785 0.6636021
0.6862474 0.66857433 0.67729557 0.69000983 0.68519384 0.6875934
0.6721047 0.63103855 0.65092456 0.6759243 0.650466 0.6629508
0.67742515 0.63591987 0.65601665 0.6779441 0.64557177 0.66136205
0.7296736 0.65106887 0.6881338 0.7402549 0.67000043 0.7033777
0.7083251 0.6029288 0.65139115 0.72838837 0.63148487 0.676484
0.7105405 0.6660495 0.687576 0.7020813 0.6589352 0.67982435
0.66629374 0.7017337 0.6835547 0.6605061 0.70421755 0.68166184
0.71407044 0.6381081 0.67395556 0.7268712 0.65838194 0.69093347
0.6897333 0.6660495 0.67768455 0.689959 0.6731923 0.68147254
0.68923897 0.56707627 0.62221813 0.71138275 0.5944589 0.6476862
0.72564685 0.64197946 0.6812539 0.733131 0.6579989 0.6935361
0.5659517 0.56404644 0.5649975 0.5646315 0.57424355 0.569397
0.7126033 0.6823767 0.6971625 0.69890815 0.6756182 0.68706584
0.70072865 0.6798519 0.69013244 0.69848883 0.69438654 0.69643164
0.6976098 0.6337317 0.6641383 0.7145902 0.65638167 0.68425024
0.6215672 0.5714526 0.5954573 0.6167958 0.57952076 0.5975776
0.7148768 0.668911 0.69113046 0.7134366 0.6742988 0.6933158
0.73384184 0.61538464 0.66941315 0.7499748 0.6326765 0.6863501
0.70552886 0.6916344 0.69851255 0.7064922 0.69608885 0.7012519
0.72484076 0.67042583 0.69657224 0.73245335 0.6848534 0.70785403
0.5051161 0.5234809 0.5141346 0.5032443 0.5314295 0.516953
0.67072123 0.65897995 0.6647988 0.67936724 0.67080903 0.67506105
0.6939261 0.6384447 0.66503024 0.6992966 0.65578586 0.6768427
0.72026396 0.698199 0.70905983 0.7113073 0.69259906 0.70182854
0.7228608 0.6668911 0.6937489 0.7281771 0.6791505 0.7028098
0.7614192 0.60326546 0.6731781 0.77037275 0.6095246 0.68057406
0.66629213 0.59888905 0.6307951 0.6868319 0.6262076 0.6551202
0.6879661 0.6832183 0.685584 0.68156683 0.68200195 0.68178433
0.72245467 0.6975257 0.70977134 0.72569823 0.7043878 0.7148842
0.71036845 0.697694 0.7039742 0.7118955 0.7098353 0.7108639
0.6916575 0.6335634 0.6613371 0.6967108 0.64906156 0.67204267
0.71219856 0.68103015 0.6962657 0.70347565 0.685662 0.6944546
0.6990533 0.7084666 0.7037285 0.7037499 0.7204324 0.71199346
0.6925852 0.58172023 0.63233006 0.71857196 0.6064604 0.65777326
0.675344 0.6113449 0.6417528 0.69584125 0.6401668 0.666844
0.7295386 0.6946642 0.71167445 0.7319335 0.6987275 0.71494514
0.7079084 0.7126746 0.7102835 0.70799696 0.7166447 0.7122946
0.7290334 0.6906245 0.70930934 0.73540473 0.69906795 0.71677613
0.7005765 0.6955058 0.6980319 0.7056674 0.6984296 0.7020298
0.722103 0.67968357 0.70025146 0.72646326 0.6840448 0.7046162
0.69976574 0.65359366 0.67589206 0.70761263 0.6685534 0.6875287
0.7042411 0.63726646 0.6690819 0.7070505 0.6470188 0.6757039
0.7050874 0.5972059 0.6466782 0.7250136 0.62454784 0.6710412
0.7301218 0.6862481 0.7075054 0.73644984 0.69391835 0.7145518
0.7357694 0.6744656 0.703785 0.7372827 0.6859174 0.7106731
0.66892415 0.58188856 0.62237823 0.67236453 0.6009703 0.63466597
0.72339284 0.6818717 0.7020189 0.7347278 0.6966421 0.71517825
0.6680505 0.6500589 0.6589319 0.6757799 0.6637869 0.6697297
0.7335582 0.65897995 0.69427204 0.73710155 0.66821295 0.7009688
0.68920565 0.66739607 0.67812556 0.68490314 0.6755756 0.6802074
0.6912924 0.67884195 0.6850106 0.6929079 0.6840022 0.68842626
0.65819156 0.61875105 0.6378622 0.6689989 0.6348044 0.65145326
0.6855117 0.6347416 0.6591505 0.6895603 0.6507639 0.6696006
0.7239209 0.67749536 0.69993913 0.72852963 0.68847084 0.707934
0.7334051 0.6880996 0.7100304 0.7378115 0.69557816 0.7160726
0.71668714 0.5884531 0.64627045 0.73780614 0.61029065 0.6680176
0.7223632 0.6709308 0.6956977 0.7211449 0.69162023 0.70607406
0.661195 0.67800033 0.66949224 0.66673446 0.69774866 0.68188906
0.7479831 0.6554452 0.6986633 0.75852776 0.67097926 0.7120726
0.70836234 0.68439656 0.69617325 0.7135058 0.7008129 0.70710236
0.7123236 0.63726646 0.6727079 0.72048116 0.652594 0.68485934
0.6896552 0.6497223 0.66909343 0.6996849 0.67097926 0.6850315
0.72638196 0.6613365 0.69233483 0.7317466 0.6764693 0.7030231
0.71367216 0.67126745 0.6918206 0.721606 0.69221604 0.70660555
0.72694063 0.66992086 0.697267 0.734088 0.6871941 0.7098675
0.64857376 0.66975254 0.65899307 0.6482959 0.6743414 0.6610622
0.72960097 0.6185827 0.66952085 0.7493817 0.6448057 0.6931717
0.71428573 0.6530887 0.6823178 0.7283928 0.6742988 0.7003028
0.6865672 0.6968524 0.69167155 0.67716634 0.70174915 0.6892386
0.6179162 0.59097797 0.60414696 0.6319021 0.6175682 0.6246529
0.7255626 0.6946642 0.7097773 0.72096086 0.69868493 0.70964813
0.7231799 0.60023564 0.65599704 0.73887914 0.6256118 0.67754424
0.717216 0.6886046 0.70261914 0.7220244 0.69885516 0.71025085
0.742223 0.6867531 0.71341145 0.7443977 0.69272673 0.7176333
0.67283845 0.6667228 0.66976666 0.66425824 0.66906416 0.66665256
0.7315267 0.6815351 0.7056466 0.74243385 0.6974082 0.719217
0.69610137 0.60107726 0.6451088 0.69504786 0.60867345 0.6489994
0.71298885 0.6874264 0.6999743 0.7174927 0.700515 0.7089022
0.7053239 0.6377714 0.66984886 0.714133 0.6539558 0.68272096
0.7274182 0.6886046 0.7074795 0.7376481 0.70719665 0.7221015
0.67672884 0.62262243 0.64854914 0.68605494 0.6440397 0.6643837
0.7058612 0.6608315 0.68260455 0.71316564 0.66970253 0.6907511
0.6760612 0.65409863 0.66489863 0.6870394 0.6783845 0.6826845
0.73428315 0.6939909 0.7135687 0.72940445 0.6963442 0.71249104
0.72324383 0.6620098 0.6912734 0.73640203 0.68221474 0.70827353
0.7157486 0.6823767 0.69866437 0.71663535 0.69723797 0.70680356
0.70965564 0.707625 0.70863885 0.7205714 0.72345406 0.72200984
0.6813979 0.62026596 0.6493964 0.6856465 0.639571 0.6618078
0.69364053 0.6333951 0.66215026 0.7038154 0.6555305 0.67881536
0.6912186 0.6902878 0.6907529 0.69504 0.6995361 0.69728076
0.6697905 0.6135331 0.6404287 0.69459146 0.64220965 0.6673743
0.73134065 0.70425856 0.71754414 0.7323541 0.7087288 0.72034776
0.6613898 0.5414913 0.59546506 0.6929061 0.5724135 0.6269227
0.69943523 0.6670594 0.6828638 0.71121246 0.68378943 0.6972314
0.75488204 0.6246423 0.68361425 0.7687335 0.6326339 0.6940748
0.72331864 0.6354149 0.67652327 0.74255514 0.6547644 0.69590193
0.6875 0.5869382 0.6332516 0.7097395 0.6122058 0.6573746
0.7167779 0.56017506 0.62887377 0.72302 0.57424355 0.6401006
0.73694927 0.6700892 0.7019307 0.74776536 0.68357664 0.7142317
0.7164049 0.6909611 0.703453 0.7234405 0.70481336 0.7140055
0.67949724 0.5823935 0.6272093 0.694639 0.6076946 0.6482646
0.65995723 0.6236324 0.64128083 0.65919787 0.62603736 0.6421898
0.5287648 0.52600574 0.52738166 0.5385232 0.54347366 0.5409871
0.69534373 0.64349437 0.66841507 0.70103186 0.6592331 0.67949027
0.7336937 0.6854065 0.70872855 0.7428687 0.7004724 0.7210479
0.7007835 0.707625 0.70418763 0.7059953 0.71966636 0.7127653
0.717145 0.63928634 0.6759811 0.7236724 0.65012556 0.68493026
0.66945106 0.5665713 0.6137296 0.6911641 0.5935651 0.6386574
0.72466093 0.68355495 0.703508 0.72802806 0.6973231 0.7123448
0.69946235 0.6569601 0.67754537 0.7044394 0.67463934 0.6892174
0.74070024 0.6837233 0.7110722 0.74713117 0.69825935 0.721869
0.7034947 0.6505639 0.67599475 0.7097039 0.6611057 0.6845434
0.7433696 0.65578187 0.6968342 0.76378524 0.6708516 0.7143084
0.7302501 0.68305 0.70586187 0.7416095 0.69966376 0.72002625
0.7096133 0.66099983 0.6844444 0.7164746 0.67463934 0.6949279
0.717488 0.67884195 0.69763017 0.7273575 0.69689745 0.71180177
0.73281723 0.653257 0.69075376 0.742532 0.6664681 0.7024469
0.7375566 0.6859115 0.71079713 0.75210273 0.70021707 0.7252331
0.674101 0.65628684 0.66507465 0.68321246 0.66544664 0.6742125
0.7325417 0.6956741 0.71363205 0.7331964 0.69868493 0.71552473
0.7155305 0.70493186 0.7101916 0.71650654 0.71400607 0.71525407
0.7061883 0.66268307 0.6837444 0.71973234 0.68208706 0.7004042
0.72970355 0.63390005 0.67843634 0.74805194 0.6496149 0.695367
0.7144068 0.70947653 0.71193314 0.7203488 0.71728307 0.71881264
0.6759378 0.6217809 0.6477293 0.69569033 0.6409754 0.667213
0.7180988 0.70190203 0.70990807 0.71800435 0.7043452 0.7111092
0.7218216 0.6402962 0.6786192 0.7346519 0.65442395 0.6922211
0.7319678 0.7037536 0.7175835 0.7372186 0.71494234 0.7259096
0.72151446 0.67682207 0.6984541 0.72808516 0.690003 0.70853275
0.64197767 0.5704427 0.6040998 0.66466355 0.597821 0.6294728
0.7258006 0.6714358 0.69756055 0.739905 0.6893646 0.71374124
0.65346533 0.5221343 0.58046407 0.6657341 0.54687834 0.60048133
0.71514744 0.71839756 0.7167688 0.7172839 0.7229008 0.7200814
0.7139215 0.7069517 0.7104195 0.71260214 0.70511127 0.7088369
0.7342937 0.6177411 0.6709937 0.75709033 0.63961357 0.69341147
0.6660146 0.63036525 0.6476998 0.68164647 0.6526365 0.6668261
0.63871604 0.65645516 0.6474641 0.64837176 0.67872494 0.6632012
0.70666224 0.71772426 0.71215034 0.7177352 0.72992295 0.7237778
0.6539766 0.58407676 0.61705345 0.67031986 0.60650295 0.63681656
0.70298123 0.6429894 0.6716483 0.721622 0.66800016 0.69377655
0.7068887 0.52162933 0.60029054 0.734605 0.5457718 0.6262636
0.7052034 0.6273355 0.6639943 0.7340015 0.64825296 0.68846756
0.7412336 0.69382256 0.7167449 0.7478992 0.70451546 0.72555935
0.73680407 0.6860798 0.7105378 0.74731964 0.7000894 0.72293395
0.7051994 0.70543677 0.7053181 0.71449673 0.7205175 0.7174945
0.72154397 0.6764854 0.6982886 0.73554015 0.69600374 0.715226
0.71125734 0.6551086 0.68202925 0.7318593 0.674767 0.7021545
0.7247723 0.68305 0.7032929 0.7367316 0.70183426 0.7188597
0.690084 0.7192392 0.70436 0.6900302 0.72902924 0.70899385
0.66978806 0.6862481 0.6779182 0.67630106 0.69906795 0.68749607
0.6160795 0.5739775 0.5942837 0.6285765 0.597438 0.61261183
0.73516834 0.6653762 0.6985333 0.74224836 0.6774908 0.7083927
0.721659 0.65897995 0.6888967 0.722123 0.67110693 0.6956809
0.712057 0.70678335 0.70941037 0.70945376 0.71413374 0.71178603
0.7195952 0.6941592 0.7066484 0.730595 0.71383584 0.7221182
0.7005854 0.68490154 0.69265467 0.71377206 0.7011959 0.7074281
0.7050099 0.7200808 0.71246564 0.7152948 0.7326467 0.72386676
0.6997223 0.7209224 0.7101641 0.7085984 0.73477465 0.72144914
0.7295231 0.6746339 0.7010057 0.7409636 0.6865983 0.7127457
0.6411183 0.5674129 0.60201806 0.6613215 0.59675705 0.6273826
0.72915936 0.6978623 0.7131676 0.72763956 0.69834447 0.7126911
0.70163053 0.7025753 0.7021026 0.71401507 0.72179425 0.7178836
0.6648023 0.5802054 0.6196297 0.67722875 0.60488576 0.6390163
0.7125802 0.673119 0.69228774 0.7249476 0.6911946 0.70766884
0.718529 0.6840599 0.70087093 0.7296851 0.69630164 0.7126026
0.710795 0.7148628 0.7128231 0.70813316 0.7151551 0.7116268
0.7321306 0.71721935 0.7245982 0.72660345 0.7140486 0.7202713
0.7190717 0.64669245 0.6809642 0.73984045 0.66710645 0.7015934
0.7269607 0.71301126 0.71991843 0.7249849 0.7147721 0.71984226
0.7492825 0.6152163 0.6756632 0.7628786 0.62710136 0.6883584
0.5965471 0.5990574 0.5977996 0.6141802 0.6193557 0.6167571
0.7087266 0.72041744 0.7145242 0.7196597 0.7343916 0.726951
0.71020555 0.65712845 0.6826368 0.71710825 0.67502236 0.69542915
0.7506357 0.6956741 0.72211057 0.7606181 0.7080478 0.7333921
0.72055346 0.6662178 0.6923211 0.7300077 0.6857045 0.7071629
0.6766697 0.6224541 0.64843065 0.68950915 0.64446527 0.6662267
0.69866765 0.644336 0.6704028 0.711748 0.6616164 0.68576723
0.7341201 0.7120013 0.72289157 0.74329334 0.725199 0.7341347
0.7225772 0.6852382 0.70341253 0.73265696 0.70072776 0.7163367
0.7157157 0.7221007 0.718894 0.72399443 0.73311484 0.7285261
0.7294415 0.6793469 0.70350355 0.7407677 0.69319487 0.7161921
0.7374912 0.7045952 0.72066796 0.7497555 0.7177512 0.73340434
0.7655515 0.49301463 0.5997748 0.79078156 0.503809 0.6154886
0.7327157 0.7189025 0.7257434 0.741026 0.7248159 0.7328313
0.72246695 0.71772426 0.7200878 0.7320005 0.7308167 0.7314081
0.7349952 0.6451776 0.68716383 0.73307884 0.6374856 0.68194854
0.7194308 0.6892779 0.70403165 0.72521955 0.7065157 0.71574545
0.74639714 0.67126745 0.7068415 0.75003576 0.6694046 0.70743006
0.73517925 0.69718903 0.71568036 0.74489886 0.7115802 0.7278584
0.70285237 0.7051002 0.7039745 0.71356934 0.7146019 0.7140852
0.7393365 0.6827133 0.70989764 0.7478309 0.68961996 0.71754676
0.73056275 0.6184144 0.6698268 0.75586456 0.6431459 0.69496435
0.6989753 0.7003871 0.6996805 0.71323216 0.7161765 0.7147013
0.59961516 0.57700723 0.588094 0.60083514 0.5817338 0.59113026
0.7137076 0.7125063 0.71310645 0.7315225 0.7312423 0.73138237
0.61890584 0.5598384 0.5878922 0.65169233 0.5834362 0.61567825
0.70556307 0.7002188 0.7028808 0.7195767 0.71770865 0.71864146
0.7163363 0.7056051 0.7109302 0.72245824 0.71643186 0.7194325
0.7203825 0.6847332 0.70210564 0.7360914 0.7049836 0.72020173
0.7139592 0.68355495 0.69842637 0.7319093 0.7072392 0.7193628
0.65479773 0.6593166 0.6570494 0.6561605 0.67246884 0.66421455
0.5987534 0.5174213 0.55512416 0.6204787 0.54389924 0.5796707
0.7468582 0.7002188 0.7227869 0.75766677 0.70868623 0.73235846
0.72897834 0.7194075 0.7241613 0.73983705 0.73426396 0.73704
0.72593355 0.7133479 0.7195857 0.72911525 0.71651703 0.7227612
0.7413051 0.69601077 0.71794426 0.75347143 0.7135804 0.7329836
0.73831284 0.70712 0.72237986 0.7423627 0.7156658 0.72876984
0.67016596 0.6662178 0.668186 0.6869399 0.68834317 0.68764085
0.7373827 0.7008921 0.7186745 0.7465592 0.7133251 0.72956383
0.69439363 0.7025753 0.6984605 0.71232533 0.7246031 0.7184118
0.7450593 0.69803065 0.72077864 0.75970435 0.7129846 0.7356034
0.74225324 0.6975257 0.7191947 0.75494915 0.70762223 0.73051995
0.6935709 0.6682377 0.68066865 0.7185123 0.69145 0.7047214
0.723981 0.6428211 0.6809914 0.7437627 0.6711495 0.7055929
0.6959124 0.68490154 0.6903631 0.71764094 0.70740944 0.7124885
0.6751387 0.6349099 0.65440667 0.7000502 0.6532749 0.67585415
0.68869 0.712338 0.7003144 0.6969373 0.72247523 0.70947653
0.7334513 0.6975257 0.7150375 0.75592947 0.71890026 0.73695004
0.70667106 0.7185659 0.7125689 0.7237643 0.7390731 0.73133856
0.7166433 0.68708974 0.7015554 0.7204898 0.69617397 0.7081232
0.71568114 0.6906245 0.7029296 0.7296517 0.7096225 0.71949774
0.7316612 0.7168827 0.72419655 0.74756354 0.7345193 0.740984
0.7335894 0.7072883 0.7201988 0.750033 0.72456056 0.7370768
0.7513823 0.66335636 0.7046308 0.76340514 0.67438394 0.71613866
0.68571925 0.66436625 0.6748739 0.69257903 0.6768098 0.68460363
0.7361671 0.6763171 0.7049741 0.7472487 0.6935353 0.7193908
0.7442297 0.6675644 0.70381546 0.76769596 0.68774736 0.72552586
0.72028834 0.68961453 0.70461774 0.7403979 0.70800525 0.72383934
0.7417717 0.7131796 0.7271947 0.7419626 0.71306974 0.7272293
0.6875848 0.6823767 0.68497086 0.70279074 0.6977061 0.7002392
0.7199929 0.67951524 0.6991687 0.73379546 0.6937907 0.7132326
0.74250275 0.6751389 0.7072203 0.7584383 0.6894923 0.7223238
0.7280448 0.6349099 0.67829525 0.74822485 0.6547644 0.6983817
0.69191563 0.66268307 0.6769839 0.6995257 0.6716602 0.6853098
0.75263953 0.67194074 0.71000445 0.7667554 0.6865132 0.724419
0.6970566 0.6138697 0.65282375 0.71085614 0.6275695 0.66662145
0.71321696 0.6258206 0.6666667 0.71779287 0.64331615 0.6785169
0.7391686 0.7093082 0.7239306 0.74883926 0.7207303 0.73451596
0.6666667 0.65982157 0.6632265 0.6804992 0.677661 0.6790771
0.7305998 0.72580373 0.7281939 0.73970425 0.73230624 0.73598665
0.7160347 0.70880324 0.7124006 0.72226745 0.71741074 0.7198309
0.7219858 0.6854065 0.7032208 0.73512596 0.70042986 0.7173586
0.69087523 0.6869214 0.68889266 0.70072716 0.7094948 0.7050837
0.7374497 0.70964485 0.72328013 0.74895823 0.71902794 0.733688
0.7014567 0.68894124 0.6951427 0.71818686 0.7140912 0.7161332
0.7292281 0.707625 0.71826416 0.7413455 0.72456056 0.7328569
0.7443595 0.67194074 0.70629865 0.7637123 0.69272673 0.7264896
0.7161386 0.6505639 0.6817781 0.7424663 0.6721284 0.7055486
0.645053 0.614543 0.6294285 0.66293335 0.6348044 0.64856404
0.69752854 0.6175728 0.6551201 0.72140634 0.6418266 0.67929375
0.727063 0.68961453 0.70784384 0.7417786 0.70749456 0.72423106
0.68152446 0.6923077 0.68687373 0.69953597 0.7185598 0.7089203
0.7305431 0.71099144 0.72063464 0.7359315 0.7168575 0.72626925
0.7185537 0.7125063 0.7155172 0.73493975 0.73209345 0.73351383
0.67354596 0.60427535 0.6370331 0.69374794 0.62761205 0.6590249
0.73524964 0.6544353 0.69249266 0.74690914 0.6659148 0.70409036
0.6554422 0.64871234 0.6520599 0.6829404 0.6780866 0.68050486
0.7274763 0.7108231 0.71905327 0.7451963 0.7278802 0.7364364
0.74540156 0.68894124 0.71606016 0.76568305 0.71009064 0.7368398
0.74206495 0.6847332 0.7122472 0.76636374 0.707069 0.7355233
0.6570573 0.59080964 0.622175 0.68830377 0.6241222 0.65464365
0.7462434 0.66874266 0.7053706 0.7585231 0.6732349 0.71333873
0.7327197 0.7101498 0.7212582 0.74626404 0.718347 0.73203945
0.7405483 0.6956741 0.71741015 0.7510478 0.7092395 0.7295452
0.7091923 0.7064467 0.70781684 0.71691364 0.71651703 0.7167153
0.7222021 0.67126745 0.6958039 0.74871 0.6977912 0.7223544
0.6568509 0.6382764 0.6474304 0.67864555 0.6678725 0.6732159
0.7557105 0.5290355 0.62237626 0.77699125 0.5372175 0.6352314
0.7213913 0.698199 0.7096057 0.7338244 0.70809036 0.72072774
0.67668533 0.6707625 0.6737109 0.70058674 0.69617397 0.6983734
0.7198032 0.66503954 0.6913386 0.72505635 0.67080903 0.6968786
0.7351476 0.6456826 0.6875168 0.7469094 0.65825427 0.6997851
0.7187722 0.6818717 0.6998359 0.72321266 0.67676723 0.6992195
0.6346215 0.668911 0.6513153 0.64851505 0.68676853 0.6670938
0.7716763 0.5842451 0.6650062 0.7943043 0.5911393 0.6778255
0.7207207 0.7271503 0.72392124 0.72937477 0.73030597 0.7298401
0.6975274 0.6315435 0.6628975 0.7220455 0.65680724 0.687883
0.72445256 0.6682377 0.6952106 0.7431307 0.68715155 0.71404564
0.69054 0.6069685 0.6460629 0.72187084 0.62663317 0.67088896
0.7219428 0.70055544 0.71108836 0.7410378 0.7196238 0.7301738
0.73342353 0.72984344 0.73162913 0.73744696 0.7319232 0.7346747
0.69484687 0.6377714 0.66508687 0.7238321 0.66336125 0.6922786
0.6296296 0.5637098 0.59484905 0.6662989 0.5911819 0.6264968
0.6996711 0.68035686 0.6898788 0.725992 0.7062178 0.71596843
0.7006167 0.6692476 0.684573 0.71930367 0.6963442 0.7076377
0.71344113 0.6906245 0.70184743 0.7385558 0.71821934 0.7282456
0.74847305 0.6806935 0.71297604 0.76386815 0.6962165 0.72847503
0.69118667 0.67059416 0.68073475 0.7147585 0.6985147 0.70654327
0.74764526 0.70812994 0.7273513 0.76796 0.7260927 0.74643975
0.6983108 0.69584244 0.6970745 0.712082 0.71511257 0.7135941
0.72962964 0.66318804 0.6948241 0.7550498 0.69042856 0.7212947
0.74072045 0.6818717 0.7100789 0.75555557 0.69745076 0.7253414
0.7160338 0.66975254 0.6921204 0.7212966 0.678044 0.69900185
0.79038566 0.47601414 0.59418005 0.80569404 0.47452867 0.5972788
0.7585157 0.6709308 0.71204 0.7812024 0.67634165 0.725
0.68962497 0.6623464 0.6757105 0.7152494 0.6858748 0.7002542
0.59721506 0.58475006 0.5909168 0.61929834 0.61075884 0.61499894
0.6609677 0.6507322 0.65581 0.6821241 0.67297953 0.67752093
0.7372971 0.70341694 0.71995866 0.75918955 0.72077286 0.7394826
0.74714935 0.6948325 0.7200419 0.76707304 0.71226114 0.73865163
0.7154797 0.6628514 0.6881608 0.743191 0.687492 0.7142573
0.6869629 0.63238513 0.65854514 0.6986686 0.65659446 0.6769784
0.71132916 0.6710991 0.69062877 0.7332287 0.69587606 0.71406424
0.7187986 0.7008921 0.7097324 0.732964 0.71502745 0.72388464
0.6626424 0.6559502 0.6592793 0.66885036 0.6734902 0.6711623
0.72591513 0.6709308 0.6973408 0.7468753 0.69681233 0.7209758
0.7043222 0.7241205 0.71408415 0.71378094 0.7307316 0.7221568
0.640641 0.63255346 0.6365715 0.64938754 0.65204066 0.6507114
0.7334312 0.6724457 0.70161575 0.75724673 0.70042986 0.727731
0.7094447 0.711833 0.71063685 0.73186773 0.7339235 0.7328942
0.74112695 0.62211746 0.67642754 0.77091974 0.6485083 0.70443565
0.6915406 0.6769904 0.6841881 0.7153237 0.6997063 0.7074288
0.7520646 0.68978286 0.71957856 0.7730877 0.7054092 0.73769945
0.75 0.67816865 0.7122779 0.7708097 0.69519514 0.7310524
0.7307489 0.69803065 0.7140151 0.753336 0.7231987 0.7379598
0.7383244 0.69718903 0.7171673 0.75997293 0.7175384 0.7381463
0.7183005 0.73135835 0.72477067 0.72119486 0.7336256 0.7273571
0.6721685 0.59636426 0.6320014 0.6897062 0.614504 0.649937
0.72716486 0.722269 0.7247087 0.7458055 0.741584 0.74368876
0.73070896 0.70611006 0.71819896 0.74972534 0.72605014 0.73769784
0.7351111 0.69601077 0.7150268 0.75865173 0.7174533 0.7374776
0.70452243 0.70274365 0.70363194 0.7244694 0.7249011 0.72468513
0.70804423 0.6355832 0.6698598 0.7149385 0.6458697 0.6786513
0.7555514 0.6758121 0.7134607 0.77845305 0.69774866 0.7358948
0.72462714 0.67059416 0.6965644 0.7479845 0.6988977 0.72260845
0.7359357 0.64736575 0.68881524 0.7636557 0.6717453 0.714758
0.75042063 0.6756438 0.7110717 0.76526284 0.6983019 0.7302506
0.73386514 0.6813668 0.7066422 0.75273716 0.69638675 0.72346634
0.6557598 0.6544353 0.6550969 0.6799422 0.68081033 0.680376
0.6728534 0.65292037 0.6627371 0.69401973 0.67417115 0.6839515
0.72850275 0.6887729 0.708081 0.7531802 0.7131123 0.7325988
0.74424696 0.6695843 0.7049442 0.7719821 0.69536537 0.7316735
0.7584738 0.6892779 0.7222222 0.78037953 0.71060133 0.7438576
0.73768955 0.66318804 0.6984577 0.7567656 0.68787503 0.72067773
0.7079489 0.6716041 0.68929774 0.7311445 0.69847214 0.714435
0.71310323 0.6815351 0.6969619 0.73954815 0.7076648 0.7232553
0.7388928 0.70543677 0.7217773 0.76054335 0.72911435 0.7444973
0.543675 0.49553946 0.5184924 0.5702374 0.5326212 0.5507878
0.74693197 0.6761488 0.70978004 0.75274104 0.6778738 0.7133484
0.73998135 0.6682377 0.70228195 0.76185334 0.6906839 0.7245251
0.6801441 0.69937724 0.6896266 0.69694114 0.72336894 0.70990914
0.714861 0.6275038 0.6683399 0.73436975 0.65135974 0.6903784
0.73197967 0.72816026 0.730065 0.73773205 0.738988 0.7383595
0.7483456 0.6852382 0.71540284 0.77284884 0.7132825 0.74187195
0.7205516 0.73001176 0.72525084 0.73584354 0.7460527 0.7409129
0.71177596 0.6887729 0.7000855 0.72969913 0.71323997 0.7213757
0.72943646 0.7015654 0.7152295 0.7381804 0.71166533 0.72468036
0.7482784 0.6950008 0.7206563 0.77465814 0.7233264 0.7481128
0.7399274 0.6862481 0.71207756 0.76051724 0.7108993 0.7348716
0.6381554 0.5613533 0.5972956 0.6674866 0.5889688 0.6257744
0.6808474 0.6653762 0.6730229 0.7033339 0.68953484 0.696366
0.72165835 0.70611006 0.71379954 0.73430705 0.71890026 0.726522
0.7334081 0.66032654 0.6949513 0.7542157 0.6814487 0.7159881
0.7009565 0.653762 0.6765372 0.72709584 0.68395966 0.70486844
0.73063 0.6793469 0.70405585 0.7555444 0.7075371 0.7307532
0.6980785 0.7032486 0.70065403 0.72097725 0.72843343 0.72468615
0.7443741 0.7126746 0.7281796 0.7657188 0.73805165 0.7516307
0.7208987 0.6264939 0.67038906 0.74465287 0.650466 0.6943801
0.70533025 0.67042583 0.68743527 0.7305778 0.6935779 0.71159726
0.7217391 0.726477 0.72410035 0.7355005 0.7404775 0.7379806
0.6883689 0.6833866 0.68586874 0.7176859 0.7122186 0.7149418
0.74234647 0.65712845 0.69714284 0.771537 0.6834064 0.72480255
0.73268867 0.7141895 0.72332084 0.7551927 0.7380942 0.74654555
0.71231365 0.6514055 0.6804994 0.73963785 0.6797038 0.70840544
0.73414767 0.6879313 0.7102885 0.7549024 0.7094097 0.73144937
0.72875756 0.668406 0.6972783 0.74727833 0.68357664 0.71400946
0.7309183 0.7301801 0.730549 0.7505546 0.7487339 0.74964315
0.73958135 0.6601582 0.6976165 0.76467764 0.67736304 0.71837693
0.74783236 0.6968524 0.7214429 0.7672288 0.7163893 0.740938
0.6459137 0.6199293 0.6326548 0.6592455 0.6351449 0.64697087
0.7317116 0.69870394 0.71482694 0.75206316 0.7213687 0.7363962
0.7369723 0.7022387 0.71918637 0.7621045 0.7268162 0.74404216
0.655706 0.62767214 0.6413829 0.6795875 0.65063626 0.6647968
0.6971163 0.6062952 0.64854157 0.7237971 0.6356982 0.6768931
0.70012915 0.63861305 0.6679577 0.73233336 0.66906416 0.69927055
0.74951863 0.7207541 0.734855 0.76482254 0.7383921 0.751375
0.7199654 0.7010604 0.7103872 0.74847966 0.72809297 0.7381456
0.7205627 0.6983673 0.7092914 0.72795177 0.7092395 0.7184738
0.7496241 0.67126745 0.7082852 0.7748551 0.68825805 0.7289939
0.61686367 0.58862144 0.6024117 0.6372838 0.61629146 0.6266118
0.710678 0.7057734 0.7082172 0.7244024 0.72745454 0.7259253
0.72947794 0.6985356 0.71367157 0.7546709 0.7237094 0.738866
0.71768767 0.7000505 0.70875937 0.7450061 0.73013574 0.73749596
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
import sys
precision_list = []
recall_list = []
f1_list = []
train_precision_list = []
train_recall_list = []
train_f1_list = []
for line in sys.stdin:
line = line.strip()
if line.startswith("[TestSet]"):
tokens = line.split(" ")
for token in tokens:
field_value = token.split(":")
field = field_value[0].strip()
if len(field_value) != 2:
continue
value = float(field_value[1].strip("[] "))
if (field == "pass_precision"):
precision_list.append(value)
if field == "pass_recall":
recall_list.append(value)
if field == "pass_f1_score":
f1_list.append(value)
elif line.startswith("[TrainSet]"):
tokens = line.split(" ")
for token in tokens:
field_value = token.split(":")
if len(field_value) != 2:
continue
field = field_value[0].strip()
value = float(field_value[1].strip("[] "))
if (field == "pass_precision"):
train_precision_list.append(value)
if field == "pass_recall":
train_recall_list.append(value)
if field == "pass_f1_score":
train_f1_list.append(value)
assert len(precision_list) == len(recall_list)
assert len(recall_list) == len(f1_list)
assert len(train_precision_list) == len(train_recall_list)
assert len(train_recall_list) == len(train_f1_list)
for i in xrange(len(precision_list)):
print str(precision_list[i]) + "\t" + str(recall_list[i]) + "\t" + str(f1_list[i]) + "\t" + str(train_precision_list[i]) + "\t" + str(train_recall_list[
i]) + "\t" + str(train_f1_list[i])
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
abc shape:
(100232, 50)
<class 'paddle.v2.fluid.framework.Variable'>
(-1L, 1L)
Pass 0, Batch 0, Cost [26.871212], Precision [0.04150454], Recall [0.30769232], F1_score[0.07314286]
Pass 0, Batch 5, Cost [18.386833], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 10, Cost [17.246365], Precision [0.00966184], Recall [0.01709402], F1_score[0.01234568]
Pass 0, Batch 15, Cost [20.179188], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 20, Cost [12.831928], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 25, Cost [9.926348], Precision [0.01388889], Recall [0.02739726], F1_score[0.01843318]
Pass 0, Batch 30, Cost [9.757711], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 35, Cost [12.412531], Precision [0.00454545], Recall [0.00884956], F1_score[0.00600601]
Pass 0, Batch 40, Cost [9.986742], Precision [0.01081081], Recall [0.01980198], F1_score[0.01398601]
Pass 0, Batch 45, Cost [10.460817], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 50, Cost [10.195407], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 55, Cost [13.31529], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 60, Cost [10.002723], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 65, Cost [8.355999], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 70, Cost [9.898893], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 75, Cost [9.887169], Precision [0.00574713], Recall [0.00970874], F1_score[0.00722022]
Pass 0, Batch 80, Cost [8.755182], Precision [0.00609756], Recall [0.01075269], F1_score[0.0077821]
Pass 0, Batch 85, Cost [10.045535], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 90, Cost [8.42337], Precision [0.01324503], Recall [0.02150538], F1_score[0.01639344]
Pass 0, Batch 95, Cost [7.5686626], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 100, Cost [8.956157], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 105, Cost [11.710886], Precision [0.00510204], Recall [0.00847458], F1_score[0.00636943]
Pass 0, Batch 110, Cost [14.177246], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 115, Cost [10.552631], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 120, Cost [9.670553], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 125, Cost [7.857345], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 130, Cost [7.83024], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 135, Cost [9.3462925], Precision [0.00617284], Recall [0.01176471], F1_score[0.00809717]
Pass 0, Batch 140, Cost [10.518989], Precision [0.01169591], Recall [0.02247191], F1_score[0.01538461]
Pass 0, Batch 145, Cost [11.205475], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 150, Cost [12.812588], Precision [0.00487805], Recall [0.00934579], F1_score[0.00641026]
Pass 0, Batch 155, Cost [10.272365], Precision [0.01796407], Recall [0.03125], F1_score[0.02281369]
Pass 0, Batch 160, Cost [13.389257], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 165, Cost [9.699731], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 170, Cost [10.965213], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 175, Cost [13.268848], Precision [0.00495049], Recall [0.00909091], F1_score[0.00641026]
Pass 0, Batch 180, Cost [14.692938], Precision [0.00434783], Recall [0.00757576], F1_score[0.00552486]
Pass 0, Batch 185, Cost [13.365563], Precision [0.00454545], Recall [0.00833333], F1_score[0.00588235]
Pass 0, Batch 190, Cost [10.744368], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 195, Cost [11.160604], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 200, Cost [8.575914], Precision [0.00666667], Recall [0.01333333], F1_score[0.00888889]
Pass 0, Batch 205, Cost [11.228489], Precision [0.02030457], Recall [0.03478261], F1_score[0.02564103]
Pass 0, Batch 210, Cost [12.056495], Precision [0.00990099], Recall [0.01818182], F1_score[0.01282051]
Pass 0, Batch 215, Cost [9.67844], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 220, Cost [10.080536], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 225, Cost [9.608576], Precision [0.], Recall [0.], F1_score[0.]
Pass 0, Batch 230, Cost [9.265404], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:0 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:0 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 1, Batch 235, Cost [13.509312], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 240, Cost [11.595556], Precision [0.00462963], Recall [0.00806452], F1_score[0.00588235]
Pass 1, Batch 245, Cost [11.816244], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 250, Cost [12.340549], Precision [0.0045045], Recall [0.00763359], F1_score[0.00566572]
Pass 1, Batch 255, Cost [9.812289], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 260, Cost [11.960119], Precision [0.00507614], Recall [0.00847458], F1_score[0.00634921]
Pass 1, Batch 265, Cost [11.166084], Precision [0.00546448], Recall [0.01086957], F1_score[0.00727273]
Pass 1, Batch 270, Cost [15.398319], Precision [0.00384615], Recall [0.0070922], F1_score[0.00498753]
Pass 1, Batch 275, Cost [12.50708], Precision [0.00429185], Recall [0.00793651], F1_score[0.00557103]
Pass 1, Batch 280, Cost [10.101504], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 285, Cost [10.254634], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 290, Cost [10.485569], Precision [0.00555556], Recall [0.01041667], F1_score[0.00724638]
Pass 1, Batch 295, Cost [9.3289385], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 300, Cost [10.417467], Precision [0.01578947], Recall [0.02941176], F1_score[0.02054795]
Pass 1, Batch 305, Cost [10.35739], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 310, Cost [11.068229], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 315, Cost [9.442175], Precision [0.00537634], Recall [0.01052632], F1_score[0.00711744]
Pass 1, Batch 320, Cost [7.9595413], Precision [0.00689655], Recall [0.01075269], F1_score[0.00840336]
Pass 1, Batch 325, Cost [9.30862], Precision [0.00568182], Recall [0.00980392], F1_score[0.00719424]
Pass 1, Batch 330, Cost [10.836834], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 335, Cost [9.750983], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 340, Cost [8.744383], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 345, Cost [10.904871], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 350, Cost [10.971662], Precision [0.00537634], Recall [0.01], F1_score[0.00699301]
Pass 1, Batch 355, Cost [10.342468], Precision [0.00543478], Recall [0.00934579], F1_score[0.00687285]
Pass 1, Batch 360, Cost [10.778999], Precision [0.00564972], Recall [0.01010101], F1_score[0.00724638]
Pass 1, Batch 365, Cost [9.83998], Precision [0.00591716], Recall [0.01123596], F1_score[0.00775194]
Pass 1, Batch 370, Cost [11.773645], Precision [0.00518135], Recall [0.00943396], F1_score[0.00668896]
Pass 1, Batch 375, Cost [10.300829], Precision [0.00564972], Recall [0.01052632], F1_score[0.00735294]
Pass 1, Batch 380, Cost [10.149308], Precision [0.01176471], Recall [0.02020202], F1_score[0.01486989]
Pass 1, Batch 385, Cost [9.288663], Precision [0.01212121], Recall [0.02325581], F1_score[0.01593626]
Pass 1, Batch 390, Cost [12.00997], Precision [0.01010101], Recall [0.01724138], F1_score[0.01273885]
Pass 1, Batch 395, Cost [11.053863], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 400, Cost [12.374212], Precision [0.00454545], Recall [0.00813008], F1_score[0.0058309]
Pass 1, Batch 405, Cost [10.118889], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 410, Cost [13.006912], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 415, Cost [11.744417], Precision [0.00502513], Recall [0.00917431], F1_score[0.00649351]
Pass 1, Batch 420, Cost [11.409122], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 425, Cost [10.597446], Precision [0.01630435], Recall [0.02830189], F1_score[0.02068966]
Pass 1, Batch 430, Cost [13.858181], Precision [0.00444444], Recall [0.00769231], F1_score[0.0056338]
Pass 1, Batch 435, Cost [8.84964], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 440, Cost [12.24767], Precision [0.00458716], Recall [0.00757576], F1_score[0.00571429]
Pass 1, Batch 445, Cost [11.055971], Precision [0.00520833], Recall [0.01], F1_score[0.00684932]
Pass 1, Batch 450, Cost [10.389307], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 455, Cost [9.007514], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 460, Cost [8.996408], Precision [0.], Recall [0.], F1_score[0.]
Pass 1, Batch 465, Cost [9.076282], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:1 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:1 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 2, Batch 470, Cost [11.9652], Precision [0.00925926], Recall [0.016], F1_score[0.01173021]
Pass 2, Batch 475, Cost [9.068219], Precision [0.01754386], Recall [0.02884615], F1_score[0.02181818]
Pass 2, Batch 480, Cost [10.960011], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 485, Cost [10.494853], Precision [0.00512821], Recall [0.00934579], F1_score[0.00662252]
Pass 2, Batch 490, Cost [9.99587], Precision [0.01123596], Recall [0.02061856], F1_score[0.01454545]
Pass 2, Batch 495, Cost [9.689588], Precision [0.00606061], Recall [0.01204819], F1_score[0.00806452]
Pass 2, Batch 500, Cost [9.258001], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 505, Cost [12.637484], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 510, Cost [10.6772], Precision [0.00471698], Recall [0.00869565], F1_score[0.00611621]
Pass 2, Batch 515, Cost [9.166649], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 520, Cost [9.239321], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 525, Cost [11.259853], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 530, Cost [10.142515], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 535, Cost [9.55883], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 540, Cost [11.704132], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 545, Cost [10.3053], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 550, Cost [9.167595], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 555, Cost [7.310463], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 560, Cost [9.655105], Precision [0.00555556], Recall [0.01052632], F1_score[0.00727273]
Pass 2, Batch 565, Cost [8.814632], Precision [0.00628931], Recall [0.01041667], F1_score[0.00784314]
Pass 2, Batch 570, Cost [9.91366], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 575, Cost [9.739231], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 580, Cost [12.302845], Precision [0.00480769], Recall [0.00869565], F1_score[0.00619195]
Pass 2, Batch 585, Cost [11.403333], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 590, Cost [9.692117], Precision [0.00609756], Recall [0.01086957], F1_score[0.0078125]
Pass 2, Batch 595, Cost [9.050299], Precision [0.00621118], Recall [0.01282051], F1_score[0.0083682]
Pass 2, Batch 600, Cost [8.974524], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 605, Cost [9.92695], Precision [0.00588235], Recall [0.01010101], F1_score[0.00743494]
Pass 2, Batch 610, Cost [10.165554], Precision [0.01744186], Recall [0.03191489], F1_score[0.02255639]
Pass 2, Batch 615, Cost [11.765322], Precision [0.00505051], Recall [0.00934579], F1_score[0.00655738]
Pass 2, Batch 620, Cost [9.99355], Precision [0.01176471], Recall [0.02222222], F1_score[0.01538462]
Pass 2, Batch 625, Cost [10.319143], Precision [0.00571429], Recall [0.00961538], F1_score[0.00716846]
Pass 2, Batch 630, Cost [12.13977], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 635, Cost [10.45984], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 640, Cost [9.859671], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 645, Cost [9.157812], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 650, Cost [13.211441], Precision [0.00471698], Recall [0.00769231], F1_score[0.00584795]
Pass 2, Batch 655, Cost [10.323876], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 660, Cost [9.598169], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 665, Cost [10.697342], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 670, Cost [9.003347], Precision [0.00636943], Recall [0.01190476], F1_score[0.00829876]
Pass 2, Batch 675, Cost [11.018316], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 680, Cost [12.877915], Precision [0.00904977], Recall [0.01492537], F1_score[0.01126761]
Pass 2, Batch 685, Cost [11.635298], Precision [0.0097561], Recall [0.01801802], F1_score[0.01265823]
Pass 2, Batch 690, Cost [9.0934515], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 695, Cost [10.783751], Precision [0.], Recall [0.], F1_score[0.]
Pass 2, Batch 700, Cost [8.2772455], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:2 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:2 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 3, Batch 705, Cost [13.28318], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 710, Cost [11.239175], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 715, Cost [14.216343], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 720, Cost [11.135891], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 725, Cost [9.092704], Precision [0.00609756], Recall [0.01204819], F1_score[0.00809717]
Pass 3, Batch 730, Cost [7.727823], Precision [0.02222222], Recall [0.04166667], F1_score[0.02898551]
Pass 3, Batch 735, Cost [12.597], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 740, Cost [10.033331], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 745, Cost [13.795351], Precision [0.00395257], Recall [0.00680272], F1_score[0.005]
Pass 3, Batch 750, Cost [12.67617], Precision [0.00434783], Recall [0.00793651], F1_score[0.00561798]
Pass 3, Batch 755, Cost [9.756472], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 760, Cost [9.883647], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 765, Cost [11.349789], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 770, Cost [8.660953], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 775, Cost [10.617964], Precision [0.01587302], Recall [0.02830189], F1_score[0.02033898]
Pass 3, Batch 780, Cost [9.488722], Precision [0.01092896], Recall [0.02], F1_score[0.01413427]
Pass 3, Batch 785, Cost [8.401545], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 790, Cost [6.886855], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 795, Cost [10.344155], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 800, Cost [8.938346], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 805, Cost [11.559656], Precision [0.00510204], Recall [0.00909091], F1_score[0.00653595]
Pass 3, Batch 810, Cost [10.438453], Precision [0.00549451], Recall [0.00952381], F1_score[0.00696864]
Pass 3, Batch 815, Cost [11.598852], Precision [0.005], Recall [0.00884956], F1_score[0.00638978]
Pass 3, Batch 820, Cost [10.204964], Precision [0.00571429], Recall [0.00980392], F1_score[0.00722022]
Pass 3, Batch 825, Cost [8.262454], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 830, Cost [8.768539], Precision [0.01282051], Recall [0.02325581], F1_score[0.01652892]
Pass 3, Batch 835, Cost [12.12817], Precision [0.01492537], Recall [0.02803738], F1_score[0.01948052]
Pass 3, Batch 840, Cost [6.9376574], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 845, Cost [10.941277], Precision [0.01015228], Recall [0.02], F1_score[0.01346801]
Pass 3, Batch 850, Cost [11.174824], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 855, Cost [9.846782], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 860, Cost [14.745783], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 865, Cost [10.849613], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 870, Cost [8.313696], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 875, Cost [10.128811], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 880, Cost [11.514126], Precision [0.00502513], Recall [0.00900901], F1_score[0.00645161]
Pass 3, Batch 885, Cost [10.229836], Precision [0.01086957], Recall [0.0212766], F1_score[0.01438849]
Pass 3, Batch 890, Cost [12.364712], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 895, Cost [8.9465275], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 900, Cost [11.518774], Precision [0.01041667], Recall [0.01818182], F1_score[0.01324503]
Pass 3, Batch 905, Cost [11.277731], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 910, Cost [9.82238], Precision [0.00571429], Recall [0.01020408], F1_score[0.00732601]
Pass 3, Batch 915, Cost [9.028438], Precision [0.01204819], Recall [0.02105263], F1_score[0.01532567]
Pass 3, Batch 920, Cost [13.874937], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 925, Cost [10.7453985], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 930, Cost [9.621041], Precision [0.], Recall [0.], F1_score[0.]
Pass 3, Batch 935, Cost [10.186861], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:3 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:3 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 4, Batch 940, Cost [13.846178], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 945, Cost [11.561779], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 950, Cost [11.39793], Precision [0.00921659], Recall [0.01666667], F1_score[0.01186944]
Pass 4, Batch 955, Cost [8.025307], Precision [0.00657895], Recall [0.0125], F1_score[0.00862069]
Pass 4, Batch 960, Cost [11.286711], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 965, Cost [10.771591], Precision [0.00568182], Recall [0.00970874], F1_score[0.00716846]
Pass 4, Batch 970, Cost [10.356476], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 975, Cost [9.6736965], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 980, Cost [10.419032], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 985, Cost [10.425215], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 990, Cost [10.604435], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 995, Cost [10.788269], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1000, Cost [10.5347805], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1005, Cost [10.040555], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1010, Cost [11.318422], Precision [0.00985222], Recall [0.01724138], F1_score[0.01253918]
Pass 4, Batch 1015, Cost [8.086926], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1020, Cost [7.0144615], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1025, Cost [8.985412], Precision [0.00613497], Recall [0.01052632], F1_score[0.00775194]
Pass 4, Batch 1030, Cost [8.147175], Precision [0.00621118], Recall [0.01149425], F1_score[0.00806452]
Pass 4, Batch 1035, Cost [10.751987], Precision [0.00558659], Recall [0.00970874], F1_score[0.0070922]
Pass 4, Batch 1040, Cost [9.147621], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1045, Cost [9.990934], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1050, Cost [13.060579], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1055, Cost [10.138929], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1060, Cost [10.577554], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1065, Cost [8.41081], Precision [0.01360544], Recall [0.02666667], F1_score[0.01801802]
Pass 4, Batch 1070, Cost [10.509108], Precision [0.00540541], Recall [0.01075269], F1_score[0.00719425]
Pass 4, Batch 1075, Cost [10.055386], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1080, Cost [9.944126], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1085, Cost [11.157241], Precision [0.01052632], Recall [0.01923077], F1_score[0.01360544]
Pass 4, Batch 1090, Cost [11.985209], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1095, Cost [10.315283], Precision [0.00546448], Recall [0.01052632], F1_score[0.00719424]
Pass 4, Batch 1100, Cost [12.898649], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1105, Cost [8.537144], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1110, Cost [12.262388], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1115, Cost [12.533052], Precision [0.00456621], Recall [0.00847458], F1_score[0.00593472]
Pass 4, Batch 1120, Cost [13.88779], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1125, Cost [10.996825], Precision [0.0104712], Recall [0.01801802], F1_score[0.01324503]
Pass 4, Batch 1130, Cost [9.966557], Precision [0.00564972], Recall [0.01086957], F1_score[0.00743495]
Pass 4, Batch 1135, Cost [9.346262], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1140, Cost [10.831074], Precision [0.00549451], Recall [0.00943396], F1_score[0.00694444]
Pass 4, Batch 1145, Cost [13.835433], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1150, Cost [11.91532], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1155, Cost [10.129173], Precision [0.00549451], Recall [0.00970874], F1_score[0.00701754]
Pass 4, Batch 1160, Cost [11.167259], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1165, Cost [9.737603], Precision [0.], Recall [0.], F1_score[0.]
Pass 4, Batch 1170, Cost [8.366184], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:4 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:4 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 5, Batch 1175, Cost [11.041561], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1180, Cost [12.241812], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1185, Cost [11.4049225], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1190, Cost [11.977769], Precision [0.00473934], Recall [0.00826446], F1_score[0.0060241]
Pass 5, Batch 1195, Cost [10.44385], Precision [0.00558659], Recall [0.01052632], F1_score[0.00729927]
Pass 5, Batch 1200, Cost [11.101124], Precision [0.01621622], Recall [0.03191489], F1_score[0.02150538]
Pass 5, Batch 1205, Cost [10.188527], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1210, Cost [10.966033], Precision [0.01442308], Recall [0.02586207], F1_score[0.01851852]
Pass 5, Batch 1215, Cost [13.67616], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1220, Cost [8.491711], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1225, Cost [10.490805], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1230, Cost [10.601091], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1235, Cost [11.14834], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1240, Cost [8.118796], Precision [0.00662252], Recall [0.01219512], F1_score[0.00858369]
Pass 5, Batch 1245, Cost [10.241171], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1250, Cost [8.454705], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1255, Cost [10.059076], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1260, Cost [7.942912], Precision [0.00662252], Recall [0.01190476], F1_score[0.00851064]
Pass 5, Batch 1265, Cost [5.9086776], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1270, Cost [8.383711], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1275, Cost [11.985157], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1280, Cost [9.21997], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1285, Cost [8.84175], Precision [0.01273885], Recall [0.02380952], F1_score[0.01659751]
Pass 5, Batch 1290, Cost [11.24242], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1295, Cost [8.247267], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1300, Cost [9.450726], Precision [0.00588235], Recall [0.01136364], F1_score[0.00775194]
Pass 5, Batch 1305, Cost [9.0633335], Precision [0.0060241], Recall [0.01234568], F1_score[0.00809717]
Pass 5, Batch 1310, Cost [9.53372], Precision [0.00591716], Recall [0.01204819], F1_score[0.00793651]
Pass 5, Batch 1315, Cost [12.099428], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1320, Cost [12.305136], Precision [0.00483092], Recall [0.00909091], F1_score[0.00630915]
Pass 5, Batch 1325, Cost [11.239608], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1330, Cost [9.607033], Precision [0.00581395], Recall [0.01149425], F1_score[0.00772201]
Pass 5, Batch 1335, Cost [11.170284], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1340, Cost [13.321592], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1345, Cost [10.548465], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1350, Cost [12.186888], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1355, Cost [11.600901], Precision [0.00515464], Recall [0.00892857], F1_score[0.00653595]
Pass 5, Batch 1360, Cost [12.380611], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1365, Cost [9.562592], Precision [0.00591716], Recall [0.01098901], F1_score[0.00769231]
Pass 5, Batch 1370, Cost [11.636732], Precision [0.00487805], Recall [0.00934579], F1_score[0.00641026]
Pass 5, Batch 1375, Cost [10.462498], Precision [0.00564972], Recall [0.01041667], F1_score[0.00732601]
Pass 5, Batch 1380, Cost [10.526], Precision [0.00555556], Recall [0.00925926], F1_score[0.00694444]
Pass 5, Batch 1385, Cost [11.847362], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1390, Cost [10.845037], Precision [0.01030928], Recall [0.02], F1_score[0.01360544]
Pass 5, Batch 1395, Cost [8.954849], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1400, Cost [8.722395], Precision [0.], Recall [0.], F1_score[0.]
Pass 5, Batch 1405, Cost [10.790264], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:5 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:5 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 6, Batch 1410, Cost [11.152528], Precision [0.00490196], Recall [0.00892857], F1_score[0.00632911]
Pass 6, Batch 1415, Cost [8.313165], Precision [0.01242236], Recall [0.02173913], F1_score[0.01581028]
Pass 6, Batch 1420, Cost [12.031639], Precision [0.00460829], Recall [0.00854701], F1_score[0.00598802]
Pass 6, Batch 1425, Cost [10.481368], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1430, Cost [10.663322], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1435, Cost [8.864544], Precision [0.01923077], Recall [0.03571429], F1_score[0.025]
Pass 6, Batch 1440, Cost [11.589552], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1445, Cost [11.640393], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1450, Cost [10.078947], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1455, Cost [11.224367], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1460, Cost [9.751972], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1465, Cost [9.933728], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1470, Cost [10.685818], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1475, Cost [9.773567], Precision [0.00578035], Recall [0.00943396], F1_score[0.00716846]
Pass 6, Batch 1480, Cost [9.881399], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1485, Cost [8.412672], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1490, Cost [7.7949753], Precision [0.00653595], Recall [0.01219512], F1_score[0.00851064]
Pass 6, Batch 1495, Cost [7.908819], Precision [0.00666667], Recall [0.01190476], F1_score[0.00854701]
Pass 6, Batch 1500, Cost [9.578828], Precision [0.00558659], Recall [0.01030928], F1_score[0.00724638]
Pass 6, Batch 1505, Cost [11.88393], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1510, Cost [10.177963], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1515, Cost [9.847863], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1520, Cost [10.159439], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1525, Cost [11.270657], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1530, Cost [9.884243], Precision [0.00571429], Recall [0.01052632], F1_score[0.00740741]
Pass 6, Batch 1535, Cost [9.389706], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1540, Cost [9.78606], Precision [0.00568182], Recall [0.01086957], F1_score[0.00746269]
Pass 6, Batch 1545, Cost [8.526768], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1550, Cost [9.257854], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1555, Cost [11.474022], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1560, Cost [11.636324], Precision [0.0201005], Recall [0.03508772], F1_score[0.0255591]
Pass 6, Batch 1565, Cost [13.024582], Precision [0.01395349], Recall [0.02459016], F1_score[0.01780415]
Pass 6, Batch 1570, Cost [9.32432], Precision [0.00543478], Recall [0.01020408], F1_score[0.0070922]
Pass 6, Batch 1575, Cost [10.273905], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1580, Cost [10.325066], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1585, Cost [14.160984], Precision [0.01666667], Recall [0.02877698], F1_score[0.02110818]
Pass 6, Batch 1590, Cost [10.653889], Precision [0.00534759], Recall [0.00909091], F1_score[0.00673401]
Pass 6, Batch 1595, Cost [10.397406], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1600, Cost [10.22167], Precision [0.00555556], Recall [0.00990099], F1_score[0.00711744]
Pass 6, Batch 1605, Cost [10.78546], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1610, Cost [10.930055], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1615, Cost [11.37484], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1620, Cost [11.881403], Precision [0.00961538], Recall [0.01769911], F1_score[0.01246106]
Pass 6, Batch 1625, Cost [14.390711], Precision [0.00409836], Recall [0.00775194], F1_score[0.00536193]
Pass 6, Batch 1630, Cost [10.2033415], Precision [0.], Recall [0.], F1_score[0.]
Pass 6, Batch 1635, Cost [9.278283], Precision [0.01226994], Recall [0.02380952], F1_score[0.01619433]
Pass 6, Batch 1640, Cost [11.190258], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:6 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:6 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 7, Batch 1645, Cost [12.335358], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1650, Cost [11.201028], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1655, Cost [10.255384], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1660, Cost [14.921765], Precision [0.00775194], Recall [0.01290323], F1_score[0.00968523]
Pass 7, Batch 1665, Cost [10.279418], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1670, Cost [8.946955], Precision [0.00645161], Recall [0.01234568], F1_score[0.00847458]
Pass 7, Batch 1675, Cost [10.893894], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1680, Cost [10.739268], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1685, Cost [10.63078], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1690, Cost [7.9769197], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1695, Cost [11.161233], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1700, Cost [8.82446], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1705, Cost [10.214317], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1710, Cost [9.719672], Precision [0.01685393], Recall [0.02777778], F1_score[0.02097902]
Pass 7, Batch 1715, Cost [8.548204], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1720, Cost [9.781002], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1725, Cost [8.79499], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1730, Cost [7.324523], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1735, Cost [9.966383], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1740, Cost [9.610244], Precision [0.0060241], Recall [0.01075269], F1_score[0.00772201]
Pass 7, Batch 1745, Cost [8.602386], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1750, Cost [11.389411], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1755, Cost [8.291462], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1760, Cost [11.912516], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1765, Cost [10.202487], Precision [0.01129944], Recall [0.02061856], F1_score[0.01459854]
Pass 7, Batch 1770, Cost [9.56878], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1775, Cost [11.057108], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1780, Cost [8.635144], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1785, Cost [10.090269], Precision [0.00568182], Recall [0.00990099], F1_score[0.00722022]
Pass 7, Batch 1790, Cost [11.730611], Precision [0.00502513], Recall [0.00869565], F1_score[0.00636943]
Pass 7, Batch 1795, Cost [10.484595], Precision [0.01685393], Recall [0.02941176], F1_score[0.02142857]
Pass 7, Batch 1800, Cost [9.790392], Precision [0.01176471], Recall [0.0212766], F1_score[0.01515151]
Pass 7, Batch 1805, Cost [13.801662], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1810, Cost [11.802573], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1815, Cost [12.051205], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1820, Cost [10.157972], Precision [0.00558659], Recall [0.00980392], F1_score[0.00711744]
Pass 7, Batch 1825, Cost [15.527349], Precision [0.00403226], Recall [0.00793651], F1_score[0.00534759]
Pass 7, Batch 1830, Cost [9.687021], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1835, Cost [9.954527], Precision [0.00578035], Recall [0.01075269], F1_score[0.0075188]
Pass 7, Batch 1840, Cost [11.048284], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1845, Cost [10.900036], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1850, Cost [10.082373], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1855, Cost [11.696243], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1860, Cost [11.695505], Precision [0.00980392], Recall [0.01904762], F1_score[0.01294498]
Pass 7, Batch 1865, Cost [10.855569], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1870, Cost [9.841898], Precision [0.], Recall [0.], F1_score[0.]
Pass 7, Batch 1875, Cost [11.140015], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:7 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:7 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 8, Batch 1880, Cost [13.099609], Precision [0.00429185], Recall [0.00757576], F1_score[0.00547945]
Pass 8, Batch 1885, Cost [11.392749], Precision [0.00469484], Recall [0.00854701], F1_score[0.00606061]
Pass 8, Batch 1890, Cost [11.346893], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 1895, Cost [11.951733], Precision [0.00473934], Recall [0.00877193], F1_score[0.00615385]
Pass 8, Batch 1900, Cost [10.085866], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 1905, Cost [10.4614725], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 1910, Cost [10.081989], Precision [0.01129944], Recall [0.02197802], F1_score[0.01492537]
Pass 8, Batch 1915, Cost [9.24057], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 1920, Cost [9.002874], Precision [0.00552486], Recall [0.01], F1_score[0.00711744]
Pass 8, Batch 1925, Cost [12.000004], Precision [0.00904977], Recall [0.01639344], F1_score[0.01166181]
Pass 8, Batch 1930, Cost [10.481496], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 1935, Cost [11.787598], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 1940, Cost [11.677275], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 1945, Cost [10.839789], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 1950, Cost [11.116947], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 1955, Cost [7.454971], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 1960, Cost [9.185146], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 1965, Cost [6.863433], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 1970, Cost [8.079624], Precision [0.00641026], Recall [0.01204819], F1_score[0.0083682]
Pass 8, Batch 1975, Cost [9.52857], Precision [0.00561798], Recall [0.01], F1_score[0.00719424]
Pass 8, Batch 1980, Cost [11.453621], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 1985, Cost [9.294389], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 1990, Cost [10.0477915], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 1995, Cost [9.571739], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 2000, Cost [8.288403], Precision [0.00689655], Recall [0.01282051], F1_score[0.00896861]
Pass 8, Batch 2005, Cost [9.522011], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 2010, Cost [8.288306], Precision [0.02564103], Recall [0.05194805], F1_score[0.03433476]
Pass 8, Batch 2015, Cost [7.825246], Precision [0.00735294], Recall [0.01234568], F1_score[0.00921659]
Pass 8, Batch 2020, Cost [8.571195], Precision [0.00645161], Recall [0.01136364], F1_score[0.00823045]
Pass 8, Batch 2025, Cost [11.381531], Precision [0.01015228], Recall [0.01886792], F1_score[0.01320132]
Pass 8, Batch 2030, Cost [9.611165], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 2035, Cost [11.008965], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 2040, Cost [14.492405], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 2045, Cost [8.71206], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 2050, Cost [8.991323], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 2055, Cost [11.793884], Precision [0.00492611], Recall [0.00854701], F1_score[0.00625]
Pass 8, Batch 2060, Cost [12.2993965], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 2065, Cost [9.755096], Precision [0.00595238], Recall [0.0106383], F1_score[0.00763359]
Pass 8, Batch 2070, Cost [11.366906], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 2075, Cost [8.685125], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 2080, Cost [10.692078], Precision [0.01117318], Recall [0.02040816], F1_score[0.01444043]
Pass 8, Batch 2085, Cost [9.966043], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 2090, Cost [11.315393], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 2095, Cost [8.561471], Precision [0.01910828], Recall [0.03658536], F1_score[0.0251046]
Pass 8, Batch 2100, Cost [13.485471], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 2105, Cost [9.318536], Precision [0.], Recall [0.], F1_score[0.]
Pass 8, Batch 2110, Cost [10.611189], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:8 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:8 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 9, Batch 2115, Cost [13.742161], Precision [0.00413223], Recall [0.00724638], F1_score[0.00526316]
Pass 9, Batch 2120, Cost [10.771215], Precision [0.00487805], Recall [0.00862069], F1_score[0.00623053]
Pass 9, Batch 2125, Cost [10.388924], Precision [0.01010101], Recall [0.01652892], F1_score[0.01253918]
Pass 9, Batch 2130, Cost [11.493747], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2135, Cost [9.8966465], Precision [0.00606061], Recall [0.01162791], F1_score[0.00796813]
Pass 9, Batch 2140, Cost [9.0535145], Precision [0.01273885], Recall [0.02222222], F1_score[0.01619433]
Pass 9, Batch 2145, Cost [11.18591], Precision [0.00529101], Recall [0.00925926], F1_score[0.00673401]
Pass 9, Batch 2150, Cost [15.139807], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2155, Cost [10.113347], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2160, Cost [10.036617], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2165, Cost [11.523783], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2170, Cost [10.189686], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2175, Cost [10.802045], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2180, Cost [8.275984], Precision [0.00636943], Recall [0.01136364], F1_score[0.00816326]
Pass 9, Batch 2185, Cost [7.773933], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2190, Cost [9.6294365], Precision [0.00584795], Recall [0.01075269], F1_score[0.00757576]
Pass 9, Batch 2195, Cost [7.912956], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2200, Cost [8.065897], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2205, Cost [8.79447], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2210, Cost [9.903881], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2215, Cost [9.272915], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2220, Cost [10.356699], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2225, Cost [8.483042], Precision [0.01315789], Recall [0.02352941], F1_score[0.01687764]
Pass 9, Batch 2230, Cost [9.416149], Precision [0.00595238], Recall [0.01123596], F1_score[0.0077821]
Pass 9, Batch 2235, Cost [11.478395], Precision [0.01538462], Recall [0.02521008], F1_score[0.01910828]
Pass 9, Batch 2240, Cost [8.20331], Precision [0.01935484], Recall [0.03658536], F1_score[0.02531645]
Pass 9, Batch 2245, Cost [9.7143955], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2250, Cost [8.235173], Precision [0.00699301], Recall [0.01219512], F1_score[0.00888889]
Pass 9, Batch 2255, Cost [9.185036], Precision [0.02409638], Recall [0.03809524], F1_score[0.0295203]
Pass 9, Batch 2260, Cost [12.54808], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2265, Cost [14.670525], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2270, Cost [11.808105], Precision [0.01369863], Recall [0.02352941], F1_score[0.01731602]
Pass 9, Batch 2275, Cost [11.225808], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2280, Cost [9.862212], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2285, Cost [10.289928], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2290, Cost [11.498098], Precision [0.01036269], Recall [0.01869159], F1_score[0.01333333]
Pass 9, Batch 2295, Cost [12.697515], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2300, Cost [12.303686], Precision [0.01435407], Recall [0.02521008], F1_score[0.01829268]
Pass 9, Batch 2305, Cost [9.951998], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2310, Cost [11.601386], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2315, Cost [8.783621], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2320, Cost [12.622863], Precision [0.0046729], Recall [0.00813008], F1_score[0.00593472]
Pass 9, Batch 2325, Cost [11.959503], Precision [0.00990099], Recall [0.01694915], F1_score[0.0125]
Pass 9, Batch 2330, Cost [12.253855], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2335, Cost [9.4297085], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2340, Cost [8.387188], Precision [0.], Recall [0.], F1_score[0.]
Pass 9, Batch 2345, Cost [7.2221627], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:9 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:9 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 10, Batch 2350, Cost [10.899326], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2355, Cost [11.332231], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2360, Cost [12.644157], Precision [0.00440529], Recall [0.00775194], F1_score[0.00561798]
Pass 10, Batch 2365, Cost [11.791189], Precision [0.00943396], Recall [0.01666667], F1_score[0.01204819]
Pass 10, Batch 2370, Cost [9.490168], Precision [0.00625], Recall [0.01219512], F1_score[0.00826446]
Pass 10, Batch 2375, Cost [11.137857], Precision [0.00531915], Recall [0.01010101], F1_score[0.00696864]
Pass 10, Batch 2380, Cost [9.995443], Precision [0.00564972], Recall [0.01052632], F1_score[0.00735294]
Pass 10, Batch 2385, Cost [8.78326], Precision [0.00568182], Recall [0.0106383], F1_score[0.00740741]
Pass 10, Batch 2390, Cost [13.168674], Precision [0.00392157], Recall [0.00769231], F1_score[0.00519481]
Pass 10, Batch 2395, Cost [10.012368], Precision [0.00502513], Recall [0.00961538], F1_score[0.00660066]
Pass 10, Batch 2400, Cost [10.876614], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2405, Cost [11.0149975], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2410, Cost [9.604336], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2415, Cost [9.121879], Precision [0.00581395], Recall [0.01052632], F1_score[0.00749064]
Pass 10, Batch 2420, Cost [9.014683], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2425, Cost [9.550129], Precision [0.01129944], Recall [0.01923077], F1_score[0.01423488]
Pass 10, Batch 2430, Cost [7.3561687], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2435, Cost [10.011591], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2440, Cost [8.841093], Precision [0.00578035], Recall [0.0106383], F1_score[0.00749064]
Pass 10, Batch 2445, Cost [10.207508], Precision [0.01694915], Recall [0.02608696], F1_score[0.02054794]
Pass 10, Batch 2450, Cost [12.016129], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2455, Cost [7.154819], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2460, Cost [11.2059], Precision [0.01538462], Recall [0.02803738], F1_score[0.01986755]
Pass 10, Batch 2465, Cost [9.090263], Precision [0.00636943], Recall [0.01052632], F1_score[0.00793651]
Pass 10, Batch 2470, Cost [10.595709], Precision [0.00555556], Recall [0.01041667], F1_score[0.00724638]
Pass 10, Batch 2475, Cost [9.658165], Precision [0.01183432], Recall [0.02197802], F1_score[0.01538461]
Pass 10, Batch 2480, Cost [8.712784], Precision [0.01298701], Recall [0.02564103], F1_score[0.01724138]
Pass 10, Batch 2485, Cost [11.345134], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2490, Cost [9.980292], Precision [0.01149425], Recall [0.02150538], F1_score[0.01498127]
Pass 10, Batch 2495, Cost [11.714226], Precision [0.01456311], Recall [0.02752294], F1_score[0.01904762]
Pass 10, Batch 2500, Cost [10.32056], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2505, Cost [9.405268], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2510, Cost [8.284669], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2515, Cost [9.659515], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2520, Cost [14.3370075], Precision [0.00409836], Recall [0.00729927], F1_score[0.00524934]
Pass 10, Batch 2525, Cost [12.813625], Precision [0.00458716], Recall [0.00813008], F1_score[0.0058651]
Pass 10, Batch 2530, Cost [13.02096], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2535, Cost [12.829119], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2540, Cost [11.830466], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2545, Cost [9.000849], Precision [0.00606061], Recall [0.01190476], F1_score[0.00803213]
Pass 10, Batch 2550, Cost [9.932688], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2555, Cost [11.5330925], Precision [0.0097561], Recall [0.01754386], F1_score[0.01253919]
Pass 10, Batch 2560, Cost [11.207427], Precision [0.01075269], Recall [0.02061856], F1_score[0.01413428]
Pass 10, Batch 2565, Cost [12.660173], Precision [0.00456621], Recall [0.00826446], F1_score[0.00588235]
Pass 10, Batch 2570, Cost [8.68731], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2575, Cost [7.8668017], Precision [0.], Recall [0.], F1_score[0.]
Pass 10, Batch 2580, Cost [10.3436985], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:10 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:10 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 11, Batch 2585, Cost [10.058949], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2590, Cost [10.743125], Precision [0.00502513], Recall [0.00900901], F1_score[0.00645161]
Pass 11, Batch 2595, Cost [10.770067], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2600, Cost [11.263709], Precision [0.00485437], Recall [0.00826446], F1_score[0.00611621]
Pass 11, Batch 2605, Cost [9.162853], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2610, Cost [9.409701], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2615, Cost [10.68007], Precision [0.01648352], Recall [0.02912621], F1_score[0.02105263]
Pass 11, Batch 2620, Cost [16.155693], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2625, Cost [10.850554], Precision [0.00469484], Recall [0.00900901], F1_score[0.00617284]
Pass 11, Batch 2630, Cost [11.856119], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2635, Cost [9.831504], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2640, Cost [10.436369], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2645, Cost [8.234526], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2650, Cost [8.927931], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2655, Cost [9.643173], Precision [0.01162791], Recall [0.01923077], F1_score[0.01449275]
Pass 11, Batch 2660, Cost [9.809753], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2665, Cost [8.235248], Precision [0.00621118], Recall [0.01098901], F1_score[0.00793651]
Pass 11, Batch 2670, Cost [7.264312], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2675, Cost [8.300825], Precision [0.00625], Recall [0.01098901], F1_score[0.00796813]
Pass 11, Batch 2680, Cost [10.00795], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2685, Cost [8.708464], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2690, Cost [11.798079], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2695, Cost [11.675827], Precision [0.01010101], Recall [0.01834862], F1_score[0.01302932]
Pass 11, Batch 2700, Cost [10.318949], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2705, Cost [10.544199], Precision [0.00549451], Recall [0.00990099], F1_score[0.00706714]
Pass 11, Batch 2710, Cost [11.428768], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2715, Cost [8.050183], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2720, Cost [7.0680923], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2725, Cost [11.092285], Precision [0.0104712], Recall [0.01851852], F1_score[0.01337793]
Pass 11, Batch 2730, Cost [9.514395], Precision [0.01796407], Recall [0.03092784], F1_score[0.02272727]
Pass 11, Batch 2735, Cost [9.019239], Precision [0.01807229], Recall [0.0326087], F1_score[0.02325581]
Pass 11, Batch 2740, Cost [10.55159], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2745, Cost [10.375629], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2750, Cost [9.180852], Precision [0.00555556], Recall [0.01086957], F1_score[0.00735294]
Pass 11, Batch 2755, Cost [9.733942], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2760, Cost [13.749515], Precision [0.00446429], Recall [0.00793651], F1_score[0.00571429]
Pass 11, Batch 2765, Cost [11.722522], Precision [0.00492611], Recall [0.00909091], F1_score[0.00638978]
Pass 11, Batch 2770, Cost [14.263981], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2775, Cost [11.832529], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2780, Cost [11.283646], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2785, Cost [9.003784], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2790, Cost [14.586069], Precision [0.00809717], Recall [0.01459854], F1_score[0.01041667]
Pass 11, Batch 2795, Cost [11.607786], Precision [0.01442308], Recall [0.02586207], F1_score[0.01851852]
Pass 11, Batch 2800, Cost [7.3466644], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2805, Cost [10.588541], Precision [0.], Recall [0.], F1_score[0.]
Pass 11, Batch 2810, Cost [10.451939], Precision [0.00531915], Recall [0.01086957], F1_score[0.00714286]
Pass 11, Batch 2815, Cost [10.031531], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:11 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:11 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 12, Batch 2820, Cost [10.061847], Precision [0.00518135], Recall [0.00943396], F1_score[0.00668896]
Pass 12, Batch 2825, Cost [10.584077], Precision [0.00505051], Recall [0.00925926], F1_score[0.00653595]
Pass 12, Batch 2830, Cost [11.586185], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2835, Cost [11.472711], Precision [0.00497512], Recall [0.00854701], F1_score[0.00628931]
Pass 12, Batch 2840, Cost [9.91707], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2845, Cost [9.741366], Precision [0.00588235], Recall [0.01098901], F1_score[0.00766284]
Pass 12, Batch 2850, Cost [12.074188], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2855, Cost [10.268036], Precision [0.01507538], Recall [0.02803738], F1_score[0.01960784]
Pass 12, Batch 2860, Cost [10.900683], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2865, Cost [9.314466], Precision [0.00552486], Recall [0.0106383], F1_score[0.00727273]
Pass 12, Batch 2870, Cost [11.253712], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2875, Cost [11.767067], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2880, Cost [9.533684], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2885, Cost [9.367525], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2890, Cost [10.377123], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2895, Cost [8.00802], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2900, Cost [7.6655855], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2905, Cost [7.949275], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2910, Cost [8.494289], Precision [0.00606061], Recall [0.01162791], F1_score[0.00796813]
Pass 12, Batch 2915, Cost [8.668179], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2920, Cost [9.717567], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2925, Cost [11.054684], Precision [0.00531915], Recall [0.00943396], F1_score[0.00680272]
Pass 12, Batch 2930, Cost [10.231768], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2935, Cost [9.032816], Precision [0.00606061], Recall [0.01075269], F1_score[0.00775194]
Pass 12, Batch 2940, Cost [11.0587635], Precision [0.00512821], Recall [0.00934579], F1_score[0.00662252]
Pass 12, Batch 2945, Cost [10.763751], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2950, Cost [8.634244], Precision [0.00641026], Recall [0.01123596], F1_score[0.00816326]
Pass 12, Batch 2955, Cost [10.171453], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2960, Cost [9.797794], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2965, Cost [8.819607], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2970, Cost [11.659115], Precision [0.00980392], Recall [0.01834862], F1_score[0.01277955]
Pass 12, Batch 2975, Cost [9.759317], Precision [0.00595238], Recall [0.01111111], F1_score[0.00775194]
Pass 12, Batch 2980, Cost [11.737886], Precision [0.00465116], Recall [0.00840336], F1_score[0.00598802]
Pass 12, Batch 2985, Cost [11.356867], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2990, Cost [12.167956], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 2995, Cost [10.221794], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 3000, Cost [10.498883], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 3005, Cost [11.887238], Precision [0.00492611], Recall [0.00869565], F1_score[0.00628931]
Pass 12, Batch 3010, Cost [10.960609], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 3015, Cost [9.478689], Precision [0.01234568], Recall [0.02150538], F1_score[0.01568627]
Pass 12, Batch 3020, Cost [8.556099], Precision [0.00636943], Recall [0.01162791], F1_score[0.00823045]
Pass 12, Batch 3025, Cost [9.155393], Precision [0.00606061], Recall [0.01162791], F1_score[0.00796813]
Pass 12, Batch 3030, Cost [14.261413], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 3035, Cost [13.046713], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 3040, Cost [9.895108], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 3045, Cost [9.538864], Precision [0.], Recall [0.], F1_score[0.]
Pass 12, Batch 3050, Cost [7.9040675], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:12 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:12 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 13, Batch 3055, Cost [11.020069], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3060, Cost [15.0310135], Precision [0.00787402], Recall [0.01298701], F1_score[0.00980392]
Pass 13, Batch 3065, Cost [11.852951], Precision [0.00452489], Recall [0.00833333], F1_score[0.0058651]
Pass 13, Batch 3070, Cost [12.059658], Precision [0.00478469], Recall [0.00787402], F1_score[0.00595238]
Pass 13, Batch 3075, Cost [9.489209], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3080, Cost [10.673921], Precision [0.01630435], Recall [0.03333334], F1_score[0.02189781]
Pass 13, Batch 3085, Cost [11.008818], Precision [0.00555556], Recall [0.00943396], F1_score[0.00699301]
Pass 13, Batch 3090, Cost [12.280433], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3095, Cost [11.4957905], Precision [0.00456621], Recall [0.00869565], F1_score[0.00598802]
Pass 13, Batch 3100, Cost [9.567425], Precision [0.00555556], Recall [0.00970874], F1_score[0.00706714]
Pass 13, Batch 3105, Cost [10.722905], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3110, Cost [10.718784], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3115, Cost [10.004508], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3120, Cost [12.613298], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3125, Cost [10.249716], Precision [0.00526316], Recall [0.00925926], F1_score[0.00671141]
Pass 13, Batch 3130, Cost [7.5076275], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3135, Cost [7.1308374], Precision [0.00671141], Recall [0.01219512], F1_score[0.00865801]
Pass 13, Batch 3140, Cost [9.560035], Precision [0.00549451], Recall [0.00934579], F1_score[0.00692042]
Pass 13, Batch 3145, Cost [8.811554], Precision [0.00595238], Recall [0.01123596], F1_score[0.0077821]
Pass 13, Batch 3150, Cost [8.636089], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3155, Cost [7.9632807], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3160, Cost [9.597858], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3165, Cost [9.381271], Precision [0.00595238], Recall [0.01020408], F1_score[0.0075188]
Pass 13, Batch 3170, Cost [12.51087], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3175, Cost [8.72015], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3180, Cost [9.785373], Precision [0.00581395], Recall [0.01041667], F1_score[0.00746269]
Pass 13, Batch 3185, Cost [6.719347], Precision [0.008], Recall [0.01408451], F1_score[0.01020408]
Pass 13, Batch 3190, Cost [10.041073], Precision [0.00568182], Recall [0.01], F1_score[0.00724638]
Pass 13, Batch 3195, Cost [9.607887], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3200, Cost [10.4674225], Precision [0.00537634], Recall [0.01111111], F1_score[0.00724638]
Pass 13, Batch 3205, Cost [9.397077], Precision [0.01818182], Recall [0.03333334], F1_score[0.02352941]
Pass 13, Batch 3210, Cost [9.473061], Precision [0.01744186], Recall [0.03092784], F1_score[0.02230483]
Pass 13, Batch 3215, Cost [9.03155], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3220, Cost [10.787967], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3225, Cost [8.119965], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3230, Cost [12.113217], Precision [0.01010101], Recall [0.01587302], F1_score[0.01234568]
Pass 13, Batch 3235, Cost [11.489939], Precision [0.02051282], Recall [0.03636364], F1_score[0.02622951]
Pass 13, Batch 3240, Cost [10.385188], Precision [0.01086957], Recall [0.01851852], F1_score[0.01369863]
Pass 13, Batch 3245, Cost [10.587038], Precision [0.00543478], Recall [0.01136364], F1_score[0.00735294]
Pass 13, Batch 3250, Cost [10.38149], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3255, Cost [7.5001736], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3260, Cost [14.100683], Precision [0.00420168], Recall [0.00704225], F1_score[0.00526316]
Pass 13, Batch 3265, Cost [9.004372], Precision [0.01212121], Recall [0.02173913], F1_score[0.0155642]
Pass 13, Batch 3270, Cost [11.288897], Precision [0.01005025], Recall [0.01904762], F1_score[0.0131579]
Pass 13, Batch 3275, Cost [10.465256], Precision [0.00520833], Recall [0.01020408], F1_score[0.00689655]
Pass 13, Batch 3280, Cost [8.718261], Precision [0.], Recall [0.], F1_score[0.]
Pass 13, Batch 3285, Cost [9.529816], Precision [0.00588235], Recall [0.01086957], F1_score[0.00763359]
[TrainSet] pass_id:13 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:13 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 14, Batch 3290, Cost [10.573397], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3295, Cost [13.097755], Precision [0.00881057], Recall [0.01639344], F1_score[0.01146132]
Pass 14, Batch 3300, Cost [10.505492], Precision [0.00505051], Recall [0.00884956], F1_score[0.00643087]
Pass 14, Batch 3305, Cost [9.66617], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3310, Cost [12.597126], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3315, Cost [10.7767725], Precision [0.00537634], Recall [0.01], F1_score[0.00699301]
Pass 14, Batch 3320, Cost [8.584322], Precision [0.00657895], Recall [0.01190476], F1_score[0.00847458]
Pass 14, Batch 3325, Cost [12.667643], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3330, Cost [9.446481], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3335, Cost [10.861639], Precision [0.01449275], Recall [0.02479339], F1_score[0.01829268]
Pass 14, Batch 3340, Cost [10.505123], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3345, Cost [11.057043], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3350, Cost [10.065198], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3355, Cost [9.729118], Precision [0.00568182], Recall [0.00952381], F1_score[0.00711744]
Pass 14, Batch 3360, Cost [10.475078], Precision [0.00537634], Recall [0.00917431], F1_score[0.00677966]
Pass 14, Batch 3365, Cost [11.020594], Precision [0.00990099], Recall [0.01754386], F1_score[0.01265823]
Pass 14, Batch 3370, Cost [6.675162], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3375, Cost [8.813284], Precision [0.00598802], Recall [0.01030928], F1_score[0.00757576]
Pass 14, Batch 3380, Cost [7.220763], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3385, Cost [9.2121315], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3390, Cost [9.225784], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3395, Cost [11.559796], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3400, Cost [14.142446], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3405, Cost [8.815464], Precision [0.00636943], Recall [0.01075269], F1_score[0.008]
Pass 14, Batch 3410, Cost [9.775738], Precision [0.00574713], Recall [0.00970874], F1_score[0.00722022]
Pass 14, Batch 3415, Cost [8.158994], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3420, Cost [9.362569], Precision [0.01190476], Recall [0.02298851], F1_score[0.01568628]
Pass 14, Batch 3425, Cost [7.74984], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3430, Cost [11.850433], Precision [0.01507538], Recall [0.02631579], F1_score[0.01916933]
Pass 14, Batch 3435, Cost [12.874448], Precision [0.00471698], Recall [0.00793651], F1_score[0.00591716]
Pass 14, Batch 3440, Cost [9.533344], Precision [0.00595238], Recall [0.01052632], F1_score[0.00760456]
Pass 14, Batch 3445, Cost [11.668045], Precision [0.02061856], Recall [0.03539823], F1_score[0.02605863]
Pass 14, Batch 3450, Cost [11.531875], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3455, Cost [9.186091], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3460, Cost [10.857897], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3465, Cost [9.378108], Precision [0.00609756], Recall [0.01086957], F1_score[0.0078125]
Pass 14, Batch 3470, Cost [11.901318], Precision [0.00497512], Recall [0.00833333], F1_score[0.00623053]
Pass 14, Batch 3475, Cost [13.287247], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3480, Cost [10.217303], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3485, Cost [9.784209], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3490, Cost [11.213238], Precision [0.01041667], Recall [0.02020202], F1_score[0.01374571]
Pass 14, Batch 3495, Cost [10.6008625], Precision [0.00534759], Recall [0.00909091], F1_score[0.00673401]
Pass 14, Batch 3500, Cost [10.823048], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3505, Cost [12.864387], Precision [0.00909091], Recall [0.01694915], F1_score[0.01183432]
Pass 14, Batch 3510, Cost [9.371408], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3515, Cost [8.723206], Precision [0.], Recall [0.], F1_score[0.]
Pass 14, Batch 3520, Cost [11.623202], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:14 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:14 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 15, Batch 3525, Cost [11.504852], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3530, Cost [12.529678], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3535, Cost [9.286154], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3540, Cost [10.944069], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3545, Cost [11.610029], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3550, Cost [9.345307], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3555, Cost [9.183006], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3560, Cost [10.30882], Precision [0.00497512], Recall [0.00943396], F1_score[0.00651466]
Pass 15, Batch 3565, Cost [12.311661], Precision [0.00438596], Recall [0.00833333], F1_score[0.00574713]
Pass 15, Batch 3570, Cost [10.018339], Precision [0.00485437], Recall [0.00952381], F1_score[0.00643087]
Pass 15, Batch 3575, Cost [10.514396], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3580, Cost [10.067129], Precision [0.00531915], Recall [0.00990099], F1_score[0.00692041]
Pass 15, Batch 3585, Cost [10.103156], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3590, Cost [8.843077], Precision [0.00591716], Recall [0.01041667], F1_score[0.00754717]
Pass 15, Batch 3595, Cost [9.326626], Precision [0.00568182], Recall [0.01052632], F1_score[0.00738007]
Pass 15, Batch 3600, Cost [8.909363], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3605, Cost [8.420805], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3610, Cost [8.279583], Precision [0.00609756], Recall [0.01075269], F1_score[0.0077821]
Pass 15, Batch 3615, Cost [7.8798056], Precision [0.01197605], Recall [0.02272727], F1_score[0.01568628]
Pass 15, Batch 3620, Cost [8.492712], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3625, Cost [10.360659], Precision [0.01098901], Recall [0.01960784], F1_score[0.01408451]
Pass 15, Batch 3630, Cost [8.32242], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3635, Cost [9.282101], Precision [0.00613497], Recall [0.01075269], F1_score[0.0078125]
Pass 15, Batch 3640, Cost [8.166041], Precision [0.0125], Recall [0.02247191], F1_score[0.01606426]
Pass 15, Batch 3645, Cost [8.852354], Precision [0.00632911], Recall [0.0106383], F1_score[0.00793651]
Pass 15, Batch 3650, Cost [9.463493], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3655, Cost [10.603962], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3660, Cost [9.172234], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3665, Cost [9.350664], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3670, Cost [12.422087], Precision [0.01442308], Recall [0.02608696], F1_score[0.01857585]
Pass 15, Batch 3675, Cost [10.274181], Precision [0.00584795], Recall [0.01075269], F1_score[0.00757576]
Pass 15, Batch 3680, Cost [8.694584], Precision [0.00632911], Recall [0.01149425], F1_score[0.00816327]
Pass 15, Batch 3685, Cost [9.540613], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3690, Cost [12.59687], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3695, Cost [10.770499], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3700, Cost [13.087601], Precision [0.00438596], Recall [0.00757576], F1_score[0.00555556]
Pass 15, Batch 3705, Cost [12.685749], Precision [0.00471698], Recall [0.00840336], F1_score[0.0060423]
Pass 15, Batch 3710, Cost [10.866476], Precision [0.00540541], Recall [0.00980392], F1_score[0.00696864]
Pass 15, Batch 3715, Cost [10.848106], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3720, Cost [9.415638], Precision [0.00598802], Recall [0.01176471], F1_score[0.00793651]
Pass 15, Batch 3725, Cost [10.133743], Precision [0.00564972], Recall [0.01030928], F1_score[0.00729927]
Pass 15, Batch 3730, Cost [11.214727], Precision [0.00985222], Recall [0.01724138], F1_score[0.01253918]
Pass 15, Batch 3735, Cost [10.604059], Precision [0.01075269], Recall [0.01941748], F1_score[0.01384083]
Pass 15, Batch 3740, Cost [10.456459], Precision [0.0106383], Recall [0.01904762], F1_score[0.01365188]
Pass 15, Batch 3745, Cost [9.470479], Precision [0.00588235], Recall [0.01149425], F1_score[0.0077821]
Pass 15, Batch 3750, Cost [10.549093], Precision [0.], Recall [0.], F1_score[0.]
Pass 15, Batch 3755, Cost [10.058731], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:15 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:15 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 16, Batch 3760, Cost [11.653128], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3765, Cost [12.0695505], Precision [0.00452489], Recall [0.00806452], F1_score[0.0057971]
Pass 16, Batch 3770, Cost [12.07014], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3775, Cost [11.874621], Precision [0.00465116], Recall [0.00819672], F1_score[0.00593472]
Pass 16, Batch 3780, Cost [11.004807], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3785, Cost [10.583481], Precision [0.00581395], Recall [0.01123596], F1_score[0.00766284]
Pass 16, Batch 3790, Cost [9.516758], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3795, Cost [11.194078], Precision [0.00462963], Recall [0.00869565], F1_score[0.0060423]
Pass 16, Batch 3800, Cost [11.679523], Precision [0.00446429], Recall [0.00833333], F1_score[0.00581395]
Pass 16, Batch 3805, Cost [11.495737], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3810, Cost [9.758224], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3815, Cost [8.814749], Precision [0.00613497], Recall [0.01282051], F1_score[0.00829875]
Pass 16, Batch 3820, Cost [7.6568093], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3825, Cost [8.580587], Precision [0.01875], Recall [0.03296703], F1_score[0.02390439]
Pass 16, Batch 3830, Cost [9.60455], Precision [0.00555556], Recall [0.0106383], F1_score[0.00729927]
Pass 16, Batch 3835, Cost [10.561134], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3840, Cost [9.472197], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3845, Cost [7.265195], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3850, Cost [9.7596], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3855, Cost [8.352209], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3860, Cost [9.090162], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3865, Cost [8.049286], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3870, Cost [10.28208], Precision [0.01136364], Recall [0.0212766], F1_score[0.01481481]
Pass 16, Batch 3875, Cost [8.163708], Precision [0.00649351], Recall [0.01333333], F1_score[0.00873362]
Pass 16, Batch 3880, Cost [14.208414], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3885, Cost [7.569171], Precision [0.0137931], Recall [0.02597403], F1_score[0.01801802]
Pass 16, Batch 3890, Cost [8.984573], Precision [0.0060241], Recall [0.01052632], F1_score[0.00766284]
Pass 16, Batch 3895, Cost [8.830538], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3900, Cost [9.27946], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3905, Cost [10.394924], Precision [0.00543478], Recall [0.00943396], F1_score[0.00689655]
Pass 16, Batch 3910, Cost [11.105578], Precision [0.01648352], Recall [0.02803738], F1_score[0.02076125]
Pass 16, Batch 3915, Cost [12.148034], Precision [0.00970874], Recall [0.01818182], F1_score[0.01265823]
Pass 16, Batch 3920, Cost [10.6159935], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3925, Cost [12.672865], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3930, Cost [11.032558], Precision [0.00995025], Recall [0.01941748], F1_score[0.01315789]
Pass 16, Batch 3935, Cost [14.990631], Precision [0.00829876], Recall [0.01574803], F1_score[0.01086957]
Pass 16, Batch 3940, Cost [12.143546], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3945, Cost [11.195265], Precision [0.00515464], Recall [0.00869565], F1_score[0.00647249]
Pass 16, Batch 3950, Cost [10.484674], Precision [0.01075269], Recall [0.02020202], F1_score[0.01403509]
Pass 16, Batch 3955, Cost [12.270511], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3960, Cost [10.164974], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3965, Cost [12.121296], Precision [0.00469484], Recall [0.00819672], F1_score[0.00597015]
Pass 16, Batch 3970, Cost [12.758514], Precision [0.00869565], Recall [0.01492537], F1_score[0.01098901]
Pass 16, Batch 3975, Cost [8.768466], Precision [0.00584795], Recall [0.01111111], F1_score[0.00766284]
Pass 16, Batch 3980, Cost [13.954388], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3985, Cost [9.624707], Precision [0.], Recall [0.], F1_score[0.]
Pass 16, Batch 3990, Cost [9.530106], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:16 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:16 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 17, Batch 3995, Cost [14.175981], Precision [0.004], Recall [0.00719424], F1_score[0.00514139]
Pass 17, Batch 4000, Cost [12.407761], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4005, Cost [10.42429], Precision [0.00520833], Recall [0.00943396], F1_score[0.00671141]
Pass 17, Batch 4010, Cost [11.302923], Precision [0.0097561], Recall [0.01680672], F1_score[0.01234568]
Pass 17, Batch 4015, Cost [11.937355], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4020, Cost [9.620588], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4025, Cost [11.5090685], Precision [0.01515152], Recall [0.02970297], F1_score[0.02006689]
Pass 17, Batch 4030, Cost [9.181065], Precision [0.01538462], Recall [0.03191489], F1_score[0.02076125]
Pass 17, Batch 4035, Cost [9.439043], Precision [0.00502513], Recall [0.00925926], F1_score[0.00651466]
Pass 17, Batch 4040, Cost [13.143663], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4045, Cost [10.151308], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4050, Cost [9.739243], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4055, Cost [9.762352], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4060, Cost [9.933073], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4065, Cost [8.231075], Precision [0.00653595], Recall [0.01136364], F1_score[0.00829876]
Pass 17, Batch 4070, Cost [8.997246], Precision [0.0060241], Recall [0.01052632], F1_score[0.00766284]
Pass 17, Batch 4075, Cost [9.377623], Precision [0.00543478], Recall [0.01], F1_score[0.00704225]
Pass 17, Batch 4080, Cost [9.257598], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4085, Cost [8.023233], Precision [0.0125], Recall [0.02197802], F1_score[0.01593626]
Pass 17, Batch 4090, Cost [11.052841], Precision [0.00552486], Recall [0.00961538], F1_score[0.00701754]
Pass 17, Batch 4095, Cost [11.362783], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4100, Cost [10.292489], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4105, Cost [10.741009], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4110, Cost [11.1418085], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4115, Cost [9.805197], Precision [0.00581395], Recall [0.01041667], F1_score[0.00746269]
Pass 17, Batch 4120, Cost [7.8723793], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4125, Cost [8.338525], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4130, Cost [10.454357], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4135, Cost [10.017744], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4140, Cost [10.766396], Precision [0.01075269], Recall [0.02040816], F1_score[0.01408451]
Pass 17, Batch 4145, Cost [12.619641], Precision [0.00473934], Recall [0.00833333], F1_score[0.0060423]
Pass 17, Batch 4150, Cost [12.57659], Precision [0.01363636], Recall [0.02631579], F1_score[0.01796407]
Pass 17, Batch 4155, Cost [10.455875], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4160, Cost [8.71776], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4165, Cost [9.564246], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4170, Cost [11.689945], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4175, Cost [11.233116], Precision [0.00526316], Recall [0.00806452], F1_score[0.00636943]
Pass 17, Batch 4180, Cost [11.260239], Precision [0.01036269], Recall [0.01801802], F1_score[0.01315789]
Pass 17, Batch 4185, Cost [12.662321], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4190, Cost [11.469317], Precision [0.01030928], Recall [0.01980198], F1_score[0.01355932]
Pass 17, Batch 4195, Cost [10.163112], Precision [0.00558659], Recall [0.01111111], F1_score[0.00743494]
Pass 17, Batch 4200, Cost [8.69568], Precision [0.01226994], Recall [0.02272727], F1_score[0.01593626]
Pass 17, Batch 4205, Cost [12.287986], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4210, Cost [9.993685], Precision [0.00555556], Recall [0.01020408], F1_score[0.00719424]
Pass 17, Batch 4215, Cost [9.191472], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4220, Cost [10.328529], Precision [0.], Recall [0.], F1_score[0.]
Pass 17, Batch 4225, Cost [10.080654], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:17 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:17 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 18, Batch 4230, Cost [10.2036295], Precision [0.01025641], Recall [0.01818182], F1_score[0.01311475]
Pass 18, Batch 4235, Cost [13.490289], Precision [0.00406504], Recall [0.0075188], F1_score[0.00527704]
Pass 18, Batch 4240, Cost [11.476549], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4245, Cost [8.232422], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4250, Cost [8.943445], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4255, Cost [9.933172], Precision [0.00555556], Recall [0.01030928], F1_score[0.00722022]
Pass 18, Batch 4260, Cost [13.114703], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4265, Cost [14.549435], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4270, Cost [10.809857], Precision [0.00930233], Recall [0.01709402], F1_score[0.01204819]
Pass 18, Batch 4275, Cost [8.568727], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4280, Cost [10.963131], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4285, Cost [11.827974], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4290, Cost [10.139459], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4295, Cost [8.904815], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4300, Cost [8.429897], Precision [0.00645161], Recall [0.01136364], F1_score[0.00823045]
Pass 18, Batch 4305, Cost [8.96207], Precision [0.00584795], Recall [0.01020408], F1_score[0.00743494]
Pass 18, Batch 4310, Cost [7.2170305], Precision [0.01428571], Recall [0.02439024], F1_score[0.01801802]
Pass 18, Batch 4315, Cost [7.7233443], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4320, Cost [8.827929], Precision [0.00555556], Recall [0.00990099], F1_score[0.00711744]
Pass 18, Batch 4325, Cost [9.976166], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4330, Cost [9.302967], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4335, Cost [7.4948792], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4340, Cost [8.090046], Precision [0.00649351], Recall [0.0125], F1_score[0.00854701]
Pass 18, Batch 4345, Cost [9.751762], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4350, Cost [8.949407], Precision [0.01242236], Recall [0.02272727], F1_score[0.01606426]
Pass 18, Batch 4355, Cost [10.841426], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4360, Cost [9.486263], Precision [0.01204819], Recall [0.02380952], F1_score[0.016]
Pass 18, Batch 4365, Cost [9.490192], Precision [0.01183432], Recall [0.02061856], F1_score[0.0150376]
Pass 18, Batch 4370, Cost [9.29175], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4375, Cost [10.570848], Precision [0.00552486], Recall [0.01041667], F1_score[0.00722022]
Pass 18, Batch 4380, Cost [11.035627], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4385, Cost [11.264914], Precision [0.00507614], Recall [0.00961538], F1_score[0.00664452]
Pass 18, Batch 4390, Cost [11.77055], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4395, Cost [9.596596], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4400, Cost [8.389196], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4405, Cost [15.792195], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4410, Cost [11.514163], Precision [0.01005025], Recall [0.01851852], F1_score[0.01302932]
Pass 18, Batch 4415, Cost [12.762758], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4420, Cost [9.617205], Precision [0.01176471], Recall [0.02247191], F1_score[0.01544402]
Pass 18, Batch 4425, Cost [10.218763], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4430, Cost [10.232367], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4435, Cost [10.387309], Precision [0.00510204], Recall [0.00990099], F1_score[0.00673401]
Pass 18, Batch 4440, Cost [12.700005], Precision [0.00921659], Recall [0.01709402], F1_score[0.01197605]
Pass 18, Batch 4445, Cost [10.808641], Precision [0.00555556], Recall [0.00943396], F1_score[0.00699301]
Pass 18, Batch 4450, Cost [10.91637], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4455, Cost [9.360314], Precision [0.], Recall [0.], F1_score[0.]
Pass 18, Batch 4460, Cost [8.716309], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:18 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:18 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 19, Batch 4465, Cost [9.3706045], Precision [0.00543478], Recall [0.01010101], F1_score[0.00706714]
Pass 19, Batch 4470, Cost [13.243591], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4475, Cost [10.967856], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4480, Cost [13.203156], Precision [0.0041841], Recall [0.00740741], F1_score[0.00534759]
Pass 19, Batch 4485, Cost [9.181915], Precision [0.00625], Recall [0.01176471], F1_score[0.00816327]
Pass 19, Batch 4490, Cost [9.491117], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4495, Cost [9.491226], Precision [0.00625], Recall [0.01086957], F1_score[0.00793651]
Pass 19, Batch 4500, Cost [9.668534], Precision [0.00510204], Recall [0.01], F1_score[0.00675676]
Pass 19, Batch 4505, Cost [10.771124], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4510, Cost [11.144819], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4515, Cost [9.874359], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4520, Cost [9.734315], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4525, Cost [9.515791], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4530, Cost [7.884944], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4535, Cost [9.845589], Precision [0.00561798], Recall [0.00961538], F1_score[0.0070922]
Pass 19, Batch 4540, Cost [10.31185], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4545, Cost [8.370462], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4550, Cost [8.219039], Precision [0.01190476], Recall [0.02247191], F1_score[0.0155642]
Pass 19, Batch 4555, Cost [7.1026993], Precision [0.0137931], Recall [0.02439024], F1_score[0.01762114]
Pass 19, Batch 4560, Cost [9.224554], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4565, Cost [10.096042], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4570, Cost [8.885607], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4575, Cost [8.244446], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4580, Cost [8.462831], Precision [0.01282051], Recall [0.02150538], F1_score[0.01606426]
Pass 19, Batch 4585, Cost [11.3763075], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4590, Cost [7.669894], Precision [0.0078125], Recall [0.01694915], F1_score[0.01069519]
Pass 19, Batch 4595, Cost [10.19805], Precision [0.00595238], Recall [0.01075269], F1_score[0.00766284]
Pass 19, Batch 4600, Cost [10.630642], Precision [0.00581395], Recall [0.00980392], F1_score[0.00729927]
Pass 19, Batch 4605, Cost [10.876219], Precision [0.01015228], Recall [0.01980198], F1_score[0.01342282]
Pass 19, Batch 4610, Cost [7.773065], Precision [0.02189781], Recall [0.03896104], F1_score[0.02803738]
Pass 19, Batch 4615, Cost [11.441221], Precision [0.005], Recall [0.00900901], F1_score[0.00643087]
Pass 19, Batch 4620, Cost [10.356148], Precision [0.01142857], Recall [0.02040816], F1_score[0.01465201]
Pass 19, Batch 4625, Cost [11.190588], Precision [0.00502513], Recall [0.00877193], F1_score[0.00638978]
Pass 19, Batch 4630, Cost [10.319338], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4635, Cost [10.034076], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4640, Cost [11.058856], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4645, Cost [12.151912], Precision [0.0047619], Recall [0.00819672], F1_score[0.0060241]
Pass 19, Batch 4650, Cost [15.696235], Precision [0.00769231], Recall [0.01315789], F1_score[0.00970874]
Pass 19, Batch 4655, Cost [11.317002], Precision [0.0052356], Recall [0.00970874], F1_score[0.00680272]
Pass 19, Batch 4660, Cost [10.794617], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4665, Cost [10.333687], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4670, Cost [11.991669], Precision [0.00483092], Recall [0.00840336], F1_score[0.00613497]
Pass 19, Batch 4675, Cost [9.263531], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4680, Cost [10.4486885], Precision [0.00552486], Recall [0.01098901], F1_score[0.00735294]
Pass 19, Batch 4685, Cost [9.261803], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4690, Cost [7.9395185], Precision [0.], Recall [0.], F1_score[0.]
Pass 19, Batch 4695, Cost [10.2689705], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:19 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:19 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 20, Batch 4700, Cost [11.906847], Precision [0.00921659], Recall [0.01526718], F1_score[0.01149425]
Pass 20, Batch 4705, Cost [9.691736], Precision [0.00543478], Recall [0.00900901], F1_score[0.00677966]
Pass 20, Batch 4710, Cost [12.49835], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4715, Cost [10.248699], Precision [0.00543478], Recall [0.00892857], F1_score[0.00675676]
Pass 20, Batch 4720, Cost [9.513457], Precision [0.00591716], Recall [0.01075269], F1_score[0.00763359]
Pass 20, Batch 4725, Cost [8.901011], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4730, Cost [13.076223], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4735, Cost [10.069137], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4740, Cost [10.723964], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4745, Cost [9.343298], Precision [0.00529101], Recall [0.00943396], F1_score[0.00677966]
Pass 20, Batch 4750, Cost [10.020033], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4755, Cost [9.822742], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4760, Cost [10.130545], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4765, Cost [9.507109], Precision [0.00568182], Recall [0.00925926], F1_score[0.00704225]
Pass 20, Batch 4770, Cost [8.4232645], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4775, Cost [10.043663], Precision [0.00546448], Recall [0.00990099], F1_score[0.00704225]
Pass 20, Batch 4780, Cost [7.4437857], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4785, Cost [7.385412], Precision [0.00666667], Recall [0.0125], F1_score[0.00869565]
Pass 20, Batch 4790, Cost [10.354793], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4795, Cost [9.392419], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4800, Cost [9.346796], Precision [0.00606061], Recall [0.01098901], F1_score[0.0078125]
Pass 20, Batch 4805, Cost [8.893204], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4810, Cost [9.643322], Precision [0.00581395], Recall [0.01020408], F1_score[0.00740741]
Pass 20, Batch 4815, Cost [10.744718], Precision [0.00540541], Recall [0.01010101], F1_score[0.00704225]
Pass 20, Batch 4820, Cost [10.140386], Precision [0.00568182], Recall [0.01010101], F1_score[0.00727273]
Pass 20, Batch 4825, Cost [8.877733], Precision [0.00632911], Recall [0.01234568], F1_score[0.0083682]
Pass 20, Batch 4830, Cost [9.871626], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4835, Cost [9.601324], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4840, Cost [11.784317], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4845, Cost [9.241347], Precision [0.01234568], Recall [0.0212766], F1_score[0.015625]
Pass 20, Batch 4850, Cost [10.571937], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4855, Cost [7.664194], Precision [0.01428571], Recall [0.02597403], F1_score[0.01843318]
Pass 20, Batch 4860, Cost [10.487889], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4865, Cost [10.004383], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4870, Cost [13.292897], Precision [0.00423729], Recall [0.0075188], F1_score[0.00542005]
Pass 20, Batch 4875, Cost [15.668848], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4880, Cost [10.62402], Precision [0.01086957], Recall [0.01886792], F1_score[0.0137931]
Pass 20, Batch 4885, Cost [10.985612], Precision [0.00534759], Recall [0.00900901], F1_score[0.00671141]
Pass 20, Batch 4890, Cost [10.477564], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4895, Cost [11.829062], Precision [0.005], Recall [0.00925926], F1_score[0.00649351]
Pass 20, Batch 4900, Cost [11.120748], Precision [0.00529101], Recall [0.00943396], F1_score[0.00677966]
Pass 20, Batch 4905, Cost [10.980214], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4910, Cost [10.670832], Precision [0.01041667], Recall [0.01694915], F1_score[0.01290323]
Pass 20, Batch 4915, Cost [10.850258], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4920, Cost [11.030699], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4925, Cost [8.047036], Precision [0.], Recall [0.], F1_score[0.]
Pass 20, Batch 4930, Cost [8.465668], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:20 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:20 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 21, Batch 4935, Cost [13.508928], Precision [0.00409836], Recall [0.00714286], F1_score[0.00520833]
Pass 21, Batch 4940, Cost [10.282688], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 4945, Cost [10.88163], Precision [0.00483092], Recall [0.00840336], F1_score[0.00613497]
Pass 21, Batch 4950, Cost [11.792871], Precision [0.00961538], Recall [0.01550388], F1_score[0.01186944]
Pass 21, Batch 4955, Cost [8.016669], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 4960, Cost [9.759131], Precision [0.00588235], Recall [0.01041667], F1_score[0.0075188]
Pass 21, Batch 4965, Cost [11.425154], Precision [0.01025641], Recall [0.01694915], F1_score[0.01277955]
Pass 21, Batch 4970, Cost [11.76277], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 4975, Cost [12.334003], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 4980, Cost [11.92897], Precision [0.00454545], Recall [0.00833333], F1_score[0.00588235]
Pass 21, Batch 4985, Cost [8.563311], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 4990, Cost [8.601107], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 4995, Cost [9.138219], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5000, Cost [9.303415], Precision [0.00555556], Recall [0.00961538], F1_score[0.00704225]
Pass 21, Batch 5005, Cost [11.318327], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5010, Cost [8.065816], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5015, Cost [7.934637], Precision [0.00636943], Recall [0.01111111], F1_score[0.00809717]
Pass 21, Batch 5020, Cost [8.678658], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5025, Cost [7.424788], Precision [0.00680272], Recall [0.01075269], F1_score[0.00833333]
Pass 21, Batch 5030, Cost [10.776342], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5035, Cost [10.271563], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5040, Cost [6.262537], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5045, Cost [10.78269], Precision [0.00520833], Recall [0.00943396], F1_score[0.00671141]
Pass 21, Batch 5050, Cost [9.224244], Precision [0.00574713], Recall [0.01098901], F1_score[0.00754717]
Pass 21, Batch 5055, Cost [10.509424], Precision [0.01111111], Recall [0.01980198], F1_score[0.01423488]
Pass 21, Batch 5060, Cost [10.218799], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5065, Cost [10.146894], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5070, Cost [10.046247], Precision [0.00561798], Recall [0.01075269], F1_score[0.00738007]
Pass 21, Batch 5075, Cost [9.829651], Precision [0.00571429], Recall [0.00980392], F1_score[0.00722022]
Pass 21, Batch 5080, Cost [12.482622], Precision [0.00909091], Recall [0.01754386], F1_score[0.01197605]
Pass 21, Batch 5085, Cost [10.505443], Precision [0.00549451], Recall [0.01052632], F1_score[0.00722022]
Pass 21, Batch 5090, Cost [11.326334], Precision [0.01630435], Recall [0.02654867], F1_score[0.02020202]
Pass 21, Batch 5095, Cost [10.364721], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5100, Cost [10.494742], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5105, Cost [9.692358], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5110, Cost [14.538923], Precision [0.00401606], Recall [0.00735294], F1_score[0.00519481]
Pass 21, Batch 5115, Cost [8.866186], Precision [0.01863354], Recall [0.03157895], F1_score[0.0234375]
Pass 21, Batch 5120, Cost [12.750559], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5125, Cost [9.421843], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5130, Cost [10.47159], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5135, Cost [10.903791], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5140, Cost [11.393303], Precision [0.00497512], Recall [0.00925926], F1_score[0.00647249]
Pass 21, Batch 5145, Cost [12.38144], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5150, Cost [10.412168], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5155, Cost [10.643362], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5160, Cost [8.429443], Precision [0.], Recall [0.], F1_score[0.]
Pass 21, Batch 5165, Cost [9.490658], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:21 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:21 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 22, Batch 5170, Cost [12.454824], Precision [0.00446429], Recall [0.00740741], F1_score[0.00557103]
Pass 22, Batch 5175, Cost [10.766703], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5180, Cost [8.452844], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5185, Cost [11.643863], Precision [0.00947867], Recall [0.01587302], F1_score[0.01186944]
Pass 22, Batch 5190, Cost [9.892845], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5195, Cost [12.418636], Precision [0.00485437], Recall [0.00909091], F1_score[0.00632911]
Pass 22, Batch 5200, Cost [10.210617], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5205, Cost [10.6387005], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5210, Cost [11.947325], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5215, Cost [9.944531], Precision [0.01041667], Recall [0.01886792], F1_score[0.01342282]
Pass 22, Batch 5220, Cost [7.9395695], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5225, Cost [13.461051], Precision [0.0042735], Recall [0.00813008], F1_score[0.00560224]
Pass 22, Batch 5230, Cost [10.944891], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5235, Cost [9.542798], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5240, Cost [9.293055], Precision [0.02325581], Recall [0.03809524], F1_score[0.02888087]
Pass 22, Batch 5245, Cost [10.298752], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5250, Cost [9.560198], Precision [0.00537634], Recall [0.01010101], F1_score[0.00701754]
Pass 22, Batch 5255, Cost [7.2611513], Precision [0.00666667], Recall [0.01219512], F1_score[0.00862069]
Pass 22, Batch 5260, Cost [6.0942054], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5265, Cost [9.408922], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5270, Cost [9.075047], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5275, Cost [8.062337], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5280, Cost [12.101899], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5285, Cost [7.9590473], Precision [0.00675676], Recall [0.01190476], F1_score[0.00862069]
Pass 22, Batch 5290, Cost [8.854759], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5295, Cost [8.914049], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5300, Cost [9.340789], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5305, Cost [9.972567], Precision [0.00549451], Recall [0.01030928], F1_score[0.00716846]
Pass 22, Batch 5310, Cost [8.790008], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5315, Cost [11.532813], Precision [0.00505051], Recall [0.00943396], F1_score[0.00657895]
Pass 22, Batch 5320, Cost [9.678423], Precision [0.01156069], Recall [0.01980198], F1_score[0.01459854]
Pass 22, Batch 5325, Cost [11.538355], Precision [0.0195122], Recall [0.03571429], F1_score[0.02523659]
Pass 22, Batch 5330, Cost [10.081401], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5335, Cost [11.417673], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5340, Cost [9.837726], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5345, Cost [10.71051], Precision [0.00520833], Recall [0.01020408], F1_score[0.00689655]
Pass 22, Batch 5350, Cost [9.963877], Precision [0.00549451], Recall [0.00961538], F1_score[0.00699301]
Pass 22, Batch 5355, Cost [15.291688], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5360, Cost [9.7267065], Precision [0.01149425], Recall [0.02150538], F1_score[0.01498127]
Pass 22, Batch 5365, Cost [9.908829], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5370, Cost [9.054051], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5375, Cost [12.809456], Precision [0.00446429], Recall [0.00769231], F1_score[0.00564972]
Pass 22, Batch 5380, Cost [10.201268], Precision [0.00552486], Recall [0.00980392], F1_score[0.00706714]
Pass 22, Batch 5385, Cost [12.549453], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5390, Cost [10.154856], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5395, Cost [9.877331], Precision [0.], Recall [0.], F1_score[0.]
Pass 22, Batch 5400, Cost [11.0982895], Precision [0.00502513], Recall [0.01010101], F1_score[0.00671141]
[TrainSet] pass_id:22 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:22 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 23, Batch 5405, Cost [11.357712], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5410, Cost [11.406774], Precision [0.00947867], Recall [0.01724138], F1_score[0.01223242]
Pass 23, Batch 5415, Cost [10.90179], Precision [0.00490196], Recall [0.00847458], F1_score[0.00621118]
Pass 23, Batch 5420, Cost [10.668898], Precision [0.01036269], Recall [0.02083333], F1_score[0.01384083]
Pass 23, Batch 5425, Cost [12.859137], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5430, Cost [10.907053], Precision [0.00529101], Recall [0.00961538], F1_score[0.00682594]
Pass 23, Batch 5435, Cost [9.703768], Precision [0.00568182], Recall [0.01075269], F1_score[0.00743494]
Pass 23, Batch 5440, Cost [7.735612], Precision [0.00621118], Recall [0.01234568], F1_score[0.00826446]
Pass 23, Batch 5445, Cost [12.05965], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5450, Cost [11.75644], Precision [0.00909091], Recall [0.01626016], F1_score[0.01166181]
Pass 23, Batch 5455, Cost [11.331955], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5460, Cost [9.840137], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5465, Cost [9.62505], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5470, Cost [8.001614], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5475, Cost [9.394568], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5480, Cost [10.379816], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5485, Cost [7.2047205], Precision [0.00684932], Recall [0.01265823], F1_score[0.00888889]
Pass 23, Batch 5490, Cost [7.9990296], Precision [0.01257862], Recall [0.02298851], F1_score[0.01626016]
Pass 23, Batch 5495, Cost [10.040537], Precision [0.00526316], Recall [0.00990099], F1_score[0.00687285]
Pass 23, Batch 5500, Cost [10.103184], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5505, Cost [10.099358], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5510, Cost [8.962374], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5515, Cost [9.910889], Precision [0.01169591], Recall [0.01980198], F1_score[0.01470588]
Pass 23, Batch 5520, Cost [12.475153], Precision [0.00483092], Recall [0.00840336], F1_score[0.00613497]
Pass 23, Batch 5525, Cost [8.54096], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5530, Cost [8.825883], Precision [0.00636943], Recall [0.01162791], F1_score[0.00823045]
Pass 23, Batch 5535, Cost [9.588023], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5540, Cost [7.846487], Precision [0.0137931], Recall [0.02597403], F1_score[0.01801802]
Pass 23, Batch 5545, Cost [10.03542], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5550, Cost [10.520989], Precision [0.00555556], Recall [0.01020408], F1_score[0.00719424]
Pass 23, Batch 5555, Cost [10.155893], Precision [0.00574713], Recall [0.0106383], F1_score[0.00746269]
Pass 23, Batch 5560, Cost [10.381748], Precision [0.00574713], Recall [0.01052632], F1_score[0.00743494]
Pass 23, Batch 5565, Cost [9.679505], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5570, Cost [10.731282], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5575, Cost [8.972085], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5580, Cost [10.679718], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5585, Cost [12.094441], Precision [0.00970874], Recall [0.01515152], F1_score[0.01183432]
Pass 23, Batch 5590, Cost [12.032719], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5595, Cost [8.816779], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5600, Cost [9.059029], Precision [0.00613497], Recall [0.01162791], F1_score[0.00803213]
Pass 23, Batch 5605, Cost [10.307453], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5610, Cost [11.398836], Precision [0.01025641], Recall [0.01818182], F1_score[0.01311475]
Pass 23, Batch 5615, Cost [10.203054], Precision [0.01081081], Recall [0.01869159], F1_score[0.01369863]
Pass 23, Batch 5620, Cost [12.514309], Precision [0.00921659], Recall [0.01666667], F1_score[0.01186944]
Pass 23, Batch 5625, Cost [10.765806], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5630, Cost [9.851471], Precision [0.], Recall [0.], F1_score[0.]
Pass 23, Batch 5635, Cost [10.138845], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:23 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:23 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 24, Batch 5640, Cost [9.878065], Precision [0.00534759], Recall [0.00909091], F1_score[0.00673401]
Pass 24, Batch 5645, Cost [11.968573], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5650, Cost [10.214784], Precision [0.00512821], Recall [0.00934579], F1_score[0.00662252]
Pass 24, Batch 5655, Cost [10.369732], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5660, Cost [10.210654], Precision [0.01075269], Recall [0.02061856], F1_score[0.01413428]
Pass 24, Batch 5665, Cost [11.946977], Precision [0.00985222], Recall [0.01639344], F1_score[0.01230769]
Pass 24, Batch 5670, Cost [11.180485], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5675, Cost [9.594909], Precision [0.01020408], Recall [0.01923077], F1_score[0.01333333]
Pass 24, Batch 5680, Cost [12.506214], Precision [0.00423729], Recall [0.00793651], F1_score[0.00552486]
Pass 24, Batch 5685, Cost [10.65126], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5690, Cost [8.836818], Precision [0.00588235], Recall [0.01111111], F1_score[0.00769231]
Pass 24, Batch 5695, Cost [10.172222], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5700, Cost [11.167791], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5705, Cost [7.846969], Precision [0.00657895], Recall [0.01190476], F1_score[0.00847458]
Pass 24, Batch 5710, Cost [8.958845], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5715, Cost [10.6452675], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5720, Cost [7.9691663], Precision [0.00641026], Recall [0.01190476], F1_score[0.00833333]
Pass 24, Batch 5725, Cost [8.261925], Precision [0.00625], Recall [0.01111111], F1_score[0.008]
Pass 24, Batch 5730, Cost [9.022351], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5735, Cost [10.5535965], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5740, Cost [8.140873], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5745, Cost [10.102673], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5750, Cost [9.630999], Precision [0.01724138], Recall [0.02970297], F1_score[0.02181818]
Pass 24, Batch 5755, Cost [9.51049], Precision [0.01149425], Recall [0.02], F1_score[0.01459854]
Pass 24, Batch 5760, Cost [8.208288], Precision [0.01342282], Recall [0.02325581], F1_score[0.01702128]
Pass 24, Batch 5765, Cost [8.06745], Precision [0.00689655], Recall [0.01369863], F1_score[0.00917431]
Pass 24, Batch 5770, Cost [9.033711], Precision [0.00578035], Recall [0.01136364], F1_score[0.00766284]
Pass 24, Batch 5775, Cost [7.689221], Precision [0.00719424], Recall [0.01369863], F1_score[0.00943396]
Pass 24, Batch 5780, Cost [8.797689], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5785, Cost [10.845978], Precision [0.00520833], Recall [0.00892857], F1_score[0.00657895]
Pass 24, Batch 5790, Cost [9.0205765], Precision [0.01863354], Recall [0.0326087], F1_score[0.02371542]
Pass 24, Batch 5795, Cost [10.553206], Precision [0.01639344], Recall [0.02702703], F1_score[0.02040816]
Pass 24, Batch 5800, Cost [11.458601], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5805, Cost [8.2083], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5810, Cost [11.238704], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5815, Cost [11.629673], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5820, Cost [10.320608], Precision [0.00564972], Recall [0.00884956], F1_score[0.00689655]
Pass 24, Batch 5825, Cost [11.141766], Precision [0.0052356], Recall [0.00952381], F1_score[0.00675676]
Pass 24, Batch 5830, Cost [11.126573], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5835, Cost [10.284246], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5840, Cost [8.65607], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5845, Cost [10.434807], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5850, Cost [11.285149], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5855, Cost [11.819169], Precision [0.00483092], Recall [0.00826446], F1_score[0.00609756]
Pass 24, Batch 5860, Cost [9.514767], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5865, Cost [10.051443], Precision [0.], Recall [0.], F1_score[0.]
Pass 24, Batch 5870, Cost [11.972702], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:24 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:24 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 25, Batch 5875, Cost [13.86825], Precision [0.00784314], Recall [0.01369863], F1_score[0.00997506]
Pass 25, Batch 5880, Cost [12.366573], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 5885, Cost [12.884173], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 5890, Cost [10.295911], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 5895, Cost [9.043591], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 5900, Cost [11.142198], Precision [0.00505051], Recall [0.01030928], F1_score[0.00677966]
Pass 25, Batch 5905, Cost [8.722379], Precision [0.00609756], Recall [0.01190476], F1_score[0.00806452]
Pass 25, Batch 5910, Cost [12.995184], Precision [0.00408163], Recall [0.0075188], F1_score[0.00529101]
Pass 25, Batch 5915, Cost [10.468871], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 5920, Cost [9.436047], Precision [0.0052356], Recall [0.01], F1_score[0.00687285]
Pass 25, Batch 5925, Cost [9.758318], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 5930, Cost [7.829029], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 5935, Cost [10.937328], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 5940, Cost [9.576663], Precision [0.00555556], Recall [0.00980392], F1_score[0.0070922]
Pass 25, Batch 5945, Cost [8.50621], Precision [0.00613497], Recall [0.01149425], F1_score[0.008]
Pass 25, Batch 5950, Cost [8.325554], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 5955, Cost [7.7837324], Precision [0.00636943], Recall [0.01136364], F1_score[0.00816326]
Pass 25, Batch 5960, Cost [7.3184347], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 5965, Cost [7.4627686], Precision [0.00671141], Recall [0.01136364], F1_score[0.00843882]
Pass 25, Batch 5970, Cost [8.508066], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 5975, Cost [9.492543], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 5980, Cost [9.166506], Precision [0.00598802], Recall [0.01098901], F1_score[0.00775194]
Pass 25, Batch 5985, Cost [9.459396], Precision [0.01183432], Recall [0.02061856], F1_score[0.0150376]
Pass 25, Batch 5990, Cost [9.149316], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 5995, Cost [9.96967], Precision [0.01162791], Recall [0.02], F1_score[0.01470588]
Pass 25, Batch 6000, Cost [8.009729], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 6005, Cost [10.359705], Precision [0.00540541], Recall [0.00900901], F1_score[0.00675676]
Pass 25, Batch 6010, Cost [9.819614], Precision [0.01142857], Recall [0.01923077], F1_score[0.01433692]
Pass 25, Batch 6015, Cost [10.076805], Precision [0.01123596], Recall [0.02020202], F1_score[0.01444043]
Pass 25, Batch 6020, Cost [9.959442], Precision [0.01183432], Recall [0.0212766], F1_score[0.01520912]
Pass 25, Batch 6025, Cost [9.425135], Precision [0.01212121], Recall [0.02173913], F1_score[0.0155642]
Pass 25, Batch 6030, Cost [8.591894], Precision [0.01298701], Recall [0.02150538], F1_score[0.01619433]
Pass 25, Batch 6035, Cost [13.147665], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 6040, Cost [11.333418], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 6045, Cost [10.208599], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 6050, Cost [11.915257], Precision [0.005], Recall [0.00925926], F1_score[0.00649351]
Pass 25, Batch 6055, Cost [9.614059], Precision [0.01086957], Recall [0.01960784], F1_score[0.01398601]
Pass 25, Batch 6060, Cost [11.887585], Precision [0.00995025], Recall [0.01694915], F1_score[0.01253919]
Pass 25, Batch 6065, Cost [9.877087], Precision [0.01169591], Recall [0.02105263], F1_score[0.01503759]
Pass 25, Batch 6070, Cost [8.628233], Precision [0.00625], Recall [0.01351351], F1_score[0.00854701]
Pass 25, Batch 6075, Cost [9.884696], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 6080, Cost [9.177933], Precision [0.01204819], Recall [0.02222222], F1_score[0.015625]
Pass 25, Batch 6085, Cost [9.912902], Precision [0.00555556], Recall [0.01], F1_score[0.00714286]
Pass 25, Batch 6090, Cost [10.284209], Precision [0.01092896], Recall [0.01801802], F1_score[0.01360544]
Pass 25, Batch 6095, Cost [9.0083], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 6100, Cost [9.867699], Precision [0.], Recall [0.], F1_score[0.]
Pass 25, Batch 6105, Cost [8.66021], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:25 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:25 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 26, Batch 6110, Cost [12.154419], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6115, Cost [10.871042], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6120, Cost [9.055323], Precision [0.00561798], Recall [0.00980392], F1_score[0.00714286]
Pass 26, Batch 6125, Cost [12.155249], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6130, Cost [9.047779], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6135, Cost [11.051586], Precision [0.01098901], Recall [0.01923077], F1_score[0.01398601]
Pass 26, Batch 6140, Cost [10.74389], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6145, Cost [11.454596], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6150, Cost [9.787235], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6155, Cost [10.477074], Precision [0.00487805], Recall [0.00840336], F1_score[0.00617284]
Pass 26, Batch 6160, Cost [9.385117], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6165, Cost [11.578545], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6170, Cost [10.319859], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6175, Cost [8.647336], Precision [0.00621118], Recall [0.01052632], F1_score[0.0078125]
Pass 26, Batch 6180, Cost [10.555671], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6185, Cost [9.616474], Precision [0.00564972], Recall [0.00980392], F1_score[0.00716846]
Pass 26, Batch 6190, Cost [7.8228674], Precision [0.01234568], Recall [0.025], F1_score[0.01652893]
Pass 26, Batch 6195, Cost [6.887393], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6200, Cost [8.145204], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6205, Cost [9.489836], Precision [0.00606061], Recall [0.01075269], F1_score[0.00775194]
Pass 26, Batch 6210, Cost [9.144245], Precision [0.01212121], Recall [0.0212766], F1_score[0.01544401]
Pass 26, Batch 6215, Cost [7.978776], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6220, Cost [10.116989], Precision [0.00552486], Recall [0.00934579], F1_score[0.00694444]
Pass 26, Batch 6225, Cost [8.14974], Precision [0.00675676], Recall [0.0125], F1_score[0.00877193]
Pass 26, Batch 6230, Cost [12.7301655], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6235, Cost [9.700203], Precision [0.00595238], Recall [0.01162791], F1_score[0.00787402]
Pass 26, Batch 6240, Cost [10.342999], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6245, Cost [6.6050763], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6250, Cost [9.345211], Precision [0.00609756], Recall [0.01020408], F1_score[0.00763359]
Pass 26, Batch 6255, Cost [12.118663], Precision [0.00485437], Recall [0.00847458], F1_score[0.00617284]
Pass 26, Batch 6260, Cost [10.26339], Precision [0.01639344], Recall [0.02912621], F1_score[0.02097902]
Pass 26, Batch 6265, Cost [9.950605], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6270, Cost [10.145871], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6275, Cost [13.7252655], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6280, Cost [11.702715], Precision [0.00473934], Recall [0.00869565], F1_score[0.00613497]
Pass 26, Batch 6285, Cost [12.289627], Precision [0.0047619], Recall [0.00840336], F1_score[0.00607903]
Pass 26, Batch 6290, Cost [14.2485485], Precision [0.00421941], Recall [0.00769231], F1_score[0.00544959]
Pass 26, Batch 6295, Cost [9.485393], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6300, Cost [10.239818], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6305, Cost [10.977582], Precision [0.01052632], Recall [0.02222222], F1_score[0.01428571]
Pass 26, Batch 6310, Cost [11.339338], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6315, Cost [10.714277], Precision [0.01538462], Recall [0.02654867], F1_score[0.01948052]
Pass 26, Batch 6320, Cost [10.152097], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6325, Cost [14.748592], Precision [0.00790514], Recall [0.01369863], F1_score[0.01002506]
Pass 26, Batch 6330, Cost [11.404807], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6335, Cost [9.2397995], Precision [0.], Recall [0.], F1_score[0.]
Pass 26, Batch 6340, Cost [10.007149], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:26 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:26 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 27, Batch 6345, Cost [10.592789], Precision [0.00490196], Recall [0.00892857], F1_score[0.00632911]
Pass 27, Batch 6350, Cost [10.558359], Precision [0.00990099], Recall [0.01769911], F1_score[0.01269841]
Pass 27, Batch 6355, Cost [12.423477], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6360, Cost [12.695795], Precision [0.00873362], Recall [0.01680672], F1_score[0.01149425]
Pass 27, Batch 6365, Cost [11.798498], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6370, Cost [10.087709], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6375, Cost [10.324094], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6380, Cost [10.909735], Precision [0.0046729], Recall [0.00884956], F1_score[0.00611621]
Pass 27, Batch 6385, Cost [12.373958], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6390, Cost [11.108467], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6395, Cost [9.999344], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6400, Cost [9.477383], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6405, Cost [7.3956156], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6410, Cost [8.376656], Precision [0.00632911], Recall [0.01086957], F1_score[0.008]
Pass 27, Batch 6415, Cost [9.856452], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6420, Cost [10.400787], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6425, Cost [8.839093], Precision [0.01169591], Recall [0.02247191], F1_score[0.01538461]
Pass 27, Batch 6430, Cost [9.515354], Precision [0.00529101], Recall [0.00952381], F1_score[0.00680272]
Pass 27, Batch 6435, Cost [6.9929376], Precision [0.00671141], Recall [0.01136364], F1_score[0.00843882]
Pass 27, Batch 6440, Cost [10.25816], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6445, Cost [9.290871], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6450, Cost [12.011408], Precision [0.0046729], Recall [0.008], F1_score[0.0058997]
Pass 27, Batch 6455, Cost [10.506987], Precision [0.00537634], Recall [0.00943396], F1_score[0.00684932]
Pass 27, Batch 6460, Cost [12.089163], Precision [0.0046729], Recall [0.00847458], F1_score[0.0060241]
Pass 27, Batch 6465, Cost [11.257555], Precision [0.01041667], Recall [0.01754386], F1_score[0.01307189]
Pass 27, Batch 6470, Cost [6.4630747], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6475, Cost [8.410327], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6480, Cost [8.715706], Precision [0.00621118], Recall [0.01351351], F1_score[0.00851064]
Pass 27, Batch 6485, Cost [12.731436], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6490, Cost [12.422728], Precision [0.0046729], Recall [0.00862069], F1_score[0.00606061]
Pass 27, Batch 6495, Cost [9.477708], Precision [0.00584795], Recall [0.01030928], F1_score[0.00746269]
Pass 27, Batch 6500, Cost [9.536071], Precision [0.00591716], Recall [0.01030928], F1_score[0.0075188]
Pass 27, Batch 6505, Cost [9.914051], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6510, Cost [11.701169], Precision [0.00473934], Recall [0.00900901], F1_score[0.00621118]
Pass 27, Batch 6515, Cost [10.177331], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6520, Cost [11.2880335], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6525, Cost [14.768657], Precision [0.008], Recall [0.01398601], F1_score[0.01017812]
Pass 27, Batch 6530, Cost [12.16799], Precision [0.00471698], Recall [0.00826446], F1_score[0.00600601]
Pass 27, Batch 6535, Cost [11.122089], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6540, Cost [11.279747], Precision [0.01020408], Recall [0.02040816], F1_score[0.01360544]
Pass 27, Batch 6545, Cost [9.467992], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6550, Cost [10.248458], Precision [0.00537634], Recall [0.00990099], F1_score[0.00696864]
Pass 27, Batch 6555, Cost [11.692459], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6560, Cost [10.81679], Precision [0.01020408], Recall [0.01886792], F1_score[0.01324503]
Pass 27, Batch 6565, Cost [9.766318], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6570, Cost [8.337043], Precision [0.], Recall [0.], F1_score[0.]
Pass 27, Batch 6575, Cost [11.022783], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:27 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:27 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 28, Batch 6580, Cost [11.188498], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6585, Cost [12.884691], Precision [0.00884956], Recall [0.01503759], F1_score[0.01114206]
Pass 28, Batch 6590, Cost [10.450911], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6595, Cost [10.079138], Precision [0.00526316], Recall [0.00970874], F1_score[0.00682594]
Pass 28, Batch 6600, Cost [9.905763], Precision [0.00588235], Recall [0.01098901], F1_score[0.00766284]
Pass 28, Batch 6605, Cost [11.084796], Precision [0.0052356], Recall [0.00961538], F1_score[0.00677966]
Pass 28, Batch 6610, Cost [10.8040285], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6615, Cost [9.301986], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6620, Cost [7.8718195], Precision [0.00613497], Recall [0.01204819], F1_score[0.00813008]
Pass 28, Batch 6625, Cost [11.719291], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6630, Cost [9.803445], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6635, Cost [11.09815], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6640, Cost [10.164033], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6645, Cost [9.567748], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6650, Cost [7.7788796], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6655, Cost [8.817744], Precision [0.00581395], Recall [0.01149425], F1_score[0.00772201]
Pass 28, Batch 6660, Cost [8.785154], Precision [0.00595238], Recall [0.0106383], F1_score[0.00763359]
Pass 28, Batch 6665, Cost [8.497097], Precision [0.00591716], Recall [0.01020408], F1_score[0.00749064]
Pass 28, Batch 6670, Cost [7.7913127], Precision [0.00645161], Recall [0.01204819], F1_score[0.00840336]
Pass 28, Batch 6675, Cost [10.202337], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6680, Cost [9.989812], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6685, Cost [10.851473], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6690, Cost [11.088173], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6695, Cost [9.380249], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6700, Cost [10.093917], Precision [0.00543478], Recall [0.00934579], F1_score[0.00687285]
Pass 28, Batch 6705, Cost [7.9411874], Precision [0.01315789], Recall [0.02469136], F1_score[0.01716738]
Pass 28, Batch 6710, Cost [10.646474], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6715, Cost [12.2524605], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6720, Cost [8.095482], Precision [0.01369863], Recall [0.025], F1_score[0.01769912]
Pass 28, Batch 6725, Cost [12.061315], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6730, Cost [8.8350115], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6735, Cost [10.791975], Precision [0.01058201], Recall [0.01851852], F1_score[0.01346801]
Pass 28, Batch 6740, Cost [10.003477], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6745, Cost [8.725836], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6750, Cost [11.843391], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6755, Cost [11.404921], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6760, Cost [12.261627], Precision [0.00961538], Recall [0.01724138], F1_score[0.01234568]
Pass 28, Batch 6765, Cost [10.96036], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6770, Cost [11.665587], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6775, Cost [10.071966], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6780, Cost [10.380308], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6785, Cost [12.537342], Precision [0.00452489], Recall [0.00840336], F1_score[0.00588235]
Pass 28, Batch 6790, Cost [9.971534], Precision [0.01111111], Recall [0.01941748], F1_score[0.01413428]
Pass 28, Batch 6795, Cost [9.622124], Precision [0.01092896], Recall [0.02], F1_score[0.01413427]
Pass 28, Batch 6800, Cost [9.176138], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6805, Cost [8.861189], Precision [0.], Recall [0.], F1_score[0.]
Pass 28, Batch 6810, Cost [8.792244], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:28 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:28 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 29, Batch 6815, Cost [9.465477], Precision [0.01069519], Recall [0.02020202], F1_score[0.01398601]
Pass 29, Batch 6820, Cost [11.326765], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6825, Cost [10.153209], Precision [0.01546392], Recall [0.02678571], F1_score[0.01960784]
Pass 29, Batch 6830, Cost [9.888147], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6835, Cost [10.250214], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6840, Cost [11.147906], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6845, Cost [9.909126], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6850, Cost [10.666172], Precision [0.00471698], Recall [0.00869565], F1_score[0.00611621]
Pass 29, Batch 6855, Cost [11.759439], Precision [0.004329], Recall [0.00769231], F1_score[0.00554017]
Pass 29, Batch 6860, Cost [11.301088], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6865, Cost [11.406067], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6870, Cost [9.385276], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6875, Cost [9.923715], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6880, Cost [9.65728], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6885, Cost [7.103366], Precision [0.00724638], Recall [0.01282051], F1_score[0.00925926]
Pass 29, Batch 6890, Cost [10.379879], Precision [0.0052356], Recall [0.00943396], F1_score[0.00673401]
Pass 29, Batch 6895, Cost [10.045714], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6900, Cost [8.166465], Precision [0.00636943], Recall [0.0106383], F1_score[0.00796813]
Pass 29, Batch 6905, Cost [9.5604725], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6910, Cost [9.119827], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6915, Cost [9.405178], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6920, Cost [7.448065], Precision [0.00724638], Recall [0.01492537], F1_score[0.0097561]
Pass 29, Batch 6925, Cost [8.601687], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6930, Cost [9.205968], Precision [0.0060241], Recall [0.01098901], F1_score[0.0077821]
Pass 29, Batch 6935, Cost [6.381447], Precision [0.0078125], Recall [0.01428571], F1_score[0.01010101]
Pass 29, Batch 6940, Cost [8.983328], Precision [0.00628931], Recall [0.01149425], F1_score[0.00813008]
Pass 29, Batch 6945, Cost [8.498232], Precision [0.00657895], Recall [0.01234568], F1_score[0.00858369]
Pass 29, Batch 6950, Cost [7.5877576], Precision [0.00719424], Recall [0.01538462], F1_score[0.00980392]
Pass 29, Batch 6955, Cost [9.699612], Precision [0.00578035], Recall [0.01052632], F1_score[0.00746269]
Pass 29, Batch 6960, Cost [7.865878], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6965, Cost [11.540499], Precision [0.00990099], Recall [0.01834862], F1_score[0.01286174]
Pass 29, Batch 6970, Cost [10.193249], Precision [0.02285714], Recall [0.03773585], F1_score[0.02846975]
Pass 29, Batch 6975, Cost [9.775476], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6980, Cost [11.33156], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6985, Cost [11.352772], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 6990, Cost [11.036102], Precision [0.00507614], Recall [0.00925926], F1_score[0.00655738]
Pass 29, Batch 6995, Cost [13.617348], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 7000, Cost [9.986568], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 7005, Cost [10.613241], Precision [0.00537634], Recall [0.01020408], F1_score[0.00704225]
Pass 29, Batch 7010, Cost [8.370609], Precision [0.00641026], Recall [0.01282051], F1_score[0.00854701]
Pass 29, Batch 7015, Cost [11.251186], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 7020, Cost [10.129884], Precision [0.00529101], Recall [0.00917431], F1_score[0.00671141]
Pass 29, Batch 7025, Cost [15.475426], Precision [0.00393701], Recall [0.00740741], F1_score[0.00514139]
Pass 29, Batch 7030, Cost [9.169058], Precision [0.00609756], Recall [0.01086957], F1_score[0.0078125]
Pass 29, Batch 7035, Cost [8.356579], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 7040, Cost [7.976121], Precision [0.], Recall [0.], F1_score[0.]
Pass 29, Batch 7045, Cost [9.522611], Precision [0.00561798], Recall [0.01111111], F1_score[0.00746269]
[TrainSet] pass_id:29 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:29 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 30, Batch 7050, Cost [11.164955], Precision [0.00938967], Recall [0.01587302], F1_score[0.01179941]
Pass 30, Batch 7055, Cost [10.2934], Precision [0.01036269], Recall [0.01754386], F1_score[0.01302932]
Pass 30, Batch 7060, Cost [10.329876], Precision [0.00505051], Recall [0.00833333], F1_score[0.00628931]
Pass 30, Batch 7065, Cost [10.446394], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7070, Cost [12.129009], Precision [0.01435407], Recall [0.02727273], F1_score[0.01880878]
Pass 30, Batch 7075, Cost [8.928651], Precision [0.00613497], Recall [0.01162791], F1_score[0.00803213]
Pass 30, Batch 7080, Cost [9.906942], Precision [0.01156069], Recall [0.02247191], F1_score[0.01526717]
Pass 30, Batch 7085, Cost [10.740864], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7090, Cost [8.975547], Precision [0.01075269], Recall [0.02], F1_score[0.01398601]
Pass 30, Batch 7095, Cost [12.1104355], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7100, Cost [9.953862], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7105, Cost [11.266674], Precision [0.00487805], Recall [0.00990099], F1_score[0.00653595]
Pass 30, Batch 7110, Cost [11.199397], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7115, Cost [8.822605], Precision [0.01818182], Recall [0.03333334], F1_score[0.02352941]
Pass 30, Batch 7120, Cost [9.306763], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7125, Cost [8.099329], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7130, Cost [7.162778], Precision [0.01290323], Recall [0.02380952], F1_score[0.0167364]
Pass 30, Batch 7135, Cost [8.714218], Precision [0.00588235], Recall [0.01098901], F1_score[0.00766284]
Pass 30, Batch 7140, Cost [7.997218], Precision [0.00636943], Recall [0.01190476], F1_score[0.00829876]
Pass 30, Batch 7145, Cost [9.118676], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7150, Cost [7.481703], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7155, Cost [9.956062], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7160, Cost [8.797586], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7165, Cost [12.540921], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7170, Cost [8.0289755], Precision [0.00666667], Recall [0.01190476], F1_score[0.00854701]
Pass 30, Batch 7175, Cost [9.196941], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7180, Cost [10.338156], Precision [0.01081081], Recall [0.02020202], F1_score[0.01408451]
Pass 30, Batch 7185, Cost [8.901846], Precision [0.01724138], Recall [0.03225806], F1_score[0.02247191]
Pass 30, Batch 7190, Cost [10.662462], Precision [0.01092896], Recall [0.01980198], F1_score[0.01408451]
Pass 30, Batch 7195, Cost [9.54418], Precision [0.00584795], Recall [0.00925926], F1_score[0.00716846]
Pass 30, Batch 7200, Cost [10.460316], Precision [0.0106383], Recall [0.02040816], F1_score[0.01398601]
Pass 30, Batch 7205, Cost [9.049444], Precision [0.02325581], Recall [0.04347826], F1_score[0.03030303]
Pass 30, Batch 7210, Cost [9.637539], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7215, Cost [10.673107], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7220, Cost [11.707304], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7225, Cost [9.185085], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7230, Cost [10.660658], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7235, Cost [13.845804], Precision [0.00421941], Recall [0.00775194], F1_score[0.00546448]
Pass 30, Batch 7240, Cost [10.950247], Precision [0.00510204], Recall [0.00980392], F1_score[0.00671141]
Pass 30, Batch 7245, Cost [11.823246], Precision [0.00505051], Recall [0.00862069], F1_score[0.00636943]
Pass 30, Batch 7250, Cost [8.805225], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7255, Cost [10.973845], Precision [0.00490196], Recall [0.00900901], F1_score[0.00634921]
Pass 30, Batch 7260, Cost [10.472387], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7265, Cost [7.9585457], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7270, Cost [7.7604628], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7275, Cost [9.271067], Precision [0.], Recall [0.], F1_score[0.]
Pass 30, Batch 7280, Cost [8.704134], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:30 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:30 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 31, Batch 7285, Cost [14.343512], Precision [0.00408163], Recall [0.00657895], F1_score[0.00503778]
Pass 31, Batch 7290, Cost [12.007767], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7295, Cost [11.54355], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7300, Cost [11.8050995], Precision [0.00900901], Recall [0.01639344], F1_score[0.01162791]
Pass 31, Batch 7305, Cost [8.421207], Precision [0.00645161], Recall [0.01219512], F1_score[0.00843882]
Pass 31, Batch 7310, Cost [10.250608], Precision [0.00571429], Recall [0.01052632], F1_score[0.00740741]
Pass 31, Batch 7315, Cost [7.6505184], Precision [0.0137931], Recall [0.02777778], F1_score[0.01843318]
Pass 31, Batch 7320, Cost [11.7320175], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7325, Cost [11.200393], Precision [0.00448431], Recall [0.00826446], F1_score[0.00581395]
Pass 31, Batch 7330, Cost [10.848093], Precision [0.00938967], Recall [0.01680672], F1_score[0.01204819]
Pass 31, Batch 7335, Cost [9.144741], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7340, Cost [11.557615], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7345, Cost [8.987668], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7350, Cost [9.008173], Precision [0.00571429], Recall [0.01052632], F1_score[0.00740741]
Pass 31, Batch 7355, Cost [9.524716], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7360, Cost [9.345789], Precision [0.00552486], Recall [0.01041667], F1_score[0.00722022]
Pass 31, Batch 7365, Cost [8.105354], Precision [0.00609756], Recall [0.01136364], F1_score[0.00793651]
Pass 31, Batch 7370, Cost [7.9942274], Precision [0.00628931], Recall [0.01098901], F1_score[0.008]
Pass 31, Batch 7375, Cost [7.6894374], Precision [0.00636943], Recall [0.01136364], F1_score[0.00816326]
Pass 31, Batch 7380, Cost [8.837353], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7385, Cost [9.003389], Precision [0.00636943], Recall [0.01075269], F1_score[0.008]
Pass 31, Batch 7390, Cost [9.560823], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7395, Cost [8.31881], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7400, Cost [11.062017], Precision [0.00518135], Recall [0.00909091], F1_score[0.00660066]
Pass 31, Batch 7405, Cost [8.106443], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7410, Cost [9.415329], Precision [0.00564972], Recall [0.01098901], F1_score[0.00746269]
Pass 31, Batch 7415, Cost [8.226012], Precision [0.00625], Recall [0.01204819], F1_score[0.00823045]
Pass 31, Batch 7420, Cost [8.366016], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7425, Cost [9.967715], Precision [0.00591716], Recall [0.00970874], F1_score[0.00735294]
Pass 31, Batch 7430, Cost [9.861389], Precision [0.01785714], Recall [0.03061225], F1_score[0.02255639]
Pass 31, Batch 7435, Cost [11.331724], Precision [0.01025641], Recall [0.01960784], F1_score[0.01346801]
Pass 31, Batch 7440, Cost [10.404832], Precision [0.00555556], Recall [0.00961538], F1_score[0.00704225]
Pass 31, Batch 7445, Cost [9.455592], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7450, Cost [10.324664], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7455, Cost [9.705045], Precision [0.00537634], Recall [0.01], F1_score[0.00699301]
Pass 31, Batch 7460, Cost [9.99783], Precision [0.00568182], Recall [0.00952381], F1_score[0.00711744]
Pass 31, Batch 7465, Cost [11.111767], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7470, Cost [12.476212], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7475, Cost [9.720856], Precision [0.00584795], Recall [0.01041667], F1_score[0.00749064]
Pass 31, Batch 7480, Cost [10.264431], Precision [0.00558659], Recall [0.01098901], F1_score[0.00740741]
Pass 31, Batch 7485, Cost [9.171673], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7490, Cost [11.034743], Precision [0.01492537], Recall [0.02564103], F1_score[0.01886792]
Pass 31, Batch 7495, Cost [12.5687685], Precision [0.00442478], Recall [0.00735294], F1_score[0.00552486]
Pass 31, Batch 7500, Cost [10.702429], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7505, Cost [11.375374], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7510, Cost [9.153762], Precision [0.], Recall [0.], F1_score[0.]
Pass 31, Batch 7515, Cost [9.476838], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:31 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:31 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 32, Batch 7520, Cost [11.871084], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7525, Cost [11.779894], Precision [0.00456621], Recall [0.008], F1_score[0.00581395]
Pass 32, Batch 7530, Cost [12.125911], Precision [0.0045045], Recall [0.00787402], F1_score[0.00573066]
Pass 32, Batch 7535, Cost [8.993739], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7540, Cost [9.337709], Precision [0.00578035], Recall [0.01333333], F1_score[0.00806452]
Pass 32, Batch 7545, Cost [9.383477], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7550, Cost [10.799752], Precision [0.00534759], Recall [0.00970874], F1_score[0.00689655]
Pass 32, Batch 7555, Cost [11.971867], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7560, Cost [12.518456], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7565, Cost [12.93626], Precision [0.00414938], Recall [0.00775194], F1_score[0.00540541]
Pass 32, Batch 7570, Cost [8.255547], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7575, Cost [9.628411], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7580, Cost [9.819568], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7585, Cost [9.723139], Precision [0.00534759], Recall [0.01030928], F1_score[0.00704225]
Pass 32, Batch 7590, Cost [8.607389], Precision [0.00617284], Recall [0.01], F1_score[0.00763359]
Pass 32, Batch 7595, Cost [8.051716], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7600, Cost [8.050746], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7605, Cost [8.475374], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7610, Cost [10.198681], Precision [0.00518135], Recall [0.01010101], F1_score[0.00684932]
Pass 32, Batch 7615, Cost [11.753075], Precision [0.00502513], Recall [0.00909091], F1_score[0.00647249]
Pass 32, Batch 7620, Cost [9.422186], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7625, Cost [12.081198], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7630, Cost [9.840208], Precision [0.00549451], Recall [0.01], F1_score[0.0070922]
Pass 32, Batch 7635, Cost [11.743376], Precision [0.00985222], Recall [0.01666667], F1_score[0.0123839]
Pass 32, Batch 7640, Cost [8.48274], Precision [0.00628931], Recall [0.01111111], F1_score[0.00803213]
Pass 32, Batch 7645, Cost [10.231298], Precision [0.00561798], Recall [0.01020408], F1_score[0.00724638]
Pass 32, Batch 7650, Cost [8.162181], Precision [0.01360544], Recall [0.02325581], F1_score[0.01716738]
Pass 32, Batch 7655, Cost [10.439079], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7660, Cost [9.443786], Precision [0.01190476], Recall [0.02083333], F1_score[0.01515152]
Pass 32, Batch 7665, Cost [8.666943], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7670, Cost [10.995494], Precision [0.015625], Recall [0.02631579], F1_score[0.01960784]
Pass 32, Batch 7675, Cost [13.187059], Precision [0.01762114], Recall [0.03174603], F1_score[0.02266289]
Pass 32, Batch 7680, Cost [10.559694], Precision [0.00512821], Recall [0.00909091], F1_score[0.00655738]
Pass 32, Batch 7685, Cost [10.745365], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7690, Cost [9.803297], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7695, Cost [10.841876], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7700, Cost [13.45503], Precision [0.00440529], Recall [0.00763359], F1_score[0.00558659]
Pass 32, Batch 7705, Cost [10.630906], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7710, Cost [10.227037], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7715, Cost [10.172968], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7720, Cost [10.724422], Precision [0.01058201], Recall [0.01923077], F1_score[0.01365188]
Pass 32, Batch 7725, Cost [10.444822], Precision [0.01587302], Recall [0.02702703], F1_score[0.02]
Pass 32, Batch 7730, Cost [10.346994], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7735, Cost [11.895779], Precision [0.00473934], Recall [0.00833333], F1_score[0.0060423]
Pass 32, Batch 7740, Cost [8.908629], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7745, Cost [7.66072], Precision [0.], Recall [0.], F1_score[0.]
Pass 32, Batch 7750, Cost [8.517218], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:32 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:32 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 33, Batch 7755, Cost [12.022078], Precision [0.00478469], Recall [0.00833333], F1_score[0.00607903]
Pass 33, Batch 7760, Cost [8.419651], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7765, Cost [11.389287], Precision [0.00469484], Recall [0.00813008], F1_score[0.00595238]
Pass 33, Batch 7770, Cost [10.109339], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7775, Cost [10.675054], Precision [0.00555556], Recall [0.01041667], F1_score[0.00724638]
Pass 33, Batch 7780, Cost [7.5867033], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7785, Cost [9.648289], Precision [0.01818182], Recall [0.03529412], F1_score[0.024]
Pass 33, Batch 7790, Cost [13.480875], Precision [0.00793651], Recall [0.01388889], F1_score[0.01010101]
Pass 33, Batch 7795, Cost [8.516695], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7800, Cost [12.158996], Precision [0.00425532], Recall [0.00793651], F1_score[0.00554017]
Pass 33, Batch 7805, Cost [11.920969], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7810, Cost [10.089855], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7815, Cost [9.738419], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7820, Cost [9.183176], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7825, Cost [8.710679], Precision [0.00606061], Recall [0.01162791], F1_score[0.00796813]
Pass 33, Batch 7830, Cost [10.244925], Precision [0.01036269], Recall [0.01869159], F1_score[0.01333333]
Pass 33, Batch 7835, Cost [6.61323], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7840, Cost [9.022205], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7845, Cost [8.663233], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7850, Cost [9.65415], Precision [0.00574713], Recall [0.00970874], F1_score[0.00722022]
Pass 33, Batch 7855, Cost [6.8873153], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7860, Cost [9.593262], Precision [0.00578035], Recall [0.01030928], F1_score[0.00740741]
Pass 33, Batch 7865, Cost [8.92252], Precision [0.01863354], Recall [0.03409091], F1_score[0.02409638]
Pass 33, Batch 7870, Cost [10.433608], Precision [0.00526316], Recall [0.00925926], F1_score[0.00671141]
Pass 33, Batch 7875, Cost [10.689177], Precision [0.015625], Recall [0.02912621], F1_score[0.02033898]
Pass 33, Batch 7880, Cost [8.595703], Precision [0.0130719], Recall [0.02380952], F1_score[0.01687764]
Pass 33, Batch 7885, Cost [10.654375], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7890, Cost [8.694992], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7895, Cost [11.362308], Precision [0.00490196], Recall [0.00917431], F1_score[0.00638978]
Pass 33, Batch 7900, Cost [10.378613], Precision [0.00529101], Recall [0.00961538], F1_score[0.00682594]
Pass 33, Batch 7905, Cost [10.196287], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7910, Cost [10.762947], Precision [0.01058201], Recall [0.01869159], F1_score[0.01351351]
Pass 33, Batch 7915, Cost [10.060284], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7920, Cost [11.021558], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7925, Cost [10.224483], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7930, Cost [11.190447], Precision [0.00995025], Recall [0.01785714], F1_score[0.01277955]
Pass 33, Batch 7935, Cost [10.94722], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7940, Cost [10.720045], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7945, Cost [8.924372], Precision [0.00632911], Recall [0.01176471], F1_score[0.00823045]
Pass 33, Batch 7950, Cost [10.148806], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7955, Cost [11.073504], Precision [0.0104712], Recall [0.01869159], F1_score[0.01342282]
Pass 33, Batch 7960, Cost [11.491491], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7965, Cost [11.332865], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7970, Cost [11.024924], Precision [0.01015228], Recall [0.01851852], F1_score[0.01311475]
Pass 33, Batch 7975, Cost [10.143494], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7980, Cost [10.5667715], Precision [0.], Recall [0.], F1_score[0.]
Pass 33, Batch 7985, Cost [8.461039], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:33 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:33 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 34, Batch 7990, Cost [11.346206], Precision [0.01408451], Recall [0.0234375], F1_score[0.01759531]
Pass 34, Batch 7995, Cost [12.73192], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8000, Cost [9.578793], Precision [0.0104712], Recall [0.01769911], F1_score[0.01315789]
Pass 34, Batch 8005, Cost [11.498047], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8010, Cost [10.135348], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8015, Cost [9.286548], Precision [0.01219512], Recall [0.0212766], F1_score[0.01550387]
Pass 34, Batch 8020, Cost [9.46961], Precision [0.0060241], Recall [0.0106383], F1_score[0.00769231]
Pass 34, Batch 8025, Cost [11.762179], Precision [0.00442478], Recall [0.00793651], F1_score[0.00568182]
Pass 34, Batch 8030, Cost [10.712123], Precision [0.00966184], Recall [0.01626016], F1_score[0.01212121]
Pass 34, Batch 8035, Cost [12.645068], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8040, Cost [9.680311], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8045, Cost [11.424268], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8050, Cost [11.583554], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8055, Cost [7.824905], Precision [0.00662252], Recall [0.01176471], F1_score[0.00847458]
Pass 34, Batch 8060, Cost [9.007535], Precision [0.01204819], Recall [0.02150538], F1_score[0.01544401]
Pass 34, Batch 8065, Cost [9.550032], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8070, Cost [8.2974205], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8075, Cost [7.1636667], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8080, Cost [8.061922], Precision [0.00645161], Recall [0.0106383], F1_score[0.00803213]
Pass 34, Batch 8085, Cost [9.541559], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8090, Cost [10.86579], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8095, Cost [7.980295], Precision [0.00657895], Recall [0.01123596], F1_score[0.00829876]
Pass 34, Batch 8100, Cost [10.235645], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8105, Cost [9.295791], Precision [0.00588235], Recall [0.01052632], F1_score[0.00754717]
Pass 34, Batch 8110, Cost [10.022638], Precision [0.00552486], Recall [0.01030928], F1_score[0.00719424]
Pass 34, Batch 8115, Cost [8.927893], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8120, Cost [10.027104], Precision [0.00558659], Recall [0.01020408], F1_score[0.00722022]
Pass 34, Batch 8125, Cost [10.040471], Precision [0.00543478], Recall [0.01149425], F1_score[0.00738007]
Pass 34, Batch 8130, Cost [9.9137335], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8135, Cost [9.914259], Precision [0.00568182], Recall [0.01020408], F1_score[0.00729927]
Pass 34, Batch 8140, Cost [11.652477], Precision [0.00990099], Recall [0.01769911], F1_score[0.01269841]
Pass 34, Batch 8145, Cost [13.332791], Precision [0.01687764], Recall [0.03030303], F1_score[0.02168022]
Pass 34, Batch 8150, Cost [8.512022], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8155, Cost [10.815574], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8160, Cost [11.6008215], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8165, Cost [12.960293], Precision [0.00465116], Recall [0.00763359], F1_score[0.00578035]
Pass 34, Batch 8170, Cost [9.94938], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8175, Cost [10.388222], Precision [0.01069519], Recall [0.01785714], F1_score[0.01337793]
Pass 34, Batch 8180, Cost [9.699904], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8185, Cost [10.246437], Precision [0.00555556], Recall [0.01030928], F1_score[0.00722022]
Pass 34, Batch 8190, Cost [10.103003], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8195, Cost [11.241151], Precision [0.00485437], Recall [0.00952381], F1_score[0.00643087]
Pass 34, Batch 8200, Cost [13.636444], Precision [0.00413223], Recall [0.00724638], F1_score[0.00526316]
Pass 34, Batch 8205, Cost [10.653208], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8210, Cost [10.147905], Precision [0.], Recall [0.], F1_score[0.]
Pass 34, Batch 8215, Cost [8.203213], Precision [0.00657895], Recall [0.01369863], F1_score[0.00888889]
Pass 34, Batch 8220, Cost [11.76532], Precision [0.00492611], Recall [0.00952381], F1_score[0.00649351]
[TrainSet] pass_id:34 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:34 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 35, Batch 8225, Cost [10.691582], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8230, Cost [12.066714], Precision [0.01382488], Recall [0.02380952], F1_score[0.01749271]
Pass 35, Batch 8235, Cost [9.713464], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8240, Cost [11.139975], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8245, Cost [11.083999], Precision [0.00518135], Recall [0.00934579], F1_score[0.00666667]
Pass 35, Batch 8250, Cost [10.537837], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8255, Cost [9.870794], Precision [0.01190476], Recall [0.02247191], F1_score[0.0155642]
Pass 35, Batch 8260, Cost [11.87742], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8265, Cost [12.530151], Precision [0.00423729], Recall [0.00714286], F1_score[0.00531915]
Pass 35, Batch 8270, Cost [11.597499], Precision [0.00913242], Recall [0.01680672], F1_score[0.01183432]
Pass 35, Batch 8275, Cost [10.357828], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8280, Cost [10.715088], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8285, Cost [8.434219], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8290, Cost [8.933306], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8295, Cost [7.0671444], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8300, Cost [12.185617], Precision [0.00904977], Recall [0.01538462], F1_score[0.01139601]
Pass 35, Batch 8305, Cost [8.28839], Precision [0.00581395], Recall [0.01030928], F1_score[0.00743494]
Pass 35, Batch 8310, Cost [8.886784], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8315, Cost [7.7186804], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8320, Cost [9.722135], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8325, Cost [12.137931], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8330, Cost [10.108744], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8335, Cost [10.015373], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8340, Cost [9.736944], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8345, Cost [10.255575], Precision [0.02150538], Recall [0.03669725], F1_score[0.02711864]
Pass 35, Batch 8350, Cost [10.52849], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8355, Cost [9.228635], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8360, Cost [9.322329], Precision [0.01851852], Recall [0.03030303], F1_score[0.02298851]
Pass 35, Batch 8365, Cost [10.324329], Precision [0.00526316], Recall [0.00980392], F1_score[0.00684932]
Pass 35, Batch 8370, Cost [9.003004], Precision [0.01226994], Recall [0.02197802], F1_score[0.01574803]
Pass 35, Batch 8375, Cost [10.671748], Precision [0.01020408], Recall [0.01754386], F1_score[0.01290323]
Pass 35, Batch 8380, Cost [11.125166], Precision [0.00515464], Recall [0.00952381], F1_score[0.00668896]
Pass 35, Batch 8385, Cost [9.987614], Precision [0.00546448], Recall [0.01030928], F1_score[0.00714286]
Pass 35, Batch 8390, Cost [10.210777], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8395, Cost [10.74815], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8400, Cost [11.046183], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8405, Cost [9.953642], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8410, Cost [12.040481], Precision [0.00947867], Recall [0.01869159], F1_score[0.01257862]
Pass 35, Batch 8415, Cost [10.297085], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8420, Cost [11.5624485], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8425, Cost [11.134285], Precision [0.01538462], Recall [0.02803738], F1_score[0.01986755]
Pass 35, Batch 8430, Cost [10.580433], Precision [0.01069519], Recall [0.01851852], F1_score[0.01355932]
Pass 35, Batch 8435, Cost [11.068138], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8440, Cost [11.074158], Precision [0.00507614], Recall [0.00917431], F1_score[0.00653595]
Pass 35, Batch 8445, Cost [8.553237], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8450, Cost [9.289418], Precision [0.], Recall [0.], F1_score[0.]
Pass 35, Batch 8455, Cost [8.326842], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:35 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:35 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 36, Batch 8460, Cost [12.713146], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8465, Cost [11.353874], Precision [0.00462963], Recall [0.00862069], F1_score[0.0060241]
Pass 36, Batch 8470, Cost [11.508529], Precision [0.00917431], Recall [0.01754386], F1_score[0.01204819]
Pass 36, Batch 8475, Cost [8.957657], Precision [0.00584795], Recall [0.01010101], F1_score[0.00740741]
Pass 36, Batch 8480, Cost [9.174497], Precision [0.00606061], Recall [0.01190476], F1_score[0.00803213]
Pass 36, Batch 8485, Cost [9.569128], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8490, Cost [11.283101], Precision [0.015625], Recall [0.02912621], F1_score[0.02033898]
Pass 36, Batch 8495, Cost [9.172403], Precision [0.00520833], Recall [0.01075269], F1_score[0.00701754]
Pass 36, Batch 8500, Cost [13.291297], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8505, Cost [10.677995], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8510, Cost [10.405481], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8515, Cost [8.536837], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8520, Cost [9.578932], Precision [0.00578035], Recall [0.01190476], F1_score[0.0077821]
Pass 36, Batch 8525, Cost [8.532006], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8530, Cost [11.112854], Precision [0.00480769], Recall [0.00892857], F1_score[0.00625]
Pass 36, Batch 8535, Cost [9.040071], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8540, Cost [8.763332], Precision [0.00574713], Recall [0.01], F1_score[0.00729927]
Pass 36, Batch 8545, Cost [6.4738626], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8550, Cost [6.8747635], Precision [0.00704225], Recall [0.0125], F1_score[0.00900901]
Pass 36, Batch 8555, Cost [12.632999], Precision [0.005], Recall [0.00943396], F1_score[0.00653595]
Pass 36, Batch 8560, Cost [13.566788], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8565, Cost [8.340655], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8570, Cost [10.620762], Precision [0.00526316], Recall [0.00934579], F1_score[0.00673401]
Pass 36, Batch 8575, Cost [10.789306], Precision [0.0052356], Recall [0.00934579], F1_score[0.00671141]
Pass 36, Batch 8580, Cost [8.340836], Precision [0.01265823], Recall [0.02439024], F1_score[0.01666667]
Pass 36, Batch 8585, Cost [9.085927], Precision [0.01162791], Recall [0.0212766], F1_score[0.01503759]
Pass 36, Batch 8590, Cost [8.936362], Precision [0.00595238], Recall [0.01176471], F1_score[0.00790514]
Pass 36, Batch 8595, Cost [9.316723], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8600, Cost [9.269349], Precision [0.00613497], Recall [0.01149425], F1_score[0.008]
Pass 36, Batch 8605, Cost [12.02646], Precision [0.00956938], Recall [0.016], F1_score[0.01197605]
Pass 36, Batch 8610, Cost [11.264121], Precision [0.00512821], Recall [0.00970874], F1_score[0.00671141]
Pass 36, Batch 8615, Cost [11.170315], Precision [0.00990099], Recall [0.01851852], F1_score[0.01290323]
Pass 36, Batch 8620, Cost [12.341532], Precision [0.00442478], Recall [0.00826446], F1_score[0.00576369]
Pass 36, Batch 8625, Cost [9.024019], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8630, Cost [11.479906], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8635, Cost [12.158021], Precision [0.00471698], Recall [0.00840336], F1_score[0.0060423]
Pass 36, Batch 8640, Cost [8.24185], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8645, Cost [15.526175], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8650, Cost [9.59834], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8655, Cost [9.017546], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8660, Cost [11.820395], Precision [0.00966184], Recall [0.01694915], F1_score[0.01230769]
Pass 36, Batch 8665, Cost [12.0216875], Precision [0.00480769], Recall [0.00840336], F1_score[0.00611621]
Pass 36, Batch 8670, Cost [11.839872], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8675, Cost [11.549032], Precision [0.01578947], Recall [0.02727273], F1_score[0.02]
Pass 36, Batch 8680, Cost [11.089418], Precision [0.], Recall [0.], F1_score[0.]
Pass 36, Batch 8685, Cost [10.720251], Precision [0.00526316], Recall [0.01162791], F1_score[0.00724638]
Pass 36, Batch 8690, Cost [10.510138], Precision [0.], Recall [0.], F1_score[0.]
[TrainSet] pass_id:36 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:36 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 37, Batch 8695, Cost [10.801603], Precision [0.00492611], Recall [0.00840336], F1_score[0.00621118]
Pass 37, Batch 8700, Cost [12.860445], Precision [0.00413223], Recall [0.00694444], F1_score[0.00518135]
Pass 37, Batch 8705, Cost [10.8268585], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8710, Cost [11.932376], Precision [0.00458716], Recall [0.00833333], F1_score[0.00591716]
Pass 37, Batch 8715, Cost [10.589172], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8720, Cost [11.313174], Precision [0.00507614], Recall [0.00961538], F1_score[0.00664452]
Pass 37, Batch 8725, Cost [11.578827], Precision [0.01005025], Recall [0.01785714], F1_score[0.01286174]
Pass 37, Batch 8730, Cost [10.636302], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8735, Cost [8.991772], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8740, Cost [10.743559], Precision [0.00448431], Recall [0.00847458], F1_score[0.0058651]
Pass 37, Batch 8745, Cost [8.247953], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8750, Cost [11.207138], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8755, Cost [10.045738], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8760, Cost [8.468711], Precision [0.0060241], Recall [0.01075269], F1_score[0.00772201]
Pass 37, Batch 8765, Cost [10.761187], Precision [0.00512821], Recall [0.00900901], F1_score[0.00653595]
Pass 37, Batch 8770, Cost [9.334493], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8775, Cost [6.8745003], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8780, Cost [8.996687], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8785, Cost [8.925602], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8790, Cost [9.658725], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8795, Cost [9.075985], Precision [0.00628931], Recall [0.01162791], F1_score[0.00816326]
Pass 37, Batch 8800, Cost [10.801643], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8805, Cost [11.186865], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8810, Cost [10.755106], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8815, Cost [9.5485325], Precision [0.01183432], Recall [0.0212766], F1_score[0.01520912]
Pass 37, Batch 8820, Cost [9.128103], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8825, Cost [9.178497], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8830, Cost [11.649369], Precision [0.00956938], Recall [0.01666667], F1_score[0.01215805]
Pass 37, Batch 8835, Cost [10.432762], Precision [0.00543478], Recall [0.01], F1_score[0.00704225]
Pass 37, Batch 8840, Cost [12.640588], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8845, Cost [10.833303], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8850, Cost [9.927198], Precision [0.02209945], Recall [0.03883495], F1_score[0.02816902]
Pass 37, Batch 8855, Cost [11.773336], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8860, Cost [13.094889], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8865, Cost [9.446289], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8870, Cost [12.653233], Precision [0.00458716], Recall [0.00813008], F1_score[0.0058651]
Pass 37, Batch 8875, Cost [11.37929], Precision [0.00485437], Recall [0.00833333], F1_score[0.00613497]
Pass 37, Batch 8880, Cost [11.563458], Precision [0.00970874], Recall [0.01680672], F1_score[0.01230769]
Pass 37, Batch 8885, Cost [10.4638405], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8890, Cost [9.367451], Precision [0.01204819], Recall [0.02197802], F1_score[0.0155642]
Pass 37, Batch 8895, Cost [13.164686], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8900, Cost [11.516703], Precision [0.00980392], Recall [0.0173913], F1_score[0.01253918]
Pass 37, Batch 8905, Cost [11.60386], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8910, Cost [10.786224], Precision [0.0052356], Recall [0.01030928], F1_score[0.00694444]
Pass 37, Batch 8915, Cost [10.981152], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8920, Cost [7.9957957], Precision [0.], Recall [0.], F1_score[0.]
Pass 37, Batch 8925, Cost [7.13707], Precision [0.00714286], Recall [0.01428571], F1_score[0.00952381]
[TrainSet] pass_id:37 pass_precision:[0.00361613] pass_recall:[0.00663915] pass_f1_score:[0.00468208]
[TestSet] pass_id:37 pass_precision:[0.00428864] pass_recall:[0.0077428] pass_f1_score:[0.00551989]
Pass 38, Batch 8930, Cost [13.073988], Precision [0.00434783], Recall [0.00769231], F1_score[0.00555556]
Pass 38, Batch 8935, Cost [12.384411], Precision [0.00865801], Recall [0.01449275], F1_score[0.01084011]
Pass 38, Batch 8940, Cost [10.795504], Precision [0.], Recall [0.], F1_score[0.]
Pass 38, Batch 8945, Cost [13.636452], Precision [0.00406504], Recall [0.00714286], F1_score[0.00518135]
Pass 38, Batch 8950, Cost [9.286617], Precision [0.], Recall [0.], F1_score[0.]
Pass 38, Batch 8955, Cost [10.261583], Precision [0.], Recall [0.], F1_score[0.]
Pass 38, Batch 8960, Cost [8.987589], Precision [0.00613497], Recall [0.01111111], F1_score[0.00790514]
Pass 38, Batch 8965, Cost [11.626898], Precision [0.], Recall [0.], F1_score[0.]
Pass 38, Batch 8970, Cost [9.823739], Precision [0.], Recall [0.], F1_score[0.]
Pass 38, Batch 8975, Cost [11.3287525], Precision [0.0045045], Recall [0.00877193], F1_score[0.00595238]
Pass 38, Batch 8980, Cost [9.156315], Precision [0.], Recall [0.], F1_score[0.]
Pass 38, Batch 8985, Cost [9.795839], Precision [0.], Recall [0.], F1_score[0.]
Pass 38, Batch 8990, Cost [8.7880535], Precision [0.], Recall [0.], F1_score[0.]
Pass 38, Batch 8995, Cost [9.1270075], Precision [0.], Recall [0.], F1_score[0.]
Pass 38, Batch 9000, Cost [7.325172], Precision [0.01315789], Recall [0.02531646], F1_score[0.01731602]
Pass 38, Batch 9005, Cost [10.402266], Precision [0.], Recall [0.], F1_score[0.]
Pass 38, Batch 9010, Cost [7.7545424], Precision [0.], Recall [0.], F1_score[0.]
Pass 38, Batch 9015, Cost [8.500661], Precision [0.], Recall [0.], F1_score[0.]
Pass 38, Batch 9020, Cost [6.8877554], Precision [0.00645161], Recall [0.01190476], F1_score[0.0083682]
Pass 38, Batch 9025, Cost [9.084515], Precision [0.], Recall [0.], F1_score[0.]
Pass 38, Batch 9030, Cost [8.96231], Precision [0.01204819], Recall [0.02061856], F1_score[0.01520912]
Pass 38, Batch 9035, Cost [10.426039], Precision [0.], Recall [0.], F1_score[0.]
Pass 38, Batch 9040, Cost [9.37664], Precision [0.], Recall [0.], F1_score[0.]
Pass 38, Batch 9045, Cost [10.531598], Precision [0.01621622], Recall [0.02803738], F1_score[0.02054795]
Pass 38, Batch 9050, Cost [8.180587], Precision [0.00680272], Recall [0.01204819], F1_score[0.00869565]
Pass 38, Batch 9055, Cost [9.105602], Precision [0.00606061], Recall [0.01086957], F1_score[0.0077821]
Pass 38, Batch 9060, Cost [8.504763], Precision [0.], Recall [0.], F1_score[0.]
Pass 38, Batch 9065, Cost [9.32242], Precision [0.00595238], Recall [0.01149425], F1_score[0.00784314]
Pass 38, Batch 9070,
\ No newline at end of file
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
因为 它太大了无法显示 source diff 。你可以改为 查看blob
#!/bin/bash
for i in 1 2 3 4 5 6 7 8 9 10
do
echo $i
python train.py >logfile_wending_$i 2>&1 &
done
import paddle.v2.fluid as fluid
from paddle.v2.fluid.initializer import NormalInitializer
import paddle.fluid as fluid
from paddle.fluid.initializer import NormalInitializer
from utils import logger, load_dict, get_embedding
import math
......@@ -31,19 +31,14 @@ def ner_net(word_dict_len, label_dict_len, stack_num=2, is_train=True):
dtype='float32',
is_sparse=IS_SPARSE)
#print dir(word_embedding)
#print word_embedding.shape
#print word_embedding.to_string
# print mark_embedding.to_string("")
word_caps_vector = fluid.layers.concat(input=[word_embedding, mark_embedding], axis = 1)
mix_hidden_lr = 1
rnn_para_attr = fluid.ParamAttr(
initializer=NormalInitializer(loc=0.0, scale=0.0, seed=0), learning_rate=0.1)
initializer=NormalInitializer(loc=0.0, scale=0.0, seed=0), learning_rate=mix_hidden_lr)
hidden_para_attr = fluid.ParamAttr(
initializer=NormalInitializer(loc=0.0, scale=(1. / math.sqrt(hidden_dim) / 3), seed=0),
learning_rate=0.1)
learning_rate=mix_hidden_lr)
hidden = fluid.layers.fc(
input=word_caps_vector,
......@@ -58,16 +53,6 @@ def ner_net(word_dict_len, label_dict_len, stack_num=2, is_train=True):
for direction in ["fwd", "bwd"]:
for i in range(stack_num):
if i != 0:
#print i
#print rnn.shape
#print hidden.shape
#print isinstance(hidden, fluid.framework.Variable)
#print isinstance(rnn, fluid.framework.Variable)
#print dir(rnn)
#print type(rnn)
#print rnn[0].shape
#print rnn[1].shape
#print rnn[2].shape
hidden = fluid.layers.fc(
name="__hidden%02d_%s__" % (i, direction),
size=hidden_dim,
......@@ -110,12 +95,6 @@ def ner_net(word_dict_len, label_dict_len, stack_num=2, is_train=True):
name='crfw',
initializer=NormalInitializer(loc=0.0, scale=(1. / math.sqrt(hidden_dim) / 3), seed=0),
learning_rate=mix_hidden_lr))
'''
crf_decode = fluid.layers.crf_decoding(
input=emission,
label=target,
param_attr=fluid.ParamAttr(name='crfw'))
'''
return crf_cost, emission, word, mark, target
else:
......
"""
Conll03 dataset.
"""
from utils import *
__all__ = ["data_reader"]
def canonicalize_digits(word):
if any([c.isalpha() for c in word]): return word
word = re.sub("\d", "DG", word)
if word.startswith("DG"):
word = word.replace(",", "") # remove thousands separator
return word
def canonicalize_word(word, wordset=None, digits=True):
word = word.lower()
if digits:
if (wordset != None) and (word in wordset): return word
word = canonicalize_digits(word) # try to canonicalize numbers
if (wordset == None) or (word in wordset): return word
else: return "UUUNKKK" # unknown token
def data_reader(data_file, word_dict, label_dict):
"""
The dataset can be obtained according to http://www.clips.uantwerpen.be/conll2003/ner/.
It returns a reader creator, each sample in the reader includes:
word id sequence, label id sequence and raw sentence.
:return: reader creator
:rtype: callable
"""
def reader():
UNK_IDX = word_dict["UUUNKKK"]
sentence = []
labels = []
with open(data_file, "r") as f:
for line in f:
if len(line.strip()) == 0:
if len(sentence) > 0:
word_idx = [
word_dict.get(
canonicalize_word(w, word_dict), UNK_IDX)
for w in sentence
]
mark = [1 if w[0].isupper() else 0 for w in sentence]
label_idx = [label_dict[l] for l in labels]
yield word_idx, mark, label_idx
sentence = []
labels = []
else:
segs = line.strip().split()
sentence.append(segs[0])
# transform I-TYPE to BIO schema
if segs[-1] != "O" and (len(labels) == 0 or
labels[-1][1:] != segs[-1][1:]):
labels.append("B" + segs[-1][1:])
else:
labels.append(segs[-1])
return reader
import paddle.v2 as paddle
import paddle.v2.fluid as fluid
import paddle.fluid as fluid
from network_conf import ner_net
from utils import logger, load_dict, get_embedding
import reader
......@@ -52,32 +52,23 @@ def main(train_data_file,
word_dict_len = len(word_dict)
label_dict_len = len(label_dict)
crf_cost, crf_decode, word, mark, target = ner_net(word_dict_len, label_dict_len)
crf_cost, feature_out, word, mark, target = ner_net(word_dict_len, label_dict_len)
avg_cost = fluid.layers.mean(x=crf_cost)
sgd_optimizer = fluid.optimizer.Momentum(
momentum=0.0,
learning_rate=1e-3)
sgd_optimizer.minimize(avg_cost)
crf_decode = fluid.layers.crf_decoding(
input=feature_out, param_attr=fluid.ParamAttr(name='crfw'))
chunk_evaluator = fluid.evaluator.ChunkEvaluator(
#name="ner_chunk",
input=crf_decode,
label=target,
chunk_scheme="IOB",
num_chunk_types=int(math.ceil((label_dict_len - 1) / 2.0)))
#global_step = fluid.layers.create_global_var(
# shape=[1], value=0, dtype='float32', force_cpu=True, persistable=True)
sgd_optimizer = fluid.optimizer.SGD(learning_rate=0.001)
#sgd_optimizer = fluid.optimizer.Momentum(
# momentum=0.0,
# learning_rate=2e-4,
#regularization=fluid.regularizer.L2DecayRegularizer(regularization_coeff=0.1)
#)
sgd_optimizer.minimize(avg_cost)
#avg_cost = fluid.layers.mean(x=crf_cost)
print type(crf_cost)
#print type(avg_cost)
print crf_cost.shape
#print avg_cost.shape
inference_program = fluid.default_main_program().clone()
with fluid.program_guard(inference_program):
test_target = chunk_evaluator.metrics + chunk_evaluator.states
......@@ -119,7 +110,6 @@ def main(train_data_file,
if batch_id % 5 == 0:
print("Pass " + str(pass_id) + ", Batch " + str(batch_id) + ", Cost " + str(cost) + ", Precision " + str(precision) + ", Recall " + str(recall) + ", F1_score" + str(f1_score))
batch_id = batch_id + 1
#pass_precision, pass_recall, pass_f1_score = chunk_evaluator.eval(exe)
pass_precision, pass_recall, pass_f1_score = test(exe, chunk_evaluator, inference_program, train_reader, place)
print("[TrainSet] pass_id:" + str(pass_id) + " pass_precision:" + str(pass_precision) + " pass_recall:" + str(pass_recall) + " pass_f1_score:" + str(pass_f1_score))
......@@ -129,8 +119,8 @@ def main(train_data_file,
if __name__ == "__main__":
main(
train_data_file="full_conll03_dataset/train.txt",
test_data_file="full_conll03_dataset/dev.txt",
train_data_file="data/train",
test_data_file="data/test",
vocab_file="data/vocab.txt",
target_file="data/target.txt",
emb_file="data/wordVectors.txt",
......
import paddle.v2 as paddle
import paddle.v2.fluid as fluid
from network_conf_base import ner_net
from utils import logger, load_dict, get_embedding
import reader
import os
import math
import numpy as np
def to_lodtensor(data, place):
seq_lens = [len(seq) for seq in data]
cur_len = 0
lod = [cur_len]
for l in seq_lens:
cur_len += l
lod.append(cur_len)
flattened_data = np.concatenate(data, axis=0).astype("int64")
flattened_data = flattened_data.reshape([len(flattened_data), 1])
res = fluid.LoDTensor()
res.set(flattened_data, place)
res.set_lod([lod])
return res
def test(exe, chunk_evaluator, inference_program, test_data, place):
chunk_evaluator.reset(exe)
for data in test_data():
word = to_lodtensor(map(lambda x:x[0], data), place)
mark = to_lodtensor(map(lambda x:x[1], data), place)
target = to_lodtensor(map(lambda x:x[2], data), place)
acc = exe.run(inference_program,
feed={"word": word,
"mark": mark,
"target": target})
return chunk_evaluator.eval(exe)
def main(train_data_file,
test_data_file,
vocab_file,
target_file,
emb_file,
model_save_dir,
num_passes=100,
batch_size=64):
if not os.path.exists(model_save_dir):
os.mkdir(model_save_dir)
word_dict = load_dict(vocab_file)
label_dict = load_dict(target_file)
word_vector_values = get_embedding(emb_file)
word_dict_len = len(word_dict)
label_dict_len = len(label_dict)
crf_cost, feature_out, word, mark, target = ner_net(word_dict_len, label_dict_len)
avg_cost = fluid.layers.mean(x=crf_cost)
'''
chunk_evaluator = fluid.evaluator.ChunkEvaluator(
#name="ner_chunk",
input=crf_decode,
label=target,
chunk_scheme="IOB",
num_chunk_types=int(math.ceil((label_dict_len - 1) / 2.0)))
'''
#global_step = fluid.layers.create_global_var(
# shape=[1], value=0, dtype='float32', force_cpu=True, persistable=True)
sgd_optimizer = fluid.optimizer.Momentum(
momentum=0.0,
learning_rate=2e-4,
#regularization=fluid.regularizer.L2DecayRegularizer(regularization_coeff=0.1)
)
sgd_optimizer.minimize(avg_cost)
crf_decode = fluid.layers.crf_decoding(
input=feature_out, param_attr=fluid.ParamAttr(name='crfw'))
chunk_evaluator = fluid.evaluator.ChunkEvaluator(
#name="ner_chunk",
input=crf_decode,
label=target,
chunk_scheme="IOB",
num_chunk_types=int(math.ceil((label_dict_len - 1) / 2.0)))
#avg_cost = fluid.layers.mean(x=crf_cost)
print type(crf_cost)
#print type(avg_cost)
print crf_cost.shape
#print avg_cost.shape
inference_program = fluid.default_main_program().clone()
with fluid.program_guard(inference_program):
test_target = chunk_evaluator.metrics + chunk_evaluator.states
inference_program = fluid.io.get_inference_program(test_target)
train_reader = paddle.batch(
paddle.reader.shuffle(
reader.data_reader(train_data_file, word_dict, label_dict),
buf_size=1000),
batch_size=batch_size)
test_reader = paddle.batch(
paddle.reader.shuffle(
reader.data_reader(test_data_file, word_dict, label_dict),
buf_size=1000),
batch_size=batch_size)
place = fluid.CPUPlace()
feeder = fluid.DataFeeder(
feed_list=[
word, mark, target
],
place=place)
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
embedding_name = 'emb'
embedding_param = fluid.global_scope().find_var(embedding_name).get_tensor()
embedding_param.set(word_vector_values, place)
batch_id = 0
for pass_id in xrange(num_passes):
chunk_evaluator.reset(exe)
for data in train_reader():
cost, precision, recall, f1_score = exe.run(
fluid.default_main_program(),
feed=feeder.feed(data),
fetch_list=[avg_cost] + chunk_evaluator.metrics)
if batch_id % 5 == 0:
print("Pass " + str(pass_id) + ", Batch " + str(batch_id) + ", Cost " + str(cost) + ", Precision " + str(precision) + ", Recall " + str(recall) + ", F1_score" + str(f1_score))
batch_id = batch_id + 1
#pass_precision, pass_recall, pass_f1_score = chunk_evaluator.eval(exe)
pass_precision, pass_recall, pass_f1_score = test(exe, chunk_evaluator, inference_program, train_reader, place)
print("[TrainSet] pass_id:" + str(pass_id) + " pass_precision:" + str(pass_precision) + " pass_recall:" + str(pass_recall) + " pass_f1_score:" + str(pass_f1_score))
pass_precision, pass_recall, pass_f1_score = test(exe, chunk_evaluator, inference_program, test_reader, place)
print("[TestSet] pass_id:" + str(pass_id) + " pass_precision:" + str(pass_precision) + " pass_recall:" + str(pass_recall) + " pass_f1_score:" + str(pass_f1_score))
if __name__ == "__main__":
main(
train_data_file="full_conll03_dataset/train.txt",
test_data_file="full_conll03_dataset/dev.txt",
vocab_file="data/vocab.txt",
target_file="data/target.txt",
emb_file="data/wordVectors.txt",
model_save_dir="models/",
num_passes=1000)
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import os
import re
import argparse
import numpy as np
from collections import defaultdict
logger = logging.getLogger("paddle")
logger.setLevel(logging.INFO)
def get_embedding(emb_file='data/wordVectors.txt'):
"""
Get the trained word vector.
"""
return np.loadtxt(emb_file, dtype='float32')
def load_dict(dict_path):
"""
Load the word dictionary from the given file.
Each line of the given file is a word, which can include multiple columns
seperated by tab.
This function takes the first column (columns in a line are seperated by
tab) as key and takes line number of a line as the key (index of the word
in the dictionary).
"""
return dict((line.strip().split("\t")[0], idx)
for idx, line in enumerate(open(dict_path, "r").readlines()))
def load_reverse_dict(dict_path):
"""
Load the word dictionary from the given file.
Each line of the given file is a word, which can include multiple columns
seperated by tab.
This function takes line number of a line as the key (index of the word in
the dictionary) and the first column (columns in a line are seperated by
tab) as the value.
"""
return dict((idx, line.strip().split("\t")[0])
for idx, line in enumerate(open(dict_path, "r").readlines()))
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册