提交 590b8bca 作者: 朱学凯

add attentiondta

上级 a9e031a9
# Auto detect text files and perform LF normalization
* text=auto
import tensorflow as tf
SMI_DIM = 65
PRO_DIM = 21
FILTERNUM = 32
SMI_FILTER_SIZE = [4,6,8]
PRO_FILTER_SIZE = [4,8,12]
EMBEDDING_DIM = 128
OUTPUT_NODE = 1
FC_SIZE = [1024, 1024, 512]
def variable_summaries(var, name):
with tf.name_scope("summaries"):
tf.summary.histogram(name, var)
mean = tf.reduce_mean(var)
tf.summary.scalar("mean/" + name, mean)
stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean)))
tf.summary.scalar("stddev/" + name, stddev)
def inference(smi_tensor, pro_tensor, regularizer=None,
keep_prob=1, trainlabel=False):
with tf.variable_scope('smi_embedding', reuse=tf.AUTO_REUSE):
smi_wordembedding = tf.get_variable(
"smi_word_embedding", [SMI_DIM, EMBEDDING_DIM])
smi_embedding = tf.nn.embedding_lookup(smi_wordembedding, smi_tensor)
pro_wordembedding = tf.get_variable(
"pro_word_embedding", [PRO_DIM, EMBEDDING_DIM])
pro_embedding = tf.nn.embedding_lookup(pro_wordembedding, pro_tensor)
with tf.variable_scope('drug_conv'):
conv1_weights = tf.get_variable(
"weight1", [SMI_FILTER_SIZE[0], EMBEDDING_DIM, FILTERNUM],
initializer=tf.truncated_normal_initializer(stddev=0.1))
conv2_weights = tf.get_variable(
"weight2", [SMI_FILTER_SIZE[1], FILTERNUM, FILTERNUM * 2],
initializer=tf.truncated_normal_initializer(stddev=0.1))
conv3_weights = tf.get_variable(
"weight3", [SMI_FILTER_SIZE[2], FILTERNUM * 2, FILTERNUM * 3],
initializer=tf.truncated_normal_initializer(stddev=0.1))
conv1_biases = tf.get_variable(
"bias1", [FILTERNUM], initializer=tf.constant_initializer(0.1))
conv2_biases = tf.get_variable(
"bias2", [FILTERNUM * 2], initializer=tf.constant_initializer(0.1))
conv3_biases = tf.get_variable(
"bias3", [FILTERNUM * 3], initializer=tf.constant_initializer(0.1))
variable_summaries(conv1_weights, 'W1')
variable_summaries(conv2_weights, 'W2')
variable_summaries(conv3_weights, 'W3')
variable_summaries(conv1_biases, 'b1')
variable_summaries(conv2_biases, 'b2')
variable_summaries(conv3_biases, 'b3')
smi_conv1 = tf.nn.relu(
tf.nn.bias_add(
tf.nn.conv1d(
smi_embedding,
conv1_weights,
stride=1,
padding='VALID'),
conv1_biases))
smi_conv1 = tf.nn.relu(
tf.nn.bias_add(
tf.nn.conv1d(
smi_conv1,
conv2_weights,
stride=1,
padding='VALID'),
conv2_biases))
smi_conv1 = tf.nn.relu(
tf.nn.bias_add(
tf.nn.conv1d(
smi_conv1,
conv3_weights,
stride=1,
padding='VALID'),
conv3_biases))
# drug_feature = tf.squeeze(
# tf.nn.pool(
# smi_conv1,
# window_shape=[85],
# pooling_type="MAX",
# padding='VALID'),
# 1)
with tf.variable_scope('protein_conv'):
conv1_weights = tf.get_variable(
"weight", [PRO_FILTER_SIZE[0], EMBEDDING_DIM, FILTERNUM],
initializer=tf.truncated_normal_initializer(stddev=0.1))
conv2_weights = tf.get_variable(
"weight1", [PRO_FILTER_SIZE[1], FILTERNUM, FILTERNUM * 2],
initializer=tf.truncated_normal_initializer(stddev=0.1))
conv3_weights = tf.get_variable(
"weight2", [PRO_FILTER_SIZE[2], FILTERNUM * 2, FILTERNUM * 3],
initializer=tf.truncated_normal_initializer(stddev=0.1))
conv1_biases = tf.get_variable("bias", [FILTERNUM], initializer=tf.constant_initializer(0.1))
conv2_biases = tf.get_variable("bias1", [FILTERNUM * 2], initializer=tf.constant_initializer(0.1))
conv3_biases = tf.get_variable("bias2", [FILTERNUM * 3], initializer=tf.constant_initializer(0.1))
pro_conv1 = tf.nn.relu(
tf.nn.bias_add(tf.nn.conv1d(pro_embedding, conv1_weights, stride=1, padding='SAME'), conv1_biases))
pro_conv1 = tf.nn.relu(
tf.nn.bias_add(tf.nn.conv1d(pro_conv1, conv2_weights, stride=1, padding='SAME'), conv2_biases))
pro_conv1 = tf.nn.relu(
tf.nn.bias_add(tf.nn.conv1d(pro_conv1, conv3_weights, stride=1, padding='SAME'), conv3_biases))
# pro_pool = tf.nn.pool(pro_conv3, window_shape=[1179], pooling_type="MAX", padding='VALID')
if regularizer is not None:
tf.add_to_collection('losses', regularizer(conv1_weights))
tf.add_to_collection('losses', regularizer(conv2_weights))
tf.add_to_collection('losses', regularizer(conv3_weights))
variable_summaries(conv1_weights, 'W1')
variable_summaries(conv2_weights, 'W2')
variable_summaries(conv3_weights, 'W3')
variable_summaries(conv1_biases, 'b1')
variable_summaries(conv2_biases, 'b2')
variable_summaries(conv3_biases, 'b3')
with tf.variable_scope("attention_layer", reuse=tf.AUTO_REUSE):
weights = tf.get_variable("weight", [smi_conv1.get_shape()[2], pro_conv1.get_shape()[2]],
initializer=tf.truncated_normal_initializer(stddev=0.1))
if regularizer is not None:
tf.add_to_collection('losses', regularizer(weights))
biases = tf.get_variable("bias",
[pro_conv1.get_shape()[2]],
initializer=tf.constant_initializer(0.1))
atten1 = tf.nn.relu(tf.einsum('ajk,ki->aji', smi_conv1, weights))+biases
atten2 = tf.nn.relu(tf.einsum('ajk,ki->aji', pro_conv1, weights))+biases
variable_summaries(weights, "DT_att_weight")
variable_summaries(biases, "DT_att_biases")
alph = tf.nn.tanh(
tf.einsum('aji,aik->ajk', atten1, tf.transpose(atten2, [0, 2, 1])))
alphdrug = tf.nn.tanh(tf.reduce_sum(alph, 2))
alphprotein = tf.nn.tanh(tf.reduce_sum(alph, 1))
alphdrug = tf.tile(tf.expand_dims(alphdrug, 2), [1, 1, smi_conv1.get_shape()[2]])
alphprotein = tf.tile(tf.expand_dims(alphprotein, 2), [1, 1, pro_conv1.get_shape()[2]])
drug_feature = tf.multiply(alphdrug, smi_conv1)
pretein_feature = tf.multiply(alphprotein, pro_conv1)
drug_feature = tf.squeeze(
tf.nn.pool(drug_feature, window_shape=[drug_feature.get_shape()[1]], pooling_type="MAX",
padding='VALID'), 1)
pretein_feature = tf.squeeze(tf.nn.pool(pretein_feature, window_shape=[pretein_feature.get_shape()[1]], pooling_type="MAX", padding='VALID'),1)
with tf.name_scope("concat_layer"):
pair_feature = tf.concat([drug_feature, pretein_feature], 1)
with tf.variable_scope('deep-fc-layer', reuse=tf.AUTO_REUSE):
fc1_weights = tf.get_variable("weight1", [int(pair_feature.get_shape()[1]), FC_SIZE[0]],
initializer=tf.contrib.layers.xavier_initializer(), dtype=tf.float32)
fc1_biases = tf.get_variable("bias1",
[FC_SIZE[0]],
initializer=tf.constant_initializer(0.1),
dtype=tf.float32)
fc2_weights = tf.get_variable("weight2", [FC_SIZE[0], FC_SIZE[1]],
initializer=tf.contrib.layers.xavier_initializer(), dtype=tf.float32)
fc2_biases = tf.get_variable("bias2",
[FC_SIZE[1]],
initializer=tf.constant_initializer(0.1),
dtype=tf.float32)
fc3_weights = tf.get_variable("weight3", [FC_SIZE[1], FC_SIZE[2]],
initializer=tf.contrib.layers.xavier_initializer(), dtype=tf.float32)
fc3_biases = tf.get_variable("bias3",
[FC_SIZE[2]],
initializer=tf.constant_initializer(0.1),
dtype=tf.float32)
if regularizer is not None:
tf.add_to_collection('losses', regularizer(fc1_weights))
tf.add_to_collection('losses', regularizer(fc2_weights))
tf.add_to_collection('losses', regularizer(fc3_weights))
variable_summaries(fc1_weights, 'W1')
variable_summaries(fc2_weights, 'W2')
variable_summaries(fc3_weights, 'W3')
variable_summaries(fc1_biases, 'b1')
variable_summaries(fc2_biases, 'b2')
variable_summaries(fc3_biases, 'b3')
fc = tf.nn.leaky_relu(
tf.matmul(
tf.cast(
pair_feature,
tf.float32),
fc1_weights) +
fc1_biases)
variable_summaries(fc, 'fc1')
# fc = tf.layers.batch_normalization(fc, training=trainlabel)
fc = tf.nn.dropout(fc, keep_prob)
fc = tf.nn.leaky_relu(
tf.matmul(fc, fc2_weights) + fc2_biases)
variable_summaries(fc, 'fc2')
# fc = tf.layers.batch_normalization(fc, training=trainlabel)
fc = tf.nn.dropout(fc, keep_prob)
fc = tf.nn.leaky_relu(
tf.matmul(fc, fc3_weights) + fc3_biases)
variable_summaries(fc, 'fc3')
# fc = tf.layers.batch_normalization(fc, training=trainlabel)
with tf.variable_scope('y-layer', reuse=tf.AUTO_REUSE):
y_weights = tf.get_variable("weight", [FC_SIZE[2], 1],
initializer=tf.truncated_normal_initializer(stddev=0.1), dtype=tf.float32)
y_biases = tf.get_variable(
"bias",
1,
initializer=tf.constant_initializer(5),
dtype=tf.float32)
variable_summaries(y_weights, 'W')
variable_summaries(y_biases, 'b')
if regularizer is not None:
tf.add_to_collection('losses', regularizer(y_weights))
logit = tf.matmul(fc, y_weights) + y_biases
return drug_feature, pretein_feature, logit
import tensorflow as tf
import pandas as pd
import numpy as np
import DTA_model as model
import os
MAX_SEQ_LEN = 1200
MAX_SMI_LEN = 100
from sklearn.metrics import mean_squared_error
from sklearn.metrics import mean_absolute_error
from sklearn.metrics import r2_score # R square
os.environ["CUDA_VISIBLE_DEVICES"] = "6"
def calculateMSE(X, Y):
in_bracket = []
for i in range(len(X)):
num = Y[i] - X[i]
num = pow(num, 2)
in_bracket.append(num)
all_sum = sum(in_bracket)
MSE = all_sum / len(X)
return MSE
def parser(record):
read_features = {
'drug': tf.FixedLenFeature([MAX_SMI_LEN], dtype=tf.int64),
'protein': tf.FixedLenFeature([MAX_SEQ_LEN], dtype=tf.int64),
'affinity': tf.FixedLenFeature([1], dtype=tf.float32)
}
read_data = tf.parse_single_example(
serialized=record, features=read_features)
# read_data = tf.parse_example(serialized=record, features=read_features)
drug = tf.cast(read_data['drug'], tf.int32)
protein = tf.cast(read_data['protein'], tf.int32)
affinit_y = read_data['affinity']
return drug, protein, affinit_y
def test(file, test_path):
with tf.Graph().as_default() as g:
dataset = tf.data.TFRecordDataset(test_path)
dataset = dataset.map(parser)
dataset = dataset.batch(
batch_size=3)
iterator = dataset.make_initializable_iterator()
drug_to_embeding, proteins_to_embeding, labels_batch \
= iterator.get_next()
_, _, test_label = \
model.inference(
drug_to_embeding,
proteins_to_embeding,
regularizer=None,
keep_prob=1,
trainlabel=0)
mean_squared_eror = tf.losses.mean_squared_error(
test_label, labels_batch)
saver = tf.train.Saver()
with tf.Session() as sess:
sess.run(iterator.initializer)
ckpt = tf.train.get_checkpoint_state(
"./" + "train" + "/model0/")
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
predictions_eval = []
labels_eval = []
# MSElist = []
try:
while True:
df, pf, p, l, MSE = sess.run(
[drug_to_embeding, proteins_to_embeding, test_label, labels_batch, mean_squared_eror])
predictions_eval.append(p)
labels_eval.append(l)
except tf.errors.OutOfRangeError:
pass
predictions_eval = np.concatenate(predictions_eval)
labels_eval = np.concatenate(labels_eval)
labels_eval.resize([labels_eval.shape[0], 1])
RESULT_PATH = "./results/" + dataname + "/"
if os.path.exists(RESULT_PATH) is False:
os.makedirs(RESULT_PATH)
with open(RESULT_PATH + "test.txt", "w") as f:
for i in predictions_eval:
f.write(str(i[0]) + '\n')
# test_MSE = mean_squared_error(labels_eval, predictions_eval)
# test_MAE = mean_absolute_error(labels_eval, predictions_eval)
# test_R2 = r2_score(labels_eval, predictions_eval)
# print("MSE:", test_MSE, "MAE:", test_MAE, "R2:", test_R2)
print("----------------test over-----------------")
# return test_MSE,test_MAE,test_R2
if __name__ == '__main__':
# dataname = "davis"
# test
dataname = "test"
test_path = "./tfrecord/" + dataname + "/file.tfrecord"
test(dataname, test_path)
# kinase
dataname = "kinase"
test_path = "./tfrecord/" + dataname + "/file.tfrecord"
test(dataname, test_path)
# kinase
dataname = "GPCR"
test_path = "./tfrecord/" + dataname + "/file.tfrecord"
test(dataname, test_path)
# kinase
dataname = "ER"
test_path = "./tfrecord/" + dataname + "/file.tfrecord"
test(dataname, test_path)
# kinase
dataname = "channel"
test_path = "./tfrecord/" + dataname + "/file.tfrecord"
test(dataname, test_path)
import tensorflow as tf
import numpy as np
import DTA_model as model
# from tensorflow.python.client import timeline
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "5"
# dataname = "davis"
# dataname = "deepaffinity"
# 5-fold cross-validation
cross_num = 1
LEARNING_RATE_BASE = 0.0001
# REGULARIZATION_RATE = 0.00001
EPOCH = 75
#
# if dataname == "kiba":
# batch_size = 100
# TESTNUM = (118256/5)*4/100
dataname = "train"
batch_size = 64
TESTNUM = (30056/5)*4/100
MAX_SEQ_LEN = 1200
MAX_SMI_LEN = 100
Train_path = "./tfrecord/" + dataname + "/file.tfrecord"
MODEL_SAVE_PATH = "./" + dataname + "/model%d/"
MODEL_NAME = "model.ckpt"
def parser(record):
read_features = {
'drug': tf.FixedLenFeature([MAX_SMI_LEN], dtype=tf.int64),
'protein': tf.FixedLenFeature([MAX_SEQ_LEN], dtype=tf.int64),
'affinity': tf.FixedLenFeature([1], dtype=tf.float32)
}
read_data = tf.parse_single_example(
serialized=record, features=read_features)
drug = tf.cast(read_data['drug'], tf.int32)
protein = tf.cast(read_data['protein'], tf.int32)
affinit_y = read_data['affinity']
return drug, protein, affinit_y
def train(num, train_path):
with tf.variable_scope("input"):
dataset = tf.data.TFRecordDataset(train_path)
dataset = dataset.map(parser)
dataset = dataset.repeat(EPOCH).shuffle(500).batch(
batch_size=batch_size)
train_iterator = dataset.make_initializable_iterator()
train_drug, train_proteins_to_embeding, train_labels_batch\
= train_iterator.get_next()
# regularizer = tf.contrib.layers.l2_regularizer(REGULARIZATION_RATE)
_, _, train_label = \
model.inference(
train_drug,
train_proteins_to_embeding,
regularizer=None, keep_prob=0.9, trainlabel=1
)
global_step = tf.Variable(0, trainable=False)
with tf.name_scope("train_loss_function"):
mean_squared_eror = tf.losses.mean_squared_error(
train_label, train_labels_batch)
tf.summary.scalar("mean_squared_eror", mean_squared_eror)
# loss = mean_squared_eror + tf.add_n(tf.get_collection("losses"))
# tf.summary.scalar("loss", loss)
with tf.name_scope("train_step"):
learning_rate = LEARNING_RATE_BASE
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
# train_step = tf.train.AdamOptimizer(learning_rate).minimize(loss, global_step=global_step)
train_step = tf.train.AdamOptimizer(learning_rate).minimize(
mean_squared_eror, global_step=global_step)
with tf.control_dependencies([train_step]):
train_op = tf.no_op(name='train')
merged = tf.summary.merge_all()
summary_write = tf.summary.FileWriter(
"./" + dataname + "/path/to/log%d" %
num, tf.get_default_graph())
var_list = [var for var in tf.global_variables() if "moving" in var.name]
var_list += tf.trainable_variables()
saver = tf.train.Saver(var_list=var_list, max_to_keep=20)
config = tf.ConfigProto(
log_device_placement=False,
allow_soft_placement=True)
config.gpu_options.allow_growth = True
with tf.Session(config=config) as sess,\
open("./" + dataname + "/path/to/log%d/log.txt" % num, "w") as f:
print("beginning training")
sess.run(
tf.group(
tf.global_variables_initializer(),
tf.local_variables_initializer()))
sess.run(train_iterator.initializer)
step = 0
maxloss = 100
trainMSElist = []
try:
while True:
step += 1
run_options = tf.RunOptions(
trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
#train and test
# trainLosslist = []
# Loss, summary, _, MSE, now_step = sess.run(
# [loss, merged, train_op, mean_squared_eror, global_step],
# options=run_options, run_metadata=run_metadata)
summary, _, MSE, now_step = sess.run(
[merged, train_op, mean_squared_eror, global_step],
options=run_options, run_metadata=run_metadata)
str = "%s-model:%d-step:%d;train_MSE:%g;" % (
dataname, num, now_step, MSE)
f.write(str + "\n")
trainMSElist.append(MSE)
# trainLosslist.append(Loss)
if step % 10 == 0:
# if step % TESTNUM == 0:
summary_write.add_summary(summary, now_step)
summary_write.add_run_metadata(
run_metadata, tag=(
"step%d" %
step), global_step=step)
trainMSE = 0
# trainLoss = 0
for i in range(len(trainMSElist)):
# trainLoss += trainLosslist[i]
trainMSE += trainMSElist[i]
# trainLoss /= len(trainLosslist)
trainMSE /= len(trainMSElist)
# print(
# "%s-model:%d-step:%d;train_Loss:%g;train_MSE:%g." %
# (dataname, num, now_step, trainLoss, trainMSE))
print(
"%s-model:%d-epoch:%d;train_MSE:%g;" %
(dataname, num, now_step, trainMSE))
trainMSElist = []
if trainMSE < maxloss:
saver.save(
sess,
os.path.join(
MODEL_SAVE_PATH %
num,
MODEL_NAME),
global_step=global_step)
maxloss = trainMSE
print("save model")
else:
pass
except tf.errors.OutOfRangeError:
pass
summary_write.close()
def main(argv=None):
for i in range(cross_num):
tf.reset_default_graph()
if os.path.exists(MODEL_SAVE_PATH % i) is False:
os.makedirs(MODEL_SAVE_PATH % i)
print("The No.%d model" % i)
# train_path = Train_path
train(i, Train_path)
if __name__ == '__main__':
tf.app.run()
# AttentionDTA_BIBM
AttentionDTA: prediction of drug–target binding affinity using attention model.https://ieeexplore.ieee.org/abstract/document/8983125
This repository contains the source code and the data.
## AttentionDTA
<div align="center">
<p><img src="model.jpg" width="600" /></p>
</div>
## Setup and dependencies
Dependencies:
- python 3.6
- tensorflow >=1.9
- numpy
## Resources:
+ README.md: this file.
+ tfrecord: The original data set and data set processing code are saved in this folder.
+ davis_div.txt: Under the 5-fold cross-validation setting, there is a division of the training set and the test set of the davis dataset.
+ kiba_div.txt: Under the 5-fold cross-validation setting, there is a division of the training set and the test set of the kiba dataset.
+ davis_str_all.txt
+ kiba_str_all.txt
+ dataset.py: create data in tfrecord format according to (kiba/davis)_div.txt
+ DTA_train.py: train a AttentionDTA model.
+ DTA_model.py: AttentionDTA model architecture
+ DTA_test.py: test trained models
# Step-by-step running:
## 1. Create data in tfrecord format
python dataset.py
## 2. Train a prediction model
python DTA_train.py
To train a model using training data.
## 3. Predict affinity with trained models
python DTA_test.py
\ No newline at end of file
RMSE : 1.977229018807965 ; Pearson Correlation Coefficient : 0.13056623322435212
\ No newline at end of file
5.357305
5.0675197
5.7569113
5.778124
5.6330957
5.7011733
5.74707
5.745891
5.7677283
5.6298275
5.834708
5.626807
5.747061
5.7795863
5.5337734
5.0578804
5.044533
5.116545
5.116936
5.1977515
5.1571093
5.175327
5.306932
5.1647
5.260111
5.3910584
4.749715
5.062382
5.22511
4.8893523
4.896854
4.9255004
5.0114884
4.9694557
4.945339
4.958314
4.979654
5.110323
4.939724
4.9064345
5.7481256
5.2178473
5.2963023
6.1322412
5.7981763
5.466777
5.4514165
4.841126
4.903579
5.809992
5.1097155
4.9852066
5.2520943
5.0755916
4.950823
4.86141
4.8641877
5.0124044
4.8961926
4.9459543
4.91149
5.053131
6.021241
4.8511534
4.8457537
4.7418513
5.0014133
5.0835266
5.032491
6.302972
6.518325
6.3595266
4.914291
5.0036993
5.3551555
5.2690964
4.8253613
5.093188
4.830887
5.290556
5.946444
5.7190695
5.0897026
5.1990857
4.929069
4.704165
4.772109
5.496005
5.521178
5.0173416
5.2495856
4.4665365
4.7818794
4.8992696
4.7708325
4.8471565
4.823306
4.91406
4.8816433
5.3732452
4.527399
4.5776706
4.446348
5.251399
4.636929
5.6276937
4.975101
5.0767593
5.735553
5.126744
5.134978
5.0244555
5.055627
4.575353
5.2955413
5.1968083
5.1110206
5.6910067
4.8353057
5.0080857
4.4142675
4.874983
4.983857
5.6008735
4.791833
4.884792
4.812028
4.7919993
4.7193837
4.744278
4.6973314
4.8015313
4.8324127
5.175117
5.243275
5.804392
6.007774
5.699701
5.0968604
4.889248
5.0366817
4.9114537
4.9910545
5.0381174
4.9201684
4.887292
4.8366556
4.8436594
5.123289
5.338315
5.8515954
6.140915
5.425441
5.238149
5.071503
5.192066
5.0144906
5.0144906
5.030442
4.9967704
4.980114
4.6933146
5.0426955
5.199746
5.0789814
5.0012975
5.0012975
4.9905815
5.098765
5.2263274
5.2759256
4.8380218
5.189737
5.2369046
5.1759844
5.170539
5.2618093
5.336355
5.378271
5.399162
5.128778
4.838815
4.7445164
5.137044
5.2947497
5.1830482
5.147432
4.9133625
4.996951
5.081598
4.5956807
5.1507587
4.827338
5.1054516
4.985319
5.3200197
5.195572
4.9644246
4.9687443
4.9013233
4.844561
4.878078
4.748005
4.8569527
5.5065827
5.156744
4.844618
5.3499384
4.9465966
4.6444616
4.9237466
4.540477
4.6846247
4.73847
4.746336
4.469534
5.6803823
5.215504
4.786839
4.844027
5.409299
4.5180173
4.925541
4.643955
4.937389
4.624559
5.189074
5.2937784
5.4621754
5.076482
5.275565
5.528797
4.921669
5.120386
5.9429355
4.9079695
4.7076616
5.2107987
5.0616627
4.69842
5.047799
4.712799
5.097641
4.711834
5.0224633
4.8407087
4.529427
4.862288
5.1725855
5.3517947
5.1611404
4.635884
4.731134
5.3034205
5.0492144
4.997364
5.313191
4.4625072
4.7290006
4.821125
4.788735
4.4387016
5.093784
4.772038
4.631194
4.64534
5.0781384
5.89443
4.8444605
5.1788993
5.7209883
4.8452005
5.1006536
4.781905
4.832019
4.8224254
4.953525
5.263251
4.8443427
5.106769
5.0599456
4.6958423
4.748782
5.098088
4.6980896
5.14616
4.4576254
5.030764
5.4351816
5.777982
5.237271
4.859377
5.143222
4.78371
5.1183586
4.7561307
5.01542
4.6256194
5.05943
5.080904
4.674723
4.6549163
4.896685
5.044359
5.039941
5.245988
5.3771772
5.5287185
4.6695123
5.6203957
4.6132884
5.0858016
4.582807
4.774371
4.902148
4.874796
4.6017103
4.6258225
4.9981174
4.8282323
5.04591
4.7728443
4.9439964
5.1177793
4.4528203
5.152982
4.7319202
4.6913424
5.1637826
5.4318995
4.9285398
4.8441596
5.1180663
5.035219
4.75043
4.6530585
5.123577
4.6844897
4.262711
4.6546373
5.0622215
4.965161
4.528703
4.6698546
4.908407
4.694425
4.4755344
5.1125364
4.674826
5.1242604
4.9212217
4.79734
5.544669
4.9019465
4.9798265
5.4148474
5.1389437
4.6826615
4.8011518
5.4023075
4.8581305
5.1077404
4.710073
4.857573
4.724973
4.7488093
4.8201447
4.747558
5.371869
4.882798
5.3572464
4.88637
4.7636304
4.7053895
5.718543
5.268621
5.2161875
4.881092
4.989822
4.5273046
4.9493637
4.4523606
5.091004
5.441701
5.345089
4.6962442
5.1342387
4.757936
4.7664037
5.4665003
4.916652
4.7357106
5.039078
4.6948214
5.4868574
4.8300543
4.7780633
4.8276334
5.0108633
4.9439764
4.9070415
4.6443667
4.8453727
4.7546773
4.8396435
4.8089104
4.8981857
5.3992653
5.592015
4.587182
4.81303
4.608478
4.7390904
4.535001
4.688124
5.345116
4.1557107
4.7772923
4.851177
4.976498
5.129426
4.702694
4.6988873
4.7741356
4.8914604
5.3048754
4.8752613
4.814432
4.6048098
4.903971
5.6147504
4.6333055
4.812833
5.4201727
5.199598
5.064108
5.445004
4.579017
4.8226113
4.762179
4.951549
4.742565
4.874222
4.906759
5.207998
4.9874096
4.8607764
4.982864
5.068338
5.4563537
5.1368003
4.9224515
5.860383
5.2574887
5.310538
5.0173383
4.701745
5.2354226
5.3711777
4.988057
5.003744
5.1594143
5.5019183
4.788782
5.2198486
4.8489404
5.0465937
5.4681783
5.3145404
4.747811
4.9031215
5.138067
6.23401
5.1033335
4.4821324
5.6936865
4.552008
4.6059933
6.039877
4.8200164
4.8918185
4.962608
4.796988
4.761211
4.9569077
5.630522
5.307389
4.879303
4.733266
5.0667915
4.66823
5.4253535
4.9671273
5.897704
4.567419
4.711712
5.58209
5.245099
4.6207037
5.0640345
4.707643
4.470788
4.699478
5.4766703
4.670993
4.5095124
5.037878
4.783192
5.5431623
5.2715616
5.4215255
4.4920855
4.752925
4.8837376
5.356306
5.457403
6.6539884
4.530016
4.326106
5.4827433
6.125723
5.2178497
4.953113
4.5416017
5.0918875
4.6940117
4.314331
4.9038224
6.7930365
4.857589
5.5144567
5.3470583
4.8767843
4.9888163
5.2041698
4.7068543
4.5610785
4.564743
5.11994
5.1847878
4.3952317
4.9833264
5.2829475
4.974173
4.8211823
5.335746
5.58351
4.544687
4.8937597
4.869544
4.9868436
4.689224
4.824614
4.9787445
6.1591644
4.992215
4.4013114
5.1125045
5.1620235
4.8949547
5.242205
4.673289
4.778784
4.7044396
4.8307467
4.9368267
4.7138247
5.015654
5.374763
4.7389827
5.415403
5.112286
4.5609727
4.7640486
4.735529
5.0414934
4.841802
4.2508063
4.4558897
4.729084
5.4343214
4.341022
4.672431
5.0107255
5.0543623
4.8229103
4.4034667
4.299003
5.495168
4.976929
4.844157
5.025272
4.863493
5.4164124
5.115439
4.3312078
4.7411413
5.49872
5.0780573
4.7240777
4.8947487
4.84495
5.2118187
5.0262346
6.0938997
4.718628
5.1288886
5.396081
4.6609845
5.186251
5.1429257
5.4518094
4.9651203
4.4990544
4.43775
4.790708
5.5170817
4.8326735
5.0721574
4.7396855
5.5031114
4.781719
3.9742265
4.5800433
5.4299006
4.5363135
4.5210414
5.389673
5.4100037
4.4687
4.711251
5.6752815
4.686854
5.1649346
5.7990537
4.688386
4.667044
4.439807
5.390884
5.984085
4.9683847
4.699034
5.816076
5.636503
6.0131993
4.97636
5.0418353
5.182488
5.0918617
4.729964
4.95245
4.9939966
4.835699
4.986536
4.8779387
4.8423815
4.9256377
4.9537888
4.8058863
4.7782564
4.960889
6.2650256
5.224127
5.49031
5.3915973
5.397621
4.5298743
5.337404
4.7633305
5.3907576
4.9337
4.612845
4.988567
4.034333
4.8844547
6.628652
5.3601084
5.3711643
4.9171176
4.7564015
5.2414827
5.294707
5.203041
5.4369698
4.9320555
5.3005166
4.683079
4.986259
4.6793365
5.1443095
4.7512693
4.899517
4.734804
5.1870275
5.9746633
4.2173862
4.646494
5.01266
4.960322
4.7862844
4.947895
5.0927663
4.9341693
5.263058
5.806653
5.500125
5.1114564
5.193698
4.992006
4.7354994
5.752206
4.6037884
4.517437
5.1584616
4.541384
4.7239413
4.507728
5.2946477
4.7722697
4.448636
4.4423385
5.578553
4.722166
5.5895233
4.350022
5.0375295
4.9391885
5.368617
5.487144
4.7768445
4.575873
4.859142
4.9653163
5.544075
4.7339535
4.4231896
4.9618616
4.955858
4.981659
5.49143
4.8066354
4.8095207
5.378572
5.3328986
5.2498198
5.675478
6.1494055
5.97561
6.0024943
4.907064
4.507712
4.889497
6.0489807
5.9081054
5.99578
5.429858
4.6467195
4.5754356
5.2244425
5.079664
5.161355
5.7914205
5.238992
5.769282
6.0508094
4.8775816
5.0929127
5.51437
5.630272
5.553763
5.6737957
5.8954186
5.4364285
5.72456
5.391491
5.391937
5.168392
4.9790096
5.416388
5.8713365
5.189254
5.5773144
4.9930496
5.2421923
5.1448445
5.354902
5.2643905
5.4485855
4.9782553
4.8424106
5.0549026
5.207709
5.5612044
5.2216444
5.4803314
5.17325
5.0616264
4.874598
5.354643
5.4304514
4.890374
5.3324485
5.5711074
5.712816
5.694637
5.2708297
5.3298163
5.1671596
5.556108
5.2547393
5.504042
5.5134516
4.793331
5.9088435
4.7444606
4.6746182
4.7425303
4.576717
4.833657
5.0263147
5.4045014
4.5806556
4.9194045
4.496266
5.3191133
5.0080624
5.002871
4.6934276
5.786208
5.317865
5.061142
5.1642895
4.9988027
4.9752383
4.890399
5.1451287
4.7317204
4.866996
4.8626814
5.1025615
5.031646
5.301943
5.137898
5.116942
5.0202117
4.853249
5.0242887
4.8804607
4.997317
4.7960553
4.8804607
5.031934
4.8018765
4.721996
5.008716
4.8216352
4.7939377
4.986079
5.1065893
4.862175
5.065699
5.07923
4.924152
4.7504168
7.0052266
5.0148435
5.2265
5.3961883
4.65821
4.3663487
5.5524416
5.1359463
5.0435486
4.8440437
6.848101
5.827922
4.8372865
4.870664
5.001268
5.2110376
5.761971
5.175683
4.8974285
4.8940864
4.7980466
5.607244
4.713622
4.781595
4.9012356
4.5838532
4.77328
4.948574
5.1242757
5.331628
4.91001
5.827537
4.729475
5.575882
4.8760986
5.3202286
4.894609
5.266787
5.6014314
4.901746
5.7829137
4.306284
5.1086006
4.7223806
4.821615
5.1545286
4.7732415
5.306234
4.921856
5.918628
5.949526
6.069521
5.695147
5.759218
5.107025
5.1317434
5.534761
4.735993
4.692055
4.735993
5.2224627
4.763516
4.7762713
4.7142296
4.997952
4.5172334
4.7384963
4.6809325
5.242487
4.439702
4.712551
4.7370043
4.3548174
4.7066407
4.990409
5.8391285
5.000009
4.5385613
4.837388
4.836126
4.4955783
5.0549145
5.447277
4.7767444
4.951976
4.8636484
4.9641857
5.0960164
4.717183
4.456939
5.3926306
4.858013
4.7759156
4.575879
4.815306
4.7779946
4.586737
4.8260527
5.134781
5.1308675
4.496276
5.1025634
5.022579
5.0014787
4.575553
4.5211053
4.9593396
5.153935
4.9174824
4.7167892
4.9366713
4.8594646
4.701455
4.6538024
5.1371603
5.5434074
5.149617
4.877791
4.9309325
5.33033
4.8511314
5.209612
5.0113854
5.175108
5.9557023
5.1618347
4.765449
5.049651
5.331762
4.887042
4.636263
5.5931525
4.67381
4.702387
5.5496063
4.6070204
4.837207
5.1604447
5.219404
5.1604447
5.3718653
5.847474
4.9883275
5.2751575
5.270664
5.2522817
5.49682
5.335777
5.439946
5.7276025
4.9500055
5.256106
5.057184
5.3697033
4.925183
5.287293
5.7276025
5.439946
5.875609
5.353733
5.009461
5.1190886
4.985362
5.107808
5.7082553
4.880881
4.99337
5.162004
5.8458743
5.399585
5.1498213
5.4402533
4.985362
5.107808
4.871456
5.0171237
4.8377414
4.948714
5.1412888
5.164959
5.2346354
5.1305356
4.7121005
5.3361864
5.8533955
4.907262
5.2809954
5.161906
5.041186
5.187063
4.828651
5.1316347
5.0547566
4.864891
5.0673833
5.0849886
4.98771
5.957556
5.262372
5.116605
5.017086
4.963931
5.2103066
5.3484626
5.5631127
4.834936
5.6038184
5.068023
4.7495866
5.0233464
5.3759375
5.0191474
4.880282
4.6070013
5.8023753
5.5834928
5.108093
4.8789573
4.704807
4.68829
4.9039593
4.7888365
4.958879
4.7202115
4.911023
5.0886526
5.331435
4.6795917
5.7811294
5.695597
4.9950686
5.6369605
5.577969
4.900093
5.412692
5.1369286
5.3532066
5.5566845
4.776821
4.7214746
5.3573785
4.906685
4.785547
5.197373
4.818037
5.40081
4.5058846
4.573052
4.565596
4.8609505
5.347301
4.8863964
4.9597673
5.091455
4.8284707
4.835686
5.124562
5.413875
4.705523
5.1938386
4.6225753
4.9898586
4.7278223
4.939702
5.7221055
5.4168515
5.443183
5.3005033
4.9999475
5.1293592
5.0265145
4.948714
4.957489
5.317333
5.2695184
4.7828503
5.037199
4.8377414
4.98308
5.889382
4.60729
5.120637
4.8650947
4.687289
4.510491
5.045161
4.9178686
5.198623
4.6319385
5.095341
5.0004315
4.976079
4.998432
4.8894024
5.156122
5.0373106
5.017099
4.6900344
4.8620553
4.8101563
4.883849
4.966839
4.862831
5.1527925
5.0022187
4.994223
5.3766723
5.160596
5.315874
5.362443
5.6532435
5.2198653
4.8501663
5.1395802
6.549361
5.8200336
6.5875816
5.9820623
5.330198
6.0885687
6.544763
6.692356
5.703773
5.9800696
6.0564795
6.589652
6.1779866
5.1381373
6.173293
6.6960125
6.241283
5.5617085
5.6453476
5.8314853
6.3538933
6.544763
5.8468847
6.7539415
5.8200336
6.973262
6.481561
6.553476
5.3742566
4.7579613
5.1664114
4.7693424
4.943983
4.989521
5.0791497
4.989521
4.9941235
5.058422
5.2333584
4.7427645
4.908714
5.044632
4.7355914
5.2628403
4.6307073
5.8193994
5.0720468
4.7427645
4.6235986
4.6321235
6.2960553
4.9975924
4.8250256
4.7899733
4.718641
4.676988
4.826001
4.697676
4.657625
5.1203456
5.039249
4.497625
4.6465635
4.8272123
4.780746
4.8748217
4.7917085
4.7300687
4.9267564
4.9002557
4.580583
4.728983
4.956771
5.0332136
4.6867847
5.34632
4.8368926
4.8824563
4.446584
4.750303
4.983336
4.8668437
4.982294
4.9170647
4.9641185
4.6879563
5.101968
4.843095
4.5404787
5.52351
4.818662
4.382122
4.7460012
4.8883123
4.742504
5.512073
4.73322
4.703771
4.3244767
6.815883
4.610693
4.7516775
5.3667808
4.7699685
5.146773
5.752639
4.929955
4.595055
5.2649255
4.912966
5.1280823
6.1618857
5.3750095
4.8695555
4.4986315
4.7352386
4.885689
4.8624163
4.722283
4.8176556
4.9565287
4.792382
4.7601967
5.534975
4.972224
5.3927407
5.090158
5.4244494
4.788018
5.3429823
5.936352
4.9475055
4.7879534
5.646777
4.3501782
5.4500766
5.0540843
5.5679903
4.4368315
4.8689547
4.548435
4.8280034
4.8507237
5.1686664
4.8813915
5.111919
4.8411894
4.9487576
4.833267
4.732741
4.60933
5.222693
5.2734866
4.8432674
4.986431
4.8989067
5.8527365
5.3761406
5.2319465
4.9997063
5.0286756
5.453976
5.970241
4.531485
4.90277
4.662845
4.777626
5.587213
5.26454
5.611045
5.1513515
4.8154435
4.791374
4.8633695
5.111906
5.794686
5.789898
5.1100736
5.0615306
5.896267
4.9500794
6.836541
4.849284
4.701991
5.5363626
5.4077854
4.6563406
4.7346573
4.784352
5.893357
5.0658417
5.3308663
5.3147736
4.844289
4.455945
5.2796893
4.5050344
5.2040772
5.4874477
4.646717
4.7124147
4.707162
4.711467
4.7331905
4.696901
5.9325266
5.499588
5.3129997
4.5969663
5.6534815
4.759399
5.5939364
4.7324586
4.5067215
4.742504
5.567528
4.9385524
5.3337016
4.726928
5.4530053
5.266729
5.136518
4.668293
5.125128
5.623014
5.301696
4.9853873
4.841541
4.687122
4.8896184
4.827108
4.9375997
4.728906
4.852266
5.1205783
4.665478
5.1470175
4.9218645
5.303338
4.872758
4.687122
5.0276957
4.8896184
5.0276957
5.171616
4.840131
4.886576
5.1205783
4.872758
5.1100206
5.135033
4.9194093
5.303338
4.886576
5.135033
4.852266
4.827108
5.1100206
5.143211
5.0276957
4.9518623
4.840131
4.7252665
5.13771
5.4194684
4.96524
5.4166017
5.0781746
5.2735763
4.76648
4.625619
5.151919
4.95588
5.0515137
5.1341753
5.0433035
4.7688246
4.4337387
4.7892237
4.9602637
4.4659452
4.6305485
4.9503417
5.350574
5.2885013
4.96472
4.9496264
4.657606
5.356675
4.778598
4.7861066
5.398925
5.146833
5.161771
4.9987245
4.6812444
6.0022283
5.636609
4.8306184
5.036639
5.7881737
5.9211645
5.310266
5.090014
5.9640284
4.6588173
5.4310393
4.9752154
5.305752
4.9790134
5.826435
4.830232
5.1463976
4.790502
5.330449
5.250215
5.250215
5.508435
6.2992587
6.445553
5.240355
5.508435
6.3054724
6.046265
5.9714794
5.809358
5.518224
6.323353
4.7128663
5.089657
6.2495255
5.240355
5.1278934
4.871316
5.4186563
6.289892
5.679498
5.330449
5.952962
4.79348
6.0630283
4.866962
4.79348
6.0630283
6.289892
5.1278934
5.80376
5.4970074
5.1185446
4.871316
5.4848185
5.1185446
5.0247526
4.8573484
4.666843
5.031005
4.83741
4.405328
5.0027514
4.8419027
4.912722
5.34433
4.880727
4.8733377
4.833922
5.293708
4.878333
4.868572
4.5747705
4.895312
5.153005
5.3003044
4.662865
4.968438
4.709995
4.7738047
4.7638144
4.8573484
4.85871
5.132455
4.904998
5.198908
5.057455
5.067839
4.4668827
4.98379
4.6246552
4.5564017
5.021069
4.98379
4.6608334
4.981024
5.1029277
5.170702
5.136107
4.938834
4.9660616
5.7570496
4.743302
5.6830716
5.210653
4.7380557
5.556036
5.9159784
5.779808
4.99915
6.070998
6.1714454
5.558686
5.0974617
5.31038
6.162546
5.694491
5.0311346
6.070898
4.7973456
4.8735166
5.311058
5.2585316
4.977031
4.925472
4.6228437
5.8802276
5.592902
5.3077583
5.428073
5.7394624
5.6548576
5.5762215
4.9230046
4.914045
4.933601
5.7267356
4.94293
4.7583404
5.344975
5.164628
5.153824
5.701173
5.0884037
5.934924
5.0606575
5.7996216
5.031124
4.933601
5.4181757
5.1956735
5.0921745
4.581941
5.210011
4.7860885
5.1715403
5.5061917
4.8443627
5.287353
4.8487415
6.1040998
5.3148646
4.7779894
4.713462
4.6293335
5.0053487
5.2984505
6.0750484
5.277896
5.5378737
5.058356
5.293627
4.801837
5.4181757
4.81153
4.9813795
4.81994
5.977701
5.1332498
5.274293
4.8914657
5.3696947
4.4002
4.8141375
5.468579
5.417328
4.9166164
5.236401
4.927569
5.7818737
5.2277985
5.0337567
5.2211323
5.1346803
5.197505
5.387151
4.817284
5.2012467
5.170067
4.564823
4.5857344
6.068502
5.1868787
5.0988164
5.0847983
5.2635384
4.921262
4.695523
5.988234
5.030289
5.092084
5.0575852
4.873219
4.93989
5.123498
5.157996
4.987419
5.353145
5.206821
5.2608085
5.223005
4.905656
4.858933
5.049827
5.059096
5.814375
5.1046014
5.40841
4.685386
4.901801
4.9692144
5.090211
5.121873
5.303454
4.3532734
6.3684826
4.8144836
6.012428
4.977919
4.9379826
4.8944583
4.7218843
5.0314846
4.8421063
4.8492155
5.3271694
5.0321198
5.3773527
4.66721
5.1145535
5.395771
4.8706627
4.851404
4.851404
5.6716747
4.7027297
4.851404
5.910774
6.1098056
4.795963
4.802264
5.0490665
4.8940125
5.45382
4.80441
4.851151
5.6280193
4.543426
5.1054506
5.103633
4.9269633
4.8948607
4.8168674
4.988766
4.642998
5.1511884
4.97677
4.8047204
4.8332753
5.083912
4.641621
4.8327365
5.554063
4.9675283
4.627434
4.937852
4.948336
4.9523315
5.05372
4.4488173
4.8731537
4.641931
5.3638506
5.0574203
5.0581155
4.911293
4.6355867
5.4400277
5.912852
5.014614
4.8440084
5.0008416
4.870552
5.0859165
4.850658
5.1320877
4.9435725
5.821024
5.3144555
5.0989714
4.9454665
4.86073
4.8597155
4.3784056
4.6697607
4.9284954
4.6869793
4.87688
5.031135
5.1326056
4.949632
4.8454843
5.2663326
4.8114886
5.333413
4.9608793
4.6243405
4.7800455
4.7630534
4.7611446
4.8622923
5.2293806
5.5730457
5.027053
5.2260346
4.621664
4.3124185
5.0972652
4.9047933
4.5518527
4.8384333
5.2479024
5.163217
4.976081
5.2086964
5.1226497
5.275181
4.989536
4.9181867
5.1963015
5.5708137
4.695628
5.3516912
4.8318734
4.9098086
5.6566553
5.6001234
4.9180064
4.836118
5.354188
4.941092
5.1103334
4.867862
5.4395175
5.387769
4.6472096
5.5708375
4.8509483
4.9925156
4.7416425
4.836962
5.2158065
4.649597
5.513699
5.1946783
5.5170927
4.75745
4.9075694
4.353044
5.0163765
5.1174884
5.3711586
4.790213
5.1863484
4.8732505
4.8433895
5.0203285
5.3232327
4.823434
5.261617
5.8356905
4.9296236
4.837908
5.392806
4.6272454
4.828987
4.8552947
5.2899594
4.8370986
4.587783
4.833657
5.2441506
5.5892725
5.1024327
5.003716
4.3335276
5.440322
5.214825
5.040244
4.2745686
5.449427
4.8842754
4.4422827
4.9813747
5.6237125
4.6672845
5.2926197
5.5657716
5.4742556
5.116215
4.962663
4.6211
4.92529
4.5238833
4.7991076
4.8176727
5.245432
4.549453
6.0456734
5.1311584
4.9612665
4.592833
5.096346
5.2013855
5.089741
4.6602674
5.019901
5.0241356
5.394093
5.045961
5.059964
5.1641707
4.8373685
4.7716746
4.784416
4.705414
4.5924797
4.5995793
4.636684
4.8865814
4.605495
5.089367
5.020376
5.0008984
5.1077867
4.9168706
5.644283
5.3232894
4.900005
4.682546
4.7845674
5.1794896
4.930279
4.857168
4.757658
4.609264
4.7331457
4.582187
4.538505
4.8465295
5.1503716
4.7192826
5.117569
4.8465295
4.6582947
4.799945
4.539672
5.0467954
4.590891
5.262536
4.995303
4.53967
4.8787165
4.626045
4.597913
4.5439625
5.0480304
4.857075
5.075156
4.964386
4.7433558
4.781051
4.9330535
5.376277
4.626207
5.3924956
4.874459
4.8861747
4.857175
4.940583
4.604446
4.67902
4.626054
4.590891
4.688345
4.8553405
4.582189
5.0039525
4.6996
4.575972
4.9170933
4.852054
5.627445
4.803496
6.4329185
4.3759103
5.627445
6.4329185
4.9173956
4.6874065
4.832296
4.9244146
4.5875964
4.896902
4.757776
4.67962
4.757776
4.995781
5.061746
4.7570057
4.6124945
4.744003
4.6643653
4.67324
4.7444754
4.99429
4.953162
4.6138096
4.8025713
5.0110617
4.7990537
4.733923
4.8689227
4.832296
4.7450943
4.7686806
4.77989
4.7892056
4.9080033
4.871192
5.069542
4.845525
4.6358795
4.7884893
4.71554
4.868485
4.545154
4.7418156
4.6222067
5.3624187
5.5789223
5.4379587
5.4912143
5.4669304
5.4669304
5.4669304
4.9448195
4.8451333
4.848415
4.670753
5.032835
4.994023
5.0396304
5.1598034
5.020762
4.793199
5.0396304
4.701825
4.626045
4.793199
4.793599
4.582189
5.573517
4.626054
4.582187
5.1192
4.793599
5.021111
5.441502
5.1192
4.823932
4.8593493
5.603721
5.0061107
6.604971
5.027403
4.619523
4.9389277
6.523593
6.5385904
4.777836
5.3185163
4.907293
5.0760994
5.036341
4.818506
5.757909
4.9787893
4.8680468
5.473919
4.7180734
5.409743
4.9754877
5.1402497
5.3052616
5.0642576
4.8097787
4.652177
4.791209
4.9790173
4.8870373
4.9293556
5.0287285
4.5542727
5.042299
4.853111
4.6642675
4.5881715
4.519171
4.5681114
4.733617
4.7209654
5.1260753
4.4032373
5.02049
4.9302783
4.6051292
4.0625257
4.9762664
4.1603785
4.59
5.3854394
5.8564754
5.322904
5.774875
5.952405
5.1063747
6.4642925
6.1377974
5.395303
4.8597093
6.1830025
5.7642913
5.2907887
5.690323
4.959248
5.243771
5.162669
5.79069
5.0528803
5.5182247
5.1871243
5.2907887
6.026011
5.5398474
5.445159
5.448554
5.297166
5.0528803
5.157436
6.198949
5.2907887
5.612367
5.4322896
5.220989
5.811634
5.3193617
5.0253963
5.792944
5.0528803
5.295647
6.3207564
5.174134
5.0011587
5.7628508
5.1338644
5.216681
5.5495195
4.596366
4.8695555
4.5050344
4.7346573
4.595055
4.7760735
4.8816023
4.8439093
4.6930537
5.1077843
5.1842012
5.2604585
5.0328884
4.1958647
5.2040143
5.225058
4.4259787
4.977569
4.7164783
4.716445
4.7656727
5.205518
5.1626415
5.5192804
5.972978
5.865755
5.8871875
5.74044
6.2094045
5.298101
4.746348
4.7560163
5.2916255
5.3298483
5.0254264
5.3851814
5.2510734
5.5989585
5.32131
5.812052
5.721953
4.7354064
5.146006
5.747921
5.1208262
4.9213777
4.4760914
5.045378
5.2037654
4.983583
6.122267
6.2874413
4.989533
5.1131163
6.198364
5.9604397
5.5815606
5.17297
4.5621943
5.8169518
6.0900345
4.442389
6.1209526
4.802297
4.937165
4.7410517
4.678498
5.25114
5.060508
5.299032
5.15943
4.9202657
4.5979795
4.8678765
5.1069736
4.9031906
5.028179
5.533014
5.4793186
4.804807
4.9513807
5.3759704
5.1846695
6.000062
4.7781363
5.3021107
5.623042
6.4694967
5.252506
6.8900843
7.1031704
4.9074917
4.4963884
5.3187714
5.188339
4.706896
5.135355
6.115098
5.833533
5.822046
5.6330876
5.6036267
4.9291205
5.68429
5.589884
6.006026
4.9113946
5.0802937
4.571918
5.162339
5.5274506
5.296513
5.8534536
4.7593203
4.712037
5.2682657
4.9374933
5.5274506
6.4595585
5.8534536
5.157431
5.2676105
5.162339
6.4595585
6.054544
5.2676105
6.2829504
5.646096
4.9374933
5.0041385
5.437558
5.423461
4.826257
4.987461
4.878118
6.054544
6.718821
4.7593203
4.744937
4.9227934
6.015733
5.296513
6.718821
6.317688
5.437558
5.423461
5.0041385
4.987461
5.2682657
5.646096
6.156983
6.913839
6.015733
4.878118
4.9227934
6.913839
4.826257
6.317688
6.6050334
6.156983
4.930298
4.6583576
4.583581
4.839581
5.142233
5.0005975
4.8320913
5.621704
6.3248906
5.3855753
5.3758793
5.3752894
5.04931
4.814832
5.019513
4.9607363
4.7438264
5.232666
6.315979
5.4239864
5.790706
5.11493
4.6233897
4.813014
5.0111203
4.813014
5.0111203
5.4104376
5.000237
4.813014
5.0111203
4.789738
4.6340356
5.0099297
4.866455
4.632772
4.6877155
4.553769
4.610284
4.797158
5.081644
5.032579
4.6258607
4.8220434
4.750577
4.654226
4.945923
5.022737
5.076713
4.9894433
4.9249516
4.890769
4.8200274
4.5789003
4.887804
5.02094
5.5282464
5.1320877
5.3144555
4.8440084
4.850658
4.814424
4.8925953
4.713681
4.5932293
4.5754175
4.5105367
4.7215376
4.7571354
4.571805
4.732981
4.7260013
4.7184854
4.579133
4.461246
4.8168945
4.8816657
4.492561
4.42045
4.7526407
5.430865
5.250495
4.657533
4.5446596
4.529642
4.566482
5.3907213
4.6768703
4.7668076
4.7219524
5.031264
4.8277254
5.3307095
5.3024135
4.8691883
4.931831
5.215125
5.705823
5.0318813
5.0847454
4.7378025
4.6061935
4.689753
4.8729906
4.742502
4.914996
4.9773293
5.1490245
5.219404
5.2331004
4.514381
5.154499
5.0838346
5.203487
4.6498003
4.704603
6.274233
4.5969934
4.7105026
4.3134575
5.923909
5.486065
4.885794
6.4919767
4.6204348
4.9686933
5.163082
4.7640233
6.339511
5.084018
4.954467
4.6988196
4.800661
4.8558383
5.012474
5.395964
4.767897
5.226125
5.0093384
4.851404
4.3150387
4.9098125
4.71686
5.4282646
4.9855766
4.7835
4.800114
5.1001534
4.904768
4.969866
5.000237
4.798345
4.4603014
4.757776
4.833575
4.5987525
4.676333
6.32092
4.5063562
5.0107565
4.3771853
4.722406
5.3157988
4.7418156
5.0520496
4.5527997
4.878892
4.6023455
4.5465546
5.0086007
4.900452
4.764067
5.00293
5.2618284
5.0969906
4.5551972
4.8970013
4.6365743
4.8315406
5.379659
4.970485
5.2035174
5.230046
4.8108454
5.062255
5.014954
4.7452607
4.6444945
5.242608
4.886308
4.233213
4.972188
5.3712893
4.8824534
6.2931104
5.129746
5.1784277
4.7244263
4.1914334
4.889551
4.770067
4.8229737
5.089913
5.2836003
4.662393
4.8475046
4.8995023
4.8355217
5.153988
5.2611814
5.2275553
4.2694483
5.063733
4.9072895
4.5322757
4.9326377
4.700427
5.4225287
4.870026
5.0345592
4.6933146
6.2500587
4.7746315
5.102544
5.3324327
5.460613
4.784862
5.1729646
4.559808
5.0360155
5.119837
5.812944
5.2451773
5.1555624
4.8556175
4.8329525
4.6383133
5.008807
5.0001564
4.8108454
5.260536
4.994023
5.202505
4.840658
5.2295413
5.000657
5.083969
5.1151767
5.1575994
5.241835
4.5133696
4.6439867
4.6585
5.050794
5.7221055
5.3299055
4.870026
4.7553887
5.234058
4.8726153
4.7933283
4.71679
4.7456856
4.665478
4.6116104
5.4970055
4.057719
5.4437795
4.627854
4.6585
4.9701467
4.8759627
4.8754215
4.81549
4.888581
5.3367224
5.118533
4.77347
5.154499
5.0143237
4.756574
4.93705
4.4909916
4.590891
4.4196763
5.0634756
5.143295
5.3941536
4.74766
5.0146465
4.9489694
5.235813
5.0752335
4.5068183
4.8793902
4.7521133
5.8781385
4.8744826
4.725738
4.687742
4.769772
4.1566305
4.9727383
5.532953
5.341763
5.028877
5.0210495
5.167028
4.8728576
4.4167595
4.885792
4.78219
4.9362226
5.04177
4.5322757
4.99017
4.7740283
5.2573
5.3230762
6.4890122
5.804288
4.91555
4.995622
5.614628
4.3214326
5.158796
4.9808598
5.435099
4.4847083
5.1591725
4.7520533
4.4551363
5.686206
4.9882026
4.786443
5.115438
5.132203
4.61041
5.7501907
6.1915607
4.783801
4.8333836
4.9828534
5.4017353
4.8514957
4.983453
4.6744847
4.9966164
4.7425876
4.739117
4.887874
5.762282
4.592881
4.642342
5.277994
5.5180197
5.9684744
5.4078403
4.7272744
5.4871507
4.659727
5.5284367
4.8566804
4.057021
4.5323915
4.740858
5.6072717
4.562797
5.4157043
4.7492537
4.5032773
4.952199
4.936812
5.1940184
4.970588
4.6380267
4.888581
4.8684216
4.9070787
4.4748034
4.6228933
4.6674957
4.789706
4.7985196
5.4787827
5.3091087
4.7425766
4.8178883
4.518522
4.9482183
4.2472277
4.6455393
4.039406
5.1721463
4.967358
5.006257
4.951897
5.069542
4.4500136
4.7641373
4.507905
4.681973
4.8447247
5.0191474
5.6870313
4.9353867
4.7089477
4.8862486
4.846607
4.835331
4.2733297
4.6999364
5.226351
5.131119
4.994023
5.806916
4.40674
4.7668104
6.355172
5.7656393
4.611064
5.0013986
4.609264
4.692813
4.7036633
5.712503
4.261361
5.121286
4.961731
4.644395
4.347659
5.8298492
4.8675823
4.6086073
4.7872615
4.8475046
4.666063
4.8702774
6.294682
4.7706804
4.402246
4.8270955
6.2829504
5.5284367
5.0961075
4.65769
4.717322
4.7160187
4.640906
4.6365743
5.480685
4.593364
4.6124167
5.111741
4.7827744
5.142259
5.069542
5.3533964
5.908598
5.1606007
5.6875663
5.130388
6.823461
5.39337
5.4078918
4.5210824
5.581638
5.0645776
4.864997
4.967164
5.4022946
4.839931
4.657219
4.8132744
4.545242
5.3650026
4.598162
4.8926406
5.451969
4.8454084
4.637988
4.5002756
4.6834764
4.8108454
4.948122
4.9187784
5.1256676
4.609264
4.673924
4.9786797
4.8555584
4.796654
5.731263
4.9725366
4.696251
4.641371
6.284825
5.2447376
4.6404514
5.109545
5.0481462
4.829184
4.956282
4.402246
4.8343716
4.09133
4.665478
4.5323915
4.5995827
5.4405866
4.864983
5.051595
5.281183
4.82572
4.5503154
4.7695084
4.8011317
4.588644
5.4552655
5.13724
4.917804
4.3771853
4.8892713
4.73527
4.91876
5.097245
4.9580755
5.3058095
4.9760175
4.414506
4.5836296
4.884061
5.129746
4.564404
5.1071987
4.833314
4.3424807
5.0348396
7.0650454
4.904768
4.8953276
5.106514
5.095041
4.623995
4.5624967
5.218872
5.074605
6.0078173
5.412571
5.1940184
5.4292254
5.813176
5.077906
5.012912
6.654
5.514495
5.020466
5.905099
5.03327
4.915215
4.818262
5.1033316
4.3134575
4.8299823
4.7668104
5.3823633
4.6570754
4.248619
5.056731
4.7641993
4.7725286
5.045244
5.7042055
5.7496166
5.4472733
5.0927052
5.565506
5.3357763
4.7982564
5.1885657
4.61819
4.8108454
4.6598887
5.1821327
4.9459233
5.038788
4.8194013
4.45553
4.6448464
4.8290014
4.6853642
5.141762
5.0405064
4.613379
4.4514217
4.98412
5.2569084
4.5321474
4.8817325
4.865838
4.7425876
5.068549
5.1490245
5.483768
4.970651
4.7641993
4.36389
4.739356
4.6737533
4.8352222
5.1774316
4.89759
4.623995
4.903838
5.1578674
5.228749
4.4814744
4.6924047
5.0012364
4.410851
5.078467
4.6627173
5.0318813
4.606123
4.803068
4.9204826
5.5056043
5.9573593
5.4279823
4.8957577
4.9121947
5.0735583
5.1574707
5.399585
5.0261354
5.655039
4.9753876
4.733695
5.160772
4.6872973
4.9948974
4.8664517
4.9024734
4.8994026
5.5997143
4.8393593
5.22377
5.6769233
5.130177
5.1533933
5.0773897
5.0114584
5.2112226
5.2901115
5.0850964
6.297155
4.668358
4.7213755
4.794922
5.144878
5.104725
5.1254587
4.832296
5.0746555
5.269079
4.76047
4.734024
4.7890153
4.407851
6.0947857
4.9719563
5.251602
4.841389
4.987035
5.6761503
4.91244
5.7116566
4.6094766
5.59552
4.67962
5.404596
6.0973654
5.362996
5.596958
5.2199583
5.448588
5.218867
4.8394136
5.133858
5.225064
4.7686563
5.639262
5.678264
5.5293307
5.0073886
4.8934965
4.971809
5.1254587
4.6288047
5.9100857
5.394642
5.274884
5.4467497
4.748689
4.3552294
6.0202327
5.1002345
4.689272
4.795706
4.0797987
4.8366075
5.654699
5.006736
5.6384516
5.1681056
4.9236336
4.957433
5.352404
5.448588
4.8363557
5.895936
5.0879807
4.978429
5.3588157
5.0765433
5.711399
4.597733
4.8860497
6.110413
4.8125954
5.2455225
4.663986
4.9678144
4.8336277
5.007112
5.390788
4.963839
5.139668
5.536352
5.096917
4.8468275
4.88505
5.5325346
5.2386174
4.691063
4.9841013
5.0073886
5.986035
5.9625134
4.623641
5.0688314
5.269367
5.4615564
5.007723
4.9565797
4.5697465
4.757658
4.7337575
4.7986255
4.9721107
5.889382
4.978429
5.269367
5.5889635
5.5127664
5.3127527
5.753313
4.872758
5.184412
4.635661
4.7387614
4.916876
4.9721107
4.9423366
5.678264
4.9743466
5.4860854
5.1318207
5.5110865
5.13389
5.238149
5.225064
5.184412
4.8093386
4.8465295
5.265768
4.8757772
4.6219153
5.122331
5.1707306
4.2901535
5.1638703
5.1132007
4.6059656
5.634097
4.9923897
4.744492
4.962718
5.5519123
4.9121428
4.883137
4.710047
5.6027555
5.157567
4.934049
4.892136
5.19566
4.91661
5.0062747
4.6912465
4.9852257
4.8717127
6.3976727
5.9545317
4.611447
5.7214646
4.9011936
5.27868
4.88768
4.4099336
4.8160114
5.9439125
5.060017
5.4823074
4.9748125
4.8659787
5.2385406
4.642789
4.929951
4.6414933
6.675785
4.967157
5.4247274
5.27253
4.6094785
4.995937
5.7116566
4.83474
4.9701486
4.7447705
5.5966997
5.1249166
5.501895
4.7552743
5.964276
5.1318207
4.866425
4.8336787
4.9852257
4.6769037
4.9871674
6.109987
5.1264305
5.034258
5.1411886
5.499295
4.8732457
4.896902
5.256462
4.851267
5.091455
4.7908115
4.8044987
4.551135
5.5519123
4.8414702
5.895936
6.0343814
5.155534
5.0688314
5.598879
4.879553
5.4860854
5.6027555
5.6927295
5.3873816
4.5107746
6.049801
4.9820657
5.848153
5.29067
4.737697
5.4993978
4.816576
5.654699
4.9756546
4.6257606
5.138782
4.9666834
6.1484127
5.502821
5.251602
6.2789207
5.061871
5.455783
5.4198995
5.110639
4.9646716
5.2386174
4.7841077
6.260846
4.9890227
5.355927
4.4665475
5.5098557
5.24376
5.482992
4.7078195
4.7269435
5.4187074
4.957447
5.5325346
5.0885496
6.045664
6.0407963
4.6983542
4.8196898
4.8761535
5.269367
5.1260376
5.3873816
4.9848547
4.597949
4.8929777
6.184993
4.970819
5.0073886
5.3803144
4.856341
4.2670493
5.054203
5.110639
5.1391587
5.394642
6.226955
4.741549
4.924164
4.411217
4.8768163
5.0997972
5.7082167
4.864479
5.5296845
4.8720784
5.267182
6.06748
5.964879
4.9828033
5.1079698
5.6463537
4.943912
4.9389334
4.9121947
5.352404
5.70113
4.7673426
5.619473
4.8623505
5.054262
4.841389
5.5166197
5.0668716
5.0328565
6.2840447
6.0181947
5.057837
4.9500856
4.7694283
4.764403
4.883326
5.2521124
5.274884
5.005759
5.0140443
5.299815
5.115059
4.8757772
5.035188
5.081603
4.981124
5.0850964
4.90655
5.8605113
4.9546003
5.7184405
6.1748776
5.70113
4.4900117
5.2436814
4.97579
4.6872973
5.501895
5.6562767
5.1533933
4.879553
4.9541206
4.752607
4.9164805
5.041189
4.551978
5.6927295
5.9100857
4.992221
4.91298
5.0694423
5.1781373
5.1620507
5.0376396
5.0079503
5.575647
4.666852
5.364384
4.969333
5.0073886
4.927015
4.9828033
5.496649
4.8717127
5.047205
5.0732613
5.9545317
5.753313
4.7986255
5.4003077
4.6464634
5.22535
5.0120683
5.064888
4.777235
4.911463
4.6907873
5.4978447
6.1209526
5.2813683
4.9895434
5.041189
4.865593
4.667321
5.1429873
4.472218
6.1220613
4.955903
5.0980616
4.9948974
5.1523705
5.010573
5.106423
4.7867537
5.6930494
5.1225944
4.9145494
4.5610867
RMSE : 1.8548911640057175 ; Pearson Correlation Coefficient : 0.19903442590966353
\ No newline at end of file
This source diff could not be displayed because it is too large. You can view the blob instead.
RMSE : 1.7273183993658947 ; Pearson Correlation Coefficient : 0.04453206966512105
\ No newline at end of file
This source diff could not be displayed because it is too large. You can view the blob instead.
RMSE : 1.756065730513885 ; Pearson Correlation Coefficient : 0.23311044821975194
\ No newline at end of file
This source diff could not be displayed because it is too large. You can view the blob instead.
RMSE : 1.1846278906245926 ; Pearson Correlation Coefficient : 0.6965380003561024
\ No newline at end of file
This source diff could not be displayed because it is too large. You can view the blob instead.
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论