From: https://www.kaggle.com/praxitelisk/inaturalist-2019-eda-dl
Author: Praxitelis-Nikolaos Kouroupetroglou
Score: 0.78337
Kudos and main ideas / reference:
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
import os
print(os.listdir("../input"))
import os
import cv2
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import json
from keras.models import Sequential, Model
from keras.layers import Dense, Flatten, Activation, Dropout, GlobalAveragePooling2D
from keras.preprocessing.image import ImageDataGenerator
from keras import optimizers, applications
from keras.callbacks import ModelCheckpoint, LearningRateScheduler, TensorBoard, EarlyStopping
['train2019.json', 'train_val2019', 'val2019.json', 'test2019', 'kaggle_sample_submission.csv', 'test2019.json']
Using TensorFlow backend.
ann_file = '../input/train2019.json'
with open(ann_file) as data_file:
train_anns = json.load(data_file)
train_anns_df = pd.DataFrame(train_anns['annotations'])[['image_id','category_id']]
train_img_df = pd.DataFrame(train_anns['images'])[['id', 'file_name']].rename(columns={'id':'image_id'})
df_train_file_cat = pd.merge(train_img_df, train_anns_df, on='image_id')
df_train_file_cat['category_id']=df_train_file_cat['category_id'].astype(str)
df_train_file_cat.head()
image_id | file_name | category_id | |
---|---|---|---|
0 | 0 | train_val2019/Plants/400/d1322d13ccd856eb4236c... | 400 |
1 | 1 | train_val2019/Plants/570/15edbc1e2ef000d8ace48... | 570 |
2 | 2 | train_val2019/Reptiles/167/c87a32e8927cbf4f06d... | 167 |
3 | 3 | train_val2019/Birds/254/9fcdd1d37e96d8fd94dfdc... | 254 |
4 | 4 | train_val2019/Plants/739/ffa06f951e99de9d220ae... | 739 |
df_train_file_cat.shape
(265213, 3)
len(df_train_file_cat['category_id'].unique())
1010
# Example of images for category_id = 400
img_names = df_train_file_cat[df_train_file_cat['category_id']=='400']['file_name'][:30]
plt.figure(figsize=[15,15])
i = 1
for img_name in img_names:
img = cv2.imread("../input/train_val2019/%s" % img_name)[...,[2, 1, 0]]
plt.subplot(6, 5, i)
plt.imshow(img)
i += 1
plt.show()
valid_ann_file = '../input/val2019.json'
with open(valid_ann_file) as data_file:
valid_anns = json.load(data_file)
valid_anns_df = pd.DataFrame(valid_anns['annotations'])[['image_id','category_id']]
valid_anns_df.head()
image_id | category_id | |
---|---|---|
0 | 265213 | 644 |
1 | 265214 | 597 |
2 | 265215 | 883 |
3 | 265216 | 300 |
4 | 265217 | 881 |
valid_img_df = pd.DataFrame(valid_anns['images'])[['id', 'file_name']].rename(columns={'id':'image_id'})
valid_img_df.head()
image_id | file_name | |
---|---|---|
0 | 265213 | train_val2019/Plants/644/716a69838526f3ada3b2f... |
1 | 265214 | train_val2019/Plants/597/0942cc64d2e759c5ee059... |
2 | 265215 | train_val2019/Plants/883/acfdbfd9fa675f1c84558... |
3 | 265216 | train_val2019/Birds/300/5f3194ff536c7dd31d80b7... |
4 | 265217 | train_val2019/Plants/881/76acaf0b2841f91982d21... |
df_valid_file_cat = pd.merge(valid_img_df, valid_anns_df, on='image_id')
df_valid_file_cat['category_id']=df_valid_file_cat['category_id'].astype(str)
df_valid_file_cat.head()
image_id | file_name | category_id | |
---|---|---|---|
0 | 265213 | train_val2019/Plants/644/716a69838526f3ada3b2f... | 644 |
1 | 265214 | train_val2019/Plants/597/0942cc64d2e759c5ee059... | 597 |
2 | 265215 | train_val2019/Plants/883/acfdbfd9fa675f1c84558... | 883 |
3 | 265216 | train_val2019/Birds/300/5f3194ff536c7dd31d80b7... | 300 |
4 | 265217 | train_val2019/Plants/881/76acaf0b2841f91982d21... | 881 |
nb_classes = 1010
batch_size = 128
img_size = 150
nb_epochs = 65
#from imblearn.over_sampling import RandomOverSampler
#ros = RandomOverSampler(random_state=0)
#X_resampled, y_resampled = ros.fit_resample(df_train_file_cat[["image_id", "file_name"]], df_train_file_cat["category_id"])
#train_df = pd.DataFrame(X_resampled, columns=["image_id", "file_name"])
#train_df["category_id"] = y_resampled
here I applied Data Augmentation technic from Udacity as following:
%%time
train_datagen=ImageDataGenerator(rescale=1./255, rotation_range=45,
width_shift_range=.15,
height_shift_range=.15,
horizontal_flip=True,
zoom_range=0.5)
train_generator=train_datagen.flow_from_dataframe(
dataframe=df_train_file_cat,
directory="../input/train_val2019",
x_col="file_name",
y_col="category_id",
batch_size=batch_size,
shuffle=True,
class_mode="categorical",
target_size=(img_size,img_size))
Found 265213 images belonging to 1010 classes. CPU times: user 5.88 s, sys: 7.94 s, total: 13.8 s Wall time: 1min 20s
# udacity_intro_to_tensorflow_for_deep_learning/l05c04_exercise_flowers_with_data_augmentation_solution.ipynb#scrollTo=jqb9OGoVKIOi
# This function will plot images in the form of a grid with 1 row and 5 columns where images are placed in each column.
def plotImages(images_arr):
fig, axes = plt.subplots(1, 5, figsize=(20,20))
axes = axes.flatten()
for img, ax in zip( images_arr, axes):
ax.imshow(img)
plt.tight_layout()
plt.show()
augmented_images = [train_generator[0][0][0] for i in range(5)]
plotImages(augmented_images)
%%time
test_datagen = ImageDataGenerator(rescale=1./255)
valid_generator=test_datagen.flow_from_dataframe(
dataframe=df_valid_file_cat,
directory="../input/train_val2019",
x_col="file_name",
y_col="category_id",
batch_size=batch_size,
shuffle=True,
class_mode="categorical",
target_size=(img_size,img_size))
Found 3030 images belonging to 1010 classes. CPU times: user 72 ms, sys: 68 ms, total: 140 ms Wall time: 829 ms
import gc
gc.collect();
#from keras.applications.vgg16 import VGG16
#from keras.applications.inception_v3 import InceptionV3
from keras.applications.inception_resnet_v2 import InceptionResNetV2
#from keras.applications.nasnet import NASNetLarge
#from keras.applications.densenet import DenseNet121
#model = VGG16(weights='imagenet', include_top=False, input_shape=(img_size, img_size, 3))
model = InceptionResNetV2(weights='imagenet', include_top=False, input_shape=(img_size, img_size, 3))
model_name = "InceptionResNetV2"
WARNING:tensorflow:From /opt/conda/lib/python3.6/site-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version. Instructions for updating: Colocations handled automatically by placer. Downloading data from https://github.com/fchollet/deep-learning-models/releases/download/v0.7/inception_resnet_v2_weights_tf_dim_ordering_tf_kernels_notop.h5 219062272/219055592 [==============================] - 2s 0us/step
#Adding custom layers
model_final = Sequential()
model_final.add(model)
model_final.add(Flatten())
model_final.add(Dense(1024, activation='relu'))
model_final.add(Dropout(0.5))
model_final.add(Dense(nb_classes, activation='softmax'))
model_final.compile(optimizers.rmsprop(lr=0.0001, decay=1e-5),loss='categorical_crossentropy',metrics=['accuracy'])
WARNING:tensorflow:From /opt/conda/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:3445: calling dropout (from tensorflow.python.ops.nn_ops) with keep_prob is deprecated and will be removed in a future version. Instructions for updating: Please use `rate` instead of `keep_prob`. Rate should be set to `rate = 1 - keep_prob`.
#Callbacks
checkpoint = ModelCheckpoint(model_name, monitor='val_loss', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1)
early = EarlyStopping(monitor='val_loss', min_delta=0, patience=5, verbose=1, mode='auto')
model_final.summary()
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= inception_resnet_v2 (Model) (None, 3, 3, 1536) 54336736 _________________________________________________________________ flatten_1 (Flatten) (None, 13824) 0 _________________________________________________________________ dense_1 (Dense) (None, 1024) 14156800 _________________________________________________________________ dropout_1 (Dropout) (None, 1024) 0 _________________________________________________________________ dense_2 (Dense) (None, 1010) 1035250 ================================================================= Total params: 69,528,786 Trainable params: 69,468,242 Non-trainable params: 60,544 _________________________________________________________________
%%time
history = model_final.fit_generator(generator=train_generator,
steps_per_epoch=80,
validation_data=valid_generator,
validation_steps=40,
epochs=nb_epochs,
callbacks = [checkpoint, early],
verbose=1)
WARNING:tensorflow:From /opt/conda/lib/python3.6/site-packages/tensorflow/python/ops/math_ops.py:3066: to_int32 (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version. Instructions for updating: Use tf.cast instead. Epoch 1/65 80/80 [==============================] - 356s 4s/step - loss: 6.9083 - acc: 0.0037 - val_loss: 6.9055 - val_acc: 0.0045 Epoch 00001: val_loss improved from inf to 6.90547, saving model to InceptionResNetV2 Epoch 2/65 80/80 [==============================] - 284s 4s/step - loss: 6.6840 - acc: 0.0079 - val_loss: 6.7927 - val_acc: 0.0097 Epoch 00002: val_loss improved from 6.90547 to 6.79267, saving model to InceptionResNetV2 Epoch 3/65 80/80 [==============================] - 286s 4s/step - loss: 6.3318 - acc: 0.0234 - val_loss: 6.5874 - val_acc: 0.0216 Epoch 00003: val_loss improved from 6.79267 to 6.58745, saving model to InceptionResNetV2 Epoch 4/65 80/80 [==============================] - 287s 4s/step - loss: 5.9367 - acc: 0.0368 - val_loss: 6.3406 - val_acc: 0.0353 Epoch 00004: val_loss improved from 6.58745 to 6.34062, saving model to InceptionResNetV2 Epoch 5/65 80/80 [==============================] - 287s 4s/step - loss: 5.6083 - acc: 0.0501 - val_loss: 6.2182 - val_acc: 0.0463 Epoch 00005: val_loss improved from 6.34062 to 6.21818, saving model to InceptionResNetV2 Epoch 6/65 80/80 [==============================] - 285s 4s/step - loss: 5.3328 - acc: 0.0677 - val_loss: 5.9901 - val_acc: 0.0550 Epoch 00006: val_loss improved from 6.21818 to 5.99013, saving model to InceptionResNetV2 Epoch 7/65 80/80 [==============================] - 286s 4s/step - loss: 5.1518 - acc: 0.0766 - val_loss: 5.8476 - val_acc: 0.0670 Epoch 00007: val_loss improved from 5.99013 to 5.84757, saving model to InceptionResNetV2 Epoch 8/65 80/80 [==============================] - 287s 4s/step - loss: 4.9322 - acc: 0.0919 - val_loss: 5.6232 - val_acc: 0.0764 Epoch 00008: val_loss improved from 5.84757 to 5.62316, saving model to InceptionResNetV2 Epoch 9/65 80/80 [==============================] - 284s 4s/step - loss: 4.7662 - acc: 0.1095 - val_loss: 5.4944 - val_acc: 0.0862 Epoch 00009: val_loss improved from 5.62316 to 5.49438, saving model to InceptionResNetV2 Epoch 10/65 80/80 [==============================] - 288s 4s/step - loss: 4.6913 - acc: 0.1072 - val_loss: 5.4311 - val_acc: 0.0861 Epoch 00010: val_loss improved from 5.49438 to 5.43106, saving model to InceptionResNetV2 Epoch 11/65 80/80 [==============================] - 287s 4s/step - loss: 4.5764 - acc: 0.1138 - val_loss: 5.3167 - val_acc: 0.0905 Epoch 00011: val_loss improved from 5.43106 to 5.31670, saving model to InceptionResNetV2 Epoch 12/65 80/80 [==============================] - 284s 4s/step - loss: 4.4179 - acc: 0.1352 - val_loss: 5.2237 - val_acc: 0.1088 Epoch 00012: val_loss improved from 5.31670 to 5.22368, saving model to InceptionResNetV2 Epoch 13/65 80/80 [==============================] - 287s 4s/step - loss: 4.3519 - acc: 0.1370 - val_loss: 5.1422 - val_acc: 0.1119 Epoch 00013: val_loss improved from 5.22368 to 5.14220, saving model to InceptionResNetV2 Epoch 14/65 80/80 [==============================] - 288s 4s/step - loss: 4.2697 - acc: 0.1420 - val_loss: 5.0044 - val_acc: 0.1267 Epoch 00014: val_loss improved from 5.14220 to 5.00435, saving model to InceptionResNetV2 Epoch 15/65 80/80 [==============================] - 286s 4s/step - loss: 4.1748 - acc: 0.1526 - val_loss: 5.1474 - val_acc: 0.1259 Epoch 00015: val_loss did not improve from 5.00435 Epoch 16/65 80/80 [==============================] - 285s 4s/step - loss: 4.1067 - acc: 0.1642 - val_loss: 4.9083 - val_acc: 0.1314 Epoch 00016: val_loss improved from 5.00435 to 4.90827, saving model to InceptionResNetV2 Epoch 17/65 80/80 [==============================] - 291s 4s/step - loss: 4.0509 - acc: 0.1677 - val_loss: 4.8343 - val_acc: 0.1340 Epoch 00017: val_loss improved from 4.90827 to 4.83430, saving model to InceptionResNetV2 Epoch 18/65 80/80 [==============================] - 286s 4s/step - loss: 3.9883 - acc: 0.1786 - val_loss: 4.7462 - val_acc: 0.1362 Epoch 00018: val_loss improved from 4.83430 to 4.74621, saving model to InceptionResNetV2 Epoch 19/65 80/80 [==============================] - 287s 4s/step - loss: 3.9329 - acc: 0.1801 - val_loss: 4.7836 - val_acc: 0.1442 Epoch 00019: val_loss did not improve from 4.74621 Epoch 20/65 80/80 [==============================] - 286s 4s/step - loss: 3.8636 - acc: 0.1883 - val_loss: 4.7084 - val_acc: 0.1481 Epoch 00020: val_loss improved from 4.74621 to 4.70838, saving model to InceptionResNetV2 Epoch 21/65 80/80 [==============================] - 284s 4s/step - loss: 3.8378 - acc: 0.1934 - val_loss: 4.6525 - val_acc: 0.1531 Epoch 00021: val_loss improved from 4.70838 to 4.65249, saving model to InceptionResNetV2 Epoch 22/65 80/80 [==============================] - 288s 4s/step - loss: 3.7650 - acc: 0.2045 - val_loss: 4.6476 - val_acc: 0.1591 Epoch 00022: val_loss improved from 4.65249 to 4.64760, saving model to InceptionResNetV2 Epoch 23/65 80/80 [==============================] - 284s 4s/step - loss: 3.7396 - acc: 0.2101 - val_loss: 4.5605 - val_acc: 0.1606 Epoch 00023: val_loss improved from 4.64760 to 4.56049, saving model to InceptionResNetV2 Epoch 24/65 80/80 [==============================] - 282s 4s/step - loss: 3.6721 - acc: 0.2151 - val_loss: 4.5303 - val_acc: 0.1595 Epoch 00024: val_loss improved from 4.56049 to 4.53029, saving model to InceptionResNetV2 Epoch 25/65 80/80 [==============================] - 287s 4s/step - loss: 3.6266 - acc: 0.2170 - val_loss: 4.3988 - val_acc: 0.1705 Epoch 00025: val_loss improved from 4.53029 to 4.39878, saving model to InceptionResNetV2 Epoch 26/65 80/80 [==============================] - 285s 4s/step - loss: 3.5826 - acc: 0.2298 - val_loss: 4.5749 - val_acc: 0.1581 Epoch 00026: val_loss did not improve from 4.39878 Epoch 27/65 80/80 [==============================] - 280s 4s/step - loss: 3.5306 - acc: 0.2341 - val_loss: 4.5384 - val_acc: 0.1757 Epoch 00027: val_loss did not improve from 4.39878 Epoch 28/65 80/80 [==============================] - 282s 4s/step - loss: 3.4787 - acc: 0.2397 - val_loss: 4.4502 - val_acc: 0.1753 Epoch 00028: val_loss did not improve from 4.39878 Epoch 29/65 80/80 [==============================] - 285s 4s/step - loss: 3.4512 - acc: 0.2407 - val_loss: 4.3671 - val_acc: 0.1785 Epoch 00029: val_loss improved from 4.39878 to 4.36713, saving model to InceptionResNetV2 Epoch 30/65 80/80 [==============================] - 284s 4s/step - loss: 3.4433 - acc: 0.2481 - val_loss: 4.2958 - val_acc: 0.1775 Epoch 00030: val_loss improved from 4.36713 to 4.29582, saving model to InceptionResNetV2 Epoch 31/65 80/80 [==============================] - 286s 4s/step - loss: 3.4167 - acc: 0.2473 - val_loss: 4.3397 - val_acc: 0.1910 Epoch 00031: val_loss did not improve from 4.29582 Epoch 32/65 80/80 [==============================] - 285s 4s/step - loss: 3.3980 - acc: 0.2479 - val_loss: 4.1655 - val_acc: 0.1946 Epoch 00032: val_loss improved from 4.29582 to 4.16553, saving model to InceptionResNetV2 Epoch 33/65 80/80 [==============================] - 286s 4s/step - loss: 3.3893 - acc: 0.2574 - val_loss: 4.4498 - val_acc: 0.1837 Epoch 00033: val_loss did not improve from 4.16553 Epoch 34/65 80/80 [==============================] - 287s 4s/step - loss: 3.3685 - acc: 0.2514 - val_loss: 4.1476 - val_acc: 0.2044 Epoch 00034: val_loss improved from 4.16553 to 4.14755, saving model to InceptionResNetV2 Epoch 35/65 80/80 [==============================] - 285s 4s/step - loss: 3.3437 - acc: 0.2637 - val_loss: 4.1630 - val_acc: 0.1980 Epoch 00035: val_loss did not improve from 4.14755 Epoch 36/65 80/80 [==============================] - 282s 4s/step - loss: 3.2950 - acc: 0.2611 - val_loss: 4.1488 - val_acc: 0.1938 Epoch 00036: val_loss did not improve from 4.14755 Epoch 37/65 80/80 [==============================] - 282s 4s/step - loss: 3.3103 - acc: 0.2603 - val_loss: 4.0952 - val_acc: 0.2135 Epoch 00037: val_loss improved from 4.14755 to 4.09521, saving model to InceptionResNetV2 Epoch 38/65 80/80 [==============================] - 282s 4s/step - loss: 3.2796 - acc: 0.2687 - val_loss: 4.1278 - val_acc: 0.2103 Epoch 00038: val_loss did not improve from 4.09521 Epoch 39/65 80/80 [==============================] - 282s 4s/step - loss: 3.2746 - acc: 0.2704 - val_loss: 4.2665 - val_acc: 0.2109 Epoch 00039: val_loss did not improve from 4.09521 Epoch 40/65 80/80 [==============================] - 283s 4s/step - loss: 3.2272 - acc: 0.2821 - val_loss: 4.1167 - val_acc: 0.2044 Epoch 00040: val_loss did not improve from 4.09521 Epoch 41/65 80/80 [==============================] - 285s 4s/step - loss: 3.2223 - acc: 0.2833 - val_loss: 4.0089 - val_acc: 0.2238 Epoch 00041: val_loss improved from 4.09521 to 4.00890, saving model to InceptionResNetV2 Epoch 42/65 80/80 [==============================] - 283s 4s/step - loss: 3.2335 - acc: 0.2779 - val_loss: 3.9840 - val_acc: 0.2186 Epoch 00042: val_loss improved from 4.00890 to 3.98400, saving model to InceptionResNetV2 Epoch 43/65 80/80 [==============================] - 287s 4s/step - loss: 3.1826 - acc: 0.2877 - val_loss: 4.1019 - val_acc: 0.2174 Epoch 00043: val_loss did not improve from 3.98400 Epoch 44/65 80/80 [==============================] - 283s 4s/step - loss: 3.1778 - acc: 0.2867 - val_loss: 4.0194 - val_acc: 0.2276 Epoch 00044: val_loss did not improve from 3.98400 Epoch 45/65 80/80 [==============================] - 284s 4s/step - loss: 3.1890 - acc: 0.2890 - val_loss: 4.0372 - val_acc: 0.2282 Epoch 00045: val_loss did not improve from 3.98400 Epoch 46/65 80/80 [==============================] - 285s 4s/step - loss: 3.1507 - acc: 0.2865 - val_loss: 4.0892 - val_acc: 0.2212 Epoch 00046: val_loss did not improve from 3.98400 Epoch 47/65 80/80 [==============================] - 290s 4s/step - loss: 3.1302 - acc: 0.2928 - val_loss: 4.0334 - val_acc: 0.2188 Epoch 00047: val_loss did not improve from 3.98400 Epoch 00047: early stopping CPU times: user 4h 9min 19s, sys: 19min 9s, total: 4h 28min 29s Wall time: 3h 45min 51s
import gc
gc.collect();
with open('history.json', 'w') as f:
json.dump(history.history, f)
history_df = pd.DataFrame(history.history)
history_df[['loss', 'val_loss']].plot()
history_df[['acc', 'val_acc']].plot()
<matplotlib.axes._subplots.AxesSubplot at 0x7f84f98710b8>
test_ann_file = '../input/test2019.json'
with open(test_ann_file) as data_file:
test_anns = json.load(data_file)
test_img_df = pd.DataFrame(test_anns['images'])[['id', 'file_name']].rename(columns={'id':'image_id'})
test_img_df.head()
image_id | file_name | |
---|---|---|
0 | 268243 | test2019/e295f3c7046b1f1e80c0301401324aa9.jpg |
1 | 268244 | test2019/ad3dcbb6846ed0b4dab58d7b1a4210ba.jpg |
2 | 268245 | test2019/e697be8e296b4b140cff4f96f85c364f.jpg |
3 | 268246 | test2019/7e7ba55e6aa26ba99e814d63b15d0121.jpg |
4 | 268247 | test2019/6cb6372079d23702511c06923970f13f.jpg |
%%time
test_generator = test_datagen.flow_from_dataframe(
dataframe=test_img_df,
directory = "../input/test2019",
x_col="file_name",
target_size = (img_size,img_size),
batch_size = 1,
shuffle = False,
class_mode = None
)
Found 35350 images. CPU times: user 800 ms, sys: 708 ms, total: 1.51 s Wall time: 11.2 s
gc.collect();
%%time
predict_valid=model_final.predict_generator(valid_generator, steps = np.ceil(valid_generator.samples / valid_generator.batch_size), verbose=1)
24/24 [==============================] - 41s 2s/step CPU times: user 47.7 s, sys: 2.04 s, total: 49.7 s Wall time: 40.8 s
predict_valid_class=np.argmax(predict_valid,axis=1)
len(predict_valid_class)
3030
from sklearn.metrics import classification_report
print(classification_report(valid_generator.classes, predict_valid_class))
precision recall f1-score support 0 0.00 0.00 0.00 3 1 0.00 0.00 0.00 3 2 0.00 0.00 0.00 3 3 0.00 0.00 0.00 3 4 0.00 0.00 0.00 3 5 0.00 0.00 0.00 3 6 0.00 0.00 0.00 3 7 0.00 0.00 0.00 3 8 0.00 0.00 0.00 3 9 0.00 0.00 0.00 3 10 0.00 0.00 0.00 3 11 0.00 0.00 0.00 3 12 0.00 0.00 0.00 3 13 0.00 0.00 0.00 3 14 0.00 0.00 0.00 3 15 0.00 0.00 0.00 3 16 0.00 0.00 0.00 3 17 0.00 0.00 0.00 3 18 0.00 0.00 0.00 3 19 0.00 0.00 0.00 3 20 0.00 0.00 0.00 3 21 0.00 0.00 0.00 3 22 0.00 0.00 0.00 3 23 0.00 0.00 0.00 3 24 0.00 0.00 0.00 3 25 0.00 0.00 0.00 3 26 0.00 0.00 0.00 3 27 0.00 0.00 0.00 3 28 0.00 0.00 0.00 3 29 0.00 0.00 0.00 3 30 0.00 0.00 0.00 3 31 0.00 0.00 0.00 3 32 0.00 0.00 0.00 3 33 0.00 0.00 0.00 3 34 0.00 0.00 0.00 3 35 0.00 0.00 0.00 3 36 0.00 0.00 0.00 3 37 0.00 0.00 0.00 3 38 0.00 0.00 0.00 3 39 0.00 0.00 0.00 3 40 0.00 0.00 0.00 3 41 0.00 0.00 0.00 3 42 0.00 0.00 0.00 3 43 0.00 0.00 0.00 3 44 0.00 0.00 0.00 3 45 0.00 0.00 0.00 3 46 0.00 0.00 0.00 3 47 0.00 0.00 0.00 3 48 0.00 0.00 0.00 3 49 0.00 0.00 0.00 3 50 0.00 0.00 0.00 3 51 0.00 0.00 0.00 3 52 0.00 0.00 0.00 3 53 0.00 0.00 0.00 3 54 0.00 0.00 0.00 3 55 0.00 0.00 0.00 3 56 0.00 0.00 0.00 3 57 0.00 0.00 0.00 3 58 0.00 0.00 0.00 3 59 0.00 0.00 0.00 3 60 0.00 0.00 0.00 3 61 0.00 0.00 0.00 3 62 0.00 0.00 0.00 3 63 0.00 0.00 0.00 3 64 0.00 0.00 0.00 3 65 0.00 0.00 0.00 3 66 0.00 0.00 0.00 3 67 0.00 0.00 0.00 3 68 0.00 0.00 0.00 3 69 0.00 0.00 0.00 3 70 0.00 0.00 0.00 3 71 0.00 0.00 0.00 3 72 0.00 0.00 0.00 3 73 0.00 0.00 0.00 3 74 0.00 0.00 0.00 3 75 0.00 0.00 0.00 3 76 0.00 0.00 0.00 3 77 0.00 0.00 0.00 3 78 0.00 0.00 0.00 3 79 0.00 0.00 0.00 3 80 0.00 0.00 0.00 3 81 0.00 0.00 0.00 3 82 0.00 0.00 0.00 3 83 0.00 0.00 0.00 3 84 0.00 0.00 0.00 3 85 0.00 0.00 0.00 3 86 0.00 0.00 0.00 3 87 0.00 0.00 0.00 3 88 0.00 0.00 0.00 3 89 0.00 0.00 0.00 3 90 0.00 0.00 0.00 3 91 0.00 0.00 0.00 3 92 0.00 0.00 0.00 3 93 0.00 0.00 0.00 3 94 0.00 0.00 0.00 3 95 0.00 0.00 0.00 3 96 0.00 0.00 0.00 3 97 0.00 0.00 0.00 3 98 0.00 0.00 0.00 3 99 0.00 0.00 0.00 3 100 0.00 0.00 0.00 3 101 0.00 0.00 0.00 3 102 0.00 0.00 0.00 3 103 0.00 0.00 0.00 3 104 0.00 0.00 0.00 3 105 0.00 0.00 0.00 3 106 0.00 0.00 0.00 3 107 0.00 0.00 0.00 3 108 0.00 0.00 0.00 3 109 0.05 0.33 0.09 3 110 0.00 0.00 0.00 3 111 0.00 0.00 0.00 3 112 0.00 0.00 0.00 3 113 0.00 0.00 0.00 3 114 0.00 0.00 0.00 3 115 0.00 0.00 0.00 3 116 0.06 0.33 0.11 3 117 0.00 0.00 0.00 3 118 0.00 0.00 0.00 3 119 0.00 0.00 0.00 3 120 0.00 0.00 0.00 3 121 0.00 0.00 0.00 3 122 0.00 0.00 0.00 3 123 0.00 0.00 0.00 3 124 0.00 0.00 0.00 3 125 0.00 0.00 0.00 3 126 0.00 0.00 0.00 3 127 0.00 0.00 0.00 3 128 0.00 0.00 0.00 3 129 0.00 0.00 0.00 3 130 0.00 0.00 0.00 3 131 0.00 0.00 0.00 3 132 0.00 0.00 0.00 3 133 0.00 0.00 0.00 3 134 0.00 0.00 0.00 3 135 0.00 0.00 0.00 3 136 0.00 0.00 0.00 3 137 0.00 0.00 0.00 3 138 0.00 0.00 0.00 3 139 0.00 0.00 0.00 3 140 0.00 0.00 0.00 3 141 0.00 0.00 0.00 3 142 0.00 0.00 0.00 3 143 0.00 0.00 0.00 3 144 0.00 0.00 0.00 3 145 0.00 0.00 0.00 3 146 0.00 0.00 0.00 3 147 0.00 0.00 0.00 3 148 0.00 0.00 0.00 3 149 0.00 0.00 0.00 3 150 0.00 0.00 0.00 3 151 0.00 0.00 0.00 3 152 0.00 0.00 0.00 3 153 0.00 0.00 0.00 3 154 0.00 0.00 0.00 3 155 0.00 0.00 0.00 3 156 0.00 0.00 0.00 3 157 0.00 0.00 0.00 3 158 0.00 0.00 0.00 3 159 0.00 0.00 0.00 3 160 0.00 0.00 0.00 3 161 0.00 0.00 0.00 3 162 0.00 0.00 0.00 3 163 0.00 0.00 0.00 3 164 0.00 0.00 0.00 3 165 0.00 0.00 0.00 3 166 0.00 0.00 0.00 3 167 0.00 0.00 0.00 3 168 0.00 0.00 0.00 3 169 0.00 0.00 0.00 3 170 0.00 0.00 0.00 3 171 0.00 0.00 0.00 3 172 0.00 0.00 0.00 3 173 0.00 0.00 0.00 3 174 0.00 0.00 0.00 3 175 0.00 0.00 0.00 3 176 0.00 0.00 0.00 3 177 0.00 0.00 0.00 3 178 0.00 0.00 0.00 3 179 0.00 0.00 0.00 3 180 0.00 0.00 0.00 3 181 0.00 0.00 0.00 3 182 0.00 0.00 0.00 3 183 0.00 0.00 0.00 3 184 0.00 0.00 0.00 3 185 0.00 0.00 0.00 3 186 0.00 0.00 0.00 3 187 0.00 0.00 0.00 3 188 0.00 0.00 0.00 3 189 0.00 0.00 0.00 3 190 0.00 0.00 0.00 3 191 0.00 0.00 0.00 3 192 0.00 0.00 0.00 3 193 0.00 0.00 0.00 3 194 0.00 0.00 0.00 3 195 0.00 0.00 0.00 3 196 0.00 0.00 0.00 3 197 0.00 0.00 0.00 3 198 0.00 0.00 0.00 3 199 0.00 0.00 0.00 3 200 0.00 0.00 0.00 3 201 0.00 0.00 0.00 3 202 0.00 0.00 0.00 3 203 0.00 0.00 0.00 3 204 0.00 0.00 0.00 3 205 0.00 0.00 0.00 3 206 0.00 0.00 0.00 3 207 0.00 0.00 0.00 3 208 0.00 0.00 0.00 3 209 0.00 0.00 0.00 3 210 0.00 0.00 0.00 3 211 0.00 0.00 0.00 3 212 0.00 0.00 0.00 3 213 0.00 0.00 0.00 3 214 0.00 0.00 0.00 3 215 0.00 0.00 0.00 3 216 0.00 0.00 0.00 3 217 0.00 0.00 0.00 3 218 0.00 0.00 0.00 3 219 0.00 0.00 0.00 3 220 0.00 0.00 0.00 3 221 0.00 0.00 0.00 3 222 0.00 0.00 0.00 3 223 0.00 0.00 0.00 3 224 0.00 0.00 0.00 3 225 0.00 0.00 0.00 3 226 0.00 0.00 0.00 3 227 0.00 0.00 0.00 3 228 0.00 0.00 0.00 3 229 0.00 0.00 0.00 3 230 0.00 0.00 0.00 3 231 0.00 0.00 0.00 3 232 0.00 0.00 0.00 3 233 0.00 0.00 0.00 3 234 0.00 0.00 0.00 3 235 0.00 0.00 0.00 3 236 0.00 0.00 0.00 3 237 0.00 0.00 0.00 3 238 0.00 0.00 0.00 3 239 0.00 0.00 0.00 3 240 0.00 0.00 0.00 3 241 0.00 0.00 0.00 3 242 0.00 0.00 0.00 3 243 0.00 0.00 0.00 3 244 0.00 0.00 0.00 3 245 0.00 0.00 0.00 3 246 0.00 0.00 0.00 3 247 0.00 0.00 0.00 3 248 0.00 0.00 0.00 3 249 0.00 0.00 0.00 3 250 0.00 0.00 0.00 3 251 0.00 0.00 0.00 3 252 0.00 0.00 0.00 3 253 0.00 0.00 0.00 3 254 0.00 0.00 0.00 3 255 0.00 0.00 0.00 3 256 0.00 0.00 0.00 3 257 0.00 0.00 0.00 3 258 0.00 0.00 0.00 3 259 0.00 0.00 0.00 3 260 0.00 0.00 0.00 3 261 0.00 0.00 0.00 3 262 0.00 0.00 0.00 3 263 0.00 0.00 0.00 3 264 0.00 0.00 0.00 3 265 0.00 0.00 0.00 3 266 0.00 0.00 0.00 3 267 0.00 0.00 0.00 3 268 0.00 0.00 0.00 3 269 0.00 0.00 0.00 3 270 0.00 0.00 0.00 3 271 0.00 0.00 0.00 3 272 0.00 0.00 0.00 3 273 0.00 0.00 0.00 3 274 0.00 0.00 0.00 3 275 0.00 0.00 0.00 3 276 0.00 0.00 0.00 3 277 0.00 0.00 0.00 3 278 0.00 0.00 0.00 3 279 0.00 0.00 0.00 3 280 0.00 0.00 0.00 3 281 0.00 0.00 0.00 3 282 0.00 0.00 0.00 3 283 0.00 0.00 0.00 3 284 0.00 0.00 0.00 3 285 0.00 0.00 0.00 3 286 0.00 0.00 0.00 3 287 0.00 0.00 0.00 3 288 0.00 0.00 0.00 3 289 0.00 0.00 0.00 3 290 0.00 0.00 0.00 3 291 0.00 0.00 0.00 3 292 0.00 0.00 0.00 3 293 0.00 0.00 0.00 3 294 0.00 0.00 0.00 3 295 0.00 0.00 0.00 3 296 0.00 0.00 0.00 3 297 0.00 0.00 0.00 3 298 0.00 0.00 0.00 3 299 0.00 0.00 0.00 3 300 0.00 0.00 0.00 3 301 0.00 0.00 0.00 3 302 0.00 0.00 0.00 3 303 0.00 0.00 0.00 3 304 0.00 0.00 0.00 3 305 0.00 0.00 0.00 3 306 0.00 0.00 0.00 3 307 0.00 0.00 0.00 3 308 0.00 0.00 0.00 3 309 0.00 0.00 0.00 3 310 0.00 0.00 0.00 3 311 0.00 0.00 0.00 3 312 0.00 0.00 0.00 3 313 0.00 0.00 0.00 3 314 0.00 0.00 0.00 3 315 0.00 0.00 0.00 3 316 0.00 0.00 0.00 3 317 0.00 0.00 0.00 3 318 0.00 0.00 0.00 3 319 0.00 0.00 0.00 3 320 0.00 0.00 0.00 3 321 0.00 0.00 0.00 3 322 0.00 0.00 0.00 3 323 0.00 0.00 0.00 3 324 0.00 0.00 0.00 3 325 0.00 0.00 0.00 3 326 0.00 0.00 0.00 3 327 0.00 0.00 0.00 3 328 0.00 0.00 0.00 3 329 0.00 0.00 0.00 3 330 0.00 0.00 0.00 3 331 0.00 0.00 0.00 3 332 0.00 0.00 0.00 3 333 0.00 0.00 0.00 3 334 0.00 0.00 0.00 3 335 0.00 0.00 0.00 3 336 0.00 0.00 0.00 3 337 0.00 0.00 0.00 3 338 0.00 0.00 0.00 3 339 0.00 0.00 0.00 3 340 0.00 0.00 0.00 3 341 0.00 0.00 0.00 3 342 0.00 0.00 0.00 3 343 0.00 0.00 0.00 3 344 0.00 0.00 0.00 3 345 0.00 0.00 0.00 3 346 0.00 0.00 0.00 3 347 0.00 0.00 0.00 3 348 0.00 0.00 0.00 3 349 0.00 0.00 0.00 3 350 0.00 0.00 0.00 3 351 0.00 0.00 0.00 3 352 0.00 0.00 0.00 3 353 0.00 0.00 0.00 3 354 0.00 0.00 0.00 3 355 0.00 0.00 0.00 3 356 0.00 0.00 0.00 3 357 0.00 0.00 0.00 3 358 0.00 0.00 0.00 3 359 0.00 0.00 0.00 3 360 0.00 0.00 0.00 3 361 0.00 0.00 0.00 3 362 0.00 0.00 0.00 3 363 0.00 0.00 0.00 3 364 0.00 0.00 0.00 3 365 0.00 0.00 0.00 3 366 0.00 0.00 0.00 3 367 0.00 0.00 0.00 3 368 0.00 0.00 0.00 3 369 0.00 0.00 0.00 3 370 0.00 0.00 0.00 3 371 0.00 0.00 0.00 3 372 0.00 0.00 0.00 3 373 0.00 0.00 0.00 3 374 0.00 0.00 0.00 3 375 0.00 0.00 0.00 3 376 0.00 0.00 0.00 3 377 0.00 0.00 0.00 3 378 0.00 0.00 0.00 3 379 0.00 0.00 0.00 3 380 0.00 0.00 0.00 3 381 0.00 0.00 0.00 3 382 0.00 0.00 0.00 3 383 0.00 0.00 0.00 3 384 0.00 0.00 0.00 3 385 0.00 0.00 0.00 3 386 0.00 0.00 0.00 3 387 0.00 0.00 0.00 3 388 0.00 0.00 0.00 3 389 0.00 0.00 0.00 3 390 0.00 0.00 0.00 3 391 0.00 0.00 0.00 3 392 0.00 0.00 0.00 3 393 0.00 0.00 0.00 3 394 0.00 0.00 0.00 3 395 0.00 0.00 0.00 3 396 0.00 0.00 0.00 3 397 0.00 0.00 0.00 3 398 0.00 0.00 0.00 3 399 0.00 0.00 0.00 3 400 0.00 0.00 0.00 3 401 0.00 0.00 0.00 3 402 0.00 0.00 0.00 3 403 0.00 0.00 0.00 3 404 0.00 0.00 0.00 3 405 0.00 0.00 0.00 3 406 0.00 0.00 0.00 3 407 0.00 0.00 0.00 3 408 0.00 0.00 0.00 3 409 0.00 0.00 0.00 3 410 0.00 0.00 0.00 3 411 0.00 0.00 0.00 3 412 0.00 0.00 0.00 3 413 0.00 0.00 0.00 3 414 0.00 0.00 0.00 3 415 0.00 0.00 0.00 3 416 0.00 0.00 0.00 3 417 0.00 0.00 0.00 3 418 0.00 0.00 0.00 3 419 0.00 0.00 0.00 3 420 0.00 0.00 0.00 3 421 0.00 0.00 0.00 3 422 0.00 0.00 0.00 3 423 0.00 0.00 0.00 3 424 0.00 0.00 0.00 3 425 0.00 0.00 0.00 3 426 0.00 0.00 0.00 3 427 0.00 0.00 0.00 3 428 0.00 0.00 0.00 3 429 0.00 0.00 0.00 3 430 0.00 0.00 0.00 3 431 0.00 0.00 0.00 3 432 0.00 0.00 0.00 3 433 0.00 0.00 0.00 3 434 0.00 0.00 0.00 3 435 0.00 0.00 0.00 3 436 0.00 0.00 0.00 3 437 0.00 0.00 0.00 3 438 0.00 0.00 0.00 3 439 0.00 0.00 0.00 3 440 0.00 0.00 0.00 3 441 0.00 0.00 0.00 3 442 0.00 0.00 0.00 3 443 0.00 0.00 0.00 3 444 0.00 0.00 0.00 3 445 0.00 0.00 0.00 3 446 0.00 0.00 0.00 3 447 0.00 0.00 0.00 3 448 0.00 0.00 0.00 3 449 0.00 0.00 0.00 3 450 0.00 0.00 0.00 3 451 0.00 0.00 0.00 3 452 0.00 0.00 0.00 3 453 0.00 0.00 0.00 3 454 0.00 0.00 0.00 3 455 0.00 0.00 0.00 3 456 0.00 0.00 0.00 3 457 0.00 0.00 0.00 3 458 0.00 0.00 0.00 3 459 0.00 0.00 0.00 3 460 0.00 0.00 0.00 3 461 0.00 0.00 0.00 3 462 0.00 0.00 0.00 3 463 0.00 0.00 0.00 3 464 0.00 0.00 0.00 3 465 0.00 0.00 0.00 3 466 0.00 0.00 0.00 3 467 0.00 0.00 0.00 3 468 0.00 0.00 0.00 3 469 0.00 0.00 0.00 3 470 0.00 0.00 0.00 3 471 0.00 0.00 0.00 3 472 0.00 0.00 0.00 3 473 0.00 0.00 0.00 3 474 0.00 0.00 0.00 3 475 0.00 0.00 0.00 3 476 0.00 0.00 0.00 3 477 0.00 0.00 0.00 3 478 0.00 0.00 0.00 3 479 0.00 0.00 0.00 3 480 0.00 0.00 0.00 3 481 0.00 0.00 0.00 3 482 0.00 0.00 0.00 3 483 0.00 0.00 0.00 3 484 0.00 0.00 0.00 3 485 0.00 0.00 0.00 3 486 0.00 0.00 0.00 3 487 0.00 0.00 0.00 3 488 0.00 0.00 0.00 3 489 0.00 0.00 0.00 3 490 0.00 0.00 0.00 3 491 0.00 0.00 0.00 3 492 0.00 0.00 0.00 3 493 0.00 0.00 0.00 3 494 0.00 0.00 0.00 3 495 0.00 0.00 0.00 3 496 0.00 0.00 0.00 3 497 0.00 0.00 0.00 3 498 0.00 0.00 0.00 3 499 0.00 0.00 0.00 3 500 0.00 0.00 0.00 3 501 0.00 0.00 0.00 3 502 0.00 0.00 0.00 3 503 0.00 0.00 0.00 3 504 0.00 0.00 0.00 3 505 0.00 0.00 0.00 3 506 0.00 0.00 0.00 3 507 0.00 0.00 0.00 3 508 0.00 0.00 0.00 3 509 0.00 0.00 0.00 3 510 0.00 0.00 0.00 3 511 0.00 0.00 0.00 3 512 0.00 0.00 0.00 3 513 0.00 0.00 0.00 3 514 0.00 0.00 0.00 3 515 0.00 0.00 0.00 3 516 0.00 0.00 0.00 3 517 0.00 0.00 0.00 3 518 0.00 0.00 0.00 3 519 0.00 0.00 0.00 3 520 0.00 0.00 0.00 3 521 0.00 0.00 0.00 3 522 0.00 0.00 0.00 3 523 0.00 0.00 0.00 3 524 0.00 0.00 0.00 3 525 0.00 0.00 0.00 3 526 0.00 0.00 0.00 3 527 0.00 0.00 0.00 3 528 0.00 0.00 0.00 3 529 0.00 0.00 0.00 3 530 0.00 0.00 0.00 3 531 0.00 0.00 0.00 3 532 0.00 0.00 0.00 3 533 0.00 0.00 0.00 3 534 0.00 0.00 0.00 3 535 0.00 0.00 0.00 3 536 0.00 0.00 0.00 3 537 0.00 0.00 0.00 3 538 0.00 0.00 0.00 3 539 0.00 0.00 0.00 3 540 0.00 0.00 0.00 3 541 0.00 0.00 0.00 3 542 0.00 0.00 0.00 3 543 0.00 0.00 0.00 3 544 0.00 0.00 0.00 3 545 0.00 0.00 0.00 3 546 0.00 0.00 0.00 3 547 0.00 0.00 0.00 3 548 0.00 0.00 0.00 3 549 0.00 0.00 0.00 3 550 0.00 0.00 0.00 3 551 0.00 0.00 0.00 3 552 0.00 0.00 0.00 3 553 0.00 0.00 0.00 3 554 0.00 0.00 0.00 3 555 0.00 0.00 0.00 3 556 0.00 0.00 0.00 3 557 0.00 0.00 0.00 3 558 0.00 0.00 0.00 3 559 0.00 0.00 0.00 3 560 0.00 0.00 0.00 3 561 0.00 0.00 0.00 3 562 0.00 0.00 0.00 3 563 0.00 0.00 0.00 3 564 0.00 0.00 0.00 3 565 0.00 0.00 0.00 3 566 0.00 0.00 0.00 3 567 0.00 0.00 0.00 3 568 0.00 0.00 0.00 3 569 0.00 0.00 0.00 3 570 0.00 0.00 0.00 3 571 0.00 0.00 0.00 3 572 0.00 0.00 0.00 3 573 0.00 0.00 0.00 3 574 0.00 0.00 0.00 3 575 0.00 0.00 0.00 3 576 0.00 0.00 0.00 3 577 0.00 0.00 0.00 3 578 0.00 0.00 0.00 3 579 0.00 0.00 0.00 3 580 0.00 0.00 0.00 3 581 0.00 0.00 0.00 3 582 0.00 0.00 0.00 3 583 0.00 0.00 0.00 3 584 0.00 0.00 0.00 3 585 0.00 0.00 0.00 3 586 0.00 0.00 0.00 3 587 0.00 0.00 0.00 3 588 0.00 0.00 0.00 3 589 0.00 0.00 0.00 3 590 0.00 0.00 0.00 3 591 0.00 0.00 0.00 3 592 0.00 0.00 0.00 3 593 0.00 0.00 0.00 3 594 0.00 0.00 0.00 3 595 0.00 0.00 0.00 3 596 0.00 0.00 0.00 3 597 0.00 0.00 0.00 3 598 0.00 0.00 0.00 3 599 0.00 0.00 0.00 3 600 0.00 0.00 0.00 3 601 0.00 0.00 0.00 3 602 0.00 0.00 0.00 3 603 0.00 0.00 0.00 3 604 0.00 0.00 0.00 3 605 0.00 0.00 0.00 3 606 0.00 0.00 0.00 3 607 0.00 0.00 0.00 3 608 0.00 0.00 0.00 3 609 0.00 0.00 0.00 3 610 0.00 0.00 0.00 3 611 0.00 0.00 0.00 3 612 0.00 0.00 0.00 3 613 0.00 0.00 0.00 3 614 0.00 0.00 0.00 3 615 0.00 0.00 0.00 3 616 0.00 0.00 0.00 3 617 0.00 0.00 0.00 3 618 0.00 0.00 0.00 3 619 0.00 0.00 0.00 3 620 0.00 0.00 0.00 3 621 0.00 0.00 0.00 3 622 0.00 0.00 0.00 3 623 0.00 0.00 0.00 3 624 0.00 0.00 0.00 3 625 0.00 0.00 0.00 3 626 0.00 0.00 0.00 3 627 0.00 0.00 0.00 3 628 0.00 0.00 0.00 3 629 0.00 0.00 0.00 3 630 0.00 0.00 0.00 3 631 0.00 0.00 0.00 3 632 0.00 0.00 0.00 3 633 0.00 0.00 0.00 3 634 0.00 0.00 0.00 3 635 0.00 0.00 0.00 3 636 0.00 0.00 0.00 3 637 0.00 0.00 0.00 3 638 0.00 0.00 0.00 3 639 0.00 0.00 0.00 3 640 0.00 0.00 0.00 3 641 0.00 0.00 0.00 3 642 0.00 0.00 0.00 3 643 0.00 0.00 0.00 3 644 0.00 0.00 0.00 3 645 0.00 0.00 0.00 3 646 0.00 0.00 0.00 3 647 0.00 0.00 0.00 3 648 0.00 0.00 0.00 3 649 0.00 0.00 0.00 3 650 0.00 0.00 0.00 3 651 0.00 0.00 0.00 3 652 0.00 0.00 0.00 3 653 0.00 0.00 0.00 3 654 0.00 0.00 0.00 3 655 0.00 0.00 0.00 3 656 0.00 0.00 0.00 3 657 0.00 0.00 0.00 3 658 0.00 0.00 0.00 3 659 0.00 0.00 0.00 3 660 0.00 0.00 0.00 3 661 0.00 0.00 0.00 3 662 0.00 0.00 0.00 3 663 0.00 0.00 0.00 3 664 0.00 0.00 0.00 3 665 0.00 0.00 0.00 3 666 0.00 0.00 0.00 3 667 0.00 0.00 0.00 3 668 0.00 0.00 0.00 3 669 0.00 0.00 0.00 3 670 0.00 0.00 0.00 3 671 0.00 0.00 0.00 3 672 0.00 0.00 0.00 3 673 0.00 0.00 0.00 3 674 0.00 0.00 0.00 3 675 0.00 0.00 0.00 3 676 0.00 0.00 0.00 3 677 0.00 0.00 0.00 3 678 0.00 0.00 0.00 3 679 0.00 0.00 0.00 3 680 0.00 0.00 0.00 3 681 0.00 0.00 0.00 3 682 0.00 0.00 0.00 3 683 0.00 0.00 0.00 3 684 0.00 0.00 0.00 3 685 0.00 0.00 0.00 3 686 0.00 0.00 0.00 3 687 0.00 0.00 0.00 3 688 0.00 0.00 0.00 3 689 0.00 0.00 0.00 3 690 0.00 0.00 0.00 3 691 0.00 0.00 0.00 3 692 0.00 0.00 0.00 3 693 0.00 0.00 0.00 3 694 0.00 0.00 0.00 3 695 0.00 0.00 0.00 3 696 0.00 0.00 0.00 3 697 0.00 0.00 0.00 3 698 0.00 0.00 0.00 3 699 0.00 0.00 0.00 3 700 0.00 0.00 0.00 3 701 0.00 0.00 0.00 3 702 0.00 0.00 0.00 3 703 0.00 0.00 0.00 3 704 0.00 0.00 0.00 3 705 0.00 0.00 0.00 3 706 0.00 0.00 0.00 3 707 0.00 0.00 0.00 3 708 0.00 0.00 0.00 3 709 0.00 0.00 0.00 3 710 0.00 0.00 0.00 3 711 0.00 0.00 0.00 3 712 0.00 0.00 0.00 3 713 0.00 0.00 0.00 3 714 0.00 0.00 0.00 3 715 0.00 0.00 0.00 3 716 0.00 0.00 0.00 3 717 0.00 0.00 0.00 3 718 0.00 0.00 0.00 3 719 0.00 0.00 0.00 3 720 0.00 0.00 0.00 3 721 0.00 0.00 0.00 3 722 0.00 0.00 0.00 3 723 0.00 0.00 0.00 3 724 0.00 0.00 0.00 3 725 0.00 0.00 0.00 3 726 0.00 0.00 0.00 3 727 0.00 0.00 0.00 3 728 0.00 0.00 0.00 3 729 0.00 0.00 0.00 3 730 0.00 0.00 0.00 3 731 0.00 0.00 0.00 3 732 0.00 0.00 0.00 3 733 0.00 0.00 0.00 3 734 0.00 0.00 0.00 3 735 0.00 0.00 0.00 3 736 0.00 0.00 0.00 3 737 0.00 0.00 0.00 3 738 0.00 0.00 0.00 3 739 0.00 0.00 0.00 3 740 0.00 0.00 0.00 3 741 0.00 0.00 0.00 3 742 0.00 0.00 0.00 3 743 0.00 0.00 0.00 3 744 0.00 0.00 0.00 3 745 0.00 0.00 0.00 3 746 0.00 0.00 0.00 3 747 0.00 0.00 0.00 3 748 0.00 0.00 0.00 3 749 0.00 0.00 0.00 3 750 0.00 0.00 0.00 3 751 0.00 0.00 0.00 3 752 0.00 0.00 0.00 3 753 0.00 0.00 0.00 3 754 0.00 0.00 0.00 3 755 0.00 0.00 0.00 3 756 0.00 0.00 0.00 3 757 0.00 0.00 0.00 3 758 0.00 0.00 0.00 3 759 0.00 0.00 0.00 3 760 0.00 0.00 0.00 3 761 0.00 0.00 0.00 3 762 0.00 0.00 0.00 3 763 0.00 0.00 0.00 3 764 0.00 0.00 0.00 3 765 0.00 0.00 0.00 3 766 0.00 0.00 0.00 3 767 0.00 0.00 0.00 3 768 0.00 0.00 0.00 3 769 0.00 0.00 0.00 3 770 0.00 0.00 0.00 3 771 0.00 0.00 0.00 3 772 0.00 0.00 0.00 3 773 0.00 0.00 0.00 3 774 0.00 0.00 0.00 3 775 0.00 0.00 0.00 3 776 0.00 0.00 0.00 3 777 0.00 0.00 0.00 3 778 0.00 0.00 0.00 3 779 0.00 0.00 0.00 3 780 0.00 0.00 0.00 3 781 0.00 0.00 0.00 3 782 0.00 0.00 0.00 3 783 0.00 0.00 0.00 3 784 0.00 0.00 0.00 3 785 0.00 0.00 0.00 3 786 0.00 0.00 0.00 3 787 0.00 0.00 0.00 3 788 0.00 0.00 0.00 3 789 0.00 0.00 0.00 3 790 0.00 0.00 0.00 3 791 0.00 0.00 0.00 3 792 0.00 0.00 0.00 3 793 0.00 0.00 0.00 3 794 0.00 0.00 0.00 3 795 0.00 0.00 0.00 3 796 0.00 0.00 0.00 3 797 0.00 0.00 0.00 3 798 0.00 0.00 0.00 3 799 0.00 0.00 0.00 3 800 0.00 0.00 0.00 3 801 0.00 0.00 0.00 3 802 0.00 0.00 0.00 3 803 0.00 0.00 0.00 3 804 0.00 0.00 0.00 3 805 0.00 0.00 0.00 3 806 0.00 0.00 0.00 3 807 0.00 0.00 0.00 3 808 0.00 0.00 0.00 3 809 0.00 0.00 0.00 3 810 0.00 0.00 0.00 3 811 0.00 0.00 0.00 3 812 0.00 0.00 0.00 3 813 0.00 0.00 0.00 3 814 0.00 0.00 0.00 3 815 0.00 0.00 0.00 3 816 0.00 0.00 0.00 3 817 0.00 0.00 0.00 3 818 0.00 0.00 0.00 3 819 0.00 0.00 0.00 3 820 0.00 0.00 0.00 3 821 0.00 0.00 0.00 3 822 0.00 0.00 0.00 3 823 0.00 0.00 0.00 3 824 0.00 0.00 0.00 3 825 0.00 0.00 0.00 3 826 0.00 0.00 0.00 3 827 0.00 0.00 0.00 3 828 0.00 0.00 0.00 3 829 0.00 0.00 0.00 3 830 0.00 0.00 0.00 3 831 0.00 0.00 0.00 3 832 0.00 0.00 0.00 3 833 0.00 0.00 0.00 3 834 0.00 0.00 0.00 3 835 0.00 0.00 0.00 3 836 0.00 0.00 0.00 3 837 0.00 0.00 0.00 3 838 0.00 0.00 0.00 3 839 0.00 0.00 0.00 3 840 0.00 0.00 0.00 3 841 0.00 0.00 0.00 3 842 0.00 0.00 0.00 3 843 0.00 0.00 0.00 3 844 0.00 0.00 0.00 3 845 0.00 0.00 0.00 3 846 0.00 0.00 0.00 3 847 0.00 0.00 0.00 3 848 0.00 0.00 0.00 3 849 0.00 0.00 0.00 3 850 0.00 0.00 0.00 3 851 0.00 0.00 0.00 3 852 0.00 0.00 0.00 3 853 0.00 0.00 0.00 3 854 0.00 0.00 0.00 3 855 0.00 0.00 0.00 3 856 0.00 0.00 0.00 3 857 0.00 0.00 0.00 3 858 0.00 0.00 0.00 3 859 0.00 0.00 0.00 3 860 0.00 0.00 0.00 3 861 0.00 0.00 0.00 3 862 0.00 0.00 0.00 3 863 0.00 0.00 0.00 3 864 0.00 0.00 0.00 3 865 0.00 0.00 0.00 3 866 0.00 0.00 0.00 3 867 0.00 0.00 0.00 3 868 0.00 0.00 0.00 3 869 0.00 0.00 0.00 3 870 0.00 0.00 0.00 3 871 0.00 0.00 0.00 3 872 0.00 0.00 0.00 3 873 0.00 0.00 0.00 3 874 0.00 0.00 0.00 3 875 0.00 0.00 0.00 3 876 0.00 0.00 0.00 3 877 0.00 0.00 0.00 3 878 0.00 0.00 0.00 3 879 0.00 0.00 0.00 3 880 0.00 0.00 0.00 3 881 0.00 0.00 0.00 3 882 0.00 0.00 0.00 3 883 0.00 0.00 0.00 3 884 0.00 0.00 0.00 3 885 0.00 0.00 0.00 3 886 0.00 0.00 0.00 3 887 0.00 0.00 0.00 3 888 0.00 0.00 0.00 3 889 0.00 0.00 0.00 3 890 0.00 0.00 0.00 3 891 0.00 0.00 0.00 3 892 0.00 0.00 0.00 3 893 0.00 0.00 0.00 3 894 0.00 0.00 0.00 3 895 0.00 0.00 0.00 3 896 0.00 0.00 0.00 3 897 0.00 0.00 0.00 3 898 0.00 0.00 0.00 3 899 0.00 0.00 0.00 3 900 0.00 0.00 0.00 3 901 0.00 0.00 0.00 3 902 0.00 0.00 0.00 3 903 0.00 0.00 0.00 3 904 0.00 0.00 0.00 3 905 0.00 0.00 0.00 3 906 0.00 0.00 0.00 3 907 0.00 0.00 0.00 3 908 0.00 0.00 0.00 3 909 0.00 0.00 0.00 3 910 0.00 0.00 0.00 3 911 0.00 0.00 0.00 3 912 0.00 0.00 0.00 3 913 0.00 0.00 0.00 3 914 0.00 0.00 0.00 3 915 0.00 0.00 0.00 3 916 0.00 0.00 0.00 3 917 0.00 0.00 0.00 3 918 0.00 0.00 0.00 3 919 0.00 0.00 0.00 3 920 0.00 0.00 0.00 3 921 0.00 0.00 0.00 3 922 0.00 0.00 0.00 3 923 0.00 0.00 0.00 3 924 0.00 0.00 0.00 3 925 0.00 0.00 0.00 3 926 0.00 0.00 0.00 3 927 0.00 0.00 0.00 3 928 0.00 0.00 0.00 3 929 0.00 0.00 0.00 3 930 0.00 0.00 0.00 3 931 0.00 0.00 0.00 3 932 0.00 0.00 0.00 3 933 0.00 0.00 0.00 3 934 0.00 0.00 0.00 3 935 0.00 0.00 0.00 3 936 0.00 0.00 0.00 3 937 0.00 0.00 0.00 3 938 0.00 0.00 0.00 3 939 0.00 0.00 0.00 3 940 0.00 0.00 0.00 3 941 0.00 0.00 0.00 3 942 0.00 0.00 0.00 3 943 0.00 0.00 0.00 3 944 0.00 0.00 0.00 3 945 0.00 0.00 0.00 3 946 0.00 0.00 0.00 3 947 0.00 0.00 0.00 3 948 0.00 0.00 0.00 3 949 0.00 0.00 0.00 3 950 0.00 0.00 0.00 3 951 0.00 0.00 0.00 3 952 0.00 0.00 0.00 3 953 0.00 0.00 0.00 3 954 0.00 0.00 0.00 3 955 0.00 0.00 0.00 3 956 0.00 0.00 0.00 3 957 0.00 0.00 0.00 3 958 0.00 0.00 0.00 3 959 0.00 0.00 0.00 3 960 0.00 0.00 0.00 3 961 0.00 0.00 0.00 3 962 0.00 0.00 0.00 3 963 0.00 0.00 0.00 3 964 0.00 0.00 0.00 3 965 0.00 0.00 0.00 3 966 0.00 0.00 0.00 3 967 0.00 0.00 0.00 3 968 0.00 0.00 0.00 3 969 0.00 0.00 0.00 3 970 0.00 0.00 0.00 3 971 0.00 0.00 0.00 3 972 0.00 0.00 0.00 3 973 0.00 0.00 0.00 3 974 0.00 0.00 0.00 3 975 0.00 0.00 0.00 3 976 0.00 0.00 0.00 3 977 0.00 0.00 0.00 3 978 0.00 0.00 0.00 3 979 0.00 0.00 0.00 3 980 0.00 0.00 0.00 3 981 0.00 0.00 0.00 3 982 0.00 0.00 0.00 3 983 0.00 0.00 0.00 3 984 0.00 0.00 0.00 3 985 0.00 0.00 0.00 3 986 0.00 0.00 0.00 3 987 0.00 0.00 0.00 3 988 0.00 0.00 0.00 3 989 0.00 0.00 0.00 3 990 0.00 0.00 0.00 3 991 0.00 0.00 0.00 3 992 0.00 0.00 0.00 3 993 0.00 0.00 0.00 3 994 0.00 0.00 0.00 3 995 0.00 0.00 0.00 3 996 0.00 0.00 0.00 3 997 0.00 0.00 0.00 3 998 0.00 0.00 0.00 3 999 0.00 0.00 0.00 3 1000 0.00 0.00 0.00 3 1001 0.00 0.00 0.00 3 1002 0.00 0.00 0.00 3 1003 0.00 0.00 0.00 3 1004 0.00 0.00 0.00 3 1005 0.00 0.00 0.00 3 1006 0.00 0.00 0.00 3 1007 0.00 0.00 0.00 3 1008 0.00 0.00 0.00 3 1009 0.00 0.00 0.00 3 micro avg 0.00 0.00 0.00 3030 macro avg 0.00 0.00 0.00 3030 weighted avg 0.00 0.00 0.00 3030
/opt/conda/lib/python3.6/site-packages/sklearn/metrics/classification.py:1143: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples. 'precision', 'predicted', average, warn_for)
%%time
test_generator.reset()
predict=model_final.predict_generator(test_generator, steps = len(test_generator.filenames), verbose=1)
10952/35350 [========>.....................] - ETA: 32:23
predicted_class_indices=np.argmax(predict,axis=1)
gc.collect();
labels = (train_generator.class_indices)
labels = dict((v,k) for k,v in labels.items())
predictions = [labels[k] for k in predicted_class_indices]
sam_sub_df = pd.read_csv('../input/kaggle_sample_submission.csv')
sam_sub_df.head()
id | predicted | |
---|---|---|
0 | 268243 | 842 |
1 | 268244 | 139 |
2 | 268245 | 988 |
3 | 268246 | 612 |
4 | 268247 | 468 |
sam_sub_df.shape
(35350, 2)
filenames=test_generator.filenames
results=pd.DataFrame({"file_name":filenames,
"predicted":predictions})
df_res = pd.merge(test_img_df, results, on='file_name')[['image_id','predicted']]\
.rename(columns={'image_id':'id'})
df_res.head()
id | predicted | |
---|---|---|
0 | 268243 | 4 |
1 | 268244 | 212 |
2 | 268245 | 370 |
3 | 268246 | 161 |
4 | 268247 | 476 |
df_res.to_csv("submission.csv",index=False)