diff --git a/results/FA.png b/results/FA.png
new file mode 100644
index 0000000..c2ae627
Binary files /dev/null and b/results/FA.png differ
diff --git a/results/FA_precision-recall.png b/results/FA_precision-recall.png
new file mode 100644
index 0000000..fa466d1
Binary files /dev/null and b/results/FA_precision-recall.png differ
diff --git a/results/checkpoint b/results/checkpoint
new file mode 100644
index 0000000..51b8018
--- /dev/null
+++ b/results/checkpoint
@@ -0,0 +1,2 @@
+model_checkpoint_path: "model_ges.ckpt"
+all_model_checkpoint_paths: "model_ges.ckpt"
diff --git a/results/data_resize.py b/results/data_resize.py
new file mode 100644
index 0000000..f236bc4
--- /dev/null
+++ b/results/data_resize.py
@@ -0,0 +1,199 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Tue Oct  3 15:32:52 2017
+
+@author: nvlab
+"""
+
+# -*- coding: utf-8 -*-
+"""
+Created on Sat Sep 30 12:13:40 2017
+
+@author: nvlab
+"""
+
+# -*- coding: utf-8 -*-
+"""
+Created on Sat Sep 23 15:58:21 2017
+
+@author: nvlab
+"""
+
+import numpy as np
+from PIL import Image
+import scipy.misc
+
+global num_house1_L,num_house2_L,num_house3_L,num_lab1_L,num_lab2_L,num_lab3_L,num_lab4_L
+global num_house1_R,num_house2_R,num_house3_R,num_lab1_R,num_lab2_R,num_lab3_R,num_lab4_R
+global num_off1_R, num_off2_R, num_off3_R, num_off1_L, num_off2_L, num_off3_L
+global img_path
+num_house1_L = 1
+num_off1_L = 1
+num_off1_R = 1
+num_house2_L = 1
+num_off2_L = 1
+num_off2_R = 1
+num_house3_L = 1
+num_off3_L = 1
+num_off3_R = 1
+num_lab1_L = 1
+num_lab2_L = 1
+num_lab3_L = 1
+num_lab4_L = 1
+num_house1_R = 1
+num_house2_R = 1
+num_house3_R = 1
+num_lab1_R = 1
+num_lab2_R = 1
+num_lab3_R = 1
+num_lab4_R = 1
+def read_data(batch):
+
+    global num_house1_L,num_house2_L,num_house3_L,num_lab1_L,num_lab2_L,num_lab3_L,num_lab4_L
+    global num_house1_R,num_house2_R,num_house3_R,num_lab1_R,num_lab2_R,num_lab3_R,num_lab4_R
+    global num_off1_R, num_off2_R, num_off3_R, num_off1_L, num_off2_L, num_off3_L
+    global img_path
+    for i in range(batch):
+        if num_house1_L<=831:        
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/house/1/Lhand/Image%s.png"%(num_house1_L)
+            num_house1_L+=1
+        elif num_house1_R<=831:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/house/1/Rhand/Image%s.png"%(num_house1_R)
+            num_house1_R+=1
+        elif num_house2_L<=988:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/house/2/Lhand/Image%s.png"%(num_house2_L)
+            num_house2_L+=1
+        elif num_house2_R<=988:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/house/2/Rhand/Image%s.png"%(num_house2_R)
+            num_house2_R+=1
+        elif num_house3_L<=1229:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/house/3/Lhand/Image%s.png"%(num_house3_L)
+            num_house3_L+=1
+        elif num_house3_R<=1229:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/house/3/Rhand/Image%s.png"%(num_house3_R)
+            num_house3_R+=1
+        elif num_lab1_L<=501:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/lab/1/Lhand/Image%s.png"%(num_lab1_L)
+            num_lab1_L+=1
+        elif num_lab1_R<=501:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/lab/1/Rhand/Image%s.png"%(num_lab1_R)
+            num_lab1_R+=1
+        elif num_lab2_L<=589:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/lab/2/Lhand/Image%s.png"%(num_lab2_L)
+            num_lab2_L+=1
+        elif num_lab2_R<=589:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/lab/2/Rhand/Image%s.png"%(num_lab2_R)
+            num_lab2_R+=1
+        elif num_lab3_L<=730:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/lab/3/Lhand/Image%s.png"%(num_lab3_L)
+            num_lab3_L+=1
+        elif num_lab3_R<=730:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/lab/3/Rhand/Image%s.png"%(num_lab3_R)
+            num_lab3_R+=1
+        elif num_lab4_L<=660:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/lab/4/Lhand/Image%s.png"%(num_lab4_L)
+            num_lab4_L+=1
+        elif num_lab4_R<=660:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/lab/4/Rhand/Image%s.png"%(num_lab4_R)
+            num_lab4_R+=1
+        elif num_off1_L<=745:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/office/1/Lhand/Image%s.png"%(num_off1_L)
+            num_off1_L+=1
+        elif num_off1_R<=745:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/office/1/Rhand/Image%s.png"%(num_off1_R)
+            num_off1_R+=1
+        elif num_off2_L<=572:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/office/2/Lhand/Image%s.png"%(num_off2_L)
+            num_off2_L+=1
+        elif num_off2_R<=572:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/office/2/Rhand/Image%s.png"%(num_off2_R)
+            num_off2_R+=1
+        elif num_off3_L<=651:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/office/3/Lhand/Image%s.png"%(num_off3_L)
+            num_off3_L+=1
+        elif num_off3_R<=651:
+            img_path = "../../../../Disk2/cedl/handcam/frames/train/office/3/Rhand/Image%s.png"%(num_off3_R)
+            num_off3_R+=1
+        img = Image.open(img_path)
+        img = np.array(img)
+        x_img = np.resize(img, [128,248,3])
+        x_img = x_img/255.0
+        
+        scipy.misc.imsave('frames_resize/Image%s.png'%i, x_img)
+    
+def read_data_test(batch):
+
+    global num_house1_L,num_house2_L,num_house3_L,num_lab1_L,num_lab2_L,num_lab3_L,num_lab4_L
+    global num_house1_R,num_house2_R,num_house3_R,num_lab1_R,num_lab2_R,num_lab3_R,num_lab4_R
+    global num_off1_R, num_off2_R, num_off3_R, num_off1_L, num_off2_L, num_off3_L
+    global img_path
+    for i in range(batch):
+        if num_house1_L<=830:        
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/house/1/Lhand/Image%s.png"%(num_house1_L)
+            num_house1_L+=1
+        elif num_house1_R<=830:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/house/1/Rhand/Image%s.png"%(num_house1_R)
+            num_house1_R+=1
+        elif num_house2_L<=887:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/house/2/Lhand/Image%s.png"%(num_house2_L)
+            num_house2_L+=1
+        elif num_house2_R<=887:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/house/2/Rhand/Image%s.png"%(num_house2_R)
+            num_house2_R+=1
+        elif num_house3_L<=929:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/house/3/Lhand/Image%s.png"%(num_house3_L)
+            num_house3_L+=1
+        elif num_house3_R<=929:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/house/3/Rhand/Image%s.png"%(num_house3_R)
+            num_house3_R+=1
+        elif num_lab1_L<=539:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/lab/1/Lhand/Image%s.png"%(num_lab1_L)
+            num_lab1_L+=1
+        elif num_lab1_R<=539:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/lab/1/Rhand/Image%s.png"%(num_lab1_R)
+            num_lab1_R+=1
+        elif num_lab2_L<=658:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/lab/2/Lhand/Image%s.png"%(num_lab2_L)
+            num_lab2_L+=1
+        elif num_lab2_R<=658:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/lab/2/Rhand/Image%s.png"%(num_lab2_R)
+            num_lab2_R+=1
+        elif num_lab3_L<=467:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/lab/3/Lhand/Image%s.png"%(num_lab3_L)
+            num_lab3_L+=1
+        elif num_lab3_R<=467:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/lab/3/Rhand/Image%s.png"%(num_lab3_R)
+            num_lab3_R+=1
+        elif num_lab4_L<=503:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/lab/4/Lhand/Image%s.png"%(num_lab4_L)
+            num_lab4_L+=1
+        elif num_lab4_R<=503:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/lab/4/Rhand/Image%s.png"%(num_lab4_R)
+            num_lab4_R+=1
+        elif num_off1_L<=590:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/office/1/Lhand/Image%s.png"%(num_off1_L)
+            num_off1_L+=1
+        elif num_off1_R<=590:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/office/1/Rhand/Image%s.png"%(num_off1_R)
+            num_off1_R+=1
+        elif num_off2_L<=419:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/office/2/Lhand/Image%s.png"%(num_off2_L)
+            num_off2_L+=1
+        elif num_off2_R<=419:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/office/2/Rhand/Image%s.png"%(num_off2_R)
+            num_off2_R+=1
+        elif num_off3_L<=566:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/office/3/Lhand/Image%s.png"%(num_off3_L)
+            num_off3_L+=1
+        elif num_off3_R<=566:
+            img_path = "../../../../Disk2/cedl/handcam/frames/test/office/3/Rhand/Image%s.png"%(num_off3_R)
+            num_off3_R+=1
+        img = Image.open(img_path)
+        img = np.array(img)
+        x_img = np.resize(img, [128,248,3])
+        x_img = x_img/255.0
+        
+        scipy.misc.imsave('test_resize/Image%s.png'%i, x_img)
+    
+read_data_test(12776)
+read_data(14992)
\ No newline at end of file
diff --git a/results/ges.jpg b/results/ges.jpg
new file mode 100644
index 0000000..4a020a3
Binary files /dev/null and b/results/ges.jpg differ
diff --git a/results/ges_precision-recall.png b/results/ges_precision-recall.png
new file mode 100644
index 0000000..87e469e
Binary files /dev/null and b/results/ges_precision-recall.png differ
diff --git a/results/index.md b/results/index.md
index 96ce61c..ae10006 100644
--- a/results/index.md
+++ b/results/index.md
@@ -1,44 +1,124 @@
-# Your Name (id)
+# 邱煜淵 (105061634)
 
 #Project 5: Deep Classification
 
 ## Overview
-The project is related to 
-> quote
+The project is related to object classification. There are two different case. One is every task has itself model. The other is just one model handle multiple task (Obj, Ged and FA). Some experiments photos are not already finish so I will make up its as soon.
+> quote https://github.com/CEDL2017/homework1
 
 
 ## Implementation
-1. One
-	* item
-	* item
-2. Two
+### Result
+
Average FA loss : 0.743
+
Average FA accuracy : 0.509 (use shallower network, train 50 epoch)
+
Average Ges loss : 1.7635
+
Average Ges accuracy : 0.9253 (use deeper network, train 60 epoch)
+
Average Obj loss : 2.268
+
Average Obj accuracy : 0.538
+	
+### Spotlight code
 
 ```
-Code highlights
+with tf.name_scope('C1_Conv'):
+    W1 = weight([3,3,3,36])
+    b1 = bias([36])
+    Conv1 = conv2d(x_image, W1)+b1
+    Conv1_BN = BN(Conv1, 36)
+    C1_Conv = tf.nn.relu(Conv1_BN)  
+with tf.name_scope('C2_Conv'):
+    W2 = weight([3,3,36,36])
+    b2 = bias([36])
+    Conv2 = conv2d(C1_Conv, W2)+b2
+    Conv2_BN = BN(Conv2, 36)
+    C2_Conv = tf.nn.relu(Conv2_BN) 
+with tf.name_scope('C1_Pool'):
+    C1_Pool = max_pooling(C2_Conv)
+    
+with tf.name_scope('C3_Conv'):
+    W3 = weight([3,3,36,64])
+    b3 = bias([64])
+    Conv3 = conv2d(C1_Pool, W3)+b3
+    Conv3_BN = BN(Conv3, 64)
+    C3_Conv = tf.nn.relu(Conv3_BN)
+C3_concate = tf.concat([C1_Pool, C3_Conv],3)
+with tf.name_scope('C4_Conv'):
+    W4 = weight([3,3,100,128])
+    b4 = bias([128])
+    Conv4 = conv2d(C3_concate, W4)+b4
+    Conv4_BN = BN(Conv4, 128)
+    C4_Conv = tf.nn.relu(Conv4_BN)    
+with tf.name_scope('C2_Pool'):
+    C2_Pool = max_pooling(C4_Conv)
+    
+with tf.name_scope('C5_Conv'):
+    W8 = weight([3,3,128,128])
+    b8 = bias([128])
+    Conv5 = conv2d(C2_Pool, W8)+b8
+    Conv5_BN = BN(Conv5, 128)
+    C5_Conv = tf.nn.relu(Conv5_BN)
+C5_concate = tf.concat([C2_Pool, C5_Conv],3)
+with tf.name_scope('C6_Conv'):
+    W9 = weight([3,3,256,256])
+    b9 = bias([256])
+    Conv6 = conv2d(C5_concate, W9)+b9
+    Conv6_BN = BN(Conv6, 256)
+    C6_Conv = tf.nn.relu(Conv6_BN)    
+with tf.name_scope('C3_Pool'):
+    C3_Pool = max_pooling(C6_Conv)
+
+with tf.name_scope('C7_Conv'):
+    W10 = weight([3,3,256,256])
+    b10 = bias([256])
+    Conv7 = conv2d(C3_Pool, W10)+b10
+    Conv7_BN = BN(Conv7, 256)
+    C7_Conv = tf.nn.relu(Conv7_BN)
+C7_concate = tf.concat([C3_Pool, C7_Conv],3)
+with tf.name_scope('C8_Conv'):
+    W11 = weight([3,3,512,512])
+    b11 = bias([512])
+    Conv8 = conv2d(C7_concate, W11)+b11
+    Conv8_BN = BN(Conv8, 512)
+    C8_Conv = tf.nn.relu(Conv8_BN)    
+with tf.name_scope('C4_Pool'):
+    C4_Pool = max_pooling(C8_Conv)
 ```
 
 ## Installation
 * Other required packages.
+	* python2.7
+	* tensorflow
+	* numpy
+	* PIL
 * How to compile from source?
-
+	
First, you must resize the input image.
+	
+	>$ python data_resize.py
+	
+	Than you can chose which task you want to train.
+	
+	>$ python train.py or $ python train_obj.py
+	
+	Last, you can test your model.
+	
+	>$ python test.py
 ### Results
 
 
 
 | -  -  -  +  +  +     | 
 
 
 | -  -  -  -  +  +  +  +   | 
 
diff --git a/results/model.ckpt.index b/results/model.ckpt.index
new file mode 100644
index 0000000..e248aa8
Binary files /dev/null and b/results/model.ckpt.index differ
diff --git a/results/model.ckpt.meta b/results/model.ckpt.meta
new file mode 100644
index 0000000..2257583
Binary files /dev/null and b/results/model.ckpt.meta differ
diff --git a/results/model_FA.ckpt.index b/results/model_FA.ckpt.index
new file mode 100644
index 0000000..826eac9
Binary files /dev/null and b/results/model_FA.ckpt.index differ
diff --git a/results/model_FA.ckpt.meta b/results/model_FA.ckpt.meta
new file mode 100644
index 0000000..1515efc
Binary files /dev/null and b/results/model_FA.ckpt.meta differ
diff --git a/results/model_ges.ckpt.index b/results/model_ges.ckpt.index
new file mode 100644
index 0000000..294e64b
Binary files /dev/null and b/results/model_ges.ckpt.index differ
diff --git a/results/model_ges.ckpt.meta b/results/model_ges.ckpt.meta
new file mode 100644
index 0000000..331ae7f
Binary files /dev/null and b/results/model_ges.ckpt.meta differ
diff --git a/results/model_obj.ckpt.index b/results/model_obj.ckpt.index
new file mode 100644
index 0000000..adf8e07
Binary files /dev/null and b/results/model_obj.ckpt.index differ
diff --git a/results/model_obj.ckpt.meta b/results/model_obj.ckpt.meta
new file mode 100644
index 0000000..091fe2d
Binary files /dev/null and b/results/model_obj.ckpt.meta differ
diff --git a/results/multiple_acc.jpg b/results/multiple_acc.jpg
new file mode 100644
index 0000000..01ce33a
Binary files /dev/null and b/results/multiple_acc.jpg differ
diff --git a/results/multiple_loss.jpg b/results/multiple_loss.jpg
new file mode 100644
index 0000000..026fa85
Binary files /dev/null and b/results/multiple_loss.jpg differ
diff --git a/results/obj_precision-recall.png b/results/obj_precision-recall.png
new file mode 100644
index 0000000..7775ece
Binary files /dev/null and b/results/obj_precision-recall.png differ
diff --git a/results/read_data.py b/results/read_data.py
new file mode 100644
index 0000000..4edb32b
--- /dev/null
+++ b/results/read_data.py
@@ -0,0 +1,502 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Sat Sep 23 15:58:21 2017
+
+@author: nvlab
+"""
+
+import numpy as np
+from PIL import Image
+
+
+def random_order(num, batch):
+    order = np.random.randint(0, num-101,(1,batch))
+    return order
+    
+    
+def read_data(batch):
+    png = []
+    order = random_order(14992, batch)
+    for i in range(batch):
+        img_path = "frames_resize/Image%s.png"%order[0,i]
+        
+        img = Image.open(img_path)
+        img = np.array(img)
+        x_img = np.reshape(img, [1,95232])
+        png.append(x_img)
+                      
+    png = np.array(png)
+    png = np.reshape(png, [batch,95232])
+    
+    order_obj_label = np.zeros([batch, 24])
+    order_ges_label = np.zeros([batch, 13])
+    order_FA_label = np.zeros([batch, 2])
+    
+    for i in range(batch):
+        order_obj_label[i, :] = obj_label[order[0,i],:]
+        order_ges_label[i,:] = ges_label[order[0,i],:]
+        order_FA_label[i,:] = FA_label[order[0,i],:]
+    train_label = np.hstack((order_obj_label,order_ges_label,order_FA_label))
+        
+    return png, train_label
+    
+def read_ges_labels():
+
+    obj_label = []
+    num_ges_label = np.zeros([14992,13])
+
+    k = 1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_left%s.npy"%k)
+    obj_label = np.array(obj_left_labels)    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+        
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    obj_label = np.array(obj_label)
+    obj_label = np.reshape(obj_label,[1,14992]) 
+    for i in range(0,14992):
+        a = obj_label[0,i]
+        #print(a)
+        num_ges_label[i,int(a)] = 1
+
+    return num_ges_label   
+
+def read_data_test(batch):
+
+    png = []
+    order = random_order(12776, batch)
+    for i in range(batch):
+        img_path = "test_resize/Image%s.png"%order[0,i]
+        
+        img = Image.open(img_path)
+        img = np.array(img)
+        x_img = np.reshape(img, [1,95232])
+        png.append(x_img)
+                      
+    png = np.array(png)
+    png = np.reshape(png, [batch,95232])
+    
+    order_obj_label = np.zeros([batch, 24])
+    order_ges_label = np.zeros([batch, 13])
+    order_FA_label = np.zeros([batch, 2])
+    
+    for i in range(batch):
+        order_obj_label[i, :] = obj_label[order[0,i],:]
+        order_ges_label[i,:] = ges_label[order[0,i],:]
+        order_FA_label[i,:] = FA_label[order[0,i],:]
+    test_label = np.hstack((order_obj_label,order_ges_label,order_FA_label))    
+        
+    return png, test_label
+    
+def read_ges_labels_test():
+
+    obj_label = []
+    num_ges_label = np.zeros([12776,13])
+
+    k = 4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_left%s.npy"%k)
+    obj_label = np.array(obj_left_labels)    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+        
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =7
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =8
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    obj_label = np.array(obj_label)
+    obj_label = np.reshape(obj_label,[1,12776]) 
+    for i in range(0,12776):
+        a = obj_label[0,i]
+        #print(a)
+        num_ges_label[i,int(a)] = 1
+
+    return num_ges_label   
+    
+def read_FA_labels():
+
+    obj_label = []
+    num_obj_left_label = np.zeros([14992,2])
+
+    k = 1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_left%s.npy"%k)
+    obj_label = np.array(obj_left_labels)    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+        
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    obj_label = np.array(obj_label)
+    obj_label = np.reshape(obj_label,[1,14992]) 
+    for i in range(0,14992):
+        a = obj_label[0,i]
+        #print(a)
+        num_obj_left_label[i,int(a)] = 1
+
+    return num_obj_left_label 
+    
+def read_FA_labels_test():
+
+    obj_label = []
+    num_obj_left_label = np.zeros([12776,2])
+
+    k = 4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_left%s.npy"%k)
+    obj_label = np.array(obj_left_labels)    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+        
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =7
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =8
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    obj_label = np.array(obj_label)
+    obj_label = np.reshape(obj_label,[1,12776]) 
+    for i in range(0,12776):
+        a = obj_label[0,i]
+        #print(a)
+        num_obj_left_label[i,int(a)] = 1
+
+    return num_obj_left_label
+
+def read_obj_labels():
+
+    obj_label = []
+    num_obj_left_label = np.zeros([14992,24])
+
+    k = 1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_left%s.npy"%k)
+    obj_label = np.array(obj_left_labels)    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+        
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    obj_label = np.array(obj_label)
+    obj_label = np.reshape(obj_label,[1,14992]) 
+    for i in range(0,14992):
+        a = obj_label[0,i]
+        #print(a)
+        num_obj_left_label[i,int(a)] = 1
+
+    return num_obj_left_label   
+    
+def read_obj_labels_test():
+
+    obj_label = []
+    num_obj_left_label = np.zeros([12776,24])
+
+    k = 4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_left%s.npy"%k)
+    obj_label = np.array(obj_left_labels)    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+        
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =7
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =8
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    obj_label = np.array(obj_label)
+    obj_label = np.reshape(obj_label,[1,12776]) 
+    for i in range(0,12776):
+        a = obj_label[0,i]
+        #print(a)
+        num_obj_left_label[i,int(a)] = 1
+
+    return num_obj_left_label   
+    
+obj_label = read_obj_labels()
+ges_label = read_ges_labels()
+FA_label = read_FA_labels()
+
+obj_test_label = read_obj_labels_test()
+ges_test_label = read_ges_labels_test()
+FA_test_label = read_FA_labels_test()
diff --git a/results/read_data_FA.py b/results/read_data_FA.py
new file mode 100644
index 0000000..82588f4
--- /dev/null
+++ b/results/read_data_FA.py
@@ -0,0 +1,229 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Thu Oct  5 10:10:49 2017
+
+@author: nvlab
+"""
+
+
+import numpy as np
+from PIL import Image
+global k
+k=0
+
+
+def random_order(num, batch):
+    order = np.random.randint(0, num-101,(1,batch))
+    return order
+    
+    
+def read_data(batch):
+    png = []
+    order = random_order(14992, batch)
+    for i in range(batch):
+        img_path = "frames_resize/Image%s.png"%order[0,i]
+        
+        img = Image.open(img_path)
+        img = np.array(img)
+        x_img = np.reshape(img, [1,95232])
+        png.append(x_img)
+                      
+    png = np.array(png)
+    png = np.reshape(png, [batch,95232])
+    
+    order_FA_label = np.zeros([batch, 2])
+    
+    for i in range(batch):
+        order_FA_label[i,:] = FA_label[order[0,i],:]
+        
+    return png, order_FA_label
+  
+
+def read_data_test(batch):
+
+    png = []
+    order = random_order(12776, batch)
+    for i in range(batch):
+        img_path = "test_resize/Image%s.png"%order[0,i]
+        
+        img = Image.open(img_path)
+        img = np.array(img)
+        x_img = np.reshape(img, [1,95232])
+        png.append(x_img)
+                      
+    png = np.array(png)
+    png = np.reshape(png, [batch,95232])
+    
+    order_FA_label = np.zeros([batch, 2])
+    
+    for i in range(batch):
+        order_FA_label[i,:] = FA_label[order[0,i],:]  
+        
+    return png, order_FA_label
+    
+def read_data_test_(batch):
+    png = []
+    global k
+    for i in range(batch):
+        img_path = "test_resize/Image%s.png"%k
+        img = Image.open(img_path)
+        img = np.array(img)
+        x_img = np.reshape(img, [1,95232])
+        png.append(x_img)
+        k+=1
+        
+    png = np.array(png)
+    png = np.reshape(png, [batch,95232])
+              
+    return png
+    
+def read_FA_labels():
+
+    obj_label = []
+    num_obj_left_label = np.zeros([14992,2])
+
+    k = 1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_left%s.npy"%k)
+    obj_label = np.array(obj_left_labels)    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+        
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    obj_label = np.array(obj_label)
+    obj_label = np.reshape(obj_label,[1,14992]) 
+    for i in range(0,14992):
+        a = obj_label[0,i]
+        #print(a)
+        num_obj_left_label[i,int(a)] = 1
+
+    return num_obj_left_label 
+    
+def read_FA_labels_test():
+
+    obj_label = []
+    num_obj_left_label = np.zeros([12776,2])
+
+    k = 4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_left%s.npy"%k)
+    obj_label = np.array(obj_left_labels)    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+        
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =7
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =8
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/FA_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    obj_label = np.array(obj_label)
+    obj_label = np.reshape(obj_label,[1,12776]) 
+    for i in range(0,12776):
+        a = obj_label[0,i]
+        #print(a)
+        num_obj_left_label[i,int(a)] = 1
+        
+    ges_test_label = np.zeros([12736, 2])
+    
+    for i in range(0,12736):
+        ges_test_label[i,:] = num_obj_left_label[i,:]
+
+    return np.array(ges_test_label.astype(int)) 
+    
+FA_label = read_FA_labels()
+
+FA_test_label = read_FA_labels_test()
diff --git a/results/read_data_ges.py b/results/read_data_ges.py
new file mode 100644
index 0000000..3d58ffc
--- /dev/null
+++ b/results/read_data_ges.py
@@ -0,0 +1,228 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Thu Oct  5 10:10:49 2017
+
+@author: nvlab
+"""
+
+import numpy as np
+from PIL import Image
+global k
+k=0
+
+
+def random_order(num, batch):
+    order = np.random.randint(0, num-101,(1,batch))
+    return order
+    
+    
+def read_data(batch):
+    png = []
+    order = random_order(14992, batch)
+    for i in range(batch):
+        img_path = "frames_resize/Image%s.png"%order[0,i]
+        
+        img = Image.open(img_path)
+        img = np.array(img)
+        x_img = np.reshape(img, [1,95232])
+        png.append(x_img)
+                      
+    png = np.array(png)
+    png = np.reshape(png, [batch,95232])
+    
+    order_ges_label = np.zeros([batch, 13])
+    
+    for i in range(batch):
+        order_ges_label[i,:] = ges_label[order[0,i],:]
+        
+    return png, order_ges_label
+    
+def read_ges_labels():
+
+    obj_label = []
+    num_ges_label = np.zeros([14992,13])
+
+    k = 1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_left%s.npy"%k)
+    obj_label = np.array(obj_left_labels)    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+        
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    obj_label = np.array(obj_label)
+    obj_label = np.reshape(obj_label,[1,14992]) 
+    for i in range(0,14992):
+        a = obj_label[0,i]
+        #print(a)
+        num_ges_label[i,int(a)] = 1
+
+    return num_ges_label   
+
+def read_data_test(batch):
+
+    png = []
+    order = random_order(12776, batch)
+    for i in range(batch):
+        img_path = "test_resize/Image%s.png"%order[0,i]
+        
+        img = Image.open(img_path)
+        img = np.array(img)
+        x_img = np.reshape(img, [1,95232])
+        png.append(x_img)
+                      
+    png = np.array(png)
+    png = np.reshape(png, [batch,95232])
+    
+    order_ges_label = np.zeros([batch, 13])
+    
+    for i in range(batch):
+
+        order_ges_label[i,:] = ges_label[order[0,i],:]    
+        
+    return png, order_ges_label
+    
+def read_data_test_(batch):
+    png = []
+    global k
+    for i in range(batch):
+        img_path = "test_resize/Image%s.png"%k
+        img = Image.open(img_path)
+        img = np.array(img)
+        x_img = np.reshape(img, [1,95232])
+        png.append(x_img)
+        k+=1
+        
+    png = np.array(png)
+    png = np.reshape(png, [batch,95232])
+              
+    return png
+    
+def read_ges_labels_test():
+
+    obj_label = []
+    num_ges_label = np.zeros([12776,13])
+
+    k = 4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_left%s.npy"%k)
+    obj_label = np.array(obj_left_labels)    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+        
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =7
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =8
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/ges_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    obj_label = np.array(obj_label)
+    obj_label = np.reshape(obj_label,[1,12776]) 
+    for i in range(0,12776):
+        a = obj_label[0,i]
+        #print(a)
+        num_ges_label[i,int(a)] = 1
+    ges_test_label = np.zeros([12736, 13])
+    
+    for i in range(0,12736):
+        ges_test_label[i,:] = num_ges_label[i,:]
+    
+    return np.array(ges_test_label.astype(int))
+    
+ges_label = read_ges_labels()
+
+ges_test_label = read_ges_labels_test()
+#print(ges_test_label)
\ No newline at end of file
diff --git a/results/read_data_obj.py b/results/read_data_obj.py
new file mode 100644
index 0000000..9de8ad6
--- /dev/null
+++ b/results/read_data_obj.py
@@ -0,0 +1,226 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Thu Oct  5 10:10:49 2017
+
+@author: nvlab
+"""
+
+import numpy as np
+from PIL import Image
+global k
+k=0
+
+def random_order(num, batch):
+    order = np.random.randint(0, num-1,(1,batch))
+    return order
+    
+    
+def read_data(batch):
+    png = []
+    order = random_order(14992, batch)
+    for i in range(batch):
+        img_path = "frames_resize/Image%s.png"%order[0,i]
+        
+        img = Image.open(img_path)
+        img = np.array(img)
+        x_img = np.reshape(img, [1,95232])
+        png.append(x_img)
+                      
+    png = np.array(png)
+    png = np.reshape(png, [batch,95232])
+    
+    order_obj_label = np.zeros([batch, 24])
+    
+    for i in range(batch):
+        order_obj_label[i, :] = obj_label[order[0,i],:]
+        
+        
+    return png, order_obj_label
+    
+
+def read_data_test(batch):
+
+    png = []
+    order = random_order(12776, batch)
+    for i in range(batch):
+        img_path = "test_resize/Image%s.png"%order[0,i]
+        
+        img = Image.open(img_path)
+        img = np.array(img)
+        x_img = np.reshape(img, [1,95232])
+        png.append(x_img)
+                      
+    png = np.array(png)
+    png = np.reshape(png, [batch,95232])
+    
+    order_obj_label = np.zeros([batch, 24])
+    
+    for i in range(batch):
+        order_obj_label[i, :] = obj_label[order[0,i],:]   
+        
+    return png, order_obj_label
+    
+def read_data_test_(batch):
+    png = []
+    global k
+    for i in range(batch):
+        img_path = "test_resize/Image%s.png"%k
+        img = Image.open(img_path)
+        img = np.array(img)
+        x_img = np.reshape(img, [1,95232])
+        png.append(x_img)
+        k+=1
+        
+    png = np.array(png)
+    png = np.reshape(png, [batch,95232])
+              
+    return png    
+    
+def read_obj_labels():
+
+    obj_label = []
+    num_obj_left_label = np.zeros([14992,24])
+
+    k = 1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_left%s.npy"%k)
+    obj_label = np.array(obj_left_labels)    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+        
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =1
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =2
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =3
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    obj_label = np.array(obj_label)
+    obj_label = np.reshape(obj_label,[1,14992]) 
+    for i in range(0,14992):
+        a = obj_label[0,i]
+        #print(a)
+        num_obj_left_label[i,int(a)] = 1
+
+    return num_obj_left_label   
+    
+def read_obj_labels_test():
+
+    obj_label = []
+    num_obj_left_label = np.zeros([12776,24])
+
+    k = 4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_left%s.npy"%k)
+    obj_label = np.array(obj_left_labels)    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+        
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/house/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =7
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =8
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/lab/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    k =4
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    k =5
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+
+    k =6
+    obj_left_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_left%s.npy"%k)
+    obj_label = np.hstack((obj_label, np.array(obj_left_labels)))    
+    obj_right_labels = np.load("../../../../Disk2/cedl/handcam/labels/office/obj_right%s.npy"%k)
+    obj_label = np.hstack((obj_label,np.array(obj_right_labels)))
+    
+    obj_label = np.array(obj_label)
+    obj_label = np.reshape(obj_label,[1,12776]) 
+    for i in range(0,12776):
+        a = obj_label[0,i]
+        #print(a)
+        num_obj_left_label[i,int(a)] = 1
+    obj_test_label = np.zeros([12736, 24])
+    for i in range(0,12736):
+        obj_test_label[i,:] = num_obj_left_label[i,:]
+    
+    return np.array(obj_test_label.astype(int)) 
+    
+obj_label = read_obj_labels()
+
+obj_test_label = read_obj_labels_test()
diff --git a/results/test_FA.py b/results/test_FA.py
new file mode 100644
index 0000000..efd9ea2
--- /dev/null
+++ b/results/test_FA.py
@@ -0,0 +1,215 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Fri Oct 13 22:39:16 2017
+
+@author: nvlab
+"""
+
+import tensorflow as tf
+from time import time
+import read_data_FA
+import numpy as np
+import matplotlib
+matplotlib.use('Agg')
+import matplotlib.pyplot as plt
+from sklearn.metrics import precision_recall_curve
+from sklearn.metrics import average_precision_score
+from itertools import cycle
+
+batchSize = 64
+trainEpoch = 12776/batchSize
+learning_rate=0.00001
+epoch_list=[]
+accuracy_list=[]
+loss_list=[]
+precision = dict()
+recall = dict()
+average_precision = dict()
+
+def weight(shape):
+    return tf.Variable(tf.truncated_normal(shape, stddev=0.1), 
+                       name='W', dtype=tf.float32)                    
+def bias(shape):
+    return tf.Variable(tf.constant(0.1, shape=shape), name='b',
+                       dtype=tf.float32)    
+def conv2d(x, W):
+    return tf.nn.conv2d(x, W, strides=[1,1,1,1], padding='SAME')    
+def max_pooling(x):
+    return tf.nn.max_pool(x, ksize=[1,2,2,1], strides=[1,2,2,1],
+                          padding='SAME')
+def BN(Conv, out_size):
+    fc_mean, fc_var = tf.nn.moments(Conv, axes=[0])
+    scale = tf.Variable(tf.ones([out_size]))
+    shift = tf.Variable(tf.zeros([out_size]))
+    epsilon=0.001
+    ema = tf.train.ExponentialMovingAverage(decay=0.5)
+    def mean_var_with_update():
+        ema_apply_op = ema.apply([fc_mean, fc_var])
+        with tf.control_dependencies([ema_apply_op]):
+            return tf.identity(fc_mean), tf.identity(fc_var)
+    mean, var = mean_var_with_update()
+    Conv = tf.nn.batch_normalization(Conv, mean, var, shift, scale, epsilon)
+    return Conv
+                          
+with tf.name_scope('Input_layer'):
+    x = tf.placeholder("float", shape=[None, 95232])
+    x_image = tf.reshape(x, [-1, 128,248, 3])
+    
+with tf.name_scope('C1_Conv'):
+    W1 = weight([3,3,3,36])
+    b1 = bias([36])
+    Conv1 = conv2d(x_image, W1)+b1
+    Conv1_BN = BN(Conv1, 36)
+    C1_Conv = tf.nn.relu(Conv1_BN)  
+with tf.name_scope('C2_Conv'):
+    W2 = weight([3,3,36,36])
+    b2 = bias([36])
+    Conv2 = conv2d(C1_Conv, W2)+b2
+    Conv2_BN = BN(Conv2, 36)
+    C2_Conv = tf.nn.relu(Conv2_BN) 
+with tf.name_scope('C1_Pool'):
+    C1_Pool = max_pooling(C2_Conv)
+    
+with tf.name_scope('C3_Conv'):
+    W3 = weight([3,3,36,64])
+    b3 = bias([64])
+    Conv3 = conv2d(C1_Pool, W3)+b3
+    Conv3_BN = BN(Conv3, 64)
+    C3_Conv = tf.nn.relu(Conv3_BN)
+with tf.name_scope('C4_Conv'):
+    W4 = weight([3,3,64,64])
+    b4 = bias([64])
+    Conv4 = conv2d(C3_Conv, W4)+b4
+    Conv4_BN = BN(Conv4, 64)
+    C4_Conv = tf.nn.relu(Conv4_BN)    
+with tf.name_scope('C2_Pool'):
+    C2_Pool = max_pooling(C4_Conv)
+    
+with tf.name_scope('C5_Conv'):
+    W8 = weight([3,3,64,128])
+    b8 = bias([128])
+    Conv5 = conv2d(C2_Pool, W8)+b8
+    Conv5_BN = BN(Conv5, 128)
+    C5_Conv = tf.nn.relu(Conv5_BN)
+with tf.name_scope('C6_Conv'):
+    W9 = weight([3,3,128,128])
+    b9 = bias([128])
+    Conv6 = conv2d(C5_Conv, W9)+b9
+    Conv6_BN = BN(Conv6, 128)
+    C6_Conv = tf.nn.relu(Conv6_BN)    
+with tf.name_scope('C3_Pool'):
+    C3_Pool = max_pooling(C6_Conv)
+    
+with tf.name_scope('D_Flat'):
+    D_Flat = tf.reshape(C3_Pool, [-1,63488])
+    
+with tf.name_scope('D_Hidden_Layer'):
+    W5 = weight([63488, 1024])
+    b5 = bias([1024])
+    D_Hidden1 = tf.matmul(D_Flat, W5)+b5
+    D_Hidden1_BN = BN(D_Hidden1, 1024)
+    D_Hidden1_BN = tf.nn.relu(D_Hidden1_BN)
+    D_Hidden1_Dropout = tf.nn.dropout(D_Hidden1_BN, keep_prob=0.5)
+with tf.name_scope('D_Hidden_Layer'):
+    W6 = weight([1024, 512])
+    b6 = bias([512])
+    D_Hidden2 = tf.matmul(D_Hidden1_Dropout, W6)+b6
+    D_Hidden2_BN = BN(D_Hidden2, 512)
+    D_Hidden2_BN = tf.nn.relu(D_Hidden2_BN)
+    D_Hidden2_Dropout = tf.nn.dropout(D_Hidden2_BN, keep_prob=0.5)    
+with tf.name_scope('Output_Layer'):
+    W7 = weight([512, 2])
+    b7 = bias([2])
+    y_predict = tf.nn.softmax(tf.matmul(D_Hidden2_Dropout, W7)+b7)    
+
+with tf.name_scope('optimizer'):
+    y_label = tf.placeholder("float", shape=[None, 2],
+                             name='y_label')
+    loss_function = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits
+                    (logits=y_predict, labels=y_label))
+    optimizer = tf.train.AdamOptimizer(learning_rate).minimize(loss_function)
+    
+with tf.name_scope('evaluate_model'):
+    correct_prediction = tf.equal(tf.argmax(y_predict, 1),
+                                  tf.argmax(y_label, 1))
+    accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
+
+saver = tf.train.Saver()
+
+with tf.Session() as sess:
+    sess.run(tf.global_variables_initializer())
+    saver.restore(sess, "model/model_FA.ckpt")
+    print('Model restored')
+    for k in range(trainEpoch):   
+        if k ==0:        
+            val_x = read_data_FA.read_data_test_(batchSize)    
+            y_test_all = sess.run(y_predict, feed_dict={x:val_x})
+        else:
+            val_x = read_data_FA.read_data_test_(batchSize)    
+            y_test = sess.run(y_predict, feed_dict={x:val_x})
+            y_test_all = np.vstack((y_test_all,y_test))
+    y_test_all = np.array(y_test_all)
+    y_score = read_data_FA.read_FA_labels_test()
+    for i in range(2):
+        precision[i], recall[i], _ = precision_recall_curve(y_score[:, i],
+                                                        y_test_all[:, i])
+        average_precision[i] = average_precision_score(y_score[:, i], y_test_all[:, i])
+            
+    precision["micro"], recall["micro"], _ = precision_recall_curve(y_score.ravel(),
+            y_test_all.ravel())
+    average_precision["micro"] = average_precision_score(y_score,y_test_all,
+                                                     average="micro")
+    print('Average precision score, micro-averaged over all classes: {0:0.2f}'
+            .format(average_precision["micro"]))
+            
+    plt.figure()
+    plt.step(recall['micro'], precision['micro'], color='b', alpha=0.2,
+     where='post')
+    plt.fill_between(recall["micro"], precision["micro"],alpha=0.2,color='b')
+
+    plt.xlabel('Recall')
+    plt.ylabel('Precision')
+    plt.ylim([0.0, 1.05])
+    plt.xlim([0.0, 1.0])
+    plt.title('Average precision score, micro-averaged over all classes: AUC={0:0.2f}'
+            .format(average_precision["micro"]))
+            
+    colors = cycle(['navy', 'turquoise', 'darkorange', 'cornflowerblue', 'teal'])
+    plt.figure(figsize=(7, 8))
+    f_scores = np.linspace(0.2, 0.8, num=4)
+    lines = []
+    labels = []
+    for f_score in f_scores:
+        x = np.linspace(0.01, 1)
+        y = f_score * x / (2 * x - f_score)
+        l, = plt.plot(x[y >= 0], y[y >= 0], color='gray', alpha=0.2)
+        plt.annotate('f1={0:0.1f}'.format(f_score), xy=(0.9, y[45] + 0.02))
+
+    lines.append(l)
+    labels.append('iso-f1 curves')
+    l, = plt.plot(recall["micro"], precision["micro"], color='gold', lw=2)
+    lines.append(l)
+    labels.append('micro-average Precision-recall (area = {0:0.2f})'
+              ''.format(average_precision["micro"]))
+
+    for i, color in zip(range(2), colors):
+        l, = plt.plot(recall[i], precision[i], color=color, lw=2)
+        lines.append(l)
+        labels.append('Precision-recall for class {0} (area = {1:0.2f})'
+                  ''.format(i, average_precision[i]))
+
+    fig = plt.gcf()
+    fig.subplots_adjust(bottom=0.25)
+    plt.xlim([0.0, 1.0])
+    plt.ylim([0.0, 1.05])
+    plt.xlabel('Recall')
+    plt.ylabel('Precision')
+    plt.title('Extension of FA Precision-Recall curve to multi-class')
+    plt.legend(lines, labels, loc=(0.55, 0.55), prop=dict(size=8))
+
+    
+    sess.close()
+    
+plt.savefig('FA_precision-recall.png')
+    
+                   
\ No newline at end of file
diff --git a/results/test_ges.py b/results/test_ges.py
new file mode 100644
index 0000000..be4afbd
--- /dev/null
+++ b/results/test_ges.py
@@ -0,0 +1,233 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Wed Oct  4 11:04:22 2017
+
+@author: nvlab
+"""
+
+import tensorflow as tf
+from time import time
+import read_data_ges
+import numpy as np
+import matplotlib
+matplotlib.use('Agg')
+import matplotlib.pyplot as plt
+from sklearn.metrics import precision_recall_curve
+from sklearn.metrics import average_precision_score
+from itertools import cycle
+
+batchSize = 64
+trainEpoch = 12776/batchSize
+learning_rate=0.00001
+epoch_list=[]
+accuracy_list=[]
+loss_list=[]
+precision = dict()
+recall = dict()
+average_precision = dict()
+
+def weight(shape):
+    return tf.Variable(tf.truncated_normal(shape, stddev=0.1), 
+                       name='W', dtype=tf.float32)                    
+def bias(shape):
+    return tf.Variable(tf.constant(0.1, shape=shape), name='b',
+                       dtype=tf.float32)    
+def conv2d(x, W):
+    return tf.nn.conv2d(x, W, strides=[1,1,1,1], padding='SAME')    
+def max_pooling(x):
+    return tf.nn.max_pool(x, ksize=[1,2,2,1], strides=[1,2,2,1],
+                          padding='SAME')
+def BN(Conv, out_size):
+    fc_mean, fc_var = tf.nn.moments(Conv, axes=[0])
+    scale = tf.Variable(tf.ones([out_size]))
+    shift = tf.Variable(tf.zeros([out_size]))
+    epsilon=0.001
+    ema = tf.train.ExponentialMovingAverage(decay=0.5)
+    def mean_var_with_update():
+        ema_apply_op = ema.apply([fc_mean, fc_var])
+        with tf.control_dependencies([ema_apply_op]):
+            return tf.identity(fc_mean), tf.identity(fc_var)
+    mean, var = mean_var_with_update()
+    Conv = tf.nn.batch_normalization(Conv, mean, var, shift, scale, epsilon)
+    return Conv
+                          
+with tf.name_scope('Input_layer'):
+    x = tf.placeholder("float", shape=[None, 95232])
+    x_image = tf.reshape(x, [-1, 128,248, 3])
+    
+with tf.name_scope('C1_Conv'):
+    W1 = weight([3,3,3,36])
+    b1 = bias([36])
+    Conv1 = conv2d(x_image, W1)+b1
+    Conv1_BN = BN(Conv1, 36)
+    C1_Conv = tf.nn.relu(Conv1_BN)  
+with tf.name_scope('C2_Conv'):
+    W2 = weight([3,3,36,36])
+    b2 = bias([36])
+    Conv2 = conv2d(C1_Conv, W2)+b2
+    Conv2_BN = BN(Conv2, 36)
+    C2_Conv = tf.nn.relu(Conv2_BN) 
+with tf.name_scope('C1_Pool'):
+    C1_Pool = max_pooling(C2_Conv)
+    
+with tf.name_scope('C3_Conv'):
+    W3 = weight([3,3,36,64])
+    b3 = bias([64])
+    Conv3 = conv2d(C1_Pool, W3)+b3
+    Conv3_BN = BN(Conv3, 64)
+    C3_Conv = tf.nn.relu(Conv3_BN)
+C3_concate = tf.concat([C1_Pool, C3_Conv],3)
+with tf.name_scope('C4_Conv'):
+    W4 = weight([3,3,100,64])
+    b4 = bias([64])
+    Conv4 = conv2d(C3_concate, W4)+b4
+    Conv4_BN = BN(Conv4, 64)
+    C4_Conv = tf.nn.relu(Conv4_BN)    
+with tf.name_scope('C2_Pool'):
+    C2_Pool = max_pooling(C4_Conv)
+    
+with tf.name_scope('C5_Conv'):
+    W8 = weight([3,3,64,128])
+    b8 = bias([128])
+    Conv5 = conv2d(C2_Pool, W8)+b8
+    Conv5_BN = BN(Conv5, 128)
+    C5_Conv = tf.nn.relu(Conv5_BN)
+C5_concate = tf.concat([C2_Pool, C5_Conv],3)
+with tf.name_scope('C6_Conv'):
+    W9 = weight([3,3,192,128])
+    b9 = bias([128])
+    Conv6 = conv2d(C5_concate, W9)+b9
+    Conv6_BN = BN(Conv6, 128)
+    C6_Conv = tf.nn.relu(Conv6_BN)    
+with tf.name_scope('C3_Pool'):
+    C3_Pool = max_pooling(C6_Conv)
+
+with tf.name_scope('C7_Conv'):
+    W10 = weight([3,3,128,256])
+    b10 = bias([256])
+    Conv7 = conv2d(C3_Pool, W10)+b10
+    Conv7_BN = BN(Conv7, 256)
+    C7_Conv = tf.nn.relu(Conv7_BN)
+C7_concate = tf.concat([C3_Pool, C7_Conv],3)
+with tf.name_scope('C8_Conv'):
+    W11 = weight([3,3,384,256])
+    b11 = bias([256])
+    Conv8 = conv2d(C7_concate, W11)+b11
+    Conv8_BN = BN(Conv8, 256)
+    C8_Conv = tf.nn.relu(Conv8_BN)    
+with tf.name_scope('C4_Pool'):
+    C4_Pool = max_pooling(C8_Conv)
+    
+with tf.name_scope('D_Flat'):
+    D_Flat = tf.reshape(C4_Pool, [-1,32768])
+    
+with tf.name_scope('D_Hidden1_Layer'):
+    W5 = weight([32768, 1024])
+    b5 = bias([1024])
+    D_Hidden1 = tf.matmul(D_Flat, W5)+b5
+    D_Hidden1_BN = BN(D_Hidden1, 1024)
+    D_Hidden1_BN = tf.nn.relu(D_Hidden1_BN)
+    D_Hidden1_Dropout = tf.nn.dropout(D_Hidden1_BN, keep_prob=0.5)
+with tf.name_scope('D_Hidden2_Layer'):
+    W6 = weight([1024, 512])
+    b6 = bias([512])
+    D_Hidden2 = tf.matmul(D_Hidden1_Dropout, W6)+b6
+    D_Hidden2_BN = BN(D_Hidden2, 512)
+    D_Hidden2_BN = tf.nn.relu(D_Hidden2_BN)
+    D_Hidden2_Dropout = tf.nn.dropout(D_Hidden2_BN, keep_prob=0.5)    
+with tf.name_scope('Output_Layer'):
+    W7 = weight([512, 13])
+    b7 = bias([13])
+    y_predict = tf.nn.softmax(tf.matmul(D_Hidden2_Dropout, W7)+b7)    
+
+with tf.name_scope('optimizer'):
+    y_label = tf.placeholder("float", shape=[None, 13],
+                             name='y_label')
+    loss_function = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits
+                    (logits=y_predict, labels=y_label))
+    optimizer = tf.train.AdamOptimizer(learning_rate).minimize(loss_function)
+    
+with tf.name_scope('evaluate_model'):
+    correct_prediction = tf.equal(tf.argmax(y_predict, 1),
+                                  tf.argmax(y_label, 1))
+    accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
+
+saver = tf.train.Saver()
+
+with tf.Session() as sess:
+    sess.run(tf.global_variables_initializer())
+    saver.restore(sess, "model/model_ges.ckpt")
+    print('Model restored')
+    for k in range(trainEpoch):   
+        if k ==0:        
+            val_x = read_data_ges.read_data_test_(batchSize)    
+            y_test_all = sess.run(y_predict, feed_dict={x:val_x})
+        else:
+            val_x = read_data_ges.read_data_test_(batchSize)    
+            y_test = sess.run(y_predict, feed_dict={x:val_x})
+            y_test_all = np.vstack((y_test_all,y_test))
+    y_test_all = np.array(y_test_all)
+    y_score = read_data_ges.read_ges_labels_test()
+    for i in range(13):
+        precision[i], recall[i], _ = precision_recall_curve(y_score[:, i],
+                                                        y_test_all[:, i])
+        average_precision[i] = average_precision_score(y_score[:, i], y_test_all[:, i])
+            
+    precision["micro"], recall["micro"], _ = precision_recall_curve(y_score.ravel(),
+            y_test_all.ravel())
+    average_precision["micro"] = average_precision_score(y_score,y_test_all,
+                                                     average="micro")
+    print('Average precision score, micro-averaged over all classes: {0:0.2f}'
+            .format(average_precision["micro"]))
+            
+    plt.figure()
+    plt.step(recall['micro'], precision['micro'], color='b', alpha=0.2,
+     where='post')
+    plt.fill_between(recall["micro"], precision["micro"],alpha=0.2,color='b')
+
+    plt.xlabel('Recall')
+    plt.ylabel('Precision')
+    plt.ylim([0.0, 1.05])
+    plt.xlim([0.0, 1.0])
+    plt.title('Average precision score, micro-averaged over all classes: AUC={0:0.2f}'
+            .format(average_precision["micro"]))
+            
+    colors = cycle(['navy', 'turquoise', 'darkorange', 'cornflowerblue', 'teal'])
+    plt.figure(figsize=(7, 8))
+    f_scores = np.linspace(0.2, 0.8, num=4)
+    lines = []
+    labels = []
+    for f_score in f_scores:
+        x = np.linspace(0.01, 1)
+        y = f_score * x / (2 * x - f_score)
+        l, = plt.plot(x[y >= 0], y[y >= 0], color='gray', alpha=0.2)
+        plt.annotate('f1={0:0.1f}'.format(f_score), xy=(0.9, y[45] + 0.02))
+
+    lines.append(l)
+    labels.append('iso-f1 curves')
+    l, = plt.plot(recall["micro"], precision["micro"], color='gold', lw=2)
+    lines.append(l)
+    labels.append('micro-average Precision-recall (area = {0:0.2f})'
+              ''.format(average_precision["micro"]))
+
+    for i, color in zip(range(13), colors):
+        l, = plt.plot(recall[i], precision[i], color=color, lw=2)
+        lines.append(l)
+        labels.append('Precision-recall for class {0} (area = {1:0.2f})'
+                  ''.format(i, average_precision[i]))
+
+    fig = plt.gcf()
+    fig.subplots_adjust(bottom=0.25)
+    plt.xlim([0.0, 1.0])
+    plt.ylim([0.0, 1.05])
+    plt.xlabel('Recall')
+    plt.ylabel('Precision')
+    plt.title('Extension of Ges Precision-Recall curve to multi-class')
+    plt.legend(lines, labels, loc=(0.55, 0.55), prop=dict(size=8))
+
+    
+    sess.close()
+    
+plt.savefig('ges_precision-recall.png')
+    
+                   
\ No newline at end of file
diff --git a/results/test_obj.py b/results/test_obj.py
new file mode 100644
index 0000000..ac2a916
--- /dev/null
+++ b/results/test_obj.py
@@ -0,0 +1,233 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Fri Oct 13 22:39:16 2017
+
+@author: nvlab
+"""
+
+import tensorflow as tf
+from time import time
+import read_data_obj
+import numpy as np
+import matplotlib
+matplotlib.use('Agg')
+import matplotlib.pyplot as plt
+from sklearn.metrics import precision_recall_curve
+from sklearn.metrics import average_precision_score
+from itertools import cycle
+
+batchSize = 64
+trainEpoch = 12776/batchSize
+learning_rate=0.00001
+epoch_list=[]
+accuracy_list=[]
+loss_list=[]
+precision = dict()
+recall = dict()
+average_precision = dict()
+
+def weight(shape):
+    return tf.Variable(tf.truncated_normal(shape, stddev=0.1), 
+                       name='W', dtype=tf.float32)                    
+def bias(shape):
+    return tf.Variable(tf.constant(0.1, shape=shape), name='b',
+                       dtype=tf.float32)    
+def conv2d(x, W):
+    return tf.nn.conv2d(x, W, strides=[1,1,1,1], padding='SAME')    
+def max_pooling(x):
+    return tf.nn.max_pool(x, ksize=[1,2,2,1], strides=[1,2,2,1],
+                          padding='SAME')
+def BN(Conv, out_size):
+    fc_mean, fc_var = tf.nn.moments(Conv, axes=[0])
+    scale = tf.Variable(tf.ones([out_size]))
+    shift = tf.Variable(tf.zeros([out_size]))
+    epsilon=0.001
+    ema = tf.train.ExponentialMovingAverage(decay=0.5)
+    def mean_var_with_update():
+        ema_apply_op = ema.apply([fc_mean, fc_var])
+        with tf.control_dependencies([ema_apply_op]):
+            return tf.identity(fc_mean), tf.identity(fc_var)
+    mean, var = mean_var_with_update()
+    Conv = tf.nn.batch_normalization(Conv, mean, var, shift, scale, epsilon)
+    return Conv
+                          
+with tf.name_scope('Input_layer'):
+    x = tf.placeholder("float", shape=[None, 95232])
+    x_image = tf.reshape(x, [-1, 128,248, 3])
+    
+with tf.name_scope('C1_Conv'):
+    W1 = weight([3,3,3,36])
+    b1 = bias([36])
+    Conv1 = conv2d(x_image, W1)+b1
+    Conv1_BN = BN(Conv1, 36)
+    C1_Conv = tf.nn.relu(Conv1_BN)  
+with tf.name_scope('C2_Conv'):
+    W2 = weight([3,3,36,36])
+    b2 = bias([36])
+    Conv2 = conv2d(C1_Conv, W2)+b2
+    Conv2_BN = BN(Conv2, 36)
+    C2_Conv = tf.nn.relu(Conv2_BN) 
+with tf.name_scope('C1_Pool'):
+    C1_Pool = max_pooling(C2_Conv)
+    
+with tf.name_scope('C3_Conv'):
+    W3 = weight([3,3,36,64])
+    b3 = bias([64])
+    Conv3 = conv2d(C1_Pool, W3)+b3
+    Conv3_BN = BN(Conv3, 64)
+    C3_Conv = tf.nn.relu(Conv3_BN)
+C3_concate = tf.concat([C1_Pool, C3_Conv],3)
+with tf.name_scope('C4_Conv'):
+    W4 = weight([3,3,100,128])
+    b4 = bias([128])
+    Conv4 = conv2d(C3_concate, W4)+b4
+    Conv4_BN = BN(Conv4, 128)
+    C4_Conv = tf.nn.relu(Conv4_BN)    
+with tf.name_scope('C2_Pool'):
+    C2_Pool = max_pooling(C4_Conv)
+    
+with tf.name_scope('C5_Conv'):
+    W8 = weight([3,3,128,128])
+    b8 = bias([128])
+    Conv5 = conv2d(C2_Pool, W8)+b8
+    Conv5_BN = BN(Conv5, 128)
+    C5_Conv = tf.nn.relu(Conv5_BN)
+C5_concate = tf.concat([C2_Pool, C5_Conv],3)
+with tf.name_scope('C6_Conv'):
+    W9 = weight([3,3,256,256])
+    b9 = bias([256])
+    Conv6 = conv2d(C5_concate, W9)+b9
+    Conv6_BN = BN(Conv6, 256)
+    C6_Conv = tf.nn.relu(Conv6_BN)    
+with tf.name_scope('C3_Pool'):
+    C3_Pool = max_pooling(C6_Conv)
+
+with tf.name_scope('C7_Conv'):
+    W10 = weight([3,3,256,256])
+    b10 = bias([256])
+    Conv7 = conv2d(C3_Pool, W10)+b10
+    Conv7_BN = BN(Conv7, 256)
+    C7_Conv = tf.nn.relu(Conv7_BN)
+C7_concate = tf.concat([C3_Pool, C7_Conv],3)
+with tf.name_scope('C8_Conv'):
+    W11 = weight([3,3,512,512])
+    b11 = bias([512])
+    Conv8 = conv2d(C7_concate, W11)+b11
+    Conv8_BN = BN(Conv8, 512)
+    C8_Conv = tf.nn.relu(Conv8_BN)    
+with tf.name_scope('C4_Pool'):
+    C4_Pool = max_pooling(C8_Conv)
+    
+with tf.name_scope('D_Flat'):
+    D_Flat = tf.reshape(C4_Pool, [-1,65536])
+    
+with tf.name_scope('D_Hidden1_Layer'):
+    W5 = weight([65536, 1024])
+    b5 = bias([1024])
+    D_Hidden1 = tf.matmul(D_Flat, W5)+b5
+    D_Hidden1_BN = BN(D_Hidden1, 1024)
+    D_Hidden1_BN = tf.nn.relu(D_Hidden1_BN)
+    D_Hidden1_Dropout = tf.nn.dropout(D_Hidden1_BN, keep_prob=0.5)
+with tf.name_scope('D_Hidden2_Layer'):
+    W6 = weight([1024, 512])
+    b6 = bias([512])
+    D_Hidden2 = tf.matmul(D_Hidden1_Dropout, W6)+b6
+    D_Hidden2_BN = BN(D_Hidden2, 512)
+    D_Hidden2_BN = tf.nn.relu(D_Hidden2_BN)
+    D_Hidden2_Dropout = tf.nn.dropout(D_Hidden2_BN, keep_prob=0.5)    
+with tf.name_scope('Output_Layer'):
+    W7 = weight([512, 24])
+    b7 = bias([24])
+    y_predict = tf.nn.softmax(tf.matmul(D_Hidden2_Dropout, W7)+b7)    
+
+with tf.name_scope('optimizer'):
+    y_label = tf.placeholder("float", shape=[None, 24],
+                             name='y_label')
+    loss_function = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits
+                    (logits=y_predict, labels=y_label))
+    optimizer = tf.train.AdamOptimizer(learning_rate).minimize(loss_function)
+    
+with tf.name_scope('evaluate_model'):
+    correct_prediction = tf.equal(tf.argmax(y_predict, 1),
+                                  tf.argmax(y_label, 1))
+    accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
+
+saver = tf.train.Saver()
+
+with tf.Session() as sess:
+    sess.run(tf.global_variables_initializer())
+    saver.restore(sess, "model/model_obj.ckpt")
+    print('Model restored')
+    for k in range(trainEpoch):   
+        if k ==0:        
+            val_x = read_data_obj.read_data_test_(batchSize)    
+            y_test_all = sess.run(y_predict, feed_dict={x:val_x})
+        else:
+            val_x = read_data_obj.read_data_test_(batchSize)    
+            y_test = sess.run(y_predict, feed_dict={x:val_x})
+            y_test_all = np.vstack((y_test_all,y_test))
+    y_test_all = np.array(y_test_all)
+    y_score = read_data_obj.read_obj_labels_test()
+    for i in range(24):
+        precision[i], recall[i], _ = precision_recall_curve(y_score[:, i],
+                                                        y_test_all[:, i])
+        average_precision[i] = average_precision_score(y_score[:, i], y_test_all[:, i])
+            
+    precision["micro"], recall["micro"], _ = precision_recall_curve(y_score.ravel(),
+            y_test_all.ravel())
+    average_precision["micro"] = average_precision_score(y_score,y_test_all,
+                                                     average="micro")
+    print('Average precision score, micro-averaged over all classes: {0:0.2f}'
+            .format(average_precision["micro"]))
+            
+    plt.figure()
+    plt.step(recall['micro'], precision['micro'], color='b', alpha=0.2,
+     where='post')
+    plt.fill_between(recall["micro"], precision["micro"],alpha=0.2,color='b')
+
+    plt.xlabel('Recall')
+    plt.ylabel('Precision')
+    plt.ylim([0.0, 1.05])
+    plt.xlim([0.0, 1.0])
+    plt.title('Average precision score, micro-averaged over all classes: AUC={0:0.2f}'
+            .format(average_precision["micro"]))
+            
+    colors = cycle(['navy', 'turquoise', 'darkorange', 'cornflowerblue', 'teal'])
+    plt.figure(figsize=(7, 8))
+    f_scores = np.linspace(0.2, 0.8, num=4)
+    lines = []
+    labels = []
+    for f_score in f_scores:
+        x = np.linspace(0.01, 1)
+        y = f_score * x / (2 * x - f_score)
+        l, = plt.plot(x[y >= 0], y[y >= 0], color='gray', alpha=0.2)
+        plt.annotate('f1={0:0.1f}'.format(f_score), xy=(0.9, y[45] + 0.02))
+
+    lines.append(l)
+    labels.append('iso-f1 curves')
+    l, = plt.plot(recall["micro"], precision["micro"], color='gold', lw=2)
+    lines.append(l)
+    labels.append('micro-average Precision-recall (area = {0:0.2f})'
+              ''.format(average_precision["micro"]))
+
+    for i, color in zip(range(24), colors):
+        l, = plt.plot(recall[i], precision[i], color=color, lw=2)
+        lines.append(l)
+        labels.append('Precision-recall for class {0} (area = {1:0.2f})'
+                  ''.format(i, average_precision[i]))
+
+    fig = plt.gcf()
+    fig.subplots_adjust(bottom=0.25)
+    plt.xlim([0.0, 1.0])
+    plt.ylim([0.0, 1.05])
+    plt.xlabel('Recall')
+    plt.ylabel('Precision')
+    plt.title('Extension of obj Precision-Recall curve to multi-class')
+    plt.legend(lines, labels, loc=(0.55, 0.55), prop=dict(size=8))
+
+    
+    sess.close()
+    
+plt.savefig('obj_precision-recall.png')
+    
+                   
\ No newline at end of file
diff --git a/results/train.py b/results/train.py
new file mode 100644
index 0000000..bde12dd
--- /dev/null
+++ b/results/train.py
@@ -0,0 +1,245 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Wed Oct  4 15:29:09 2017
+
+@author: nvlab
+"""
+
+import tensorflow as tf
+from time import time
+import read_data
+import numpy as np
+
+trainEpoch = 50
+batchSize = 64
+totalBatchs = int(14992/batchSize)
+learning_rate=0.00001
+epoch_list=[]
+obj_accuracy_list=[]
+obj_loss_list=[]
+ges_accuracy_list=[]
+ges_loss_list=[]
+FA_accuracy_list=[]
+FA_loss_list=[]
+
+def weight(shape):
+    return tf.Variable(tf.truncated_normal(shape, stddev=0.1), 
+                       name='W', dtype=tf.float32)                    
+def bias(shape):
+    return tf.Variable(tf.constant(0.1, shape=shape), name='b',
+                       dtype=tf.float32)    
+def conv2d(x, W):
+    return tf.nn.conv2d(x, W, strides=[1,1,1,1], padding='SAME')    
+def max_pooling(x):
+    return tf.nn.max_pool(x, ksize=[1,2,2,1], strides=[1,2,2,1],
+                          padding='SAME')
+def BN(Conv, out_size):
+    fc_mean, fc_var = tf.nn.moments(Conv, axes=[0])
+    scale = tf.Variable(tf.ones([out_size]))
+    shift = tf.Variable(tf.zeros([out_size]))
+    epsilon=0.001
+    ema = tf.train.ExponentialMovingAverage(decay=0.5)
+    def mean_var_with_update():
+        ema_apply_op = ema.apply([fc_mean, fc_var])
+        with tf.control_dependencies([ema_apply_op]):
+            return tf.identity(fc_mean), tf.identity(fc_var)
+    mean, var = mean_var_with_update()
+    Conv = tf.nn.batch_normalization(Conv, mean, var, shift, scale, epsilon)
+    return Conv
+                          
+with tf.name_scope('Input_layer'):
+    x = tf.placeholder("float", shape=[None, 95232])
+    x_image = tf.reshape(x, [-1, 128,248, 3])
+    
+with tf.name_scope('C1_Conv'):
+    W1 = weight([3,3,3,36])
+    b1 = bias([36])
+    Conv1 = conv2d(x_image, W1)+b1
+    Conv1_BN = BN(Conv1, 36)
+    C1_Conv = tf.nn.relu(Conv1_BN)  
+with tf.name_scope('C2_Conv'):
+    W2 = weight([3,3,36,36])
+    b2 = bias([36])
+    Conv2 = conv2d(C1_Conv, W2)+b2
+    Conv2_BN = BN(Conv2, 36)
+    C2_Conv = tf.nn.relu(Conv2_BN) 
+with tf.name_scope('C1_Pool'):
+    C1_Pool = max_pooling(C2_Conv)
+    
+with tf.name_scope('C3_Conv'):
+    W3 = weight([3,3,36,64])
+    b3 = bias([64])
+    Conv3 = conv2d(C1_Pool, W3)+b3
+    Conv3_BN = BN(Conv3, 64)
+    C3_Conv = tf.nn.relu(Conv3_BN)
+C3_concate = tf.concat([C1_Pool, C3_Conv],3)
+with tf.name_scope('C4_Conv'):
+    W4 = weight([3,3,100,128])
+    b4 = bias([128])
+    Conv4 = conv2d(C3_concate, W4)+b4
+    Conv4_BN = BN(Conv4, 128)
+    C4_Conv = tf.nn.relu(Conv4_BN)    
+with tf.name_scope('C2_Pool'):
+    C2_Pool = max_pooling(C4_Conv)
+    
+with tf.name_scope('C5_Conv'):
+    W8 = weight([3,3,128,128])
+    b8 = bias([128])
+    Conv5 = conv2d(C2_Pool, W8)+b8
+    Conv5_BN = BN(Conv5, 128)
+    C5_Conv = tf.nn.relu(Conv5_BN)
+C5_concate = tf.concat([C2_Pool, C5_Conv],3)
+with tf.name_scope('C6_Conv'):
+    W9 = weight([3,3,256,256])
+    b9 = bias([256])
+    Conv6 = conv2d(C5_concate, W9)+b9
+    Conv6_BN = BN(Conv6, 256)
+    C6_Conv = tf.nn.relu(Conv6_BN)    
+with tf.name_scope('C3_Pool'):
+    C3_Pool = max_pooling(C6_Conv)
+
+with tf.name_scope('C7_Conv'):
+    W10 = weight([3,3,256,256])
+    b10 = bias([256])
+    Conv7 = conv2d(C3_Pool, W10)+b10
+    Conv7_BN = BN(Conv7, 256)
+    C7_Conv = tf.nn.relu(Conv7_BN)
+C7_concate = tf.concat([C3_Pool, C7_Conv],3)
+with tf.name_scope('C8_Conv'):
+    W11 = weight([3,3,512,512])
+    b11 = bias([512])
+    Conv8 = conv2d(C7_concate, W11)+b11
+    Conv8_BN = BN(Conv8, 512)
+    C8_Conv = tf.nn.relu(Conv8_BN)    
+with tf.name_scope('C4_Pool'):
+    C4_Pool = max_pooling(C8_Conv)
+    
+with tf.name_scope('D_Flat'):
+    D_Flat = tf.reshape(C4_Pool, [-1,65536])
+    
+with tf.name_scope('D_Hidden1_Layer_obj'):
+    W5 = weight([65536, 1024])
+    b5 = bias([1024])
+    D_Hidden1_obj = tf.matmul(D_Flat, W5)+b5
+    D_Hidden1_obj_BN = BN(D_Hidden1_obj, 1024)
+    D_Hidden1_obj_BN = tf.nn.relu(D_Hidden1_obj_BN)
+    D_Hidden1_obj_Dropout = tf.nn.dropout(D_Hidden1_obj_BN, keep_prob=0.5)
+with tf.name_scope('D_Hidden2_Layer_obj'):
+    W6 = weight([1024, 512])
+    b6 = bias([512])
+    D_Hidden2_obj = tf.matmul(D_Hidden1_obj_Dropout, W6)+b6
+    D_Hidden2_obj_BN = BN(D_Hidden2_obj, 512)
+    D_Hidden2_obj_BN = tf.nn.relu(D_Hidden2_obj_BN)
+    D_Hidden2_obj_Dropout = tf.nn.dropout(D_Hidden2_obj_BN, keep_prob=0.5)    
+with tf.name_scope('Output_Layer_obj'):
+    W7 = weight([512, 24])
+    b7 = bias([24])
+    y_obj_predict = tf.nn.softmax(tf.matmul(D_Hidden2_obj_Dropout, W7)+b7)    
+
+with tf.name_scope('D_Hidden1_Layer_ges'):
+    W10 = weight([65536, 1024])
+    b10 = bias([1024])
+    D_Hidden1_ges = tf.matmul(D_Flat, W10)+b10
+    D_Hidden1_ges_BN = BN(D_Hidden1_ges, 1024)
+    D_Hidden1_ges_BN = tf.nn.relu(D_Hidden1_ges_BN)
+    D_Hidden1_ges_Dropout = tf.nn.dropout(D_Hidden1_ges_BN, keep_prob=0.5)
+with tf.name_scope('D_Hidden2_Layer_ges'):
+    W11 = weight([1024, 512])
+    b11 = bias([512])
+    D_Hidden2_ges = tf.matmul(D_Hidden1_ges_Dropout, W11)+b11
+    D_Hidden2_ges_BN = BN(D_Hidden2_ges, 512)
+    D_Hidden2_ges_BN = tf.nn.relu(D_Hidden2_ges_BN)
+    D_Hidden2_ges_Dropout = tf.nn.dropout(D_Hidden2_ges_BN, keep_prob=0.5)    
+with tf.name_scope('Output_Layer_ges'):
+    W12 = weight([512,13])
+    b12 = bias([13])
+    y_ges_predict = tf.nn.softmax(tf.matmul(D_Hidden2_ges_Dropout, W12)+b12)    
+
+with tf.name_scope('D_Hidden1_Layer_FA'):
+    W13 = weight([65536, 1024])
+    b13 = bias([1024])
+    D_Hidden1_FA = tf.matmul(D_Flat, W13)+b13
+    D_Hidden1_FA_BN = BN(D_Hidden1_FA, 1024)
+    D_Hidden1_FA_BN = tf.nn.relu(D_Hidden1_FA_BN)
+    D_Hidden1_FA_Dropout = tf.nn.dropout(D_Hidden1_FA_BN, keep_prob=0.5)
+with tf.name_scope('D_Hidden2_Layer_FA'):
+    W14 = weight([1024, 512])
+    b14 = bias([512])
+    D_Hidden2_FA = tf.matmul(D_Hidden1_FA_Dropout, W14)+b14
+    D_Hidden2_FA_BN = BN(D_Hidden2_FA, 512)
+    D_Hidden2_FA_BN = tf.nn.relu(D_Hidden2_FA_BN)
+    D_Hidden2_FA_Dropout = tf.nn.dropout(D_Hidden2_FA_BN, keep_prob=0.5)    
+with tf.name_scope('Output_Layer_FA'):
+    W15 = weight([512, 2])
+    b15 = bias([2])
+    y_FA_predict = tf.nn.softmax(tf.matmul(D_Hidden2_FA_Dropout, W15)+b15)    
+
+with tf.name_scope('optimizer'):
+    y_label = tf.placeholder("float", shape=[None, 39],
+                             name='y_label')
+    y_obj_label = y_label[:,0:24]
+    y_ges_label = y_label[:,24:37]
+    y_FA_label = y_label[:,37:39]
+ 
+    obj_loss_function = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits
+                    (logits=y_obj_predict, labels=y_obj_label))
+    ges_loss_function = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits
+                    (logits=y_ges_predict, labels=y_ges_label))
+    FA_loss_function = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits
+                    (logits=y_FA_predict, labels=y_FA_label))
+    loss_function = 0.6*obj_loss_function+0.3*ges_loss_function+0.1*FA_loss_function
+    optimizer = tf.train.AdamOptimizer(learning_rate).minimize(loss_function)
+    
+with tf.name_scope('evaluate_model'):
+    obj_correct_prediction = tf.equal(tf.argmax(y_obj_predict, 1),
+                                  tf.argmax(y_obj_label, 1))
+    obj_accuracy = tf.reduce_mean(tf.cast(obj_correct_prediction, "float"))
+    ges_correct_prediction = tf.equal(tf.argmax(y_ges_predict, 1),
+                                  tf.argmax(y_ges_label, 1))
+    ges_accuracy = tf.reduce_mean(tf.cast(ges_correct_prediction, "float"))
+    FA_correct_prediction = tf.equal(tf.argmax(y_FA_predict, 1),
+                                  tf.argmax(y_FA_label, 1))
+    FA_accuracy = tf.reduce_mean(tf.cast(FA_correct_prediction, "float"))
+#    acc = tf.reduce_mean(tf.concat(obj_accuracy,ges_accuracy,FA_accuracy))
+    
+    
+startTime = time()
+saver = tf.train.Saver(tf.all_variables())
+with tf.Session() as sess:
+
+    sess.run(tf.global_variables_initializer())
+    print("start training")
+    n = 0
+    for epoch in range(trainEpoch):
+        for i in range(0, totalBatchs):
+            images_x,train_label = read_data.read_data(batchSize)
+            
+            sess.run(optimizer, feed_dict={x:images_x, y_label:train_label})
+            save_path = saver.save(sess, "./model.ckpt")
+
+        if epoch >20:
+            learning_rate = learning_rate*0.1
+        
+        test_x,test_label = read_data.read_data_test(batchSize)    
+        obj_loss, obj_acc = sess.run([obj_loss_function, obj_accuracy],
+                         feed_dict={x:test_x, y_label: test_label})
+        ges_loss, ges_acc = sess.run([ges_loss_function, ges_accuracy],
+                         feed_dict={x:test_x, y_label: test_label})
+        FA_loss, FA_acc = sess.run([FA_loss_function, FA_accuracy],
+                         feed_dict={x:test_x, y_label: test_label})
+                             
+        epoch_list.append(epoch)
+        obj_loss_list.append(obj_loss)
+        obj_accuracy_list.append(obj_acc)
+        ges_loss_list.append(ges_loss)
+        ges_accuracy_list.append(ges_acc)
+        FA_loss_list.append(FA_loss)
+        FA_accuracy_list.append(FA_acc)
+        print("Train Epoch:", '%02d'%(epoch+1), 
+              "Obj Loss=", "{:.9f}".format(obj_loss),"Obj Accuracy=","{:.9f}".format(obj_acc),
+                "Ges Loss=", "{:.9f}".format(ges_loss),"Ges Accuracy=","{:.9f}".format(ges_acc),
+                "FA Loss=", "{:.9f}".format(FA_loss),"FA Accuracy=","{:.9f}".format(FA_acc))
+    sess.close()
+          
+duration = time()-startTime
+print("Train Finished take:", duration)          
\ No newline at end of file
diff --git a/results/train_FA.py b/results/train_FA.py
new file mode 100644
index 0000000..cc76a63
--- /dev/null
+++ b/results/train_FA.py
@@ -0,0 +1,164 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Wed Oct  4 15:29:09 2017
+
+@author: nvlab
+"""
+
+import tensorflow as tf
+from time import time
+import read_data_FA
+import numpy as np
+
+trainEpoch = 30
+batchSize = 64
+totalBatchs = int(14992/batchSize)
+learning_rate=0.00001
+epoch_list=[]
+accuracy_list=[]
+loss_list=[]
+
+
+def weight(shape):
+    return tf.Variable(tf.truncated_normal(shape, stddev=0.1), 
+                       name='W', dtype=tf.float32)                    
+def bias(shape):
+    return tf.Variable(tf.constant(0.1, shape=shape), name='b',
+                       dtype=tf.float32)    
+def conv2d(x, W):
+    return tf.nn.conv2d(x, W, strides=[1,1,1,1], padding='SAME')    
+def max_pooling(x):
+    return tf.nn.max_pool(x, ksize=[1,2,2,1], strides=[1,2,2,1],
+                          padding='SAME')
+def BN(Conv, out_size):
+    fc_mean, fc_var = tf.nn.moments(Conv, axes=[0])
+    scale = tf.Variable(tf.ones([out_size]))
+    shift = tf.Variable(tf.zeros([out_size]))
+    epsilon=0.001
+    ema = tf.train.ExponentialMovingAverage(decay=0.5)
+    def mean_var_with_update():
+        ema_apply_op = ema.apply([fc_mean, fc_var])
+        with tf.control_dependencies([ema_apply_op]):
+            return tf.identity(fc_mean), tf.identity(fc_var)
+    mean, var = mean_var_with_update()
+    Conv = tf.nn.batch_normalization(Conv, mean, var, shift, scale, epsilon)
+    return Conv
+                          
+with tf.name_scope('Input_layer'):
+    x = tf.placeholder("float", shape=[None, 95232])
+    x_image = tf.reshape(x, [-1, 128,248, 3])
+    
+with tf.name_scope('C1_Conv'):
+    W1 = weight([3,3,3,36])
+    b1 = bias([36])
+    Conv1 = conv2d(x_image, W1)+b1
+    Conv1_BN = BN(Conv1, 36)
+    C1_Conv = tf.nn.relu(Conv1_BN)  
+with tf.name_scope('C2_Conv'):
+    W2 = weight([3,3,36,36])
+    b2 = bias([36])
+    Conv2 = conv2d(C1_Conv, W2)+b2
+    Conv2_BN = BN(Conv2, 36)
+    C2_Conv = tf.nn.relu(Conv2_BN) 
+with tf.name_scope('C1_Pool'):
+    C1_Pool = max_pooling(C2_Conv)
+    
+with tf.name_scope('C3_Conv'):
+    W3 = weight([3,3,36,64])
+    b3 = bias([64])
+    Conv3 = conv2d(C1_Pool, W3)+b3
+    Conv3_BN = BN(Conv3, 64)
+    C3_Conv = tf.nn.relu(Conv3_BN)
+with tf.name_scope('C4_Conv'):
+    W4 = weight([3,3,64,64])
+    b4 = bias([64])
+    Conv4 = conv2d(C3_Conv, W4)+b4
+    Conv4_BN = BN(Conv4, 64)
+    C4_Conv = tf.nn.relu(Conv4_BN)    
+with tf.name_scope('C2_Pool'):
+    C2_Pool = max_pooling(C4_Conv)
+    
+with tf.name_scope('C5_Conv'):
+    W8 = weight([3,3,64,128])
+    b8 = bias([128])
+    Conv5 = conv2d(C2_Pool, W8)+b8
+    Conv5_BN = BN(Conv5, 128)
+    C5_Conv = tf.nn.relu(Conv5_BN)
+with tf.name_scope('C6_Conv'):
+    W9 = weight([3,3,128,128])
+    b9 = bias([128])
+    Conv6 = conv2d(C5_Conv, W9)+b9
+    Conv6_BN = BN(Conv6, 128)
+    C6_Conv = tf.nn.relu(Conv6_BN)    
+with tf.name_scope('C3_Pool'):
+    C3_Pool = max_pooling(C6_Conv)
+    
+with tf.name_scope('D_Flat'):
+    D_Flat = tf.reshape(C3_Pool, [-1,63488])
+    
+with tf.name_scope('D_Hidden_Layer'):
+    W5 = weight([63488, 1024])
+    b5 = bias([1024])
+    D_Hidden1 = tf.matmul(D_Flat, W5)+b5
+    D_Hidden1_BN = BN(D_Hidden1, 1024)
+    D_Hidden1_BN = tf.nn.relu(D_Hidden1_BN)
+    D_Hidden1_Dropout = tf.nn.dropout(D_Hidden1_BN, keep_prob=0.5)
+with tf.name_scope('D_Hidden_Layer'):
+    W6 = weight([1024, 512])
+    b6 = bias([512])
+    D_Hidden2 = tf.matmul(D_Hidden1_Dropout, W6)+b6
+    D_Hidden2_BN = BN(D_Hidden2, 512)
+    D_Hidden2_BN = tf.nn.relu(D_Hidden2_BN)
+    D_Hidden2_Dropout = tf.nn.dropout(D_Hidden2_BN, keep_prob=0.5)    
+with tf.name_scope('Output_Layer'):
+    W7 = weight([512, 2])
+    b7 = bias([2])
+    y_predict = tf.nn.softmax(tf.matmul(D_Hidden2_Dropout, W7)+b7)    
+
+with tf.name_scope('optimizer'):
+    y_label = tf.placeholder("float", shape=[None, 2],
+                             name='y_label')
+    loss_function = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits
+                    (logits=y_predict, labels=y_label))
+    optimizer = tf.train.AdamOptimizer(learning_rate).minimize(loss_function)
+    
+with tf.name_scope('evaluate_model'):
+    correct_prediction = tf.equal(tf.argmax(y_predict, 1),
+                                  tf.argmax(y_label, 1))
+    accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
+
+
+startTime = time()
+saver = tf.train.Saver(tf.all_variables())
+f = open('FA.txt','a')
+with tf.Session() as sess:
+
+    sess.run(tf.global_variables_initializer())
+    print("start training")
+    for epoch in range(trainEpoch):
+        for i in range(0, totalBatchs):
+            images_x, batch_y = read_data_FA.read_data(batchSize)
+           
+            sess.run(optimizer, feed_dict={x:images_x, y_label:batch_y})
+            save_path = saver.save(sess, "./model/model_FA.ckpt")
+
+        if epoch >20:
+            learning_rate = learning_rate*0.1
+        
+        val_x, test_y = read_data_FA.read_data_test(batchSize)    
+        loss, acc = sess.run([loss_function, accuracy],
+                         feed_dict={x:val_x, y_label: test_y})
+                             
+#        epoch_list.append(epoch)
+#        loss_list.append(loss)
+#        accuracy_list.append(acc)
+        f.write(str(epoch)+"\n")
+        f.write(str(loss)+"\n")
+        f.write(str(acc)+"\n")
+        print("Train Epoch:", '%02d'%(epoch+1), 
+              "Loss=", "{:.9f}".format(loss),
+              "Accuracy=", acc)
+    sess.close()
+          
+duration = time()-startTime
+print("Train Finished take:", duration)          
\ No newline at end of file
diff --git a/results/train_ges.py b/results/train_ges.py
new file mode 100644
index 0000000..cb9b124
--- /dev/null
+++ b/results/train_ges.py
@@ -0,0 +1,196 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Wed Oct  4 15:15:07 2017
+
+@author: nvlab
+"""
+
+# -*- coding: utf-8 -*-
+"""
+Created on Sat Sep 30 12:13:38 2017
+
+@author: nvlab
+"""
+
+# -*- coding: utf-8 -*-
+"""
+Created on Sun Sep 24 19:44:11 2017
+
+@author: nvlab
+"""
+
+import tensorflow as tf
+from time import time
+import read_data_ges
+import numpy as np
+
+trainEpoch = 60
+batchSize = 64
+totalBatchs = int(14992/batchSize)
+learning_rate=0.0001
+epoch_list=[]
+accuracy_list=[]
+loss_list=[]
+
+
+def weight(shape):
+    return tf.Variable(tf.truncated_normal(shape, stddev=0.1), 
+                       name='W', dtype=tf.float32)                    
+def bias(shape):
+    return tf.Variable(tf.constant(0.1, shape=shape), name='b',
+                       dtype=tf.float32)    
+def conv2d(x, W):
+    return tf.nn.conv2d(x, W, strides=[1,1,1,1], padding='SAME')    
+def max_pooling(x):
+    return tf.nn.max_pool(x, ksize=[1,2,2,1], strides=[1,2,2,1],
+                          padding='SAME')
+def BN(Conv, out_size):
+    fc_mean, fc_var = tf.nn.moments(Conv, axes=[0])
+    scale = tf.Variable(tf.ones([out_size]))
+    shift = tf.Variable(tf.zeros([out_size]))
+    epsilon=0.001
+    ema = tf.train.ExponentialMovingAverage(decay=0.5)
+    def mean_var_with_update():
+        ema_apply_op = ema.apply([fc_mean, fc_var])
+        with tf.control_dependencies([ema_apply_op]):
+            return tf.identity(fc_mean), tf.identity(fc_var)
+    mean, var = mean_var_with_update()
+    Conv = tf.nn.batch_normalization(Conv, mean, var, shift, scale, epsilon)
+    return Conv
+                          
+with tf.name_scope('Input_layer'):
+    x = tf.placeholder("float", shape=[None, 95232])
+    x_image = tf.reshape(x, [-1, 128,248, 3])
+    
+with tf.name_scope('C1_Conv'):
+    W1 = weight([3,3,3,36])
+    b1 = bias([36])
+    Conv1 = conv2d(x_image, W1)+b1
+    Conv1_BN = BN(Conv1, 36)
+    C1_Conv = tf.nn.relu(Conv1_BN)  
+with tf.name_scope('C2_Conv'):
+    W2 = weight([3,3,36,36])
+    b2 = bias([36])
+    Conv2 = conv2d(C1_Conv, W2)+b2
+    Conv2_BN = BN(Conv2, 36)
+    C2_Conv = tf.nn.relu(Conv2_BN) 
+with tf.name_scope('C1_Pool'):
+    C1_Pool = max_pooling(C2_Conv)
+    
+with tf.name_scope('C3_Conv'):
+    W3 = weight([3,3,36,64])
+    b3 = bias([64])
+    Conv3 = conv2d(C1_Pool, W3)+b3
+    Conv3_BN = BN(Conv3, 64)
+    C3_Conv = tf.nn.relu(Conv3_BN)
+C3_concate = tf.concat([C1_Pool, C3_Conv],3)
+with tf.name_scope('C4_Conv'):
+    W4 = weight([3,3,100,64])
+    b4 = bias([64])
+    Conv4 = conv2d(C3_concate, W4)+b4
+    Conv4_BN = BN(Conv4, 64)
+    C4_Conv = tf.nn.relu(Conv4_BN)    
+with tf.name_scope('C2_Pool'):
+    C2_Pool = max_pooling(C4_Conv)
+    
+with tf.name_scope('C5_Conv'):
+    W8 = weight([3,3,64,128])
+    b8 = bias([128])
+    Conv5 = conv2d(C2_Pool, W8)+b8
+    Conv5_BN = BN(Conv5, 128)
+    C5_Conv = tf.nn.relu(Conv5_BN)
+C5_concate = tf.concat([C2_Pool, C5_Conv],3)
+with tf.name_scope('C6_Conv'):
+    W9 = weight([3,3,192,128])
+    b9 = bias([128])
+    Conv6 = conv2d(C5_concate, W9)+b9
+    Conv6_BN = BN(Conv6, 128)
+    C6_Conv = tf.nn.relu(Conv6_BN)    
+with tf.name_scope('C3_Pool'):
+    C3_Pool = max_pooling(C6_Conv)
+
+with tf.name_scope('C7_Conv'):
+    W10 = weight([3,3,128,256])
+    b10 = bias([256])
+    Conv7 = conv2d(C3_Pool, W10)+b10
+    Conv7_BN = BN(Conv7, 256)
+    C7_Conv = tf.nn.relu(Conv7_BN)
+C7_concate = tf.concat([C3_Pool, C7_Conv],3)
+with tf.name_scope('C8_Conv'):
+    W11 = weight([3,3,384,256])
+    b11 = bias([256])
+    Conv8 = conv2d(C7_concate, W11)+b11
+    Conv8_BN = BN(Conv8, 256)
+    C8_Conv = tf.nn.relu(Conv8_BN)    
+with tf.name_scope('C4_Pool'):
+    C4_Pool = max_pooling(C8_Conv)
+    
+with tf.name_scope('D_Flat'):
+    D_Flat = tf.reshape(C4_Pool, [-1,32768])
+    
+with tf.name_scope('D_Hidden1_Layer'):
+    W5 = weight([32768, 1024])
+    b5 = bias([1024])
+    D_Hidden1 = tf.matmul(D_Flat, W5)+b5
+    D_Hidden1_BN = BN(D_Hidden1, 1024)
+    D_Hidden1_BN = tf.nn.relu(D_Hidden1_BN)
+    D_Hidden1_Dropout = tf.nn.dropout(D_Hidden1_BN, keep_prob=0.5)
+with tf.name_scope('D_Hidden2_Layer'):
+    W6 = weight([1024, 512])
+    b6 = bias([512])
+    D_Hidden2 = tf.matmul(D_Hidden1_Dropout, W6)+b6
+    D_Hidden2_BN = BN(D_Hidden2, 512)
+    D_Hidden2_BN = tf.nn.relu(D_Hidden2_BN)
+    D_Hidden2_Dropout = tf.nn.dropout(D_Hidden2_BN, keep_prob=0.5)    
+with tf.name_scope('Output_Layer'):
+    W7 = weight([512, 13])
+    b7 = bias([13])
+    y_predict = tf.nn.softmax(tf.matmul(D_Hidden2_Dropout, W7)+b7)    
+
+with tf.name_scope('optimizer'):
+    y_label = tf.placeholder("float", shape=[None, 13],
+                             name='y_label')
+    loss_function = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits
+                    (logits=y_predict, labels=y_label))
+    optimizer = tf.train.AdamOptimizer(learning_rate).minimize(loss_function)
+    
+with tf.name_scope('evaluate_model'):
+    correct_prediction = tf.equal(tf.argmax(y_predict, 1),
+                                  tf.argmax(y_label, 1))
+    accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
+
+
+startTime = time()
+saver = tf.train.Saver(tf.all_variables())
+f = open('ges.txt','a')
+with tf.Session() as sess:
+
+    sess.run(tf.global_variables_initializer())
+    print("start training")
+    for epoch in range(trainEpoch):
+        for i in range(0, totalBatchs):
+            images_x, batch_y = read_data_ges.read_data(batchSize)
+            
+            sess.run(optimizer, feed_dict={x:images_x, y_label:batch_y})
+            save_path = saver.save(sess, "./model/model_ges.ckpt")
+
+        if epoch >25:
+            learning_rate = learning_rate*0.1
+        val_x, test_y = read_data_ges.read_data_test(batchSize)    
+        loss, acc = sess.run([loss_function, accuracy],
+                         feed_dict={x:val_x, y_label: test_y})
+                             
+#        epoch_list.append(epoch)
+#        loss_list.append(loss)
+#        accuracy_list.append(acc)f.write(epoch)
+        f.write(str(epoch)+"\n")
+        f.write(str(loss)+"\n")
+        f.write(str(acc)+"\n")
+        print("Train Epoch:", '%02d'%(epoch+1), 
+              "Loss=", "{:.9f}".format(loss),
+              "Accuracy=", acc)
+    
+    sess.close()
+          
+duration = time()-startTime
+print("Train Finished take:", duration)          
\ No newline at end of file
diff --git a/results/train_obj.py b/results/train_obj.py
new file mode 100644
index 0000000..c1ce68f
--- /dev/null
+++ b/results/train_obj.py
@@ -0,0 +1,189 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Sat Sep 30 12:13:38 2017
+
+@author: nvlab
+"""
+
+# -*- coding: utf-8 -*-
+"""
+Created on Sun Sep 24 19:44:11 2017
+
+@author: nvlab
+"""
+
+import tensorflow as tf
+from time import time
+import read_data_obj
+import numpy as np
+
+trainEpoch = 50
+batchSize = 64
+totalBatchs = int(14992/batchSize)
+learning_rate=0.00005
+epoch_list=[]
+accuracy_list=[]
+loss_list=[]
+
+
+def weight(shape):
+    return tf.Variable(tf.truncated_normal(shape, stddev=0.1), 
+                       name='W', dtype=tf.float32)                    
+def bias(shape):
+    return tf.Variable(tf.constant(0.1, shape=shape), name='b',
+                       dtype=tf.float32)    
+def conv2d(x, W):
+    return tf.nn.conv2d(x, W, strides=[1,1,1,1], padding='SAME')    
+def max_pooling(x):
+    return tf.nn.max_pool(x, ksize=[1,2,2,1], strides=[1,2,2,1],
+                          padding='SAME')
+def BN(Conv, out_size):
+    fc_mean, fc_var = tf.nn.moments(Conv, axes=[0])
+    scale = tf.Variable(tf.ones([out_size]))
+    shift = tf.Variable(tf.zeros([out_size]))
+    epsilon=0.001
+    ema = tf.train.ExponentialMovingAverage(decay=0.5)
+    def mean_var_with_update():
+        ema_apply_op = ema.apply([fc_mean, fc_var])
+        with tf.control_dependencies([ema_apply_op]):
+            return tf.identity(fc_mean), tf.identity(fc_var)
+    mean, var = mean_var_with_update()
+    Conv = tf.nn.batch_normalization(Conv, mean, var, shift, scale, epsilon)
+    return Conv
+                          
+with tf.name_scope('Input_layer'):
+    x = tf.placeholder("float", shape=[None, 95232])
+    x_image = tf.reshape(x, [-1, 128,248, 3])
+    
+with tf.name_scope('C1_Conv'):
+    W1 = weight([3,3,3,36])
+    b1 = bias([36])
+    Conv1 = conv2d(x_image, W1)+b1
+    Conv1_BN = BN(Conv1, 36)
+    C1_Conv = tf.nn.relu(Conv1_BN)  
+with tf.name_scope('C2_Conv'):
+    W2 = weight([3,3,36,36])
+    b2 = bias([36])
+    Conv2 = conv2d(C1_Conv, W2)+b2
+    Conv2_BN = BN(Conv2, 36)
+    C2_Conv = tf.nn.relu(Conv2_BN) 
+with tf.name_scope('C1_Pool'):
+    C1_Pool = max_pooling(C2_Conv)
+    
+with tf.name_scope('C3_Conv'):
+    W3 = weight([3,3,36,64])
+    b3 = bias([64])
+    Conv3 = conv2d(C1_Pool, W3)+b3
+    Conv3_BN = BN(Conv3, 64)
+    C3_Conv = tf.nn.relu(Conv3_BN)
+C3_concate = tf.concat([C1_Pool, C3_Conv],3)
+with tf.name_scope('C4_Conv'):
+    W4 = weight([3,3,100,128])
+    b4 = bias([128])
+    Conv4 = conv2d(C3_concate, W4)+b4
+    Conv4_BN = BN(Conv4, 128)
+    C4_Conv = tf.nn.relu(Conv4_BN)    
+with tf.name_scope('C2_Pool'):
+    C2_Pool = max_pooling(C4_Conv)
+    
+with tf.name_scope('C5_Conv'):
+    W8 = weight([3,3,128,128])
+    b8 = bias([128])
+    Conv5 = conv2d(C2_Pool, W8)+b8
+    Conv5_BN = BN(Conv5, 128)
+    C5_Conv = tf.nn.relu(Conv5_BN)
+C5_concate = tf.concat([C2_Pool, C5_Conv],3)
+with tf.name_scope('C6_Conv'):
+    W9 = weight([3,3,256,256])
+    b9 = bias([256])
+    Conv6 = conv2d(C5_concate, W9)+b9
+    Conv6_BN = BN(Conv6, 256)
+    C6_Conv = tf.nn.relu(Conv6_BN)    
+with tf.name_scope('C3_Pool'):
+    C3_Pool = max_pooling(C6_Conv)
+
+with tf.name_scope('C7_Conv'):
+    W10 = weight([3,3,256,256])
+    b10 = bias([256])
+    Conv7 = conv2d(C3_Pool, W10)+b10
+    Conv7_BN = BN(Conv7, 256)
+    C7_Conv = tf.nn.relu(Conv7_BN)
+C7_concate = tf.concat([C3_Pool, C7_Conv],3)
+with tf.name_scope('C8_Conv'):
+    W11 = weight([3,3,512,512])
+    b11 = bias([512])
+    Conv8 = conv2d(C7_concate, W11)+b11
+    Conv8_BN = BN(Conv8, 512)
+    C8_Conv = tf.nn.relu(Conv8_BN)    
+with tf.name_scope('C4_Pool'):
+    C4_Pool = max_pooling(C8_Conv)
+    
+with tf.name_scope('D_Flat'):
+    D_Flat = tf.reshape(C4_Pool, [-1,65536])
+    
+with tf.name_scope('D_Hidden1_Layer'):
+    W5 = weight([65536, 1024])
+    b5 = bias([1024])
+    D_Hidden1 = tf.matmul(D_Flat, W5)+b5
+    D_Hidden1_BN = BN(D_Hidden1, 1024)
+    D_Hidden1_BN = tf.nn.relu(D_Hidden1_BN)
+    D_Hidden1_Dropout = tf.nn.dropout(D_Hidden1_BN, keep_prob=0.5)
+with tf.name_scope('D_Hidden2_Layer'):
+    W6 = weight([1024, 512])
+    b6 = bias([512])
+    D_Hidden2 = tf.matmul(D_Hidden1_Dropout, W6)+b6
+    D_Hidden2_BN = BN(D_Hidden2, 512)
+    D_Hidden2_BN = tf.nn.relu(D_Hidden2_BN)
+    D_Hidden2_Dropout = tf.nn.dropout(D_Hidden2_BN, keep_prob=0.5)    
+with tf.name_scope('Output_Layer'):
+    W7 = weight([512, 24])
+    b7 = bias([24])
+    y_predict = tf.nn.softmax(tf.matmul(D_Hidden2_Dropout, W7)+b7)    
+
+with tf.name_scope('optimizer'):
+    y_label = tf.placeholder("float", shape=[None, 24],
+                             name='y_label')
+    loss_function = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits
+                    (logits=y_predict, labels=y_label))
+    optimizer = tf.train.AdamOptimizer(learning_rate).minimize(loss_function)
+    
+with tf.name_scope('evaluate_model'):
+    correct_prediction = tf.equal(tf.argmax(y_predict, 1),
+                                  tf.argmax(y_label, 1))
+    accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
+
+
+startTime = time()
+saver = tf.train.Saver(tf.all_variables())
+f = open('obj.txt','a')
+with tf.Session() as sess:
+
+    sess.run(tf.global_variables_initializer())
+    print("start training")
+    n = 0
+    for epoch in range(trainEpoch):
+        for i in range(0, totalBatchs):
+            images_x, batch_y = read_data_obj.read_data(batchSize)
+            
+            sess.run(optimizer, feed_dict={x:images_x, y_label:batch_y})
+            save_path = saver.save(sess, "./model/model_obj.ckpt")
+
+        if epoch >20:
+            learning_rate = learning_rate*0.1
+        val_x, test_y = read_data_obj.read_data_test(batchSize)    
+        loss, acc = sess.run([loss_function, accuracy],
+                         feed_dict={x:val_x, y_label: test_y})
+                             
+#        epoch_list.append(epoch)
+#        loss_list.append(loss)
+#        accuracy_list.append(acc)f.write(epoch)
+        f.write(str(epoch)+"\n")
+        f.write(str(loss)+"\n")
+        f.write(str(acc)+"\n")
+        print("Train Epoch:", '%02d'%(epoch+1), 
+              "Loss=", "{:.9f}".format(loss),
+              "Accuracy=", acc)
+    sess.close()
+          
+duration = time()-startTime
+print("Train Finished take:", duration)          
\ No newline at end of file