|
| 1 | +from __future__ import print_function, division |
| 2 | +from flask import Flask , redirect , url_for , request , render_template , jsonify , json |
| 3 | +import pandas as pd |
| 4 | +import os |
| 5 | +import random |
| 6 | +from collections import Counter |
| 7 | +import numpy as np |
| 8 | +import tensorflow as tf |
| 9 | +import json |
| 10 | +import sys |
| 11 | + |
| 12 | + |
| 13 | + |
| 14 | +app = Flask(__name__) |
| 15 | + |
| 16 | + |
| 17 | +# @app.route("/predict" , methods=['POST']) |
| 18 | +@app.route('/' , methods=['POST']) |
| 19 | + |
| 20 | +def hello(): |
| 21 | + data = request.data |
| 22 | + # query = str(json_) |
| 23 | + print(data) |
| 24 | + imagePath = data |
| 25 | + modelFullPath = './output_graph.pb' |
| 26 | + labelsFullPath = './output_labels.txt' |
| 27 | + |
| 28 | + |
| 29 | + |
| 30 | + |
| 31 | + |
| 32 | + def create_graph(): |
| 33 | + """Creates a graph from saved GraphDef file and returns a saver.""" |
| 34 | + # Creates graph from saved graph_def.pb. |
| 35 | + with tf.gfile.FastGFile(modelFullPath, 'rb') as f: |
| 36 | + graph_def = tf.GraphDef() |
| 37 | + graph_def.ParseFromString(f.read()) |
| 38 | + _ = tf.import_graph_def(graph_def, name='') |
| 39 | + def run_inference_on_image(): |
| 40 | + answer = None |
| 41 | + |
| 42 | + if not tf.gfile.Exists(imagePath): |
| 43 | + tf.logging.fatal('File does not exist %s', imagePath) |
| 44 | + return answer |
| 45 | + |
| 46 | + image_data = tf.gfile.FastGFile(imagePath, 'rb').read() |
| 47 | + |
| 48 | + # Creates graph from saved GraphDef. |
| 49 | + create_graph() |
| 50 | + |
| 51 | + with tf.Session() as sess: |
| 52 | + |
| 53 | + softmax_tensor = sess.graph.get_tensor_by_name('final_result:0') |
| 54 | + predictions = sess.run(softmax_tensor, |
| 55 | + {'DecodeJpeg/contents:0': image_data}) |
| 56 | + predictions = np.squeeze(predictions) |
| 57 | + |
| 58 | + top_k = predictions.argsort()[-1:][::-1] # Getting top 5 predictions |
| 59 | + f = open(labelsFullPath, 'rb') |
| 60 | + lines = f.readlines() |
| 61 | + labels = [str(w).replace("\n", "") for w in lines] |
| 62 | + for node_id in top_k: |
| 63 | + human_string = labels[node_id] |
| 64 | + score = predictions[node_id] |
| 65 | + # return '%s (score = %.5f)' % (human_string, score) |
| 66 | + |
| 67 | + answer = labels[top_k[0]] |
| 68 | + return answer |
| 69 | + answer = run_inference_on_image() |
| 70 | + return '%s' % answer |
| 71 | + |
| 72 | + |
| 73 | + |
| 74 | + # if __name__ == '__main__': |
| 75 | + |
| 76 | + |
| 77 | + |
| 78 | + |
| 79 | +if __name__ == '__main__': |
| 80 | + # imagePath = './image1.jpg' |
| 81 | + modelFullPath = './output_graph.pb' |
| 82 | + labelsFullPath = './output_labels.txt' |
| 83 | + # app.run(debug = True) |
| 84 | + app.run('0.0.0.0' , 5000) |
0 commit comments