I am a newbie research Deeplearning. I load a saved model with tensorflow and made a API with flask but I get error 'Container localhost does not exist.' when I predict, please help me fix it. Thank you.
tensorflow version 1.13.1
keras version 2.2.4
flask version 1.0.3
I run it by command 'python app.py'
This is my code:
from flask import Flask, request
from tensorflow.python.keras.models import load_model
import numpy as np
import tensorflow as tf
from tensorflow.python.keras.applications import imagenet_utils
from tensorflow.python.keras.preprocessing.image import img_to_array
from tensorflow.python.keras.preprocessing.image import load_img
from PIL import Image
import io
app = Flask(__name__)
model = None
labels = ['AchatinaFulice', 'Riptortus', 'SquashBug']
def load_model_insect():
global model
model = load_model('insect2.h5')
global graph
graph = tf.get_default_graph()
def predict(image):
image = image.resize((200, 200))
image = img_to_array(image)
image = np.expand_dims(image, 0)
image = imagenet_utils.preprocess_input(image)
image = image / 255
pred = np.argmax(model.predict(image))
return labels[pred]
@app.route('/')
def upload():
return '''<html>
<body>
<form action = "http://localhost:5000/uploader" method = "POST"
enctype = "multipart/form-data">
<input type = "file" name = "file" />
<input type = "submit"/>
</form>
</body>
</html>'''
@app.route('/uploader', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
image = request.files["file"].read()
image = Image.open(io.BytesIO(image))
with graph.as_default():
label = predict(image)
return label
if __name__ == "__main__":
load_model_insect()
app.run(host='localhost')
The full log
C:\ProgramData\Anaconda3\python.exe "C:\Program Files\JetBrains\PyCharm 2019.1.3\helpers\pydev\pydevconsole.py" --mode=client --port=53173
import sys; print('Python %s on %s' % (sys.version, sys.platform))
sys.path.extend(['C:\\Users\\hoho303\\PycharmProjects\\Insect', 'C:/Users/hoho303/PycharmProjects/Insect'])
Python 3.7.3 (default, Mar 27 2019, 17:13:21) [MSC v.1915 64 bit (AMD64)]
Type 'copyright', 'credits' or 'license' for more information
IPython 7.4.0 -- An enhanced Interactive Python. Type '?' for help.
PyDev console: using IPython 7.4.0
Python 3.7.3 (default, Mar 27 2019, 17:13:21) [MSC v.1915 64 bit (AMD64)] on win32
runfile('C:/Users/hoho303/PycharmProjects/Insect/app.py', wdir='C:/Users/hoho303/PycharmProjects/Insect')
WARNING:tensorflow:From C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\ops\resource_variable_ops.py:435: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.
Instructions for updating:
Colocations handled automatically by placer.
WARNING:tensorflow:From C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\keras\layers\core.py:143: calling dropout (from tensorflow.python.ops.nn_ops) with keep_prob is deprecated and will be removed in a future version.
Instructions for updating:
Please use `rate` instead of `keep_prob`. Rate should be set to `rate = 1 - keep_prob`.
2019-06-09 17:53:44.811258: I tensorflow/core/platform/cpu_feature_guard.cc:141] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2
WARNING:tensorflow:No training configuration found in save file: the model was *not* compiled. Compile it manually.
* Serving Flask app "app" (lazy loading)
* Environment: production
WARNING: Do not use the development server in a production environment.
Use a production WSGI server instead.
* Debug mode: off
* Running on http://localhost:5000/ (Press CTRL+C to quit)
127.0.0.1 - - [09/Jun/2019 17:53:54] "GET / HTTP/1.1" 200 -
[2019-06-09 17:54:02,496] ERROR in app: Exception on /uploader [POST]
Traceback (most recent call last):
File "C:\ProgramData\Anaconda3\lib\site-packages\flask\app.py", line 2292, in wsgi_app
response = self.full_dispatch_request()
File "C:\ProgramData\Anaconda3\lib\site-packages\flask\app.py", line 1815, in full_dispatch_request
rv = self.handle_user_exception(e)
File "C:\ProgramData\Anaconda3\lib\site-packages\flask\app.py", line 1718, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "C:\ProgramData\Anaconda3\lib\site-packages\flask\_compat.py", line 35, in reraise
raise value
File "C:\ProgramData\Anaconda3\lib\site-packages\flask\app.py", line 1813, in full_dispatch_request
rv = self.dispatch_request()
File "C:\ProgramData\Anaconda3\lib\site-packages\flask\app.py", line 1799, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "C:/Users/hoho303/PycharmProjects/Insect/app.py", line 68, in upload_file
pred = np.argmax(model.predict(image))
File "C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py", line 1113, in predict
self, x, batch_size=batch_size, verbose=verbose, steps=steps)
File "C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\keras\engine\training_arrays.py", line 329, in model_iteration
batch_outs = f(ins_batch)
File "C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\keras\backend.py", line 3076, in __call__
run_metadata=self.run_metadata)
File "C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\client\session.py", line 1439, in __call__
run_metadata_ptr)
File "C:\ProgramData\Anaconda3\lib\site-packages\tensorflow\python\framework\errors_impl.py", line 528, in __exit__
c_api.TF_GetCode(self.status.status))
tensorflow.python.framework.errors_impl.FailedPreconditionError: Error while reading resource variable dense_1/bias from Container: localhost. This could mean that the variable was uninitialized. Not found: Container localhost does not exist. (Could not find resource: localhost/dense_1/bias)
[[{{node dense_1/BiasAdd/ReadVariableOp}}]]
I also used this advice to create a new variable session
.
Then I followed this advice and it worked. Specifically I nested the with session.as_default():
and inside of that, a with graph.as_default():
@app.route("/api/doodlePredict", methods=["POST"])
def predictAPI():
global model, graph
print(request.get_data())
with session.as_default():
with graph.as_default():
response = perpareDataAndPredict(model, testJson)
print("this is the response: ", response)
return jsonify(response.tolist())
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With