How to Inference Using saved_model (tensorflow)
# using tensorflow 2.0.0
import tensorflow as tf
import cv2 import numpy as np
# load inference model loaded = tf.saved_model.load('./saved_model/1') infer = loaded.signatures["serving_default"]
# preprocessing for input data
VGG_MEAN = [104, 117, 123] img = cv2.imread('path/to/input_img.jpg') img = cv2.resize(img, (224, 224)) img = img - VGG_MEAN img = np.expand_dims(img, 0).astype('float32') # inference result = infer(tf.convert_to_tensor(img))
# check result print(result)
VGG_MEAN = [104, 117, 123] img = cv2.imread('path/to/input_img.jpg') img = cv2.resize(img, (224, 224)) img = img - VGG_MEAN img = np.expand_dims(img, 0).astype('float32') # inference result = infer(tf.convert_to_tensor(img))
# check result print(result)
댓글
댓글 쓰기