Commit
·
3bb7bc8
1
Parent(s):
f9c2c31
app update
Browse files
app.py
CHANGED
|
@@ -1,25 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import tensorflow as tf
|
| 2 |
from google.protobuf.struct_pb2 import Struct
|
|
|
|
|
|
|
|
|
|
| 3 |
|
| 4 |
def create_proto_message(text):
|
| 5 |
message = Struct()
|
| 6 |
message.fields["task_data"].string_value = text
|
| 7 |
return message.SerializeToString()
|
| 8 |
|
| 9 |
-
class TFProtoModel:
|
| 10 |
-
def __init__(self, model_path):
|
| 11 |
self.model = tf.saved_model.load(model_path)
|
| 12 |
self.infer = self.model.signatures['serving_default']
|
| 13 |
|
| 14 |
-
def
|
|
|
|
|
|
|
|
|
|
| 15 |
proto_data = create_proto_message(text)
|
| 16 |
-
|
|
|
|
|
|
|
| 17 |
result = self.infer(inputs=input_tensor)
|
| 18 |
return result['outputs'].numpy()
|
| 19 |
|
| 20 |
-
|
| 21 |
-
|
|
|
|
|
|
|
| 22 |
|
| 23 |
-
#
|
| 24 |
-
|
| 25 |
-
return model.predict(text)
|
|
|
|
| 1 |
+
# import tensorflow as tf
|
| 2 |
+
# from google.protobuf.struct_pb2 import Struct
|
| 3 |
+
# import struct2tensor.ops.gen_decode_proto_sparse
|
| 4 |
+
|
| 5 |
+
# def create_proto_message(text):
|
| 6 |
+
# message = Struct()
|
| 7 |
+
# message.fields["task_data"].string_value = text
|
| 8 |
+
# return message.SerializeToString()
|
| 9 |
+
|
| 10 |
+
# class TFProtoModel:
|
| 11 |
+
# def __init__(self, model_path):
|
| 12 |
+
# self.model = tf.saved_model.load(model_path)
|
| 13 |
+
# self.infer = self.model.signatures['serving_default']
|
| 14 |
+
|
| 15 |
+
# def predict(self, text):
|
| 16 |
+
# proto_data = create_proto_message(text)
|
| 17 |
+
# input_tensor = tf.constant([proto_data], dtype=tf.string)
|
| 18 |
+
# result = self.infer(inputs=input_tensor)
|
| 19 |
+
# return result['outputs'].numpy()
|
| 20 |
+
|
| 21 |
+
# # Initialize model when the file is loaded
|
| 22 |
+
# model = TFProtoModel("model")
|
| 23 |
+
|
| 24 |
+
# # This is the function Hugging Face will call
|
| 25 |
+
# def pipeline(text):
|
| 26 |
+
# return model.predict(text)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
import tensorflow as tf
|
| 30 |
from google.protobuf.struct_pb2 import Struct
|
| 31 |
+
from transformers import Pipeline
|
| 32 |
+
import struct2tensor.ops.gen_decode_proto_sparse
|
| 33 |
+
|
| 34 |
|
| 35 |
def create_proto_message(text):
|
| 36 |
message = Struct()
|
| 37 |
message.fields["task_data"].string_value = text
|
| 38 |
return message.SerializeToString()
|
| 39 |
|
| 40 |
+
class TFProtoModel(Pipeline):
|
| 41 |
+
def __init__(self, model_path="model"):
|
| 42 |
self.model = tf.saved_model.load(model_path)
|
| 43 |
self.infer = self.model.signatures['serving_default']
|
| 44 |
|
| 45 |
+
def _sanitize_parameters(self, **kwargs):
|
| 46 |
+
return {}, {}, {}
|
| 47 |
+
|
| 48 |
+
def preprocess(self, text):
|
| 49 |
proto_data = create_proto_message(text)
|
| 50 |
+
return tf.constant([proto_data], dtype=tf.string)
|
| 51 |
+
|
| 52 |
+
def _forward(self, input_tensor):
|
| 53 |
result = self.infer(inputs=input_tensor)
|
| 54 |
return result['outputs'].numpy()
|
| 55 |
|
| 56 |
+
def postprocess(self, model_outputs):
|
| 57 |
+
return {"score": float(model_outputs[0])}
|
| 58 |
+
|
| 59 |
+
pipeline = TFProtoModel
|
| 60 |
|
| 61 |
+
# To specify the task
|
| 62 |
+
task = "text-classification" # or another appropriate task type
|
|
|