Jiahuita
Attempt to fix pipeline inference issue
7532efb
raw
history blame
1.17 kB
from transformers import Pipeline
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
from tensorflow.keras.preprocessing.sequence import pad_sequences
import numpy as np
import tensorflow as tf
import json
class NewsClassifierPipeline(Pipeline):
def __init__(self, model_path="news_classifier.h5", tokenizer_path="tokenizer.json"):
super().__init__()
self.model = load_model(model_path)
with open(tokenizer_path, "r") as f:
tokenizer_data = json.load(f)
self.tokenizer = tokenizer_from_json(tokenizer_data)
def preprocess(self, inputs):
sequences = self.tokenizer.texts_to_sequences([inputs])
return pad_sequences(sequences, maxlen=128)
def _forward(self, inputs):
preprocessed = self.preprocess(inputs)
predictions = self.model.predict(preprocessed)
scores = tf.nn.softmax(predictions, axis=1).numpy()
label = np.argmax(scores)
return [{"label": "foxnews" if label == 0 else "nbc", "score": float(scores[0, label])}]
def postprocess(self, model_outputs):
return model_outputs