Spaces:
Sleeping
Sleeping
| from flask import Flask, request, jsonify | |
| from transformers import pipeline, AutoTokenizer, AutoModelForSeq2SeqLM | |
| import os, time | |
| app = Flask(__name__) | |
| # Load from local 'model/' folder | |
| tokenizer = AutoTokenizer.from_pretrained("./model") | |
| model = AutoModelForSeq2SeqLM.from_pretrained("./model") | |
| summarizer = pipeline("summarization", model=model, tokenizer=tokenizer) | |
| # Warm up | |
| summarizer("Warm-up input.", max_length=30, min_length=5, do_sample=False) | |
| def summarize(): | |
| try: | |
| data = request.get_json() | |
| text = data["text"] | |
| max_len = int(data.get("max_length", 150)) # Default to 150 if not provided | |
| min_len = int(data.get("min_length", 30)) # Default to 30 if not provided | |
| summary_output = summarizer( | |
| text, | |
| max_length=max_len, | |
| min_length=min_len, | |
| do_sample=False | |
| ) | |
| summary = summary_output[0]["summary_text"] | |
| # Token count of the summary | |
| tokenized = tokenizer(summary, return_tensors="pt") | |
| token_count = len(tokenized["input_ids"][0]) | |
| return jsonify({ | |
| "summary": summary, | |
| "token_count": token_count | |
| }) | |
| except Exception as e: | |
| return jsonify({"error": str(e)}), 400 | |
| if __name__ == "__main__": | |
| app.run(host="0.0.0.0", port=7860) | |