Dc-4nderson commited on
Commit
894acf1
·
verified ·
1 Parent(s): 88c0bb3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -17
app.py CHANGED
@@ -1,26 +1,26 @@
1
- import gradio as gr
 
2
  from transformers import pipeline
3
 
4
- # Load your model from the Hub
5
  generator = pipeline(
6
- "summarization", # officially supported tag
7
  model="andersondequan5/checkin-bart-model",
8
  tokenizer="andersondequan5/checkin-bart-model"
9
  )
10
 
11
- # Function to expand check-in
12
- def expand_checkin(task: str):
13
- result = generator(task, max_length=200, num_beams=4, do_sample=False)
14
- return result[0]["summary_text"]
15
 
16
- # Gradio Interface
17
- demo = gr.Interface(
18
- fn=expand_checkin,
19
- inputs=gr.Textbox(lines=3, placeholder="Enter a short task like: 'fix API routes and write docs'"),
20
- outputs="text",
21
- title="Check-In Expansion API",
22
- description="Submit a short input task and get a polished daily check-in expansion."
23
- )
24
 
25
- if __name__ == "__main__":
26
- demo.launch()
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from pydantic import BaseModel
3
  from transformers import pipeline
4
 
5
+ # Load model
6
  generator = pipeline(
7
+ "summarization", # use summarization since text2text-generation is deprecated in HF metadata
8
  model="andersondequan5/checkin-bart-model",
9
  tokenizer="andersondequan5/checkin-bart-model"
10
  )
11
 
12
+ # FastAPI app
13
+ app = FastAPI(title="Check-In Expansion API")
 
 
14
 
15
+ # Request/Response schema
16
+ class CheckInRequest(BaseModel):
17
+ task: str
18
+
19
+ class CheckInResponse(BaseModel):
20
+ checkin: str
 
 
21
 
22
+ @app.post("/predict", response_model=CheckInResponse)
23
+ def predict(request: CheckInRequest):
24
+ """Expand a short input into a detailed check-in."""
25
+ result = generator(request.task, max_length=200, num_beams=4, do_sample=False)
26
+ return {"checkin": result[0]["summary_text"]}