| from fastapi import FastAPI, HTTPException |
| from pydantic import BaseModel |
| from groq import Groq |
| import os |
| from dotenv import load_dotenv |
| from fastapi import FastAPI, HTTPException |
| from pydantic import BaseModel |
| from groq import Groq |
| import os |
| from dotenv import load_dotenv |
|
|
| |
| load_dotenv() |
|
|
| api_key = os.getenv("EXTRACTSUMMARY_API_KEY") |
|
|
| if not api_key: |
| raise ValueError("ExtractSummary API Key not found in environment variables!") |
|
|
| client = Groq(api_key=api_key) |
|
|
| app = FastAPI() |
|
|
| @app.get("/") |
| def read_root(): |
| return {"message": "Hello, FastAPI!"} |
|
|
| class TextInput(BaseModel): |
| text: str |
| max_words: int = 30 |
|
|
| class ExtractInput(BaseModel): |
| text: str |
| fields: list[str] |
|
|
| @app.post("/summarize") |
| def summarize_text(input_data: TextInput): |
| try: |
| prompt = ( |
| f"Summarize the following text in at most {input_data.max_words} words:\n\n" |
| f"{input_data.text}" |
| ) |
|
|
| messages = [ |
| {"role": "system", "content": "You are a helpful, concise summarizer."}, |
| {"role": "user", "content": prompt}, |
| ] |
|
|
| completion = client.chat.completions.create( |
| model="meta-llama/llama-4-scout-17b-16e-instruct", |
| messages=messages, |
| temperature=0.7, |
| max_completion_tokens=input_data.max_words * 3, |
| top_p=1, |
| stream=False, |
| stop=None, |
| ) |
|
|
| summary = completion.choices[0].message.content.strip() |
| return {"summary": summary} |
|
|
| except Exception as e: |
| raise HTTPException(status_code=500, detail=str(e)) |
|
|
| @app.post("/extract") |
| def extract_fields(input_data: ExtractInput): |
| try: |
| fields_formatted = ', '.join(input_data.fields) |
| prompt = ( |
| f"Extract the following fields from the text: {fields_formatted}.\n\n" |
| f"Text:\n{input_data.text}\n\n" |
| "Provide the extracted fields in JSON format." |
| ) |
|
|
| messages = [ |
| {"role": "system", "content": "You are an information extractor that returns clean JSON."}, |
| {"role": "user", "content": prompt}, |
| ] |
|
|
| completion = client.chat.completions.create( |
| model="meta-llama/llama-4-scout-17b-16e-instruct", |
| messages=messages, |
| temperature=0.2, |
| max_completion_tokens=300, |
| top_p=1, |
| stream=False, |
| stop=None, |
| ) |
|
|
| extracted_info = completion.choices[0].message.content.strip() |
| return {"extracted": extracted_info} |
|
|
| except Exception as e: |
| raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
|