Spaces:
Sleeping
Sleeping
| from fastapi import FastAPI, Request, HTTPException | |
| from pydantic import BaseModel | |
| from huggingface_hub import InferenceClient | |
| import json | |
| from typing import List, Dict, Tuple | |
| app = FastAPI() | |
| # Inisialisasi HuggingFace client | |
| client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct") | |
| class ChatRequest(BaseModel): | |
| chat_history: List[Dict[str, str]] | |
| class ChatMemRequest(BaseModel): | |
| message: str | |
| chat_history: List[Tuple[str, str]] | |
| class ProcessJsonRequest(BaseModel): | |
| json_input: str | |
| def chat_llama(chat_history: List[Dict[str, str]]) -> List[Dict[str, str]]: | |
| chat_completion = client.chat_completion( | |
| messages=chat_history, | |
| max_tokens=500, | |
| ) | |
| chat_history.append({"role": "assistant", "content": chat_completion.choices[0].message.content}) | |
| return chat_history | |
| def chat_mem(message: str, chat_history: List[Tuple[str, str]]) -> Tuple[str, List[Tuple[str, str]]]: | |
| chat_history_role = [{"role": "system", "content": "You are a helpful assistant."}] | |
| for user_msg, assistant_msg in chat_history: | |
| chat_history_role.append({"role": "user", "content": user_msg}) | |
| chat_history_role.append({"role": "assistant", "content": assistant_msg}) | |
| chat_history_role.append({"role": "user", "content": message}) | |
| chat_completion = client.chat_completion( | |
| messages=chat_history_role, | |
| max_tokens=500, | |
| ) | |
| chat_history_role.append({"role": "assistant", "content": chat_completion.choices[0].message.content}) | |
| modified = map(lambda x: x["content"], chat_history_role) | |
| a = list(modified) | |
| chat_history = [(a[i*2+1], a[i*2+2]) for i in range(len(a)//2)] | |
| return "", chat_history | |
| def process_json(json_input: str) -> Tuple[str, str]: | |
| try: | |
| chat_history = json.loads(json_input) | |
| if not isinstance(chat_history, list): | |
| raise ValueError("Input should be a list of message dictionaries.") | |
| except (json.JSONDecodeError, ValueError) as e: | |
| return f"Error parsing JSON: {str(e)}", "" | |
| chat_history = chat_llama(chat_history) | |
| return json.dumps(chat_history, indent=2), "" | |
| async def chat_llama_endpoint(request: ChatRequest): | |
| chat_history = request.chat_history | |
| response = chat_llama(chat_history) | |
| return {"chat_history": response} | |
| async def chat_mem_endpoint(request: ChatMemRequest): | |
| message = request.message | |
| chat_history = request.chat_history | |
| response = chat_mem(message, chat_history) | |
| return {"message": response[0], "chat_history": response[1]} | |
| async def process_json_endpoint(request: ProcessJsonRequest): | |
| json_input = request.json_input | |
| response = process_json(json_input) | |
| return {"output_json": response[0], "error_message": response[1]} | |
| if __name__ == "__main__": | |
| import uvicorn | |
| uvicorn.run(app, host="0.0.0.0", port=7860) |