File size: 2,919 Bytes
58d3981
 
0ba8344
58d3981
 
0ba8344
 
f86db8c
9baa4cb
f86db8c
1133401
58d3981
 
 
 
 
 
 
 
 
 
 
1635dbd
f86db8c
1635dbd
 
f86db8c
 
fdb8106
58d3981
12bc661
58d3981
 
 
1635dbd
 
 
 
 
 
 
f86db8c
 
 
 
fdb8106
1635dbd
e3d560c
58d3981
1635dbd
f86db8c
 
1635dbd
 
 
 
f86db8c
 
e3d560c
0ba8344
58d3981
 
0ba8344
58d3981
0ba8344
 
58d3981
 
 
0ba8344
58d3981
0ba8344
 
58d3981
 
0ba8344
58d3981
e3172a9
0ba8344
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
from fastapi import FastAPI, Request, HTTPException
from pydantic import BaseModel
from huggingface_hub import InferenceClient
import json
from typing import List, Dict, Tuple

app = FastAPI()

# Inisialisasi HuggingFace client
client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct")

class ChatRequest(BaseModel):
    chat_history: List[Dict[str, str]]

class ChatMemRequest(BaseModel):
    message: str
    chat_history: List[Tuple[str, str]]

class ProcessJsonRequest(BaseModel):
    json_input: str

def chat_llama(chat_history: List[Dict[str, str]]) -> List[Dict[str, str]]:
    chat_completion = client.chat_completion(
        messages=chat_history,
        max_tokens=500,
    )
    chat_history.append({"role": "assistant", "content": chat_completion.choices[0].message.content})
    return chat_history

def chat_mem(message: str, chat_history: List[Tuple[str, str]]) -> Tuple[str, List[Tuple[str, str]]]:
    chat_history_role = [{"role": "system", "content": "You are a helpful assistant."}]
    for user_msg, assistant_msg in chat_history:
        chat_history_role.append({"role": "user", "content": user_msg})
        chat_history_role.append({"role": "assistant", "content": assistant_msg})
    chat_history_role.append({"role": "user", "content": message})
    
    chat_completion = client.chat_completion(
        messages=chat_history_role,
        max_tokens=500,
    )
    chat_history_role.append({"role": "assistant", "content": chat_completion.choices[0].message.content})
    
    modified = map(lambda x: x["content"], chat_history_role)
    a = list(modified)
    chat_history = [(a[i*2+1], a[i*2+2]) for i in range(len(a)//2)]

    return "", chat_history

def process_json(json_input: str) -> Tuple[str, str]:
    try:
        chat_history = json.loads(json_input)
        if not isinstance(chat_history, list):
            raise ValueError("Input should be a list of message dictionaries.")
    except (json.JSONDecodeError, ValueError) as e:
        return f"Error parsing JSON: {str(e)}", ""
    
    chat_history = chat_llama(chat_history)
    return json.dumps(chat_history, indent=2), ""

@app.post("/chat_llama")
async def chat_llama_endpoint(request: ChatRequest):
    chat_history = request.chat_history
    response = chat_llama(chat_history)
    return {"chat_history": response}

@app.post("/chat_mem")
async def chat_mem_endpoint(request: ChatMemRequest):
    message = request.message
    chat_history = request.chat_history
    response = chat_mem(message, chat_history)
    return {"message": response[0], "chat_history": response[1]}

@app.post("/process_json")
async def process_json_endpoint(request: ProcessJsonRequest):
    json_input = request.json_input
    response = process_json(json_input)
    return {"output_json": response[0], "error_message": response[1]}

if __name__ == "__main__":
    import uvicorn
    uvicorn.run(app, host="0.0.0.0", port=7860)