Spaces:
Sleeping
Sleeping
| import json | |
| import os | |
| from datetime import datetime | |
| from zoneinfo import ZoneInfo | |
| import gradio as gr | |
| from extract import extract | |
| import app_util | |
| from pgsoft.pgconst.const import service_list, functionality_list | |
| from pgsoft.pghost import ais | |
| from pgsoft.pgdate.date_utils import beijing | |
| import call_pgai | |
| from str_util import normalize_text | |
| ####################### | |
| # proxy version | |
| ####################### | |
| proxy_version = "1.0.0-2024-01-18-a" # reconstruct ai calling | |
| t = datetime.now() | |
| t = t.astimezone(ZoneInfo("Asia/Shanghai")) | |
| print(f"[Beijing]: {t.replace(microsecond=0)}") | |
| t = t.astimezone(ZoneInfo("America/Los_Angeles")) | |
| print(f"[Seattle]: {t.replace(microsecond=0)}") | |
| identity = os.environ.get("identity") | |
| print(f"identity: {identity}") | |
| if not identity: | |
| identity = "watermelon" | |
| ai = "stevez-ai-dev" | |
| if identity in ais: | |
| ai = ais[identity] | |
| db_token = os.environ.get("db_token") | |
| if db_token: | |
| print(db_token[:5]) | |
| def run(hf_token, service, game, functionality, nlp_command): | |
| """ | |
| event handler | |
| """ | |
| # reuse hf_token field as json string | |
| token, user, redirect, source, username, _ = extract(hf_token) | |
| if user is None: | |
| user = "__fake__" | |
| # redirect all traffic to the proxy sever | |
| global ai | |
| if redirect is not None: | |
| ai = redirect | |
| ai_url = f"https://{ai}.hf.space" | |
| if token is None or token == "": | |
| return "please specify hf token" | |
| if service not in service_list[1:]: | |
| if game is None: | |
| return "please specify which game" | |
| if functionality is None: | |
| return "please choose the AI functionality" | |
| if functionality == "AI": | |
| if nlp_command in ["", None]: | |
| return "please make sure the command is not empty" | |
| service_start = beijing() | |
| print(f"<<<<<<<<<<<<<< service starts at {service_start} <<<<<<<<<<<<<<") | |
| if service in ["download game", "upload game", "list games"]: | |
| res = app_util.file_service(service, nlp_command, db_token) | |
| if res is None: | |
| outp = {"status": "Failure"} | |
| else: | |
| outp = {"status": "OK", "result": res} | |
| else: | |
| assert "games" in service_list | |
| if service == "games": | |
| print(f"{beijing()} [{user}] [{game}] {nlp_command}") | |
| nlp_command = normalize_text(nlp_command) | |
| call_pgai.from_cache = True | |
| outp = call_pgai.call_pgai( | |
| service, | |
| game, | |
| functionality, | |
| nlp_command, | |
| ai_url, | |
| token, | |
| ) | |
| if outp is None: | |
| return "no output" | |
| if isinstance(outp, str): | |
| return outp | |
| # add proxy version info to the output | |
| outp["timestamp"] = beijing().__str__() | |
| outp["proxy-version"] = proxy_version | |
| outp["user"] = user | |
| outp["username"] = username | |
| outp["game"] = game | |
| outp["source"] = source | |
| outp["cache"] = call_pgai.from_cache | |
| app_util.call_logger(outp, identity, token) | |
| service_end = beijing() | |
| timecost = service_end.timestamp() - service_start.timestamp() | |
| print( | |
| f">>>>>>>>>>>>>>> service ends at {service_end}, " | |
| + f"costs {timecost:.2f}s >>>>>>>>>>>>>>>\n" | |
| ) | |
| return json.dumps(outp, indent=4) | |
| demo = gr.Interface( | |
| fn=run, | |
| inputs=[ | |
| "text", | |
| gr.Radio( | |
| service_list, | |
| value=service_list[0], | |
| info="Shared services", | |
| ), | |
| gr.Radio( | |
| ["watermelon"], | |
| value="watermelon", | |
| info="Which game you want the AI to support?", | |
| ), | |
| gr.Radio( | |
| functionality_list, | |
| value=functionality_list[0], | |
| # label = "What do you want to do?", | |
| info="What functionality?", | |
| ), | |
| "text", | |
| ], | |
| outputs="text", | |
| title="Demo", | |
| allow_flagging="never", | |
| ) | |
| demo.launch() | |