steveagi commited on
Commit
3f7e375
·
unverified ·
2 Parent(s): 2a13540 42e71ec

Merge pull request #10 from east-and-west-magic/feature-cache-query

Browse files
Files changed (2) hide show
  1. app.py +25 -19
  2. cache.py +39 -0
app.py CHANGED
@@ -8,13 +8,14 @@ import gradio as gr
8
  from gradio_client import Client
9
  from extract import extract
10
  import app_util
 
11
  from pgsoft.pgconst.const import service_list, functionality_list, game_list
12
  from pgsoft.pgdate.date_utils import beijing
13
 
14
  #######################
15
  # proxy version
16
  #######################
17
- proxy_version = "1.0.0-2023-12-01-a" # use pgconst
18
 
19
  t = datetime.now()
20
  t = t.astimezone(ZoneInfo("Asia/Shanghai"))
@@ -37,6 +38,8 @@ if not identity:
37
  space = "stevez-ai"
38
  if identity in spaces:
39
  space = spaces[identity]
 
 
40
 
41
 
42
  def run(hf_token, service, game, functionality, nlp_command):
@@ -92,24 +95,27 @@ def run(hf_token, service, game, functionality, nlp_command):
92
  assert "games" in service_list
93
  if service == "games":
94
  print(f"{beijing()} [{user_name}] [{game}] {nlp_command}")
95
- client = Client(
96
- url,
97
- hf_token=token,
98
- verbose=False,
99
- )
100
- calling_start = beijing()
101
- print(f"calling ai starts at {calling_start}")
102
- res = client.predict(
103
- service,
104
- game,
105
- functionality,
106
- nlp_command, # hidden,
107
- api_name="/predict",
108
- )
109
- calling_end = beijing()
110
- timecost = calling_end.timestamp() - calling_start.timestamp()
111
- print(f"calling ai ends at {calling_end}, costs {timecost:.2f}s")
112
- outp = json.loads(res)
 
 
 
113
  # add proxy version info to the output
114
  outp["proxy-version"] = proxy_version
115
  outp["user"] = user_name
 
8
  from gradio_client import Client
9
  from extract import extract
10
  import app_util
11
+ import cache
12
  from pgsoft.pgconst.const import service_list, functionality_list, game_list
13
  from pgsoft.pgdate.date_utils import beijing
14
 
15
  #######################
16
  # proxy version
17
  #######################
18
+ proxy_version = "1.0.0-2023-12-12-a" # use cache
19
 
20
  t = datetime.now()
21
  t = t.astimezone(ZoneInfo("Asia/Shanghai"))
 
38
  space = "stevez-ai"
39
  if identity in spaces:
40
  space = spaces[identity]
41
+ filepath = os.sep.join(["cache", "cached_ai.json"])
42
+ cache.load_cache(filepath)
43
 
44
 
45
  def run(hf_token, service, game, functionality, nlp_command):
 
95
  assert "games" in service_list
96
  if service == "games":
97
  print(f"{beijing()} [{user_name}] [{game}] {nlp_command}")
98
+ outp = cache.get_cache(nlp_command)
99
+ if outp is None:
100
+ client = Client(
101
+ url,
102
+ hf_token=token,
103
+ verbose=False,
104
+ )
105
+ calling_start = beijing()
106
+ print(f"calling ai starts at {calling_start}")
107
+ res = client.predict(
108
+ service,
109
+ game,
110
+ functionality,
111
+ nlp_command, # hidden,
112
+ api_name="/predict",
113
+ )
114
+ calling_end = beijing()
115
+ timecost = calling_end.timestamp() - calling_start.timestamp()
116
+ print(f"calling ai ends at {calling_end}, costs {timecost:.2f}s")
117
+ outp = json.loads(res)
118
+ cache.add_cache(nlp_command, outp)
119
  # add proxy version info to the output
120
  outp["proxy-version"] = proxy_version
121
  outp["user"] = user_name
cache.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import json
3
+ from pgsoft.pgdate.date_utils import beijing
4
+
5
+ cache_ai = {}
6
+
7
+
8
+ def normalize_text(text: str) -> str:
9
+ text = text.lower()
10
+ tmp = text.split(" ")
11
+ tmp = [word for word in tmp if word != ""]
12
+ return " ".join(tmp)
13
+
14
+
15
+ def load_cache(filepath: str):
16
+ """load cached ai calling from a json file"""
17
+ global cache_ai
18
+ if os.path.exists(filepath):
19
+ with open(filepath, "r+") as f:
20
+ cache_ai = json.load(f)
21
+
22
+
23
+ def add_cache(command: str, result: dict):
24
+ """add a cache of ai calling"""
25
+ command = normalize_text(command)
26
+ result["command"] = command
27
+
28
+ global cache_ai
29
+ cache_ai[command] = result
30
+ print(f'[cache] added "{command}"')
31
+
32
+
33
+ def get_cache(command: str) -> dict | None:
34
+ """return a cached ai calling with new "timestamp" """
35
+ command = normalize_text(command)
36
+ outp = cache_ai.get(command)
37
+ if outp:
38
+ outp["timestamp"] = beijing().__str__()
39
+ return outp