botsi commited on
Commit
b20408a
·
verified ·
1 Parent(s): 99bd41e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -38
app.py CHANGED
@@ -1,4 +1,6 @@
1
- # Original code from https://huggingface.co/spaces/huggingface-projects/llama-2-7b-chat and https://huggingface.co/spaces/radames/gradio-chatbot-read-query-param
 
 
2
  import gradio as gr
3
  import time
4
  import random
@@ -6,22 +8,21 @@ import json
6
  import mysql.connector
7
  import os
8
  import csv
9
- from huggingface_hub import Repository, hf_hub_download
10
- from datetime import datetime
11
-
12
  import spaces
13
  import torch
 
14
  from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
15
  from threading import Thread
16
  from typing import Iterator
 
 
17
 
18
- # data_fetcher.py
19
  import mysql.connector
20
  import urllib.parse
21
  import urllib.request
22
 
23
-
24
- # Save chat history as JSON
25
  import atexit
26
  import os
27
  from huggingface_hub import HfApi, HfFolder
@@ -69,10 +70,10 @@ DESCRIPTION = """\
69
  This is your personal space to chat.
70
  You can ask anything from strategic questions regarding the game or just chat as you like.
71
  """
 
 
72
  '''LICENSE = """
73
  <p/>
74
-
75
- ---
76
  As a derivate work of [Llama-2-13b-chat](https://huggingface.co/meta-llama/Llama-2-13b-chat) by Meta,
77
  this demo is governed by the original [license](https://huggingface.co/spaces/huggingface-projects/llama-2-13b-chat/blob/main/LICENSE.txt) and [acceptable use policy](https://huggingface.co/spaces/huggingface-projects/llama-2-13b-chat/blob/main/USE_POLICY.md).
78
  """
@@ -88,7 +89,6 @@ if torch.cuda.is_available():
88
  tokenizer = AutoTokenizer.from_pretrained(model_id)
89
  tokenizer.use_default_system_prompt = False
90
 
91
- import mysql.connector
92
 
93
  def fetch_personalized_data(session_index):
94
  try:
@@ -193,31 +193,11 @@ def fetch_personalized_data(session_index):
193
  return None
194
 
195
 
196
- js = """
197
- function() {
198
- const params = new URLSearchParams(window.location.search);
199
- const url_params = Object.fromEntries(params);
200
- return url_params;
201
- }
202
- """
203
-
204
- def get_window_url_params():
205
- return """
206
- function() {
207
- const params = new URLSearchParams(window.location.search);
208
- const url_params = Object.fromEntries(params);
209
- return url_params;
210
- }
211
- """
212
-
213
- ## trust-game-llama-2-7b-chat
214
- # app.py
215
  @spaces.GPU
216
  def generate(
217
- request: gr.Request,
218
  message: str,
219
  chat_history: list[tuple[str, str]],
220
- # system_prompt: str,
221
  max_new_tokens: int = 1024,
222
  temperature: float = 0.6,
223
  top_p: float = 0.9,
@@ -225,14 +205,12 @@ def generate(
225
  repetition_penalty: float = 1.2,
226
  ) -> Iterator[str]: # Change return type hint to Iterator[str]
227
 
 
228
  params = request.query_params
229
- print(params)
230
 
231
- # Construct the input prompt using the functions from the system_prompt_config module
232
  input_prompt = construct_input_prompt(chat_history, message)
233
-
234
- # Use the global variable to store the chat history
235
- # global global_chat_history
236
 
237
  conversation = []
238
 
@@ -300,8 +278,7 @@ examples=[
300
  ],
301
  )
302
 
303
- with gr.Blocks(js = js, css="style.css") as demo:
304
- #url_params = gr.JSON({}, visible=False, label="URL Params")
305
  #session_index = get_session_index(url_params)
306
  session_index = 'eb3636167d3a63fbeee32934610e5b2f'
307
  personalized_data = fetch_personalized_data(session_index)
 
1
+ # Original code from https://huggingface.co/spaces/huggingface-projects/llama-2-7b-chat
2
+ # Modified for trust game purposes
3
+
4
  import gradio as gr
5
  import time
6
  import random
 
8
  import mysql.connector
9
  import os
10
  import csv
 
 
 
11
  import spaces
12
  import torch
13
+
14
  from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
15
  from threading import Thread
16
  from typing import Iterator
17
+ from huggingface_hub import Repository, hf_hub_download
18
+ from datetime import datetime
19
 
20
+ # for fetch_personalized_data
21
  import mysql.connector
22
  import urllib.parse
23
  import urllib.request
24
 
25
+ # for saving chat history as JSON
 
26
  import atexit
27
  import os
28
  from huggingface_hub import HfApi, HfFolder
 
70
  This is your personal space to chat.
71
  You can ask anything from strategic questions regarding the game or just chat as you like.
72
  """
73
+
74
+ # LICENSE commented out
75
  '''LICENSE = """
76
  <p/>
 
 
77
  As a derivate work of [Llama-2-13b-chat](https://huggingface.co/meta-llama/Llama-2-13b-chat) by Meta,
78
  this demo is governed by the original [license](https://huggingface.co/spaces/huggingface-projects/llama-2-13b-chat/blob/main/LICENSE.txt) and [acceptable use policy](https://huggingface.co/spaces/huggingface-projects/llama-2-13b-chat/blob/main/USE_POLICY.md).
79
  """
 
89
  tokenizer = AutoTokenizer.from_pretrained(model_id)
90
  tokenizer.use_default_system_prompt = False
91
 
 
92
 
93
  def fetch_personalized_data(session_index):
94
  try:
 
193
  return None
194
 
195
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
196
  @spaces.GPU
197
  def generate(
198
+ request: gr.Request, # In order to fetch query params
199
  message: str,
200
  chat_history: list[tuple[str, str]],
 
201
  max_new_tokens: int = 1024,
202
  temperature: float = 0.6,
203
  top_p: float = 0.9,
 
205
  repetition_penalty: float = 1.2,
206
  ) -> Iterator[str]: # Change return type hint to Iterator[str]
207
 
208
+ # Fetch query params
209
  params = request.query_params
210
+ print('those are the query params:' + params)
211
 
212
+ # Construct the input prompt using the functions from the construct_input_prompt function
213
  input_prompt = construct_input_prompt(chat_history, message)
 
 
 
214
 
215
  conversation = []
216
 
 
278
  ],
279
  )
280
 
281
+ with gr.Blocks(css="style.css") as demo:
 
282
  #session_index = get_session_index(url_params)
283
  session_index = 'eb3636167d3a63fbeee32934610e5b2f'
284
  personalized_data = fetch_personalized_data(session_index)