Afeezee commited on
Commit
7673c2b
·
verified ·
1 Parent(s): dc45bd4

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +83 -0
app.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from groq import Groq
2
+ import gradio as gr
3
+
4
+ # Initialize the Groq client with your API key
5
+ client = Groq(
6
+ api_key="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"
7
+ )
8
+
9
+ # List to maintain the conversation history, starting with a default prompt
10
+ conversation_history = [
11
+ {"role": "system", "content": "You are an assistant in a trivia game focused on Nigerian music."}
12
+ ]
13
+
14
+ # Function to get the initial LLM output and start the conversation
15
+ def start_trivia_game():
16
+ # Initial message to start the game
17
+ initial_message = "Start a Trivia on Nigeria music. Assess the answer and provide the percentage score"
18
+
19
+ # Add the initial message to the conversation history
20
+ conversation_history.append({"role": "user", "content": initial_message})
21
+
22
+ # Get completion from the LLM for the initial question
23
+ completion = client.chat.completions.create(
24
+ model="llama-3.1-70b-versatile",
25
+ messages=conversation_history,
26
+ temperature=1,
27
+ max_tokens=1024,
28
+ top_p=1,
29
+ stream=True,
30
+ stop=None,
31
+ )
32
+
33
+ llm_output = ""
34
+ for chunk in completion:
35
+ llm_output += chunk.choices[0].delta.content or ""
36
+
37
+ # Add the assistant's response to the conversation history
38
+ conversation_history.append({"role": "assistant", "content": llm_output})
39
+
40
+ return llm_output
41
+
42
+ # Function to handle user response and continue the conversation
43
+ def continue_trivia_game(user_response):
44
+ # Add user's response to the conversation history
45
+ conversation_history.append({"role": "user", "content": user_response})
46
+
47
+ # Get completion from the LLM for the user's response
48
+ completion = client.chat.completions.create(
49
+ model="llama-3.1-70b-versatile",
50
+ messages=conversation_history,
51
+ temperature=1,
52
+ max_tokens=1024,
53
+ top_p=1,
54
+ stream=True,
55
+ stop=None,
56
+ )
57
+
58
+ llm_output = ""
59
+ for chunk in completion:
60
+ llm_output += chunk.choices[0].delta.content or ""
61
+
62
+ # Add the assistant's response to the conversation history
63
+ conversation_history.append({"role": "assistant", "content": llm_output})
64
+
65
+ return llm_output
66
+
67
+ # Start the game and get the initial LLM output
68
+ initial_output = start_trivia_game()
69
+
70
+ # Creating Gradio Interface
71
+ llm_output = gr.Textbox(label="LLM Output", placeholder="The output from the LLM will appear here", lines=10, value=initial_output)
72
+ user_response = gr.Textbox(label="Your Response", placeholder="Type your response here", lines=3)
73
+
74
+ demo = gr.Interface(
75
+ fn=continue_trivia_game,
76
+ inputs=user_response,
77
+ outputs=llm_output,
78
+ title="TriviaVilla--Nigerian Music Trivia Game",
79
+ description="How much do you know Nigerian Music? Here is a simple trivia game on Nigerian Music using LLama 3.1",
80
+ )
81
+
82
+ # Starting the Gradio app
83
+ demo.launch()