deepali1021 commited on
Commit
5144aba
·
1 Parent(s): 749f801

chnaged the model and prompt

Browse files
Files changed (1) hide show
  1. app.py +82 -80
app.py CHANGED
@@ -1,80 +1,82 @@
1
- # You can find this code for Chainlit python streaming here (https://docs.chainlit.io/concepts/streaming/python)
2
-
3
- # OpenAI Chat completion
4
- import os
5
- from openai import AsyncOpenAI # importing openai for API usage
6
- import chainlit as cl # importing chainlit for our app
7
- from chainlit.prompt import Prompt, PromptMessage # importing prompt tools
8
- from chainlit.playground.providers import ChatOpenAI # importing ChatOpenAI tools
9
- from dotenv import load_dotenv
10
-
11
- load_dotenv()
12
-
13
- # ChatOpenAI Templates
14
- system_template = """You are a helpful assistant who always speaks in a pleasant tone!
15
- """
16
-
17
- user_template = """{input}
18
- Think through your response step by step.
19
- """
20
-
21
-
22
- @cl.on_chat_start # marks a function that will be executed at the start of a user session
23
- async def start_chat():
24
- settings = {
25
- "model": "gpt-3.5-turbo",
26
- "temperature": 0,
27
- "max_tokens": 500,
28
- "top_p": 1,
29
- "frequency_penalty": 0,
30
- "presence_penalty": 0,
31
- }
32
-
33
- cl.user_session.set("settings", settings)
34
-
35
-
36
- @cl.on_message # marks a function that should be run each time the chatbot receives a message from a user
37
- async def main(message: cl.Message):
38
- settings = cl.user_session.get("settings")
39
-
40
- client = AsyncOpenAI()
41
-
42
- print(message.content)
43
-
44
- prompt = Prompt(
45
- provider=ChatOpenAI.id,
46
- messages=[
47
- PromptMessage(
48
- role="system",
49
- template=system_template,
50
- formatted=system_template,
51
- ),
52
- PromptMessage(
53
- role="user",
54
- template=user_template,
55
- formatted=user_template.format(input=message.content),
56
- ),
57
- ],
58
- inputs={"input": message.content},
59
- settings=settings,
60
- )
61
-
62
- print([m.to_openai() for m in prompt.messages])
63
-
64
- msg = cl.Message(content="")
65
-
66
- # Call OpenAI
67
- async for stream_resp in await client.chat.completions.create(
68
- messages=[m.to_openai() for m in prompt.messages], stream=True, **settings
69
- ):
70
- token = stream_resp.choices[0].delta.content
71
- if not token:
72
- token = ""
73
- await msg.stream_token(token)
74
-
75
- # Update the prompt object with the completion
76
- prompt.completion = msg.content
77
- msg.prompt = prompt
78
-
79
- # Send and close the message stream
80
- await msg.send()
 
 
 
1
+ # You can find this code for Chainlit python streaming here (https://docs.chainlit.io/concepts/streaming/python)
2
+
3
+ # OpenAI Chat completion
4
+ import os
5
+ from openai import AsyncOpenAI # importing openai for API usage
6
+ import chainlit as cl # importing chainlit for our app
7
+ from chainlit.prompt import Prompt, PromptMessage # importing prompt tools
8
+ from chainlit.playground.providers import ChatOpenAI # importing ChatOpenAI tools
9
+ from dotenv import load_dotenv
10
+
11
+ load_dotenv()
12
+
13
+ # ChatOpenAI Templates
14
+ system_template = """You are a helpful assistant who always speaks in a pleasant tone!
15
+ """
16
+
17
+ user_template = """{input}
18
+ 1. Think through your response step by step.
19
+ 2. Provide clear and conside answer.
20
+ 3. Provide answer in bulleted form for text summarization and generation.
21
+ """
22
+
23
+
24
+ @cl.on_chat_start # marks a function that will be executed at the start of a user session
25
+ async def start_chat():
26
+ settings = {
27
+ "model": "gpt-4o-mini",
28
+ "temperature": 0,
29
+ "max_tokens": 500,
30
+ "top_p": 1,
31
+ "frequency_penalty": 0,
32
+ "presence_penalty": 0,
33
+ }
34
+
35
+ cl.user_session.set("settings", settings)
36
+
37
+
38
+ @cl.on_message # marks a function that should be run each time the chatbot receives a message from a user
39
+ async def main(message: cl.Message):
40
+ settings = cl.user_session.get("settings")
41
+
42
+ client = AsyncOpenAI()
43
+
44
+ print(message.content)
45
+
46
+ prompt = Prompt(
47
+ provider=ChatOpenAI.id,
48
+ messages=[
49
+ PromptMessage(
50
+ role="system",
51
+ template=system_template,
52
+ formatted=system_template,
53
+ ),
54
+ PromptMessage(
55
+ role="user",
56
+ template=user_template,
57
+ formatted=user_template.format(input=message.content),
58
+ ),
59
+ ],
60
+ inputs={"input": message.content},
61
+ settings=settings,
62
+ )
63
+
64
+ print([m.to_openai() for m in prompt.messages])
65
+
66
+ msg = cl.Message(content="")
67
+
68
+ # Call OpenAI
69
+ async for stream_resp in await client.chat.completions.create(
70
+ messages=[m.to_openai() for m in prompt.messages], stream=True, **settings
71
+ ):
72
+ token = stream_resp.choices[0].delta.content
73
+ if not token:
74
+ token = ""
75
+ await msg.stream_token(token)
76
+
77
+ # Update the prompt object with the completion
78
+ prompt.completion = msg.content
79
+ msg.prompt = prompt
80
+
81
+ # Send and close the message stream
82
+ await msg.send()