Spaces:
Sleeping
Sleeping
Update utils/mistral.py
Browse files- utils/mistral.py +3 -3
utils/mistral.py
CHANGED
|
@@ -102,7 +102,7 @@ def Model_ProfessionalDetails_Output(resume, client):
|
|
| 102 |
#for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True):#, temperature=0.35):
|
| 103 |
# response += message.choices[0].delta.content
|
| 104 |
|
| 105 |
-
data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False
|
| 106 |
print("This is without stream data ",data.choices[0].message.content)
|
| 107 |
response = data.choices[0].message.content
|
| 108 |
|
|
@@ -142,7 +142,7 @@ def Model_EducationalDetails_Output(resume, client):
|
|
| 142 |
#response = ""
|
| 143 |
#for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True):#, temperature=0.35):
|
| 144 |
# response += message.choices[0].delta.content
|
| 145 |
-
data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False
|
| 146 |
print("This is without stream data ",data.choices[0].message.content)
|
| 147 |
response = data.choices[0].message.content
|
| 148 |
|
|
@@ -184,7 +184,7 @@ def Model_PersonalDetails_Output(resume, client):
|
|
| 184 |
#for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=True):#, temperature=0.35):
|
| 185 |
# response += message.choices[0].delta.content
|
| 186 |
|
| 187 |
-
data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False
|
| 188 |
print("This is without stream data ",data.choices[0].message.content)
|
| 189 |
response = data.choices[0].message.content
|
| 190 |
|
|
|
|
| 102 |
#for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True):#, temperature=0.35):
|
| 103 |
# response += message.choices[0].delta.content
|
| 104 |
|
| 105 |
+
data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35)
|
| 106 |
print("This is without stream data ",data.choices[0].message.content)
|
| 107 |
response = data.choices[0].message.content
|
| 108 |
|
|
|
|
| 142 |
#response = ""
|
| 143 |
#for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True):#, temperature=0.35):
|
| 144 |
# response += message.choices[0].delta.content
|
| 145 |
+
data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35)
|
| 146 |
print("This is without stream data ",data.choices[0].message.content)
|
| 147 |
response = data.choices[0].message.content
|
| 148 |
|
|
|
|
| 184 |
#for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=True):#, temperature=0.35):
|
| 185 |
# response += message.choices[0].delta.content
|
| 186 |
|
| 187 |
+
data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35)
|
| 188 |
print("This is without stream data ",data.choices[0].message.content)
|
| 189 |
response = data.choices[0].message.content
|
| 190 |
|