Spaces:
Sleeping
Sleeping
Update utils/mistral.py
Browse files- utils/mistral.py +3 -3
utils/mistral.py
CHANGED
|
@@ -70,7 +70,7 @@ def Model_ProfessionalDetails_Output(resume, client):
|
|
| 70 |
response += message.choices[0].delta.content
|
| 71 |
|
| 72 |
data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35)
|
| 73 |
-
print("This is without stream data ",data)
|
| 74 |
|
| 75 |
try:
|
| 76 |
clean_response = Data_Cleaner(response)
|
|
@@ -108,7 +108,7 @@ def Model_EducationalDetails_Output(resume, client):
|
|
| 108 |
for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True, temperature=0.35):
|
| 109 |
response += message.choices[0].delta.content
|
| 110 |
data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35)
|
| 111 |
-
print("This is without stream data ",data)
|
| 112 |
|
| 113 |
try:
|
| 114 |
clean_response = Data_Cleaner(response)
|
|
@@ -148,7 +148,7 @@ def Model_PersonalDetails_Output(resume, client):
|
|
| 148 |
response += message.choices[0].delta.content
|
| 149 |
|
| 150 |
data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35)
|
| 151 |
-
print("This is without stream data ",data)
|
| 152 |
|
| 153 |
# Handle cases where the response might have formatting issues
|
| 154 |
try:
|
|
|
|
| 70 |
response += message.choices[0].delta.content
|
| 71 |
|
| 72 |
data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35)
|
| 73 |
+
print("This is without stream data ",data.choices[0].message.content)
|
| 74 |
|
| 75 |
try:
|
| 76 |
clean_response = Data_Cleaner(response)
|
|
|
|
| 108 |
for message in client.chat_completion(messages=[system_role, user_prompt], max_tokens=4096, stream=True, temperature=0.35):
|
| 109 |
response += message.choices[0].delta.content
|
| 110 |
data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35)
|
| 111 |
+
print("This is without stream data ",data.choices[0].message.content)
|
| 112 |
|
| 113 |
try:
|
| 114 |
clean_response = Data_Cleaner(response)
|
|
|
|
| 148 |
response += message.choices[0].delta.content
|
| 149 |
|
| 150 |
data = client.chat_completion(messages=[system_role, user_prompt], max_tokens=3000, stream=False, temperature=0.35)
|
| 151 |
+
print("This is without stream data ",data.choices[0].message.content)
|
| 152 |
|
| 153 |
# Handle cases where the response might have formatting issues
|
| 154 |
try:
|