Yasu777 commited on
Commit
650fdad
·
verified ·
1 Parent(s): fec4fb2

Update chat.py

Browse files
Files changed (1) hide show
  1. chat.py +6 -6
chat.py CHANGED
@@ -236,7 +236,7 @@ def generate_chain_response(user_input, api_key):
236
  {"role": "system", "content": mistral_system_prompt},
237
  {"role": "user", "content": mistral_prompt}
238
  ],
239
- temperature=0.3,
240
  max_tokens=2000
241
  )
242
 
@@ -264,8 +264,8 @@ def generate_chain_response(user_input, api_key):
264
  {"role": "system", "content": deepseek_system_prompt},
265
  {"role": "user", "content": deepseek_prompt}
266
  ],
267
- temperature=0.3,
268
- max_tokens=2000
269
  )
270
 
271
  deepseek_knowledge = deepseek_response.choices[0].message.content
@@ -356,8 +356,8 @@ def generate_chain_response(user_input, api_key):
356
  {"role": "system", "content": design_system_prompt},
357
  {"role": "user", "content": design_prompt}
358
  ],
359
- temperature=0.4,
360
- max_tokens=3000
361
  )
362
 
363
  design_doc = design_response.choices[0].message.content
@@ -453,7 +453,7 @@ def generate_chain_response(user_input, api_key):
453
  {"role": "system", "content": error_system_prompt},
454
  {"role": "user", "content": error_prompt}
455
  ],
456
- temperature=0.4,
457
  max_tokens=4000
458
  )
459
 
 
236
  {"role": "system", "content": mistral_system_prompt},
237
  {"role": "user", "content": mistral_prompt}
238
  ],
239
+ temperature=1.0,
240
  max_tokens=2000
241
  )
242
 
 
264
  {"role": "system", "content": deepseek_system_prompt},
265
  {"role": "user", "content": deepseek_prompt}
266
  ],
267
+ temperature=0.6,
268
+ max_tokens=3000
269
  )
270
 
271
  deepseek_knowledge = deepseek_response.choices[0].message.content
 
356
  {"role": "system", "content": design_system_prompt},
357
  {"role": "user", "content": design_prompt}
358
  ],
359
+ temperature=0.6,
360
+ max_tokens=4000
361
  )
362
 
363
  design_doc = design_response.choices[0].message.content
 
453
  {"role": "system", "content": error_system_prompt},
454
  {"role": "user", "content": error_prompt}
455
  ],
456
+ temperature=0.6,
457
  max_tokens=4000
458
  )
459