| import openai |
| from utilities import prompt_constants, constants |
|
|
| def Completion(summary_messages): |
| response = openai.ChatCompletion.create( |
| model="gpt-3.5-turbo", |
| messages=summary_messages) |
| return response["choices"][0]["message"]["content"] |
|
|
| def SummarizeCompletion(code, sample): |
| truncated_prefix="" |
| if len(code) > 14000: |
| truncated_prefix=constants.SUMMARIZATION_TRUNCATION |
| truncated_code = code[:14000] |
|
|
| summary_messages = [] |
| if(sample!=""): |
| summary_messages.append({"role": "system", "content": prompt_constants.TRANSLATION_PROMPT_PREFIX+sample+prompt_constants.TRANSLATION_PROMPT_SUFFIX}) |
| summary_messages.append({"role": "user", "content": truncated_code}) |
| print("Sample not '': ",sample, " summary_messages: ",summary_messages) |
| else: |
| summary_messages.append({"role": "system", "content": prompt_constants.SUMMARY_PROMPT}) |
| summary_messages.append({"role": "user", "content": truncated_code}) |
| print("Sample: ",sample, " summary_messages: ",summary_messages) |
|
|
| response_message = Completion(summary_messages) |
| summary_messages.append({"role": "assistant", "content": response_message}) |
| |
| return summary_messages, truncated_prefix+response_message+constants.CHAR_COUNT+str(len(truncated_code)) |