File size: 1,333 Bytes
e7d050a 1e987dc e7d050a 4e93024 e7d050a 1e987dc e7d050a 4e93024 e7d050a 18d13eb e7d050a 1e987dc | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 | import openai
from utilities import prompt_constants, constants
def Completion(summary_messages):
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=summary_messages)
return response["choices"][0]["message"]["content"]
def SummarizeCompletion(code, sample):
truncated_prefix=""
if len(code) > 14000:
truncated_prefix=constants.SUMMARIZATION_TRUNCATION
truncated_code = code[:14000]
summary_messages = []
if(sample!=""):
summary_messages.append({"role": "system", "content": prompt_constants.TRANSLATION_PROMPT_PREFIX+sample+prompt_constants.TRANSLATION_PROMPT_SUFFIX})
summary_messages.append({"role": "user", "content": truncated_code})
print("Sample not '': ",sample, " summary_messages: ",summary_messages)
else:
summary_messages.append({"role": "system", "content": prompt_constants.SUMMARY_PROMPT})
summary_messages.append({"role": "user", "content": truncated_code})
print("Sample: ",sample, " summary_messages: ",summary_messages)
response_message = Completion(summary_messages)
summary_messages.append({"role": "assistant", "content": response_message})
return summary_messages, truncated_prefix+response_message+constants.CHAR_COUNT+str(len(truncated_code)) |