Update main.py
Browse files
main.py
CHANGED
|
@@ -130,6 +130,13 @@ def generate_topics(user_input, num_topics, previous_queries):
|
|
| 130 |
subtopics = json_from_text(response_topics)
|
| 131 |
return subtopics
|
| 132 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 133 |
def generate_report(topic, description):
|
| 134 |
prompt = f"""create a detailed report on: {topic} by following the instructions: {description}"""
|
| 135 |
md_report = together_response(prompt, model = "meta-llama/Llama-3-70b-chat-hf", SysPrompt = SysPromptMdOffline)
|
|
|
|
| 130 |
subtopics = json_from_text(response_topics)
|
| 131 |
return subtopics
|
| 132 |
|
| 133 |
+
def generate_subtopics(main_task,user_input,num_topics,excluded_topics):
|
| 134 |
+
excluded_topics = ",".join(excluded_topics)
|
| 135 |
+
prompt = prompt_subtopics.format(main_task = main_task,user_input=user_input, num_topics=num_topics, excluded_topics=excluded_topics)
|
| 136 |
+
response_topics = together_response(prompt, model="meta-llama/Llama-3-70b-chat-hf", SysPrompt=SysPromptList, temperature=1)
|
| 137 |
+
subtopics = json_from_text(response_topics)
|
| 138 |
+
return subtopics
|
| 139 |
+
|
| 140 |
def generate_report(topic, description):
|
| 141 |
prompt = f"""create a detailed report on: {topic} by following the instructions: {description}"""
|
| 142 |
md_report = together_response(prompt, model = "meta-llama/Llama-3-70b-chat-hf", SysPrompt = SysPromptMdOffline)
|