Spaces:
Runtime error
Runtime error
shaocongma
commited on
Commit
·
036df68
1
Parent(s):
c42190b
Fix unexpected behavior: section generation order changed.
Browse files- auto_backgrounds.py +7 -1
auto_backgrounds.py
CHANGED
|
@@ -114,14 +114,20 @@ def generate_backgrounds(title, description="", template="ICLR2022", model="gpt-
|
|
| 114 |
|
| 115 |
def generate_draft(title, description="", template="ICLR2022",
|
| 116 |
tldr=True, max_kw_refs=10, max_num_refs=30, sections=None, bib_refs=None, model="gpt-4"):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 117 |
# pre-processing `sections` parameter;
|
| 118 |
print("================START================")
|
| 119 |
print(f"Generating {title}.")
|
| 120 |
print("================PRE-PROCESSING================")
|
| 121 |
if sections is None:
|
| 122 |
sections = ["introduction", "related works", "backgrounds", "methodology", "experiments", "conclusion", "abstract"]
|
|
|
|
|
|
|
| 123 |
|
| 124 |
-
# todo: add more parameters; select which section to generate; select maximum refs.
|
| 125 |
if model == "gpt-4":
|
| 126 |
max_tokens = 4096
|
| 127 |
else:
|
|
|
|
| 114 |
|
| 115 |
def generate_draft(title, description="", template="ICLR2022",
|
| 116 |
tldr=True, max_kw_refs=10, max_num_refs=30, sections=None, bib_refs=None, model="gpt-4"):
|
| 117 |
+
|
| 118 |
+
def _filter_sections(sections):
|
| 119 |
+
ordered_sections = ["introduction", "related works", "backgrounds", "methodology", "experiments", "conclusion",
|
| 120 |
+
"abstract"]
|
| 121 |
+
return [section for section in ordered_sections if section in sections]
|
| 122 |
# pre-processing `sections` parameter;
|
| 123 |
print("================START================")
|
| 124 |
print(f"Generating {title}.")
|
| 125 |
print("================PRE-PROCESSING================")
|
| 126 |
if sections is None:
|
| 127 |
sections = ["introduction", "related works", "backgrounds", "methodology", "experiments", "conclusion", "abstract"]
|
| 128 |
+
else:
|
| 129 |
+
sections = _filter_sections(sections)
|
| 130 |
|
|
|
|
| 131 |
if model == "gpt-4":
|
| 132 |
max_tokens = 4096
|
| 133 |
else:
|