p-christ commited on
Commit
a68fee5
·
1 Parent(s): 53464b6

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +0 -31
README.md CHANGED
@@ -1,31 +0,0 @@
1
- ```
2
- def wrap_prompt(prompt, input):
3
- if model_name == "HuggingFaceH4/zephyr-7b-alpha":
4
- return f"### Instructions:\n{prompt}" + "\n---\n" + input + "\n\n### Response: "
5
- elif model_name == "stabilityai/stablelm-3b-4e1t":
6
- return f"{prompt}" + "Input: " + input + "\n Output: "
7
- # elif model_name == "princeton-nlp/Sheared-LLaMA-1.3B":
8
- elif "LLaMA" in model_name:
9
- return f"<s>[INST] <<SYS>>\n{prompt}\n<</SYS>>\n\n{input} [/INST] "
10
- else:
11
- raise ValueError("Unrecognised model")
12
-
13
-
14
- def add_prompt(input: str):
15
- prompt = '\n'.join(["You prepare students for exams by turning some text they provide into flashcards.",
16
- "The flashcards must be statements and not question & answers",
17
- "You try to represent everything in the text but ignore information that wouldn't be useful in an exam e.g. bibliographies, references, advertisements.",
18
- "Each piece of information should be in the same language that it was written in the text. Do not translate it to english.",
19
- "The flashcards should be independent, concise, formulated in a single sentence, unambiguous and factual.",
20
- "The flashcards should make sense on their own without any additional context e.g. they shouldn't use words like 'he', 'it', 'they' unless it's clear what they're referring to",
21
- "The flashcards should only contain knowledge found directly in the text. Do not include information from other sources",
22
- "Copy the formatting style they use for the text, e.g. if the text is a list of Term: Definition inputs then keep the same Term: Definition format",
23
- "Write out any equations exactly as they appear in the text, do not remove maths symbols like '+' or '='",
24
- f"Before each flashcard write {start_item_icon} and after each flashcard write {end_item_icon}",
25
- ])
26
- return wrap_prompt(prompt, input)
27
-
28
- ```
29
-
30
-
31
- sheard-llama-2.7b no quantization