andrewzamai commited on
Commit
2331e72
·
verified ·
1 Parent(s): 5cf3fa1

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +3 -3
README.md CHANGED
@@ -313,7 +313,7 @@ An inverse trend can be observed, with SLIMER emerging as the most effective in
313
  <div class="template">
314
  <pre>{
315
  "description": "SLIMER prompt",
316
- "prompt_input": "<|start_header_id|>system<|end_header_id|>You are given a text chunk (delimited by triple quotes) and an instruction.\nRead the text and answer to the instruction in the end.\n\"\"\"\n{<span class="highlight-orange">input</span>}\n\"\"\"\nInstruction: Extract the Named Entities of type {<span class="highlight-orange">NE_name</span>} from the text chunk you have read. You are given a DEFINITION and some GUIDELINES.\nDEFINITION: {<span class="highlight-orange">definition</span>}\nGUIDELINES: {<span class="highlight-orange">guidelines</span>}\nReturn a JSON list of instances of this Named Entity type (for example [\"text_span_1\", \"text_span_2\"]. Return an empty list [] if no instances are present. Return only the JSON list, no further motivations or introduction to the answer.<|eot_id|>\n<|start_header_id|>assistant<|end_header_id|>\n\n"
317
  }</pre>
318
  </div>
319
  </body>
@@ -323,9 +323,9 @@ An inverse trend can be observed, with SLIMER emerging as the most effective in
323
  ```python
324
  from vllm import LLM, SamplingParams
325
 
326
- vllm_model = LLM(model="expertai/SLIMER")
327
 
328
- sampling_params = SamplingParams(temperature=0, max_tokens=128, stop=['</s>'])
329
 
330
  prompts = [prompter.generate_prompt(instruction, input) for instruction, input in instruction_input_pairs]
331
  responses = vllm_model.generate(prompts, sampling_params)
 
313
  <div class="template">
314
  <pre>{
315
  "description": "SLIMER prompt",
316
+ "prompt_input": "<|start_header_id|>system<|end_header_id|>\n\nYou are an expert in Named Entity Recognition designed to output JSON only.<|eot_id|>\n<|start_header_id|>user<|end_header_id|>\n\nYou are given a text chunk (delimited by triple quotes) and an instruction.\nRead the text and answer to the instruction in the end.\n\"\"\"\n{<span class="highlight-orange">input</span>}\n\"\"\"\nInstruction: Extract the Named Entities of type {<span class="highlight-orange">NE_name</span>} from the text chunk you have read. You are given a DEFINITION and some GUIDELINES.\nDEFINITION: {<span class="highlight-orange">definition</span>}\nGUIDELINES: {<span class="highlight-orange">guidelines</span>}\nReturn a JSON list of instances of this Named Entity type (for example [\"text_span_1\", \"text_span_2\"]. Return an empty list [] if no instances are present. Return only the JSON list, no further motivations or introduction to the answer.<|eot_id|>\n<|start_header_id|>assistant<|end_header_id|>\n\n"
317
  }</pre>
318
  </div>
319
  </body>
 
323
  ```python
324
  from vllm import LLM, SamplingParams
325
 
326
+ vllm_model = LLM(model="expertai/SLIMER-LLaMA3")
327
 
328
+ sampling_params = SamplingParams(temperature=0, max_tokens=128)
329
 
330
  prompts = [prompter.generate_prompt(instruction, input) for instruction, input in instruction_input_pairs]
331
  responses = vllm_model.generate(prompts, sampling_params)