| import os |
| from huggingface_hub import login |
| from easyllm.clients import huggingface |
| from easyllm.prompt_utils import build_llama2_prompt |
|
|
| TOKEN = os.environ.get("TOKEN") |
|
|
| login(token=TOKEN) |
|
|
| huggingface.prompt_builder = build_llama2_prompt |
|
|
| |
| |
| |
|
|
| def translate(schema_input, schema_target): |
| prompt = '"""{} \n Translate the schema metadata file above to the schema: {}"""'.format(schema_input, schema_target) |
| response = huggingface.Completion.create( |
| model="princeton-nlp/Sheared-LLaMA-1.3B", |
| prompt=prompt, |
| temperature=0.9, |
| top_p=0.6, |
| max_tokens=250, |
| ) |
| print(response) |
| return response['choices'][0]['text'] |