| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | try: |
| | |
| | from transformers import AutoTokenizer, AutoModelForCausalLM |
| | |
| | tokenizer = AutoTokenizer.from_pretrained("ArliAI/gpt-oss-20b-Derestricted") |
| | model = AutoModelForCausalLM.from_pretrained("ArliAI/gpt-oss-20b-Derestricted") |
| | messages = [ |
| | {"role": "user", "content": "Who are you?"}, |
| | ] |
| | inputs = tokenizer.apply_chat_template( |
| | messages, |
| | add_generation_prompt=True, |
| | tokenize=True, |
| | return_dict=True, |
| | return_tensors="pt", |
| | ).to(model.device) |
| | |
| | outputs = model.generate(**inputs, max_new_tokens=40) |
| | print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:])) |
| | with open('ArliAI_gpt-oss-20b-Derestricted_1.txt', 'w', encoding='utf-8') as f: |
| | f.write('Everything was good in ArliAI_gpt-oss-20b-Derestricted_1.txt') |
| | except Exception as e: |
| | import os |
| | from slack_sdk import WebClient |
| | client = WebClient(token=os.environ['SLACK_TOKEN']) |
| | client.chat_postMessage( |
| | channel='#hub-model-metadata-snippets-sprint', |
| | text='Problem in <https://huggingface.co/datasets/model-metadata/code_execution_files/blob/main/ArliAI_gpt-oss-20b-Derestricted_1.txt|ArliAI_gpt-oss-20b-Derestricted_1.txt>', |
| | ) |
| |
|
| | with open('ArliAI_gpt-oss-20b-Derestricted_1.txt', 'a', encoding='utf-8') as f: |
| | import traceback |
| | f.write('''```CODE: |
| | # Load model directly |
| | from transformers import AutoTokenizer, AutoModelForCausalLM |
| | |
| | tokenizer = AutoTokenizer.from_pretrained("ArliAI/gpt-oss-20b-Derestricted") |
| | model = AutoModelForCausalLM.from_pretrained("ArliAI/gpt-oss-20b-Derestricted") |
| | messages = [ |
| | {"role": "user", "content": "Who are you?"}, |
| | ] |
| | inputs = tokenizer.apply_chat_template( |
| | messages, |
| | add_generation_prompt=True, |
| | tokenize=True, |
| | return_dict=True, |
| | return_tensors="pt", |
| | ).to(model.device) |
| | |
| | outputs = model.generate(**inputs, max_new_tokens=40) |
| | print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:])) |
| | ``` |
| | |
| | ERROR: |
| | ''') |
| | traceback.print_exc(file=f) |
| | |
| | finally: |
| | from huggingface_hub import upload_file |
| | upload_file( |
| | path_or_fileobj='ArliAI_gpt-oss-20b-Derestricted_1.txt', |
| | repo_id='model-metadata/code_execution_files', |
| | path_in_repo='ArliAI_gpt-oss-20b-Derestricted_1.txt', |
| | repo_type='dataset', |
| | ) |
| |
|