| | pip install transformers huggingface_hub gradio torch |
| | from huggingface_hub import login |
| |
|
| | |
| | login("your_token") |
| | from transformers import AutoTokenizer, AutoModelForCausalLM |
| |
|
| | |
| | tokenizer = AutoTokenizer.from_pretrained("Salesforce/codegen-350M-multi") |
| | model = AutoModelForCausalLM.from_pretrained("Salesforce/codegen-350M-multi") |
| | |
| | text = "def bubble_sort(list_elements):" |
| |
|
| | |
| | input_ids = tokenizer(text, return_tensors="pt").input_ids |
| |
|
| | |
| | generated_ids = model.generate( |
| | input_ids, |
| | max_length=200, |
| | num_return_sequences=1, |
| | pad_token_id=tokenizer.eos_token_id |
| | ) |
| |
|
| | |
| | generated_code = tokenizer.decode(generated_ids[0], skip_special_tokens=True) |
| |
|
| | print(generated_code) |
| | from huggingface_hub import HfApi, Repository |
| |
|
| | |
| | repo_name = "your-username/codegen-350M-multi-bubble-sort" |
| |
|
| | |
| | api = HfApi() |
| |
|
| | |
| | api.create_repo(repo_name, exist_ok=True) |
| |
|
| | |
| | model.push_to_hub(repo_name) |
| | tokenizer.push_to_hub(repo_name) |
| |
|
| | print(f"Model and tokenizer pushed to Hugging Face Hub under: {repo_name}") |
| |
|