# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("LearningOpt/pie-conditioned-13b")
model = AutoModelForCausalLM.from_pretrained("LearningOpt/pie-conditioned-13b")Quick Links
13b CodeLlama model finetuned on performance-conditioned data to optimize C++ programs. For more details on how the data was collected and training was done, refer to this webpage.
- Downloads last month
- 3
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="LearningOpt/pie-conditioned-13b")