# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("LearningOpt/pie-conditioned-7b")
model = AutoModelForCausalLM.from_pretrained("LearningOpt/pie-conditioned-7b")Quick Links
7b CodeLlama model finetuned on performance-conditioned data to optimize C++ programs. For more details on how the data was collected and training was done, refer to this webpage.
- Downloads last month
- 5
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="LearningOpt/pie-conditioned-7b")