NATO-1000-Cortex / cortex_model.py
NaTo1000's picture
Upload folder using huggingface_hub
87ac3cd verified
import torch
import torch.nn as nn
from transformers import AutoModelForCausalLM, AutoTokenizer
class NATO1000Cortex(nn.Module):
def __init__(self, model_name="codellama/CodeLlama-7b-hf", max_length=1000000):
super().__init__()
self.tokenizer = AutoTokenizer.from_pretrained(model_name)
self.model = AutoModelForCausalLM.from_pretrained(model_name)
self.max_length = max_length
def forward(self, prompt):
inputs = self.tokenizer(prompt, return_tensors="pt", max_length=self.max_length, truncation=True)
generate_ids = self.model.generate(inputs.input_ids, max_length=self.max_length)
return self.tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
# Example usage:
# model = NATO1000Cortex()
# code_prompt = "def fibonacci(n):\n if n <= 0:\n return []\n elif n == 1:\n return [0]\n else:\n list_fib = [0, 1]\n while len(list_fib) < n:\n next_fib = list_fib[-1] + list_fib[-2]\n list_fib.append(next_fib)\n return list_fib\n\n# Write a unit test for the fibonacci function"
# generated_code = model(code_prompt)
# print(generated_code)