llama-cpp-python How to use marcorez8/llama-cpp-python-windows-blackwell-cuda with llama-cpp-python:
# !pip install llama-cpp-python
from llama_cpp import Llama
llm = Llama.from_pretrained(
repo_id="marcorez8/llama-cpp-python-windows-blackwell-cuda",
filename="{{GGUF_FILE}}",
)
output = llm(
"Once upon a time,",
max_tokens=512,
echo=True
)
print(output)