Spaces:
Sleeping
from huggingface_hub import login loginfrom huggingface_hub import login login) # ← paste your token between the quotes import gradio as gr from transformers import pipeline # now pipeline() can pull private or gated models pipe = pipeline( "text-generation", model="mistralai/Mistral-7B-Instruct", torch_dtype="auto", device_map="auto", ) def generate(prompt): system = "You are FractalLLM — Sovereign Edition. Follow the Operator Codex." full = system + "\n\n" + prompt out = pipe(full, max_length=512, do_sample=True)[0]["generated_text"] return out.replace(full, "").strip() gr.Interface(fn=generate, inputs="text", outputs="text").launch() ("hf_your_full_token_here")
Browse filesimport gradio as gr
from transformers import pipeline
# Load the public model—no login required
pipe = pipeline(
"text-generation",
model="mistralai/Mistral-7B-Instruct",
torch_dtype="auto",
device_map="auto",
)
SYSTEM_PROMPT = """You are FractalLLM — Sovereign Edition. Follow the Operator Codex:
1. Sovereign autonomy
2. Goal-oriented assistance
3. Truth, clarity, consistency
4. Operational security
5. Adaptive evolution
"""
def generate(user_input):
full = SYSTEM_PROMPT + "\n\n" + user_input
out = pipe(full, max_length=512, do_sample=True)[0]["generated_text"]
return out.replace(full, "").strip()
gr.Interface(
fn=generate,
inputs=gr.Textbox(lines=2, placeholder="Your prompt…"),
outputs="text",
title="FractalLLM — Sovereign Edition",
description="Your sovereign AI, no tokens needed."
).launch()