Spaces:
Runtime error
Runtime error
File size: 2,510 Bytes
934837e 58a36b6 2deac9d 58a36b6 951fa8a 934837e 951fa8a 934837e 58a36b6 951fa8a 99f53a8 1598759 dc5a6a9 52f8a36 2e51359 498391a dc5a6a9 2e51359 52f8a36 b9e98fd 1598759 b9e98fd 99f53a8 b9e98fd 1598759 dc5a6a9 1598759 5d772d8 99f53a8 58a36b6 bba4f72 99f53a8 ff85d7a 58a36b6 2e51359 5d772d8 58a36b6 1b877d0 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 | from ctransformers import AutoModelForCausalLM
import gradio as gr
import os
from huggingface_hub import login
import re
login(token=os.getenv("HF_TOKEN"))
if not os.path.exists("model.gguf"):
os.system("wget https://huggingface.co/TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf -O model.gguf")
print("model.gguf downloaded.")
# Load GGUF model with ctransformers
llm = AutoModelForCausalLM.from_pretrained(
"./",
model_file="model.gguf",
model_type="llama",
max_new_tokens=256,
temperature=0.7,
top_p=0.9
)
# Clean and generate response with multi-language code formatting
def clean_response(raw):
raw = re.sub(r"[{}]+", "", raw)
raw = raw.replace("\\n", "\n") # properly format newlines
raw = raw.strip()
if raw.startswith("````"):
return raw
language_blocks = [
("python", ["def ", "print(", "class ", "import ", "return "]),
("javascript", ["console.log(", "function ", "let ", "const "]),
("bash", ["#!/bin/bash", "sudo ", "echo ", "apt-get"]),
("sql", ["SELECT ", "FROM ", "WHERE "]),
("java", ["public static void main", "System.out.println"])
]
for lang, keywords in language_blocks:
if any(kw in raw for kw in keywords):
return f"```{lang}\n{raw}\n```"
return raw
def chat(prompt):
if not prompt.strip():
return ""
wrapped_prompt = (
"You are Fermata-LightCoder, a helpful, fast, and concise AI coding assistant created by Rangga Fermata. "
"You specialize in writing and explaining programming code, shell scripts, algorithms, and small dev utilities. "
"When the user asks in another language like Indonesian, respond completely in that language.\n\n"
f"User: {prompt}\nAssistant:"
)
try:
response = llm(wrapped_prompt)
return clean_response(str(response))
except Exception as e:
return f"❌ Error: {str(e)}"
# Gradio UI for Fermata-LightCoder
demo = gr.Interface(
fn=chat,
inputs=gr.Textbox(lines=5, label="Ask Fermata-LightCoder"),
outputs=gr.Markdown(label="Fermata-LightCoder Says:"),
title="🧠 Fermata-LightCoder (TinyLlama 1.1B via ctransformers)",
description="A compact, CPU-optimized AI assistant that specializes in generating code and scripts. Powered by TinyLlama GGUF + ctransformers.",
allow_flagging="never"
)
# Launch App
if __name__ == "__main__":
demo.launch(mcp_server=True)
|