Spaces:
Build error
Build error
| import gradio as gr | |
| from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline | |
| # Load DeepSeek with trust_remote_code | |
| deepseek_tokenizer = AutoTokenizer.from_pretrained("deepseek-ai/DeepSeek-R1", trust_remote_code=True) | |
| deepseek_model = AutoModelForCausalLM.from_pretrained("deepseek-ai/DeepSeek-R1", trust_remote_code=True, torch_dtype="auto", device_map="auto") | |
| deepseek_pipe = pipeline("text-generation", model=deepseek_model, tokenizer=deepseek_tokenizer) | |
| # Load LLaMA with trust_remote_code | |
| llama_tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-4-Scout-17B-16E-Instruct", trust_remote_code=True) | |
| llama_model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-4-Scout-17B-16E-Instruct", trust_remote_code=True, torch_dtype="auto", device_map="auto") | |
| llama_pipe = pipeline("text-generation", model=llama_model, tokenizer=llama_tokenizer) | |
| def generate_and_enhance_code(code_request: str, features: str): | |
| # Generate base code from DeepSeek | |
| base_output = deepseek_pipe(code_request, max_new_tokens=512, do_sample=True, temperature=0.7)[0]["generated_text"] | |
| # Enhance with LLaMA | |
| enhancement_prompt = f"Hey Llama! can you please add some more features in my code?\n\nOriginal code:\n{base_output}\n\nFeatures to add:\n{features}\n\nAdd the features and pass me the code without any extra asking!" | |
| enhanced_output = llama_pipe(enhancement_prompt, max_new_tokens=1024, do_sample=True, temperature=0.6)[0]["generated_text"] | |
| return enhanced_output | |
| with gr.Blocks() as demo: | |
| gr.Markdown("## MINEOGO: DeepSeek + LLaMA Code Assistant") | |
| with gr.Row(): | |
| code_input = gr.Textbox(lines=5, label="What code do you want?") | |
| feature_input = gr.Textbox(lines=3, label="What features should LLaMA add?") | |
| submit_btn = gr.Button("Generate & Enhance Code") | |
| output_box = gr.Code(label="Enhanced Code") | |
| submit_btn.click(fn=generate_and_enhance_code, inputs=[code_input, feature_input], outputs=output_box) | |
| demo.launch() |