Spaces:
Sleeping
Sleeping
File size: 2,049 Bytes
5f5d50c d166ec4 5f5d50c 5ef1731 5f5d50c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 |
import gradio as gr
import spaces
import os
from mistralai import Mistral, UserMessage, SystemMessage
# Get the API key and endpoint
token = os.environ.get("GITHUB_TOKEN")
endpoint = "https://models.github.ai/inference"
model_name = "mistral-ai/Codestral-2501"
# Initialize the Mistral client
client = Mistral(api_key=token, server_url=endpoint)
def generate_response(query, diagram_type):
if diagram_type == "ERD":
system_prompt = (
"You are a code generation expert. Convert the following PlantUML code into "
"equivalent SQL code. Give only code as output."
)
elif diagram_type == "Use Case Diagram":
system_prompt = (
"You are a code generation expert. Convert the following PlantUML code into "
"equivalent Java REST API controller methods."
)
elif diagram_type in ["Class Diagram", "Sequence Diagram"]:
system_prompt = (
"You are a code generation expert. Convert the following PlantUML code into "
"equivalent Java code. Give only code as output."
)
else:
system_prompt = "You are a helpful coding assistant."
user_prompt = query.strip()
response = client.chat.complete(
model=model_name,
messages=[
SystemMessage(content=system_prompt),
UserMessage(content=user_prompt),
],
max_tokens=1000
)
return response.choices[0].message.content
# ---------------- Gradio Interface ----------------
def predict(query, diagram_type):
return generate_response(query, diagram_type)
iface = gr.Interface(
fn=predict,
inputs=[
gr.Textbox(lines=15, label="PlantUML Code"),
gr.Dropdown(
choices=["ERD", "Use Case Diagram", "Class Diagram", "Sequence Diagram"],
label="Diagram Type"
)
],
outputs="text",
title="PlantUML-To-Code Converter",
description="Enter PlantUML code to generate equivalent SQL or Java code based on the diagram type."
)
iface.launch()
|