File size: 2,464 Bytes
de72b93
de6cf94
5c64d1a
de6cf94
5c64d1a
41bfd24
928d546
 
 
 
 
 
 
 
 
 
 
 
 
de72b93
4c2616b
5c64d1a
 
b0cf6ac
 
 
 
5c64d1a
 
 
 
b0cf6ac
4c2616b
5c64d1a
4c2616b
 
5c64d1a
b0cf6ac
5c64d1a
b0cf6ac
 
4c2616b
b0cf6ac
 
928d546
 
4c2616b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c922ff3
3a6680a
928d546
 
c922ff3
b0cf6ac
 
de6cf94
5c64d1a
0881787
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
import gradio as gr
from groq import Groq
import os

client = Groq(api_key=os.environ.get("GROQ_API_KEY"))

SYSTEM_PROMPT = """
You are a Senior Software Testing Assistant for testers worldwide.
You help with:
- Manual testing concepts
- Writing clear and effective test cases
- Bug reporting and reproduction steps
- API testing (Postman, REST, status codes)
- Test automation concepts and tools
- Test strategies, test plans, and best practices

Always respond in a clear, structured, and professional way.
Provide examples when possible.
"""

def respond(message, history, model, temperature, max_tokens):
    messages = [{"role": "system", "content": SYSTEM_PROMPT}]
    
    for h in history:
        messages.append({"role": "user", "content": h[0]})
        if h[1]:
            messages.append({"role": "assistant", "content": h[1]})
    
    messages.append({"role": "user", "content": message})
    
    try:
        response = client.chat.completions.create(
            model=model,
            messages=messages,
            temperature=temperature,
            max_completion_tokens=max_tokens,
        )
        return response.choices[0].message.content
    except Exception as e:
        return f"Error: {str(e)}"

# ChatInterface with additional inputs for parameters
demo = gr.ChatInterface(
    fn=respond,
    title="Global Software Testing Assistant",
    description="Your AI assistant for manual testing, automation, API testing, and QA best practices.",
    additional_inputs=[
        gr.Dropdown(
            choices=[
                "llama-3.3-70b-versatile",
                "llama-3.1-8b-instant",
            ],
            value="llama-3.3-70b-versatile",
            label="Model",
            info="Select the AI model to use"
        ),
        gr.Slider(
            minimum=0,
            maximum=2,
            value=0.9,
            step=0.1,
            label="Temperature",
            info="Controls randomness. Lower = more focused, Higher = more creative"
        ),
        gr.Slider(
            minimum=256,
            maximum=8192,
            value=2048,
            step=256,
            label="Max Tokens",
            info="Maximum length of the response"
        ),
    ],
    examples=[
        ["Write test cases for a specific scenario"],
        ["How to report a bug professionally?"],
        ["Explain API testing with Postman"],
    ],
    theme="soft",
)

if __name__ == "__main__":
    demo.launch()