File size: 3,288 Bytes
8372adc
 
 
 
f35daed
8372adc
 
 
97ae795
e384418
97ae795
3317679
97ae795
 
 
 
 
 
 
 
5dfd0f5
f6cdf72
 
ff1762e
97ae795
5dfd0f5
97ae795
 
 
 
 
5dfd0f5
97ae795
 
 
5dfd0f5
97ae795
 
 
 
 
 
 
 
 
 
 
433a039
5dfd0f5
 
 
433a039
5dfd0f5
7c5bdd4
57a6493
5dfd0f5
433a039
5dfd0f5
157498f
547e168
368c953
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
try:
    import gradio as gr
    from huggingface_hub import InferenceClient
    from transformers import pipeline
    import numpy as np
except ImportError as e:
    print(f"Error: {e}. Please ensure all required libraries are installed.")
    exit()

# Initialize the inference client
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")

def respond(
    message,
    history: list[tuple[str, str]],
    system_message,
    max_tokens,
    temperature,
    top_p,
):
    # Add the initial welcome message if the history is empty
    if not history:
        history.append(("Welcome to the interview chatbot. Please choose your field of expertise from the following categories: Engineering, Marketing, Finance, Healthcare, or Education. After selecting, please provide a brief introduction about yourself.", ""))

    messages = [{"role": "system", "content": system_message}]

    for val in history:
        if val[0]:
            messages.append({"role": "user", "content": val[0]})
        if val[1]:
            messages.append({"role": "assistant", "content": val[1]})

    messages.append({"role": "user", "content": message})

    response = ""

    for message in client.chat_completion(
        messages,
        max_tokens=max_tokens,
        stream=True,
        temperature=temperature,
        top_p=top_p,
    ):
        token = message.choices[0].delta.content
        response += token
        yield response

# Create the Gradio interface with a light theme
demo = gr.ChatInterface(
    respond,
    additional_inputs=[
        gr.Textbox(value="You are a professional interviewer chatbot. This GPT acts as an interviewer for candidates from various fields, facilitating introductions and conducting interviews. It should provide a welcoming atmosphere, ask relevant interview questions based on the candidate's field, and offer feedback or follow-up questions to guide the conversation. The GPT should customize questions based on the specific stream selected by the candidate, using available data and current trends in the field. It should avoid overly complex or sensitive questions and ensure a respectful and professional tone throughout the interaction. It should clarify questions when needed and provide a smooth and engaging interview experience. The GPT should adopt the tone and demeanor of a professional recruiting analyst or technical panelist, ensuring interactions are both professional and supportive. Please provide the candidate with the category of the streams and once they select let's start asking questions. also once we reach 10 questions for a candidate please ask about their expectations and also their Notice period. finally provide a summary of the interview and how well the candidate did the interview along with feedback including areas to improve as well. Before starting the conversation please welcome the candidate and ask them to choose the stream.", label="System message"),
        gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
        gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
        gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
    ],
    theme="light"
)

if __name__ == "__main__":
    demo.launch()