Spaces:
Sleeping
Sleeping
| import openai | |
| import gradio as gr | |
| import json | |
| openai.api_key = "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" | |
| def save_conversation(): | |
| with open('conversation.json', 'w') as f: | |
| json.dump(messages, f) | |
| def load_conversation(): | |
| try: | |
| with open('conversation.json', 'r') as f: | |
| return json.load(f) | |
| except FileNotFoundError: | |
| return [] | |
| messages = load_conversation() | |
| if not messages: | |
| messages.append({"role": "system", "content": "You are a knowledgeable assistant specialized in recruiting and hiring, and familiar with ADP Workforce Now Recruitment and various hiring and CRM tools."}) | |
| def CustomChatGPT(user_input): | |
| messages.append({"role": "user", "content": user_input}) | |
| # Ensure the conversation fits within the model's maximum token limit | |
| conversation = messages[-4096:] | |
| try: | |
| response = openai.ChatCompletion.create( | |
| model="gpt-3.5-turbo", | |
| messages=conversation, | |
| max_tokens=1000, | |
| temperature=0.7) | |
| except openai.api_resources.request_error.RequestError as e: | |
| print(f"Received error from OpenAI: {e}") | |
| return "I'm sorry, but I'm unable to generate a response at this time." | |
| ChatGPT_reply = response["choices"][0]["message"]["content"] | |
| messages.append({"role": "assistant", "content": ChatGPT_reply}) | |
| save_conversation() | |
| return ChatGPT_reply | |
| interface = gr.Interface(fn=CustomChatGPT, | |
| inputs="textbox", | |
| outputs="textbox", | |
| title="HR HELPER", | |
| description="Chat with a specialized assistant that can answer questions about recruiting, hiring, and various HR and CRM tools. Developed by A. Leschik.") | |
| interface.launch() | |