Spaces:
Sleeping
Sleeping
File size: 9,592 Bytes
c00ae2f e497516 c00ae2f e497516 c00ae2f e497516 c00ae2f e497516 c00ae2f e497516 c00ae2f e497516 c00ae2f e497516 c00ae2f e497516 3da4663 e497516 3da4663 e497516 c00ae2f e497516 c00ae2f e497516 c00ae2f e497516 c00ae2f e497516 c00ae2f e497516 c00ae2f e497516 3da4663 e497516 c00ae2f e497516 c00ae2f e497516 c00ae2f e497516 c00ae2f e497516 c00ae2f e497516 c00ae2f e497516 c00ae2f e497516 c00ae2f e497516 c00ae2f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 |
import gradio as gr
import boto3
import json
import os
from botocore.exceptions import ClientError, NoCredentialsError
# Configuration - Set via Hugging Face Spaces environment variables
KNOWLEDGE_BASE_ID = os.environ.get("KNOWLEDGE_BASE_ID", "PLEASE_SET_IN_SPACES_SETTINGS")
AWS_REGION = os.environ.get("AWS_REGION", "us-east-1")
MODEL_ARN = os.environ.get("MODEL_ARN", "arn:aws:bedrock:us-east-1::foundation-model/amazon.titan-text-express-v1")
# Check if running in demo mode (no credentials set)
DEMO_MODE = KNOWLEDGE_BASE_ID == "PLEASE_SET_IN_SPACES_SETTINGS"
# Initialize AWS clients
bedrock_agent = None
initialization_error = None
if not DEMO_MODE:
try:
bedrock_agent = boto3.client(
'bedrock-agent-runtime',
region_name=AWS_REGION,
aws_access_key_id=os.environ.get('AWS_ACCESS_KEY_ID'),
aws_secret_access_key=os.environ.get('AWS_SECRET_ACCESS_KEY')
)
print(f"β
AWS Bedrock client initialized in region: {AWS_REGION}")
except Exception as e:
print(f"β Error initializing AWS client: {e}")
initialization_error = str(e)
else:
print("π§ Running in DEMO MODE - Configure environment variables to enable AWS integration")
def search_knowledge_base(query, search_type="generate"):
"""
Search the ORU IT Knowledge Base using Amazon Bedrock
"""
if not query.strip():
return "Please enter a question!", "", "error"
# Demo mode response
if DEMO_MODE:
demo_response = f"""
## π§ Demo Mode Active
**Your Query:** "{query}"
**Demo Response:** This is a demonstration of the ORU IT Helpdesk Assistant interface.
To enable full functionality:
1. Configure your Amazon Bedrock Knowledge Base
2. Set up environment variables in Hugging Face Spaces settings
3. Upload your ORU IT documentation to the knowledge base
"""
demo_sources = """
**Demo Sources:**
- password_authentication_001_student_password_change.txt
- mobile_applications_002_oru_app_experience_fix.txt
- software_applications_003_word_mac_view_only_fix.txt
"""
return demo_response, demo_sources, "demo"
if not bedrock_agent:
error_msg = f"β AWS Bedrock client not initialized."
if initialization_error:
error_msg += f" Error: {initialization_error}"
return error_msg, "", "error"
try:
if search_type == "generate":
# β
Correct RetrieveAndGenerate with separate modelConfiguration
response = bedrock_agent.retrieve_and_generate(
input={'text': query},
retrieveAndGenerateConfiguration={
'type': 'KNOWLEDGE_BASE',
'knowledgeBaseConfiguration': {
'knowledgeBaseId': KNOWLEDGE_BASE_ID,
'retrievalConfiguration': {
'vectorSearchConfiguration': {
'numberOfResults': 5
}
}
},
'modelConfiguration': {
'modelArn': MODEL_ARN
}
}
)
# Extract the generated response
generated_text = response.get('output', {}).get('text', 'No response generated.')
# Extract sources/citations
sources_info = []
citations = response.get('citations', [])
for i, citation in enumerate(citations, 1):
retrieved_refs = citation.get('retrievedReferences', [])
for j, ref in enumerate(retrieved_refs, 1):
content = ref.get('content', {}).get('text', 'No content available')
location = ref.get('location', {}).get('s3Location', {}).get('uri', 'Unknown source')
sources_info.append(
f"**Source {i}.{j}:**\n{content[:300]}{'...' if len(content) > 300 else ''}\n*From: {location}*\n"
)
sources_text = "\n".join(sources_info) if sources_info else "No sources found."
return generated_text, sources_text, "success"
else: # retrieve only
response = bedrock_agent.retrieve(
knowledgeBaseId=KNOWLEDGE_BASE_ID,
retrievalQuery={'text': query},
retrievalConfiguration={
'vectorSearchConfiguration': {
'numberOfResults': 5
}
}
)
retrieval_results = response.get('retrievalResults', [])
if not retrieval_results:
return "No relevant sources found.", "", "warning"
sources_info = []
for i, result in enumerate(retrieval_results, 1):
content = result.get('content', {}).get('text', 'No content available')
score = result.get('score', 0)
location = result.get('location', {}).get('s3Location', {}).get('uri', 'Unknown source')
sources_info.append(
f"**Source {i} (Confidence: {score:.2f}):**\n"
f"{content[:400]}{'...' if len(content) > 400 else ''}\n"
f"*From: {location}*\n"
)
sources_text = "\n".join(sources_info)
response_text = f"Found {len(retrieval_results)} relevant sources from the ORU IT Knowledge Base:"
return response_text, sources_text, "success"
except ClientError as e:
error_code = e.response['Error']['Code']
error_message = e.response['Error']['Message']
return f"β AWS Error ({error_code}): {error_message}", "", "error"
except NoCredentialsError:
return "β AWS credentials not found. Please configure your credentials in Spaces settings.", "", "error"
except Exception as e:
return f"β Unexpected error: {str(e)}", "", "error"
def format_response(query, search_type):
"""Process the query and return formatted response"""
if not query.strip():
return (
"Please enter your IT question above and click 'Get AI Answer' or 'Show Sources'.",
"",
"Enter a question to get started!"
)
response_text, sources_text, status = search_knowledge_base(query, search_type)
if status == "demo":
formatted_response = response_text
status_msg = "π§ Demo Mode - Configure AWS credentials to enable full functionality"
elif search_type == "generate":
if status == "success":
formatted_response = f"## π€ AI Assistant Response\n\n{response_text}"
status_msg = "β
Response generated successfully from ORU IT Knowledge Base!"
else:
formatted_response = response_text
status_msg = "β Error generating response"
else:
if status == "success":
formatted_response = f"## π Retrieved Sources\n\n{response_text}"
status_msg = "β
Sources retrieved successfully from ORU IT Knowledge Base!"
else:
formatted_response = response_text
status_msg = "β Error retrieving sources"
return formatted_response, sources_text, status_msg
# Sample queries
SAMPLE_QUERIES = [
"How do I change my student password?",
"My ORU app isn't showing the right tiles",
"Word is showing view only mode on my Mac",
"How do I set up multi-factor authentication?",
"I can't edit Microsoft Word documents",
"How do I connect to ORU WiFi network?",
"My ORU email isn't syncing to my phone",
"How do I access Vision portal from off-campus?",
"VPN connection keeps dropping",
"Can't print to campus printers from my laptop",
]
# Build Gradio UI
with gr.Blocks(title="ORU IT Helpdesk Assistant") as app:
gr.Markdown("## ποΈ Oral Roberts University IT Helpdesk Assistant\nPowered by Amazon Bedrock Knowledge Bases & S3 Vectors")
with gr.Row():
with gr.Column(scale=2):
query_input = gr.Textbox(label="π Ask your IT question:")
with gr.Row():
search_btn = gr.Button("π€ Get AI Answer", variant="primary")
retrieve_btn = gr.Button("π Show Sources", variant="secondary")
with gr.Column():
for q in SAMPLE_QUERIES:
btn = gr.Button(q, size="sm")
btn.click(lambda x=q: x, outputs=query_input)
with gr.Column(scale=3):
status_output = gr.HTML()
response_output = gr.Markdown()
sources_output = gr.Markdown(visible=True)
search_btn.click(
fn=lambda query: format_response(query, "generate"),
inputs=[query_input],
outputs=[response_output, sources_output, status_output]
)
retrieve_btn.click(
fn=lambda query: format_response(query, "retrieve"),
inputs=[query_input],
outputs=[response_output, sources_output, status_output]
)
query_input.submit(
fn=lambda query: format_response(query, "generate"),
inputs=[query_input],
outputs=[response_output, sources_output, status_output]
)
# β
Always launch on Hugging Face
print("ποΈ Starting ORU IT Helpdesk Assistant...")
print(f"π Knowledge Base ID: {KNOWLEDGE_BASE_ID}")
print(f"π AWS Region: {AWS_REGION}")
print(f"π§ Demo Mode: {'Active' if DEMO_MODE else 'Disabled'}")
app.launch(
server_name="0.0.0.0",
server_port=int(os.environ.get("PORT", 7860))
)
|