import os import streamlit as st import docx from huggingface_hub import InferenceClient api_key = os.getenv("HF_API_KEY") client = InferenceClient(api_key=api_key) file_path = "Adrega_P.I._User_Manual.docx" def read_docx(file_path): doc = docx.Document(file_path) text = [] for paragraph in doc.paragraphs: text.append(paragraph.text) return "\n".join(text) user_input = st.text_input('Ask me a question') messages = [{"role": "user","content": user_input}] completion = client.chat.completions.create( model="Qwen/Qwen2.5-Coder-32B-Instruct", messages=messages, max_tokens=500 ) if st.button("Submit"): if user_input: response = completion.choices[0].message answer = response['content'] st.write(f"Adrega AI: {answer}") else: st.write("Please enter a question.")