Spaces:
Sleeping
Sleeping
GitHub Actions commited on
Commit ·
8117f33
1
Parent(s): 0fcd45d
Sync from GitHub Actions
Browse files- app.py +23 -25
- codeInsight/logger/__init__.py +4 -1
app.py
CHANGED
|
@@ -13,7 +13,6 @@ def load_pipeline():
|
|
| 13 |
try:
|
| 14 |
pipeline = PredictionPipeline()
|
| 15 |
return pipeline
|
| 16 |
-
|
| 17 |
except Exception as e:
|
| 18 |
logging.error("Failed to load pipeline in Streamlit app")
|
| 19 |
st.error(f"Failed to load model pipeline: {e}")
|
|
@@ -25,30 +24,29 @@ st.title("🤖 CodeInsight Assistant")
|
|
| 25 |
st.caption("Your fine-tuned CodeLlama-7b model, ready to help with Python.")
|
| 26 |
|
| 27 |
if pipeline:
|
| 28 |
-
if "
|
| 29 |
st.session_state.messages = [
|
| 30 |
{"role": "assistant", "content": "Hello! How can I help you with Python programming today?"}
|
| 31 |
]
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
st.error("The prediction pipeline could not be loaded. Please check the logs.")
|
|
|
|
| 13 |
try:
|
| 14 |
pipeline = PredictionPipeline()
|
| 15 |
return pipeline
|
|
|
|
| 16 |
except Exception as e:
|
| 17 |
logging.error("Failed to load pipeline in Streamlit app")
|
| 18 |
st.error(f"Failed to load model pipeline: {e}")
|
|
|
|
| 24 |
st.caption("Your fine-tuned CodeLlama-7b model, ready to help with Python.")
|
| 25 |
|
| 26 |
if pipeline:
|
| 27 |
+
if "messages" not in st.session_state:
|
| 28 |
st.session_state.messages = [
|
| 29 |
{"role": "assistant", "content": "Hello! How can I help you with Python programming today?"}
|
| 30 |
]
|
| 31 |
+
|
| 32 |
+
for message in st.session_state.messages:
|
| 33 |
+
with st.chat_message(message["role"]):
|
| 34 |
+
st.markdown(message["content"])
|
| 35 |
+
|
| 36 |
+
prompt = st.chat_input("Ask me to write python code")
|
| 37 |
+
|
| 38 |
+
if prompt:
|
| 39 |
+
st.session_state.messages.append({"role": "user", "content": prompt})
|
| 40 |
+
with st.chat_message("user"):
|
| 41 |
+
st.markdown(prompt)
|
| 42 |
+
|
| 43 |
+
with st.chat_message("assistant"):
|
| 44 |
+
with st.spinner("Thinking..."):
|
| 45 |
+
response = pipeline.predict(prompt)
|
| 46 |
+
formatted_response = f"```python\n{response}\n```"
|
| 47 |
+
st.markdown(formatted_response)
|
| 48 |
+
|
| 49 |
+
st.session_state.messages.append({"role": "assistant", "content": formatted_response})
|
| 50 |
+
|
| 51 |
+
else:
|
| 52 |
+
st.error("The prediction pipeline could not be loaded. Please check the logs.")
|
|
|
codeInsight/logger/__init__.py
CHANGED
|
@@ -20,4 +20,7 @@ console_handler.setFormatter(formetter)
|
|
| 20 |
logging.basicConfig(
|
| 21 |
level=logging.DEBUG,
|
| 22 |
handlers=[file_handler, console_handler],
|
| 23 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
logging.basicConfig(
|
| 21 |
level=logging.DEBUG,
|
| 22 |
handlers=[file_handler, console_handler],
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
for noisy_logger in ["watchdog", "watchdog.observer", "watchdog.observers"]:
|
| 26 |
+
logging.getLogger(noisy_logger).setLevel(logging.CRITICAL)
|