Spaces:
Sleeping
Sleeping
Nathan Butters
commited on
Commit
·
b16d173
1
Parent(s):
d7744dc
attempting another way
Browse files- app.py +7 -9
- helpers/chat.py +0 -9
- tempDir/example1.png +0 -0
- tempDir/example2.png +0 -0
app.py
CHANGED
|
@@ -8,8 +8,8 @@ import logging
|
|
| 8 |
from helpers.constant import *
|
| 9 |
from helpers.chat import basicChat, guidedMM, mmChat
|
| 10 |
import os
|
| 11 |
-
import requests
|
| 12 |
import tempfile
|
|
|
|
| 13 |
|
| 14 |
logger = logging.getLogger(__name__)
|
| 15 |
logging.basicConfig(filename='app.log', level=logging.INFO)
|
|
@@ -49,15 +49,13 @@ enable = st.checkbox("Enable camera")
|
|
| 49 |
picture = st.camera_input("Take a picture of your math work", disabled=not enable)
|
| 50 |
|
| 51 |
if picture is not None:
|
| 52 |
-
|
| 53 |
-
|
|
|
|
| 54 |
with open(temp_image_path, "wb") as f:
|
| 55 |
-
f.write(picture.
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
base_url = requests.get("https://ipv4.icanhazip.com")
|
| 59 |
-
img_url = f"https://{base_url}/" + temp_image_path
|
| 60 |
-
logger.info(img_url)
|
| 61 |
guidedMM(st.session_state.systemPrompt, img_url)
|
| 62 |
else:
|
| 63 |
basicChat()
|
|
|
|
| 8 |
from helpers.constant import *
|
| 9 |
from helpers.chat import basicChat, guidedMM, mmChat
|
| 10 |
import os
|
|
|
|
| 11 |
import tempfile
|
| 12 |
+
import requests
|
| 13 |
|
| 14 |
logger = logging.getLogger(__name__)
|
| 15 |
logging.basicConfig(filename='app.log', level=logging.INFO)
|
|
|
|
| 49 |
picture = st.camera_input("Take a picture of your math work", disabled=not enable)
|
| 50 |
|
| 51 |
if picture is not None:
|
| 52 |
+
# Save the image to a temporary file
|
| 53 |
+
temp_dir = tempfile.mkdtemp()
|
| 54 |
+
temp_image_path = os.path.join(temp_dir, "picture.png")
|
| 55 |
with open(temp_image_path, "wb") as f:
|
| 56 |
+
f.write(picture.getbuffer())
|
| 57 |
+
extIP = requests.get("https://ipv4.icanhazip.com").text + ":8501"
|
| 58 |
+
img_url = f"http://{extIP}{temp_image_path}"
|
|
|
|
|
|
|
|
|
|
| 59 |
guidedMM(st.session_state.systemPrompt, img_url)
|
| 60 |
else:
|
| 61 |
basicChat()
|
helpers/chat.py
CHANGED
|
@@ -8,15 +8,6 @@ logger = logging.getLogger(__name__)
|
|
| 8 |
api_key = os.environ.get('hf_api')
|
| 9 |
client = InferenceClient(api_key=api_key)
|
| 10 |
|
| 11 |
-
def hf_stream(model_name: str, messages: dict):
|
| 12 |
-
stream = client.chat.completions.create(
|
| 13 |
-
model=model_name,
|
| 14 |
-
messages=messages,
|
| 15 |
-
max_tokens=1000,
|
| 16 |
-
stream=True)
|
| 17 |
-
for chunk in stream:
|
| 18 |
-
chunk.choices[0].delta.content, end=""
|
| 19 |
-
|
| 20 |
def hf_generator(model,prompt,data,system=None):
|
| 21 |
if system:
|
| 22 |
messages = [
|
|
|
|
| 8 |
api_key = os.environ.get('hf_api')
|
| 9 |
client = InferenceClient(api_key=api_key)
|
| 10 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11 |
def hf_generator(model,prompt,data,system=None):
|
| 12 |
if system:
|
| 13 |
messages = [
|
tempDir/example1.png
ADDED
|
tempDir/example2.png
ADDED
|