odaly commited on
Commit
f348b55
Β·
verified Β·
1 Parent(s): 1119232

delete 02_πŸŒ‹_Multimodal.py

Browse files
Files changed (1) hide show
  1. 02_πŸŒ‹_Multimodal.py +0 -198
02_πŸŒ‹_Multimodal.py DELETED
@@ -1,198 +0,0 @@
1
- import streamlit as st
2
- import requests
3
- import base64
4
- from PIL import Image
5
- from io import BytesIO
6
- import json
7
- import ollama
8
-
9
- st.set_page_config(
10
- page_title="LLaVA Playground",
11
- page_icon="πŸŒ‹",
12
- layout="wide",
13
- initial_sidebar_state="expanded",
14
- )
15
-
16
-
17
- def img_to_base64(image):
18
- """
19
- Convert an image to base64 format.
20
-
21
- Args:
22
- image: PIL.Image - The image to be converted.
23
- Returns:
24
- str: The base64 encoded image.
25
- """
26
- buffered = BytesIO()
27
- image.save(buffered, format="PNG")
28
- return base64.b64encode(buffered.getvalue()).decode()
29
-
30
-
31
- def get_allowed_model_names(models_info: dict) -> tuple:
32
- """
33
- Returns a tuple containing the names of the allowed models.
34
- """
35
- allowed_models = ["bakllava:latest", "llava:latest"]
36
- return tuple(
37
- model
38
- for model in allowed_models
39
- if model in [m["name"] for m in models_info["models"]]
40
- )
41
-
42
-
43
- def main():
44
- st.subheader("LLaVA 1.6 Playground", divider="red", anchor=False)
45
-
46
- models_info = ollama.list()
47
- available_models = get_allowed_model_names(models_info)
48
- missing_models = set(["bakllava:latest", "llava:latest"]) - set(available_models)
49
-
50
- col_1, col_2 = st.columns(2)
51
- with col_1.popover("βš™οΈ Model Management", help="Manage models here"):
52
- if not available_models:
53
- st.error("No allowed models are available.", icon="😳")
54
- model_to_download = st.selectbox(
55
- "Select a model to download", ["bakllava:latest", "llava:latest"]
56
- )
57
- if st.button(f"Download {model_to_download}"):
58
- try:
59
- ollama.pull(model_to_download)
60
- st.toast(
61
- f"""Downloaded model: {
62
- model_to_download}""",
63
- icon="βœ…",
64
- )
65
- st.rerun()
66
- except Exception as e:
67
- st.error(
68
- f"""Failed to download model: {
69
- model_to_download}. Error: {str(e)}""",
70
- icon="😳",
71
- )
72
- else:
73
- if missing_models:
74
- model_to_download = st.selectbox(
75
- ":green[**πŸ“₯ DOWNLOAD MODEL**]", list(missing_models)
76
- )
77
- if st.button(f":green[Download **_{model_to_download}_**]"):
78
- try:
79
- ollama.pull(model_to_download)
80
- st.toast(
81
- f"""Downloaded model: {
82
- model_to_download}""",
83
- icon="βœ…",
84
- )
85
- st.rerun()
86
- except Exception as e:
87
- st.error(
88
- f"""Failed to download model: {
89
- model_to_download}. Error: {str(e)}""",
90
- icon="😳",
91
- )
92
-
93
- selected_model = st.selectbox(":red[**⛔️ DELETE MODEL**]", available_models)
94
- if st.button(f"Delete **_{selected_model}_**", type="primary"):
95
- try:
96
- ollama.delete(selected_model)
97
- st.toast(f"Deleted model: {selected_model}", icon="βœ…")
98
- st.rerun()
99
- except Exception as e:
100
- st.error(
101
- f"""Failed to delete model: {
102
- selected_model}. Error: {str(e)}""",
103
- icon="😳",
104
- )
105
-
106
- if not available_models:
107
- return
108
-
109
- selected_model = col_2.selectbox(
110
- "Pick a model available locally on your system ↓", available_models, key=1
111
- )
112
-
113
- if "chats" not in st.session_state:
114
- st.session_state.chats = []
115
-
116
- if "uploaded_file_state" not in st.session_state:
117
- st.session_state.uploaded_file_state = None
118
-
119
- uploaded_file = st.file_uploader(
120
- "Upload an image for analysis", type=["png", "jpg", "jpeg"]
121
- )
122
-
123
- col1, col2 = st.columns(2)
124
-
125
- with col2:
126
- container1 = st.container(height=500, border=True)
127
- with container1:
128
- if uploaded_file is not None:
129
- st.session_state.uploaded_file_state = uploaded_file.getvalue()
130
- image = Image.open(BytesIO(st.session_state.uploaded_file_state))
131
- st.image(image, caption="Uploaded image")
132
-
133
- with col1:
134
- container2 = st.container(height=500, border=True)
135
-
136
- if uploaded_file is not None:
137
- for message in st.session_state.chats:
138
- avatar = "πŸŒ‹" if message["role"] == "assistant" else "🫠"
139
- with container2.chat_message(message["role"], avatar=avatar):
140
- if message["role"] == "user":
141
- st.markdown(message["content"])
142
- else:
143
- st.markdown(message["content"])
144
-
145
- if user_input := st.chat_input(
146
- "Question about the image...", key="chat_input"
147
- ):
148
- st.session_state.chats.append({"role": "user", "content": user_input})
149
- container2.chat_message("user", avatar="🫠").markdown(user_input)
150
-
151
- image_base64 = img_to_base64(image)
152
- API_URL = "http://localhost:11434/api/generate"
153
- headers = {
154
- "Content-Type": "application/json",
155
- "Accept": "application/json",
156
- }
157
- data = {
158
- "model": selected_model,
159
- "prompt": user_input,
160
- "images": [image_base64],
161
- }
162
-
163
- with container2.chat_message("assistant", avatar="πŸŒ‹"):
164
- with st.spinner(":blue[processing...]"):
165
- response = requests.post(API_URL, json=data, headers=headers)
166
- if response.status_code == 200:
167
- response_lines = response.text.split("\n")
168
- llava_response = ""
169
- for line in response_lines:
170
- if line.strip(): # Skip empty lines
171
- try:
172
- response_data = json.loads(line)
173
- if "response" in response_data:
174
- llava_response += response_data["response"]
175
- except json.JSONDecodeError:
176
- pass # Skip invalid JSON lines
177
- if llava_response:
178
- st.markdown(llava_response)
179
- else:
180
- st.error(
181
- f"""No response received from {
182
- selected_model}.""",
183
- icon="😳",
184
- )
185
- else:
186
- st.error(
187
- f"""Failed to get a response from {
188
- selected_model}.""",
189
- icon="😳",
190
- )
191
-
192
- st.session_state.chats.append(
193
- {"role": "assistant", "content": llava_response}
194
- )
195
-
196
-
197
- if __name__ == "__main__":
198
- main()