Spestly commited on
Commit
77ae47f
·
verified ·
1 Parent(s): 823d236

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +52 -48
app.py CHANGED
@@ -14,13 +14,13 @@ if not os.path.exists(AI_PFP) or not os.path.exists(USER_PFP):
14
  st.stop()
15
 
16
  model_info = {
17
- "c4ai-aya-expanse-8b": {"description": "Aya Expanse is a highly performant 8B multilingual model, designed to rival monolingual performance through innovations in instruction tuning with data arbitrage, preference training, and model merging. Serves 23 languages.", "context": "4K", "output": "4K"},
18
- "c4ai-aya-expanse-32b": {"description": "Aya Expanse is a highly performant 32B multilingual model, designed to rival monolingual performance through innovations in instruction tuning with data arbitrage, preference training, and model merging. Serves 23 languages.", "context": "128K", "output": "4K"},
19
- "c4ai-aya-vision-8b": {"description": "Aya Vision is a state-of-the-art multimodal model excelling at a variety of critical benchmarks for language, text, and image capabilities. This 8 billion parameter variant is focused on low latency and best-in-class performance.", "context": "16K", "output": "4K"},
20
- "c4ai-aya-vision-32b": {"description": "Aya Vision is a state-of-the-art multimodal model excelling at a variety of critical benchmarks for language, text, and image capabilities. Serves 23 languages. This 32 billion parameter variant is focused on state-of-art multilingual performance.", "context": "16k", "output": "4K"},
21
- "command-a-03-2025": {"description": "Command A is our most performant model to date, excelling at tool use, agents, retrieval augmented generation (RAG), and multilingual use cases. Command A has a context length of 256K, only requires two GPUs to run, and has 150% higher throughput compared to Command R+ 08-2024.", "context": "256K", "output": "8K"},
22
- "command-r7b-12-2024": {"description": "command-r7b-12-2024 is a small, fast update delivered in December 2024. It excels at RAG, tool use, agents, and similar tasks requiring complex reasoning and multiple steps.", "context": "128K", "output": "4K"},
23
- "command-r-plus-04-2024": {"description": "Command R+ is an instruction-following conversational model that performs language tasks at a higher quality, more reliably, and with a longer context than previous models. It is best suited for complex RAG workflows and multi-step tool use.", "context": "128K", "output": "4K"},
24
  }
25
 
26
  with st.sidebar:
@@ -29,10 +29,10 @@ with st.sidebar:
29
  st.title("Settings")
30
  api_key = st.text_input("Cohere API Key", type="password")
31
  selected_model = st.selectbox("Model", options=list(model_info.keys()))
32
- if st.button("Clear Chat"):
33
  st.session_state.messages = []
34
  st.session_state.first_message_sent = False
35
- st.experimental_rerun()
36
  st.divider()
37
  st.image(AI_PFP, width=60)
38
  st.subheader(selected_model)
@@ -45,53 +45,57 @@ if "messages" not in st.session_state:
45
  st.session_state.messages = []
46
  if "first_message_sent" not in st.session_state:
47
  st.session_state.first_message_sent = False
 
 
48
 
49
- main = st.container()
50
- with main:
51
- if not st.session_state.first_message_sent:
52
- st.markdown(
53
- "<h1 style='text-align:center; color:#4a4a4a; margin-top:100px;'>How can Cohere help you today?</h1>",
54
- unsafe_allow_html=True
55
- )
56
- for msg in st.session_state.messages:
57
- avatar = USER_PFP if msg["role"] == "user" else AI_PFP
58
- with st.chat_message(msg["role"], avatar=avatar):
59
- st.markdown(msg["content"])
60
 
61
- col1, col2 = st.columns([1, 4])
62
- with col1:
63
- if selected_model.startswith("c4ai-aya-vision"):
64
- uploaded = st.file_uploader("Upload image", type=["png", "jpg", "jpeg"])
65
- else:
66
- uploaded = None
67
- with col2:
68
- prompt = st.chat_input("Message...")
69
 
70
- if prompt:
 
 
 
 
 
 
 
 
 
 
 
 
71
  if not api_key:
72
  st.error("API key required")
73
  st.stop()
74
- st.session_state.first_message_sent = True
75
- st.session_state.messages.append({"role": "user", "content": prompt})
76
- with st.chat_message("user", avatar=USER_PFP):
77
- st.markdown(prompt)
78
-
 
 
 
 
 
 
 
 
 
 
79
  try:
80
  co = cohere.ClientV2(api_key)
81
- user_content = [{"type": "text", "text": prompt}]
82
- if uploaded:
83
- raw = uploaded.read()
84
- b64 = base64.b64encode(raw).decode("utf-8")
85
- data_url = f"data:image/jpeg;base64,{b64}"
86
- user_content.append({"type": "image_url", "image_url": {"url": data_url}})
87
- response = co.chat(
88
- model=selected_model,
89
- messages=[{"role": "user", "content": user_content}]
90
- )
91
- content_items = response.message.content
92
- reply = "".join(getattr(item, 'text', '') for item in content_items)
93
- st.session_state.messages.append({"role": "assistant", "content": reply})
94
  with st.chat_message("assistant", avatar=AI_PFP):
95
  st.markdown(reply)
 
96
  except Exception as e:
97
- st.error(f"Error: {str(e)}")
 
14
  st.stop()
15
 
16
  model_info = {
17
+ "c4ai-aya-expanse-8b": {"description": "Aya Expanse is a highly performant 8B multilingual model...", "context": "4K", "output": "4K"},
18
+ "c4ai-aya-expanse-32b": {"description": "Aya Expanse is a highly performant 32B multilingual model...", "context": "128K", "output": "4K"},
19
+ "command-a-03-2025": {"description": "Command A is our most performant model to date...", "context": "256K", "output": "8K"},
20
+ "command-r7b-12-2024": {"description": "command-r7b-12-2024 is a small, fast update...", "context": "128K", "output": "4K"},
21
+ "command-r-plus-04-2024": {"description": "Command R+ is an instruction-following conversational model...", "context": "128K", "output": "4K"},
22
+ "c4ai-aya-vision-8b": {"description": "Aya Vision is an 8B vision-language model...", "context": "4K", "output": "4K"},
23
+ "c4ai-aya-vision-32b": {"description": "Aya Vision is a 32B vision-language model...", "context": "128K", "output": "4K"}
24
  }
25
 
26
  with st.sidebar:
 
29
  st.title("Settings")
30
  api_key = st.text_input("Cohere API Key", type="password")
31
  selected_model = st.selectbox("Model", options=list(model_info.keys()))
32
+ def clear_chat():
33
  st.session_state.messages = []
34
  st.session_state.first_message_sent = False
35
+ st.button("Clear Chat", on_click=clear_chat)
36
  st.divider()
37
  st.image(AI_PFP, width=60)
38
  st.subheader(selected_model)
 
45
  st.session_state.messages = []
46
  if "first_message_sent" not in st.session_state:
47
  st.session_state.first_message_sent = False
48
+ if "uploaded_image" not in st.session_state:
49
+ st.session_state.uploaded_image = None
50
 
51
+ if not st.session_state.first_message_sent:
52
+ st.markdown(
53
+ "<h1 style='text-align:center; color:#4a4a4a; margin-top:100px;'>How can Cohere help you today?</h1>",
54
+ unsafe_allow_html=True
55
+ )
 
 
 
 
 
 
56
 
57
+ for msg in st.session_state.messages:
58
+ avatar = USER_PFP if msg["role"] == "user" else AI_PFP
59
+ with st.chat_message(msg["role"], avatar=avatar):
60
+ st.markdown(msg["content"])
 
 
 
 
61
 
62
+ col1, col2 = st.columns([1, 8])
63
+ with col1:
64
+ if selected_model.startswith("c4ai-aya-vision"):
65
+ img = st.file_uploader(label="📷", key="uploader", type=["png","jpg","jpeg"], accept_multiple_files=False)
66
+ if img is not None:
67
+ st.session_state.uploaded_image = img
68
+ st.image(img, width=80)
69
+ else:
70
+ st.write("")
71
+ with col2:
72
+ prompt = st.chat_input("Message...")
73
+
74
+ if prompt or st.session_state.uploaded_image:
75
  if not api_key:
76
  st.error("API key required")
77
  st.stop()
78
+ user_items = []
79
+ if prompt:
80
+ st.session_state.first_message_sent = True
81
+ st.session_state.messages.append({"role": "user", "content": prompt})
82
+ with st.chat_message("user", avatar=USER_PFP):
83
+ st.markdown(prompt)
84
+ user_items.append({"type": "text", "text": prompt})
85
+ if st.session_state.uploaded_image:
86
+ raw = st.session_state.uploaded_image.read()
87
+ b64 = base64.b64encode(raw).decode("utf-8")
88
+ url = f"data:image/jpeg;base64,{b64}"
89
+ user_items.append({"type": "image_url", "image_url": {"url": url}})
90
+ with st.chat_message("user", avatar=USER_PFP):
91
+ st.image(raw, width=200)
92
+ st.session_state.uploaded_image = None
93
  try:
94
  co = cohere.ClientV2(api_key)
95
+ response = co.chat(model=selected_model, messages=[{"role":"user","content":user_items}])
96
+ reply = "".join(getattr(item,'text','') for item in response.message.content)
 
 
 
 
 
 
 
 
 
 
 
97
  with st.chat_message("assistant", avatar=AI_PFP):
98
  st.markdown(reply)
99
+ st.session_state.messages.append({"role":"assistant","content":reply})
100
  except Exception as e:
101
+ st.error(f"Error: {e}")