Update app.py
Browse files
app.py
CHANGED
|
@@ -1,15 +1,19 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
-
from transformers import AutoModelForCausalLM
|
| 3 |
-
from transformers.models.qwen2.tokenization_qwen2 import Qwen2Tokenizer
|
| 4 |
import torch
|
| 5 |
from PIL import Image
|
| 6 |
import io
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
|
| 8 |
@st.cache_resource
|
| 9 |
def load_model():
|
| 10 |
model_name = "Qwen/Qwen2-VL-7B-Instruct"
|
| 11 |
try:
|
| 12 |
-
tokenizer =
|
| 13 |
model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto", trust_remote_code=True)
|
| 14 |
return tokenizer, model
|
| 15 |
except Exception as e:
|
|
@@ -37,10 +41,14 @@ def generate_response(prompt, image, tokenizer, model):
|
|
| 37 |
|
| 38 |
st.title("Чат с Qwen VL-7B-Instruct")
|
| 39 |
|
|
|
|
|
|
|
|
|
|
| 40 |
tokenizer, model = load_model()
|
| 41 |
|
| 42 |
if tokenizer is None or model is None:
|
| 43 |
st.warning("Модель не загружена. Приложение может работать некорректно.")
|
|
|
|
| 44 |
else:
|
| 45 |
st.success("Модель успешно загружена!")
|
| 46 |
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
|
|
| 3 |
import torch
|
| 4 |
from PIL import Image
|
| 5 |
import io
|
| 6 |
+
import importlib
|
| 7 |
+
|
| 8 |
+
def check_transformers_version():
|
| 9 |
+
import transformers
|
| 10 |
+
return transformers.__version__
|
| 11 |
|
| 12 |
@st.cache_resource
|
| 13 |
def load_model():
|
| 14 |
model_name = "Qwen/Qwen2-VL-7B-Instruct"
|
| 15 |
try:
|
| 16 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
|
| 17 |
model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto", trust_remote_code=True)
|
| 18 |
return tokenizer, model
|
| 19 |
except Exception as e:
|
|
|
|
| 41 |
|
| 42 |
st.title("Чат с Qwen VL-7B-Instruct")
|
| 43 |
|
| 44 |
+
transformers_version = check_transformers_version()
|
| 45 |
+
st.info(f"Версия transformers: {transformers_version}")
|
| 46 |
+
|
| 47 |
tokenizer, model = load_model()
|
| 48 |
|
| 49 |
if tokenizer is None or model is None:
|
| 50 |
st.warning("Модель не загружена. Приложение может работать некорректно.")
|
| 51 |
+
st.info("Попробуйте установить последнюю версию transformers: pip install transformers --upgrade")
|
| 52 |
else:
|
| 53 |
st.success("Модель успешно загружена!")
|
| 54 |
|