from transformers import AutoModelForCausalLM, AutoTokenizer model_name = "inclusionAI/Ring-mini-2.0" print(f"load model {model_name}") model = AutoModelForCausalLM.from_pretrained( model_name, torch_dtype="auto", device_map="auto", trust_remote_code=True ) print(f"load tokenizer {model_name}") tokenizer = AutoTokenizer.from_pretrained(model_name) from flask import Flask app = Flask(__name__) print(f"Flask app") # Главная страница @app.route("/") def home(): return f"

Главная страница

Добро пожаловать!

current model {model_name}

" # Страница "О нас" @app.route("/about") def about(): return "

О нас

Мы изучаем Flask!

" # Страница "Контакты" @app.route("/contact") def contact(): return "

Контакты

Свяжитесь с нами: email@example.com

" if __name__ == "__main__": app.run(debug=False, host='0.0.0.0', port=7860)