# # SPDX-FileCopyrightText: Hadad # SPDX-License-Identifier: Apache-2.0 # # --------------------------------------------- # | OLLAMA_API_BASE_URL | /v1 | ENV or SECRET | # |---------------------|-----|---------------| # | OLLAMA_API_KEY | | SECRET | # --------------------------------------------- MODEL = "hf.co/LiquidAI/LFM2.5-1.2B-Instruct-GGUF:Q4_K_M" INFO = """

Ollama Inference Playground part of the Demo Playground, and the UltimaX Intelligence project


This space run the LFM2.5 (1.2B) model from LiquidAI, hosted on a server using Ollama and accessed via the OpenAI Python SDK.

Official documentation for using Ollama with the OpenAI-Compatible API can be found here.

LFM2.5 (1.2B) runs entirely on a dual-core CPU. Thanks to its small size, the model can operate efficiently on minimal hardware.

The LFM2.5 (1.2B) model can also be viewed or downloaded from the official repository here.

Like this project? You can support me by buying a coffee. """ HOST = "0.0.0.0"