File size: 975 Bytes
c7256ee | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 | from huggingface_hub import InferenceClient
class DeepSeek_V3:
def __init__(self, token):
self.client = InferenceClient(token=token)
self.model_id = "deepseek-ai/DeepSeek-V3"
def generate_stream(self, prompt, max_tokens=1500, temperature=0.1):
try:
for message in self.client.chat_completion(
model=self.model_id,
messages=[{"role": "user", "content": prompt}],
max_tokens=max_tokens,
temperature=temperature,
stream=True,
):
if message.choices:
content = message.choices[0].delta.content
if content:
yield content
except Exception as e:
yield f" DeepSeek API Busy: {e}"
def generate(self, prompt, max_tokens=500, temperature=0.1):
return "".join(self.generate_stream(prompt, max_tokens=max_tokens, temperature=temperature)) |