OzonConsultant / test_model.py
teslatony's picture
Create test_model.py
531f4dd verified
raw
history blame contribute delete
465 Bytes
from huggingface_hub import InferenceClient
import os
token = os.getenv("HF_TOKEN")
if not token:
print("Установи HF_TOKEN!")
exit()
client = InferenceClient(token=token)
messages = [{"role": "user", "content": "Решить 2+2=?"}]
response = client.chat_completion(
model="deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
messages=messages,
max_tokens=50,
temperature=0.5
)
print("Ответ:", response.choices[0].message["content"])