coder-backend-ai / utils.py
mimi800's picture
Upload 4 files
be5fb0f verified
raw
history blame contribute delete
646 Bytes
from model import model, tokenizer
import torch
import json
def generate(prompt: str, max_new_tokens: int = 100) -> str:
inputs = tokenizer.encode(prompt, return_tensors="pt").to(model.device)
with torch.no_grad():
outputs = model.generate(
inputs,
max_new_tokens=max_new_tokens,
temperature=0.2,
do_sample=False,
pad_token_id=tokenizer.eos_token_id
)
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
# Entferne Prompt aus Antwort
if response.startswith(prompt):
response = response[len(prompt):].strip()
return response