YAML Metadata Warning: empty or missing yaml metadata in repo card (https://huggingface.co/docs/hub/model-cards#model-card-metadata)
def format_chatml(messages):
  return tokenizer.apply_chat_template(
      messages,
      tokenize=False,
      add_generation_prompt=True
  )


from transformers import AutoModelForCausalLM, AutoTokenizer

model = AutoModelForCausalLM.from_pretrained("foryui/ktherapist-1b-sft3500_ep4")
tokenizer = AutoTokenizer.from_pretrained("foryui/ktherapist-1b-sft3500_ep4")

with open("system prompt.txt", "r", encoding="utf-8") as f:
    therapist_prompt = f.read()

messages = []
messages.append({"role":"system","content":therapist_prompt})
for i,utt in enumerate(dialogue_context.split('\n')):
    if utt.startswith('์ƒ๋‹ด์‚ฌ:'):
        messages.append({"role":"assistant","content":utt})
    elif utt.startswith('๋‚ด๋‹ด์ž:'):
        messages.append({"role":"user","content":utt})
    else:
        messages[-1]['content'] = messages[-1]['content'] + '\n' + utt
prompt = format_chatml(messages)
##think๊ฐ•์ œ
prompt += '<think>\n'

inputs = tokenizer(prompt, return_tensors="pt").to(model.device)

output_ids = model.generate(
    **inputs,
    max_new_tokens=256,
    do_sample=True,
    temperature=0.7,
    top_p=0.95,
    pad_token_id=tokenizer.eos_token_id
)

generated_text = tokenizer.decode(output_ids[0][inputs['input_ids'].shape[-1]:], skip_special_tokens=True)
if '</answer>' in generated_text:
  response = generated_text[:generated_text.find('</answer>')+9]
else:
  response = generated_text


print("๋ชจ๋ธ ์‘๋‹ต:")
print(response)
Downloads last month
5
Safetensors
Model size
1B params
Tensor type
BF16
ยท
Inference Providers NEW
This model isn't deployed by any Inference Provider. ๐Ÿ™‹ Ask for provider support

Space using foryui/ktherapist-1b-sft3500_ep4 1