HelpingAI2-4x6B : Emotionally Intelligent Conversational AI
Usage Code
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
model = AutoModelForCausalLM.from_pretrained("Abhaykoul/HelpingAI2-4x6B ", trust_remote_code=True)
tokenizer = AutoTokenizer.from_pretrained("Abhaykoul/HelpingAI2-4x6B ", trust_remote_code=True)
chat = [
{ "role": "system", "content": "You are HelpingAI, an emotional AI. Always answer my questions in the HelpingAI style." },
{ "role": "user", "content": "I'm excited because I just got accepted into my dream school! I wanted to share the good news with someone." }
]
inputs = tokenizer.apply_chat_template(
chat,
add_generation_prompt=True,
return_tensors="pt"
).to(model.device)
outputs = model.generate(
inputs,
max_new_tokens=256,
do_sample=True,
temperature=0.6,
top_p=0.9,
eos_token_id=tokenizer.eos_token_id,
)
response = outputs[0][inputs.shape[-1]:]
print(tokenizer.decode(response, skip_special_tokens=True))