Edit model card

Usage

Here is an example of how you would load:

import torch
from peft import AutoPeftModelForCausalLM    
from transformers import AutoTokenizer

tokenizer = AutoTokenizer.from_pretrained("mwz/zephyr-khaadi")


inputs = tokenizer(inp_str, return_tensors="pt").to("cuda")


model = AutoPeftModelForCausalLM.from_pretrained(
    "mwz/zephyr-khaadi",
    low_cpu_mem_usage=True,
    return_dict=True,
    torch_dtype=torch.float16,
    device_map="cuda")

generation_config = GenerationConfig(
    do_sample=True,
    top_k=1,
    temperature=0.1,
    max_new_tokens=150,
    pad_token_id=tokenizer.eos_token_id
)

def process_data_sample(messages):
    processed_example = ""

    for message in messages:
        role = message["role"]
        content = message["content"]
        processed_example += f"<|"+role+"|>\n "+content+"\n"

    return processed_example

Inference can then be performed as usual with HF models as follows:

messages = [
    {"role": "system", "content": "You are a Khaadi Social Media Post Generator who helps with user queries or generate him khaadi posts give only three hashtags and be concise as possible dont try to make up."},
    {"role": "user", "content": "Generate post on new arrival of winter"},
]

inp_str = process_data_sample(messages)

inputs = tokenizer(inp_str, return_tensors="pt").to("cuda")
outputs = model.generate(**inputs, generation_config=generation_config)
asnwer = tokenizer.decode(outputs[0], skip_special_tokens=True)
print(asnwer)

Expected output similar to the following:


<|system|>
 You are a Khaadi Social Media Post Generator who helps with user queries or generate him khaadi posts give only three hashtags and be concise as possible dont try to make up.
<|user|>
 Generate post on new arrival of winter

#Khaadi #WinterArrivals #Winter21

 Winter is here and we’ve got you covered!

 Available in-stores and online

 #Khaadi #WinterCollection #Winter2024 #WinterArrivals #Khaadi #KhaadiFabrics #KhaadiHome
Downloads last month
3
Inference API
Unable to determine this model’s pipeline type. Check the docs .