KashiwaByte's picture
initial commit
7c93e9d
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
from transformers.generation.utils import GenerationConfig
from peft import PeftModel, PeftConfig
import json
import csv
# model_path = "/root/merge_models"
model_path = "/root/lanyun-tmp/ZhipuAI/chatglm3-6b"
model = AutoModelForCausalLM.from_pretrained(
model_path, torch_dtype=torch.float16, device_map="auto", trust_remote_code=True
)
# model.generation_config = GenerationConfig.from_pretrained(model_path)
tokenizer = AutoTokenizer.from_pretrained(
model_path, use_fast=False, trust_remote_code=True,
)
# 读取JSONL文件
filename = '/root/lanyun-tmp/Dataset/val_triviaqa.csv'
data = []
with open(filename, newline='',encoding="utf-8") as csvfile:
reader = csv.DictReader(csvfile)
files = 'TriviaQA_ChatGLM3_Nlora.csv'
with open(files, 'w', newline='',encoding='utf-8') as csvfile:
writer = csv.writer(csvfile)
# 提取内容
# 逐行读取CSV文件
for index,row in enumerate(reader):
if index<=1000:
context = row['context']
question = row['question']
messages = str([{'role': 'system', 'content': 'Don t output "[" !!!, As a reading comprehension expert, you will receive context and question. Please understand the given Context first and then output the answer of the question based on the Context'}, {'role': 'user', 'content': '{\'context\': \'[DOC] [TLE] richard marx had an 80s No 1 hit with Hold On To The Nights? \', \'question\': \'Who had an 80s No 1 hit with Hold On To The Nights?\'}'}, {'role': 'assistant', 'content': "richard marx"}])
response = model.chat(tokenizer, messages)
answer = response[0][0]
print(answer)
writer.writerow(answer)