KeerthiVM commited on
Commit
2b8678a
·
1 Parent(s): 9a54325
Files changed (1) hide show
  1. SkinGPT.py +19 -4
SkinGPT.py CHANGED
@@ -253,10 +253,15 @@ class SkinGPT4(nn.Module):
253
  # ### Response:
254
  # """
255
 
256
- prompt = """### Skin Diagnosis Analysis ###
 
 
 
 
 
 
257
  <IMAGE>
258
  Could you describe the skin condition in this image?
259
- Please provide a detailed analysis including possible diagnoses.
260
  ### Response:"""
261
 
262
 
@@ -314,6 +319,18 @@ class SkinGPT4(nn.Module):
314
  print(f"\n[DEBUG] After replacement:")
315
  print(f"Image token embedding (after):\n{input_embeddings[0, replace_positions[0][1], :5]}...")
316
 
 
 
 
 
 
 
 
 
 
 
 
 
317
  outputs = self.llama.generate(
318
  inputs_embeds=input_embeddings,
319
  max_new_tokens=max_new_tokens,
@@ -322,8 +339,6 @@ class SkinGPT4(nn.Module):
322
  top_p=0.9,
323
  repetition_penalty=1.1,
324
  do_sample=True,
325
- pad_token_id = self.tokenizer.eos_token_id,
326
- eos_token_id = self.tokenizer.eos_token_id
327
  )
328
 
329
 
 
253
  # ### Response:
254
  # """
255
 
256
+ # prompt = """### Skin Diagnosis Analysis ###
257
+ # <IMAGE>
258
+ # Could you describe the skin condition in this image?
259
+ # Please provide a detailed analysis including possible diagnoses.
260
+ # ### Response:"""
261
+
262
+ prompt = """### Instruction:
263
  <IMAGE>
264
  Could you describe the skin condition in this image?
 
265
  ### Response:"""
266
 
267
 
 
319
  print(f"\n[DEBUG] After replacement:")
320
  print(f"Image token embedding (after):\n{input_embeddings[0, replace_positions[0][1], :5]}...")
321
 
322
+ # outputs = self.llama.generate(
323
+ # inputs_embeds=input_embeddings,
324
+ # max_new_tokens=max_new_tokens,
325
+ # temperature=0.7,
326
+ # top_k=40,
327
+ # top_p=0.9,
328
+ # repetition_penalty=1.1,
329
+ # do_sample=True,
330
+ # pad_token_id = self.tokenizer.eos_token_id,
331
+ # eos_token_id = self.tokenizer.eos_token_id
332
+ # )
333
+
334
  outputs = self.llama.generate(
335
  inputs_embeds=input_embeddings,
336
  max_new_tokens=max_new_tokens,
 
339
  top_p=0.9,
340
  repetition_penalty=1.1,
341
  do_sample=True,
 
 
342
  )
343
 
344