ybelkada commited on
Commit
949641a
1 Parent(s): 93fcb3c

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -42,7 +42,7 @@ pipe = pipeline("image-to-text", model=model_id)
42
  url = "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/tasks/ai2d-demo.jpg"
43
 
44
  image = Image.open(requests.get(url, stream=True).raw)
45
- prompt = "<image>\nUSER: What does the label 15 represent? (1) lava (2) core (3) tunnel (4) ash cloud\nASSISTANT:"
46
 
47
  outputs = pipe(image, prompt=prompt, generate_kwargs={"max_new_tokens": 200})
48
  print(outputs)
@@ -62,7 +62,7 @@ from transformers import AutoProcessor, LlavaForConditionalGeneration
62
 
63
  model_id = "llava-hf/llava-1.5-7b-hf"
64
 
65
- prompt = "<image> \nUSER: What are these?\nASSISTANT:"
66
  image_file = "http://images.cocodataset.org/val2017/000000039769.jpg"
67
 
68
  model = LlavaForConditionalGeneration.from_pretrained(
 
42
  url = "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/tasks/ai2d-demo.jpg"
43
 
44
  image = Image.open(requests.get(url, stream=True).raw)
45
+ prompt = "USER: <image>\nWhat does the label 15 represent? (1) lava (2) core (3) tunnel (4) ash cloud\nASSISTANT:"
46
 
47
  outputs = pipe(image, prompt=prompt, generate_kwargs={"max_new_tokens": 200})
48
  print(outputs)
 
62
 
63
  model_id = "llava-hf/llava-1.5-7b-hf"
64
 
65
+ prompt = "USER: <image>\nWhat are these?\nASSISTANT:"
66
  image_file = "http://images.cocodataset.org/val2017/000000039769.jpg"
67
 
68
  model = LlavaForConditionalGeneration.from_pretrained(