Update README.md
Browse files
README.md
CHANGED
@@ -54,8 +54,8 @@ import requests
|
|
54 |
from PIL import Image
|
55 |
from transformers import BlipProcessor, Blip2ForConditionalGeneration
|
56 |
|
57 |
-
processor = BlipProcessor.from_pretrained("Salesforce/blip2-
|
58 |
-
model = Blip2ForConditionalGeneration.from_pretrained("Salesforce/blip2-
|
59 |
|
60 |
img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg'
|
61 |
raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB')
|
@@ -81,8 +81,8 @@ import requests
|
|
81 |
from PIL import Image
|
82 |
from transformers import Blip2Processor, Blip2ForConditionalGeneration
|
83 |
|
84 |
-
processor = Blip2Processor.from_pretrained("Salesforce/blip2-
|
85 |
-
model = Blip2ForConditionalGeneration.from_pretrained("Salesforce/blip2-
|
86 |
|
87 |
img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg'
|
88 |
raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB')
|
@@ -107,8 +107,8 @@ import requests
|
|
107 |
from PIL import Image
|
108 |
from transformers import Blip2Processor, Blip2ForConditionalGeneration
|
109 |
|
110 |
-
processor = Blip2Processor.from_pretrained("Salesforce/blip2-
|
111 |
-
model = Blip2ForConditionalGeneration.from_pretrained("Salesforce/blip2-
|
112 |
|
113 |
img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg'
|
114 |
raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB')
|
@@ -133,8 +133,8 @@ import requests
|
|
133 |
from PIL import Image
|
134 |
from transformers import Blip2Processor, Blip2ForConditionalGeneration
|
135 |
|
136 |
-
processor = Blip2Processor.from_pretrained("Salesforce/blip2-
|
137 |
-
model = Blip2ForConditionalGeneration.from_pretrained("Salesforce/blip2-
|
138 |
|
139 |
img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg'
|
140 |
raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB')
|
|
|
54 |
from PIL import Image
|
55 |
from transformers import BlipProcessor, Blip2ForConditionalGeneration
|
56 |
|
57 |
+
processor = BlipProcessor.from_pretrained("Salesforce/blip2-opt-2.7b")
|
58 |
+
model = Blip2ForConditionalGeneration.from_pretrained("Salesforce/blip2-opt-2.7b")
|
59 |
|
60 |
img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg'
|
61 |
raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB')
|
|
|
81 |
from PIL import Image
|
82 |
from transformers import Blip2Processor, Blip2ForConditionalGeneration
|
83 |
|
84 |
+
processor = Blip2Processor.from_pretrained("Salesforce/blip2-opt-2.7b")
|
85 |
+
model = Blip2ForConditionalGeneration.from_pretrained("Salesforce/blip2-opt-2.7b", device_map="auto")
|
86 |
|
87 |
img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg'
|
88 |
raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB')
|
|
|
107 |
from PIL import Image
|
108 |
from transformers import Blip2Processor, Blip2ForConditionalGeneration
|
109 |
|
110 |
+
processor = Blip2Processor.from_pretrained("Salesforce/blip2-opt-2.7b")
|
111 |
+
model = Blip2ForConditionalGeneration.from_pretrained("Salesforce/blip2-opt-2.7b", torch_dtype=torch.float16, device_map="auto")
|
112 |
|
113 |
img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg'
|
114 |
raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB')
|
|
|
133 |
from PIL import Image
|
134 |
from transformers import Blip2Processor, Blip2ForConditionalGeneration
|
135 |
|
136 |
+
processor = Blip2Processor.from_pretrained("Salesforce/blip2-opt-2.7b")
|
137 |
+
model = Blip2ForConditionalGeneration.from_pretrained("Salesforce/blip2-opt-2.7b", load_in_8bit=True, device_map="auto")
|
138 |
|
139 |
img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg'
|
140 |
raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB')
|