Spaces:
Sleeping
Sleeping
howard-hou
commited on
Commit
•
69698e1
1
Parent(s):
e129b57
Update app.py
Browse files
app.py
CHANGED
@@ -124,53 +124,115 @@ def evaluate(
|
|
124 |
gc.collect()
|
125 |
yield out_str.strip()
|
126 |
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
程心和关一帆进入了飞船,智子最后也进来了。她早就不再穿那身华丽的和服了,她现在身着迷彩服,再次成为一名轻捷精悍的战士,她的身上佩带着许多武器和生存装备,最引人注目的是那把插在背后的武士刀。
|
144 |
-
“放心,我在,你们就在!”智子对两位人类朋友说。
|
145 |
-
聚变发动机启动了,推进器发出幽幽的蓝光,飞船缓缓地穿过了宇宙之门。
|
146 |
-
小宇宙中只剩下漂流瓶和生态球。漂流瓶隐没于黑暗里,在一千米见方的宇宙中,只有生态球里的小太阳发出一点光芒。在这个小小的生命世界中,几只清澈的水球在零重力环境中静静地飘浮着,有一条小鱼从一只水球中蹦出,跃入另一只水球,轻盈地穿游于绿藻之间。在一小块陆地上的草丛中,有一滴露珠从一片草叶上脱离,旋转着飘起,向太空中折射出一缕晶莹的阳光。''', 333, 1, 0.3, 0, 1],
|
147 |
-
]
|
148 |
|
149 |
##########################################################################
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
150 |
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
174 |
|
175 |
demo.queue(concurrency_count=1, max_size=10)
|
176 |
demo.launch(share=False)
|
|
|
124 |
gc.collect()
|
125 |
yield out_str.strip()
|
126 |
|
127 |
+
import gradio as gr
|
128 |
+
import os, gc
|
129 |
+
from datetime import datetime
|
130 |
+
from huggingface_hub import hf_hub_download
|
131 |
+
|
132 |
+
ctx_limit = 3500
|
133 |
+
title = "rwkv1b5-vitl336p14-577token_mix665k_rwkv"
|
134 |
+
|
135 |
+
os.environ["RWKV_JIT_ON"] = '1'
|
136 |
+
os.environ["RWKV_CUDA_ON"] = '0' # if '1' then use CUDA kernel for seq mode (much faster)
|
137 |
+
|
138 |
+
from rwkv.model import RWKV
|
139 |
+
model_path = hf_hub_download(repo_id="howard-hou/visualrwkv-5", filename=f"{title}.pth")
|
140 |
+
model = RWKV(model=model_path, strategy='cpu fp32')
|
141 |
+
from rwkv.utils import PIPELINE, PIPELINE_ARGS
|
142 |
+
pipeline = PIPELINE(model, "rwkv_vocab_v20230424")
|
|
|
|
|
|
|
|
|
|
|
143 |
|
144 |
##########################################################################
|
145 |
+
from model import VisualEncoder, EmbeddingMixer, VisualEncoderConfig
|
146 |
+
emb_mixer = EmbeddingMixer(model.w["emb.weight"], num_image_embeddings=4096)
|
147 |
+
config = VisualEncoderConfig(n_embd=model.args.n_embd,
|
148 |
+
vision_tower_name='openai/clip-vit-large-patch14-336',
|
149 |
+
grid_size=-1)
|
150 |
+
visual_encoder = VisualEncoder(config)
|
151 |
+
##########################################################################
|
152 |
+
def generate_prompt(instruction, input=""):
|
153 |
+
instruction = instruction.strip().replace('\r\n','\n').replace('\n\n','\n')
|
154 |
+
input = input.strip().replace('\r\n','\n').replace('\n\n','\n')
|
155 |
+
if input:
|
156 |
+
return f"""Instruction: {instruction}
|
157 |
+
|
158 |
+
Input: {input}
|
159 |
+
|
160 |
+
Response:"""
|
161 |
+
else:
|
162 |
+
return f"""User: hi
|
163 |
+
|
164 |
+
Assistant: Hi. I am your assistant and I will provide expert full response in full details. Please feel free to ask any question and I will always answer it.
|
165 |
+
|
166 |
+
User: {instruction}
|
167 |
|
168 |
+
Assistant:"""
|
169 |
+
|
170 |
+
def evaluate(
|
171 |
+
ctx,
|
172 |
+
token_count=200,
|
173 |
+
temperature=1.0,
|
174 |
+
top_p=0.7,
|
175 |
+
presencePenalty = 0.1,
|
176 |
+
countPenalty = 0.1,
|
177 |
+
):
|
178 |
+
args = PIPELINE_ARGS(temperature = max(0.2, float(temperature)), top_p = float(top_p),
|
179 |
+
alpha_frequency = countPenalty,
|
180 |
+
alpha_presence = presencePenalty,
|
181 |
+
token_ban = [], # ban the generation of some tokens
|
182 |
+
token_stop = [0]) # stop generation whenever you see any token here
|
183 |
+
ctx = ctx.strip()
|
184 |
+
all_tokens = []
|
185 |
+
out_last = 0
|
186 |
+
out_str = ''
|
187 |
+
occurrence = {}
|
188 |
+
state = None
|
189 |
+
for i in range(int(token_count)):
|
190 |
+
out, state = model.forward(pipeline.encode(ctx)[-ctx_limit:] if i == 0 else [token], state)
|
191 |
+
for n in occurrence:
|
192 |
+
out[n] -= (args.alpha_presence + occurrence[n] * args.alpha_frequency)
|
193 |
+
|
194 |
+
token = pipeline.sample_logits(out, temperature=args.temperature, top_p=args.top_p)
|
195 |
+
if token in args.token_stop:
|
196 |
+
break
|
197 |
+
all_tokens += [token]
|
198 |
+
for xxx in occurrence:
|
199 |
+
occurrence[xxx] *= 0.996
|
200 |
+
if token not in occurrence:
|
201 |
+
occurrence[token] = 1
|
202 |
+
else:
|
203 |
+
occurrence[token] += 1
|
204 |
+
|
205 |
+
tmp = pipeline.decode(all_tokens[out_last:])
|
206 |
+
if '\ufffd' not in tmp:
|
207 |
+
out_str += tmp
|
208 |
+
yield out_str.strip()
|
209 |
+
out_last = i + 1
|
210 |
+
|
211 |
+
del out
|
212 |
+
del state
|
213 |
+
gc.collect()
|
214 |
+
yield out_str.strip()
|
215 |
+
|
216 |
+
|
217 |
+
##########################################################################
|
218 |
+
examples = [
|
219 |
+
[
|
220 |
+
"./extreme_ironing.jpg",
|
221 |
+
"What is unusual about this image?",
|
222 |
+
],
|
223 |
+
[
|
224 |
+
"./waterview.jpg",
|
225 |
+
"What are the things I should be cautious about when I visit here?",
|
226 |
+
]
|
227 |
+
]
|
228 |
+
def test(image, question):
|
229 |
+
return question
|
230 |
+
demo = gr.Interface(fn=test,
|
231 |
+
inputs=["image", "text"],
|
232 |
+
outputs="text",
|
233 |
+
examples=examples,
|
234 |
+
title=title,
|
235 |
+
description="VisualRWKV-v5.0")
|
236 |
|
237 |
demo.queue(concurrency_count=1, max_size=10)
|
238 |
demo.launch(share=False)
|