Kevin676 commited on
Commit
b916a45
·
1 Parent(s): 8192aca

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -25
app.py CHANGED
@@ -15,6 +15,20 @@ tokenizer = AutoTokenizer.from_pretrained(CKPT)
15
  device = 0 if torch.cuda.is_available() else -1
16
 
17
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
  def translate(text):
19
  """
20
  Translate the text from source lang to target lang
@@ -34,25 +48,14 @@ def translate(text):
34
  result = translation_pipeline(text)
35
  return result[0]['translation_text']
36
 
37
- #prompt
38
-
39
- stable_diffusion = gr.Blocks.load(name="spaces/runwayml/stable-diffusion-v1-5")
40
- clip_interrogator_2 = gr.Blocks.load(name="spaces/fffiloni/CLIP-Interrogator-2")
41
 
42
- def get_images(prompt):
43
- gallery_dir = stable_diffusion(prompt, fn_index=2)
44
- img_results = [os.path.join(gallery_dir, img) for img in os.listdir(gallery_dir)]
45
- return img_results[0]
46
-
47
- def get_new_prompt(img, mode):
48
- interrogate = clip_interrogator_2(img, mode, 12, api_name="clipi2")
49
- return interrogate
50
-
51
- def infer(input):
52
- prompt = pipe1(input+',', num_return_sequences=1)[0]["generated_text"]
53
- img = get_images(prompt)
54
- result = get_new_prompt(img, 'fast')
55
- return result[0]
56
 
57
  with gr.Blocks() as demo:
58
  gr.Markdown(
@@ -70,13 +73,6 @@ with gr.Blocks() as demo:
70
 
71
  button.click(translate, [inp1], [out1])
72
 
73
- with gr.Row().style(equal_height=True):
74
- inp2 = out1
75
- btn1 = gr.Button("让您的提示词更详细一下吧")
76
- out2 = gr.Textbox(label = "翻译后的英文内容", lines=1)
77
-
78
- btn1.click(infer, [inp2], [out2])
79
-
80
  gr.Markdown(
81
  """ ### <center>注意❗:请不要输入或生成会对个人以及组织造成侵害的内容,此程序仅供科研、学习及娱乐使用。用户输入或生成的内容与程序开发者无关,请自觉合法合规使用,违反者一切后果自负。</center>
82
 
 
15
  device = 0 if torch.cuda.is_available() else -1
16
 
17
 
18
+ import gradio as gr
19
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
20
+ import torch
21
+ from langs import LANGS
22
+
23
+ TASK = "translation"
24
+ CKPT = "facebook/nllb-200-distilled-600M"
25
+
26
+ model = AutoModelForSeq2SeqLM.from_pretrained(CKPT)
27
+ tokenizer = AutoTokenizer.from_pretrained(CKPT)
28
+
29
+ device = 0 if torch.cuda.is_available() else -1
30
+
31
+
32
  def translate(text):
33
  """
34
  Translate the text from source lang to target lang
 
48
  result = translation_pipeline(text)
49
  return result[0]['translation_text']
50
 
 
 
 
 
51
 
52
+ gr.Interface(
53
+ translate,
54
+ [
55
+ gr.components.Textbox(label="Text"),
56
+ ],
57
+ ["text"],
58
+ ).launch()
 
 
 
 
 
 
 
59
 
60
  with gr.Blocks() as demo:
61
  gr.Markdown(
 
73
 
74
  button.click(translate, [inp1], [out1])
75
 
 
 
 
 
 
 
 
76
  gr.Markdown(
77
  """ ### <center>注意❗:请不要输入或生成会对个人以及组织造成侵害的内容,此程序仅供科研、学习及娱乐使用。用户输入或生成的内容与程序开发者无关,请自觉合法合规使用,违反者一切后果自负。</center>
78