sms07 commited on
Commit
557cd0f
·
1 Parent(s): 80bcc31

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +128 -1
app.py CHANGED
@@ -4,4 +4,131 @@ from transformers import (
4
  pipeline,
5
  BlenderbotTokenizer,
6
  BlenderbotForConditionalGeneration,
7
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
  pipeline,
5
  BlenderbotTokenizer,
6
  BlenderbotForConditionalGeneration,
7
+ )
8
+
9
+ image_path = ""
10
+ current_image_path = ""
11
+ question = ""
12
+ first_turn = True
13
+
14
+ try:
15
+ while image_path != str(1) and question != str(1):
16
+ ###############################################################################
17
+
18
+ if first_turn:
19
+ image_path = input("Please enter the image path (type 1 to EXIT)\n")
20
+ else:
21
+ image_path = input(
22
+ "Please enter the image path (type 1 to EXIT) (type 2 to reuse image)\n"
23
+ )
24
+
25
+ ###############################################################################
26
+
27
+ if image_path == str(1):
28
+ break
29
+ elif image_path == str(2) and first_turn == False:
30
+ pass
31
+ else:
32
+ image = Image.open(str(image_path))
33
+
34
+ question = input("Please enter your question (type 1 to EXIT)\n")
35
+
36
+ ###############################################################################
37
+
38
+ # 5 MODEL INFERENCES.
39
+ # User Input = Image + Question About The Image.
40
+ # User -> Model 1 -> Model 2 -> Model 3 -> Model 4 -> Model 5
41
+
42
+ # Model 1.
43
+
44
+ vqa_pipeline_output = vqa_pipeline(image, question, top_k=5)[0]
45
+
46
+ # Model 2.
47
+
48
+ text = (
49
+ "I love "
50
+ + str(vqa_pipeline_output["answer"])
51
+ + " and I would like to know how to [MASK]."
52
+ )
53
+ bbu_pipeline_output = bbu_pipeline(text)
54
+
55
+ # Model 3.
56
+
57
+ utterance = bbu_pipeline_output[0]["sequence"]
58
+ inputs = tokenizer(utterance, return_tensors="pt")
59
+ result = facebook_model.generate(**inputs)
60
+ facebook_model_output = tokenizer.decode(result[0])
61
+
62
+ # Model 4.
63
+
64
+ facebook_model_output = facebook_model_output.replace("<s> ", "")
65
+ facebook_model_output = facebook_model_output.replace("<s>", "")
66
+ facebook_model_output = facebook_model_output.replace("</s>", "")
67
+ gpt2_pipeline_output = gpt2_pipeline(facebook_model_output)[0]["generated_text"]
68
+
69
+ # Model 5.
70
+
71
+ topic, prob = topic_model_1.transform(gpt2_pipeline_output)
72
+ topic_model_1_output = topic_model_1.get_topic_info(topic[0])["Representation"][
73
+ 0
74
+ ]
75
+
76
+ topic, prob = topic_model_2.transform(gpt2_pipeline_output)
77
+ topic_model_2_output = topic_model_2.get_topic_info(topic[0])["Representation"][
78
+ 0
79
+ ]
80
+ ###############################################################################
81
+
82
+ print()
83
+
84
+ print("-" * 150)
85
+ print("vqa_pipeline_output = ", vqa_pipeline_output)
86
+ print("bbu_pipeline_output =", bbu_pipeline_output)
87
+ print("facebook_model_output =", facebook_model_output)
88
+ print("gpt2_pipeline_output =", gpt2_pipeline_output)
89
+ print("topic_model_1_output =", topic_model_1_output)
90
+ print("topic_model_2_output =", topic_model_2_output)
91
+
92
+ print()
93
+
94
+ print("-" * 150)
95
+ print("SUMMARY")
96
+ print("-" * 7)
97
+ print("Your Image:", image)
98
+ print("Your Question:", question)
99
+ print("-" * 100)
100
+ print(
101
+ "1. Highest Predicted Answer For Your Question:",
102
+ vqa_pipeline_output["answer"],
103
+ "\n",
104
+ )
105
+ print(text)
106
+ print(
107
+ "2. Highest Predicted Sequence On [MASK] Based on 1.:",
108
+ bbu_pipeline_output[0]["sequence"],
109
+ "\n",
110
+ )
111
+ print(
112
+ "3. Conversation Based On Previous Answer Based on 2.:",
113
+ facebook_model_output,
114
+ "\n",
115
+ )
116
+ print(
117
+ "4. Text Generated Based On Previous Answer Based on 3.:",
118
+ gpt2_pipeline_output,
119
+ "\n",
120
+ )
121
+ print(
122
+ "5. Highest Predicted Topic Model_1 For Previous The Answer Based on 4.:",
123
+ topic_model_1_output,
124
+ "\n",
125
+ )
126
+ print(
127
+ "6. Highest Predicted Topic Model_2 For Previous The Answer Based on 4.:",
128
+ topic_model_2_output,
129
+ )
130
+ print("-" * 150)
131
+
132
+ first_turn = False
133
+ except Exception as e:
134
+ print("Error:", e)