mszel commited on
Commit
0b73c8b
·
1 Parent(s): 35248ff

fixing the lynxscribe demo

Browse files
examples/LynxScribe Image RAG CHANGED
@@ -1,12 +1,5 @@
1
  {
2
  "edges": [
3
- {
4
- "id": "LynxScribe Image RAG Builder 1 LynxScribe Image RAG Query 1",
5
- "source": "LynxScribe Image RAG Builder 1",
6
- "sourceHandle": "output",
7
- "target": "LynxScribe Image RAG Query 1",
8
- "targetHandle": "rag_graph"
9
- },
10
  {
11
  "id": "Input chat 1 LynxScribe Image RAG Query 1",
12
  "source": "Input chat 1",
@@ -21,6 +14,13 @@
21
  "target": "View image 1",
22
  "targetHandle": "embedding_similarities"
23
  },
 
 
 
 
 
 
 
24
  {
25
  "id": "LynxScribe Image Describer 1 LynxScribe Image RAG Builder 1",
26
  "source": "LynxScribe Image Describer 1",
@@ -29,87 +29,22 @@
29
  "targetHandle": "image_describer"
30
  },
31
  {
32
- "id": "LynxScribe RAG Vector Store 1 LynxScribe Image RAG Builder 1",
33
- "source": "LynxScribe RAG Vector Store 1",
34
  "sourceHandle": "output",
35
  "target": "LynxScribe Image RAG Builder 1",
36
  "targetHandle": "rag_graph"
37
  },
38
  {
39
- "id": "Cloud-sourced Image Loader 1 LynxScribe Image RAG Builder 1",
40
- "source": "Cloud-sourced Image Loader 1",
41
  "sourceHandle": "output",
42
- "target": "LynxScribe Image RAG Builder 1",
43
- "targetHandle": "image_urls"
44
  }
45
  ],
46
  "env": "LynxScribe",
47
  "nodes": [
48
- {
49
- "data": {
50
- "__execution_delay": 0.0,
51
- "collapsed": false,
52
- "display": null,
53
- "error": null,
54
- "meta": {
55
- "inputs": {
56
- "image_describer": {
57
- "name": "image_describer",
58
- "position": "bottom",
59
- "type": {
60
- "type": "<class 'inspect._empty'>"
61
- }
62
- },
63
- "image_urls": {
64
- "name": "image_urls",
65
- "position": "left",
66
- "type": {
67
- "type": "<class 'inspect._empty'>"
68
- }
69
- },
70
- "rag_graph": {
71
- "name": "rag_graph",
72
- "position": "bottom",
73
- "type": {
74
- "type": "<class 'inspect._empty'>"
75
- }
76
- }
77
- },
78
- "name": "LynxScribe Image RAG Builder",
79
- "outputs": {
80
- "output": {
81
- "name": "output",
82
- "position": "right",
83
- "type": {
84
- "type": "None"
85
- }
86
- }
87
- },
88
- "params": {
89
- "image_rag_out_path": {
90
- "default": "image_test_rag_graph.pickle",
91
- "name": "image_rag_out_path",
92
- "type": {
93
- "type": "<class 'str'>"
94
- }
95
- }
96
- },
97
- "type": "basic"
98
- },
99
- "params": {},
100
- "status": "done",
101
- "title": "LynxScribe Image RAG Builder"
102
- },
103
- "dragHandle": ".bg-primary",
104
- "height": 298.0,
105
- "id": "LynxScribe Image RAG Builder 1",
106
- "position": {
107
- "x": 202.17177613422314,
108
- "y": 209.6180585281515
109
- },
110
- "type": "basic",
111
- "width": 479.0
112
- },
113
  {
114
  "data": {
115
  "__execution_delay": 0.0,
@@ -140,7 +75,7 @@
140
  "type": "basic"
141
  },
142
  "params": {
143
- "chat": "Show me a picture about meditation"
144
  },
145
  "status": "done",
146
  "title": "Input chat"
@@ -219,7 +154,7 @@
219
  "data": {
220
  "__execution_delay": null,
221
  "collapsed": false,
222
- "display": "https://storage.googleapis.com/lynxkite_public_data/lynxscribe-images/image-rag-test/yoga-3053487_1280.jpg",
223
  "error": null,
224
  "meta": {
225
  "inputs": {
@@ -315,8 +250,8 @@
315
  "height": 363.0,
316
  "id": "LynxScribe Image Describer 1",
317
  "position": {
318
- "x": 16.941021961247145,
319
- "y": 664.8693601063997
320
  },
321
  "type": "basic",
322
  "width": 401.0
@@ -327,7 +262,7 @@
327
  "error": null,
328
  "meta": {
329
  "inputs": {},
330
- "name": "LynxScribe RAG Vector Store",
331
  "outputs": {
332
  "output": {
333
  "name": "output",
@@ -375,8 +310,8 @@
375
  }
376
  },
377
  "position": {
378
- "x": 831.0,
379
- "y": 734.0
380
  },
381
  "type": "basic"
382
  },
@@ -388,25 +323,27 @@
388
  "text_embedder_model_name_or_path": "text-embedding-3-large"
389
  },
390
  "status": "done",
391
- "title": "LynxScribe RAG Vector Store"
392
  },
393
  "dragHandle": ".bg-primary",
394
- "height": 445.0,
395
- "id": "LynxScribe RAG Vector Store 1",
396
  "position": {
397
- "x": 485.48864677068343,
398
- "y": 669.7387202127995
399
  },
400
  "type": "basic",
401
- "width": 442.0
402
  },
403
  {
404
  "data": {
 
 
405
  "display": null,
406
  "error": null,
407
  "meta": {
408
  "inputs": {},
409
- "name": "Cloud-sourced Image Loader",
410
  "outputs": {
411
  "output": {
412
  "name": "output",
@@ -417,6 +354,13 @@
417
  }
418
  },
419
  "params": {
 
 
 
 
 
 
 
420
  "cloud_provider": {
421
  "default": "gcp",
422
  "name": "cloud_provider",
@@ -433,27 +377,87 @@
433
  }
434
  },
435
  "position": {
436
- "x": 141.0,
437
- "y": 421.0
438
  },
439
  "type": "basic"
440
  },
441
  "params": {
 
442
  "cloud_provider": "gcp",
443
  "folder_URL": "https://storage.googleapis.com/lynxkite_public_data/lynxscribe-images/image-rag-test"
444
  },
445
  "status": "done",
446
- "title": "Cloud-sourced Image Loader"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
447
  },
448
  "dragHandle": ".bg-primary",
449
- "height": 267.0,
450
- "id": "Cloud-sourced Image Loader 1",
451
  "position": {
452
- "x": -451.2626989986675,
453
- "y": 225.06333310959974
454
  },
455
  "type": "basic",
456
- "width": 437.0
457
  }
458
  ]
459
  }
 
1
  {
2
  "edges": [
 
 
 
 
 
 
 
3
  {
4
  "id": "Input chat 1 LynxScribe Image RAG Query 1",
5
  "source": "Input chat 1",
 
14
  "target": "View image 1",
15
  "targetHandle": "embedding_similarities"
16
  },
17
+ {
18
+ "id": "Cloud-sourced File Loader 1 LynxScribe Image RAG Builder 1",
19
+ "source": "Cloud-sourced File Loader 1",
20
+ "sourceHandle": "output",
21
+ "target": "LynxScribe Image RAG Builder 1",
22
+ "targetHandle": "file_urls"
23
+ },
24
  {
25
  "id": "LynxScribe Image Describer 1 LynxScribe Image RAG Builder 1",
26
  "source": "LynxScribe Image Describer 1",
 
29
  "targetHandle": "image_describer"
30
  },
31
  {
32
+ "id": "LynxScribe RAG Graph Vector Store 1 LynxScribe Image RAG Builder 1",
33
+ "source": "LynxScribe RAG Graph Vector Store 1",
34
  "sourceHandle": "output",
35
  "target": "LynxScribe Image RAG Builder 1",
36
  "targetHandle": "rag_graph"
37
  },
38
  {
39
+ "id": "LynxScribe Image RAG Builder 1 LynxScribe Image RAG Query 1",
40
+ "source": "LynxScribe Image RAG Builder 1",
41
  "sourceHandle": "output",
42
+ "target": "LynxScribe Image RAG Query 1",
43
+ "targetHandle": "rag_graph"
44
  }
45
  ],
46
  "env": "LynxScribe",
47
  "nodes": [
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
  {
49
  "data": {
50
  "__execution_delay": 0.0,
 
75
  "type": "basic"
76
  },
77
  "params": {
78
+ "chat": "Show me a picture about pills!"
79
  },
80
  "status": "done",
81
  "title": "Input chat"
 
154
  "data": {
155
  "__execution_delay": null,
156
  "collapsed": false,
157
+ "display": "https://storage.googleapis.com/lynxkite_public_data/lynxscribe-images/image-rag-test/capsules-1079838_1280.jpg",
158
  "error": null,
159
  "meta": {
160
  "inputs": {
 
250
  "height": 363.0,
251
  "id": "LynxScribe Image Describer 1",
252
  "position": {
253
+ "x": 127.85361236096924,
254
+ "y": 687.0518781863441
255
  },
256
  "type": "basic",
257
  "width": 401.0
 
262
  "error": null,
263
  "meta": {
264
  "inputs": {},
265
+ "name": "LynxScribe RAG Graph Vector Store",
266
  "outputs": {
267
  "output": {
268
  "name": "output",
 
310
  }
311
  },
312
  "position": {
313
+ "x": 790.0,
314
+ "y": 633.0
315
  },
316
  "type": "basic"
317
  },
 
323
  "text_embedder_model_name_or_path": "text-embedding-3-large"
324
  },
325
  "status": "done",
326
+ "title": "LynxScribe RAG Graph Vector Store"
327
  },
328
  "dragHandle": ".bg-primary",
329
+ "height": 436.0,
330
+ "id": "LynxScribe RAG Graph Vector Store 1",
331
  "position": {
332
+ "x": 595.4558693958389,
333
+ "y": 688.0989202130276
334
  },
335
  "type": "basic",
336
+ "width": 339.0
337
  },
338
  {
339
  "data": {
340
+ "__execution_delay": 0.0,
341
+ "collapsed": null,
342
  "display": null,
343
  "error": null,
344
  "meta": {
345
  "inputs": {},
346
+ "name": "Cloud-sourced File Loader",
347
  "outputs": {
348
  "output": {
349
  "name": "output",
 
354
  }
355
  },
356
  "params": {
357
+ "accepted_file_types": {
358
+ "default": ".jpg, .jpeg, .png",
359
+ "name": "accepted_file_types",
360
+ "type": {
361
+ "type": "<class 'str'>"
362
+ }
363
+ },
364
  "cloud_provider": {
365
  "default": "gcp",
366
  "name": "cloud_provider",
 
377
  }
378
  },
379
  "position": {
380
+ "x": 248.0,
381
+ "y": 419.0
382
  },
383
  "type": "basic"
384
  },
385
  "params": {
386
+ "accepted_file_types": ".jpg, .jpeg, .png",
387
  "cloud_provider": "gcp",
388
  "folder_URL": "https://storage.googleapis.com/lynxkite_public_data/lynxscribe-images/image-rag-test"
389
  },
390
  "status": "done",
391
+ "title": "Cloud-sourced File Loader"
392
+ },
393
+ "dragHandle": ".bg-primary",
394
+ "height": 291.0,
395
+ "id": "Cloud-sourced File Loader 1",
396
+ "position": {
397
+ "x": -479.7367372966062,
398
+ "y": 213.81581567584843
399
+ },
400
+ "type": "basic",
401
+ "width": 512.0
402
+ },
403
+ {
404
+ "data": {
405
+ "display": null,
406
+ "error": null,
407
+ "meta": {
408
+ "inputs": {
409
+ "file_urls": {
410
+ "name": "file_urls",
411
+ "position": "left",
412
+ "type": {
413
+ "type": "<class 'inspect._empty'>"
414
+ }
415
+ },
416
+ "image_describer": {
417
+ "name": "image_describer",
418
+ "position": "bottom",
419
+ "type": {
420
+ "type": "<class 'inspect._empty'>"
421
+ }
422
+ },
423
+ "rag_graph": {
424
+ "name": "rag_graph",
425
+ "position": "bottom",
426
+ "type": {
427
+ "type": "<class 'inspect._empty'>"
428
+ }
429
+ }
430
+ },
431
+ "name": "LynxScribe Image RAG Builder",
432
+ "outputs": {
433
+ "output": {
434
+ "name": "output",
435
+ "position": "right",
436
+ "type": {
437
+ "type": "None"
438
+ }
439
+ }
440
+ },
441
+ "params": {},
442
+ "position": {
443
+ "x": 480.0,
444
+ "y": 388.0
445
+ },
446
+ "type": "basic"
447
+ },
448
+ "params": {},
449
+ "status": "done",
450
+ "title": "LynxScribe Image RAG Builder"
451
  },
452
  "dragHandle": ".bg-primary",
453
+ "height": 313.0,
454
+ "id": "LynxScribe Image RAG Builder 1",
455
  "position": {
456
+ "x": 243.62049392420903,
457
+ "y": 215.6136303371116
458
  },
459
  "type": "basic",
460
+ "width": 526.0
461
  }
462
  ]
463
  }
examples/LynxScribe demo CHANGED
@@ -1,56 +1,126 @@
1
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  "env": "LynxScribe",
3
  "nodes": [
4
  {
5
- "id": "Input chat 1",
6
- "type": "basic",
7
  "data": {
8
- "title": "Input chat",
9
- "params": {
10
- "chat": "who is the CTO of Lynx?"
11
- },
12
  "display": null,
13
  "error": null,
14
- "collapsed": null,
15
  "meta": {
16
  "inputs": {},
17
- "params": {
18
- "chat": {
19
- "default": null,
20
- "type": {
21
- "type": "<class 'str'>"
22
- },
23
- "name": "chat"
24
- }
25
- },
26
  "outputs": {
27
  "output": {
28
  "name": "output",
 
29
  "type": {
30
  "type": "None"
31
- },
32
- "position": "right"
 
 
 
 
 
 
 
 
33
  }
34
  },
35
- "name": "Input chat",
36
  "type": "basic"
37
  },
38
- "__execution_delay": 0.0
 
 
 
 
39
  },
 
 
 
 
40
  "position": {
41
- "x": -493.5496596237119,
42
- "y": 20.90123252513356
43
  },
44
- "height": 186.0,
45
- "width": 259.0,
46
- "parentId": null
47
  },
48
  {
49
- "id": "View 1",
50
- "type": "table_view",
51
  "data": {
52
- "title": "View",
53
- "params": {},
54
  "display": {
55
  "dataframes": {
56
  "df": {
@@ -59,7 +129,7 @@
59
  ],
60
  "data": [
61
  [
62
- "TheThe Chief Technology Officer (CTO) of Lynx Analytics is Chema Lizano. He leads the technology strategy and roadmap at the company, overseeing the vision, development, and implementation of solutions across various clients and environments. If you have any more questions regarding our team or services, feel free to ask!\n\nPlease visit https://www.lynxanalytics.com/board for further information."
63
  ]
64
  ]
65
  }
@@ -67,686 +137,488 @@
67
  },
68
  "error": null,
69
  "meta": {
70
- "type": "table_view",
71
- "name": "View",
72
  "inputs": {
73
  "input": {
74
  "name": "input",
 
75
  "type": {
76
  "type": "<class 'inspect._empty'>"
77
- },
78
- "position": "left"
79
  }
80
  },
 
81
  "outputs": {},
82
- "params": {}
83
- }
 
 
 
 
84
  },
 
 
 
 
85
  "position": {
86
- "x": 731.7440706129762,
87
- "y": -716.4943976910913
88
  },
89
- "width": 1256.0,
90
- "parentId": null,
91
- "height": 950.0
92
  },
93
  {
94
- "id": "LLM 1",
95
- "type": "basic",
96
  "data": {
97
- "title": "LLM",
98
- "params": {
99
- "name": "openai"
100
- },
101
  "display": null,
102
  "error": null,
103
  "meta": {
104
  "inputs": {},
 
105
  "outputs": {
106
  "output": {
 
 
107
  "type": {
108
  "type": "None"
109
- },
110
- "name": "output",
111
- "position": "top"
112
  }
113
  },
114
- "type": "basic",
115
  "params": {
116
- "name": {
117
- "default": "openai",
118
- "name": "name",
119
  "type": {
120
- "type": "<class 'str'>"
121
  }
122
  }
123
  },
124
- "name": "LLM"
125
- }
 
 
 
 
 
126
  },
 
 
 
 
127
  "position": {
128
- "x": -312.5774211084781,
129
- "y": 1093.4019527511366
130
  },
131
- "parentId": null,
132
- "width": 200.0,
133
- "height": 200.0
134
  },
135
  {
136
- "id": "Scenario selector 1",
137
- "type": "basic",
138
  "data": {
139
- "title": "Scenario selector",
140
- "params": {
141
- "scenario_file": "uploads/chat_api/scenarios.yaml",
142
- "node_types": "intent_cluster"
143
- },
144
  "display": null,
145
  "error": null,
146
  "meta": {
147
- "params": {
148
- "scenario_file": {
149
- "type": {
150
- "type": "<class 'str'>"
151
- },
152
- "name": "scenario_file",
153
- "default": null
154
- },
155
- "node_types": {
156
- "default": "intent_cluster",
157
  "type": {
158
- "type": "<class 'str'>"
159
- },
160
- "name": "node_types"
161
  }
162
  },
163
- "inputs": {},
164
  "outputs": {
165
  "output": {
166
- "position": "top",
167
  "name": "output",
 
168
  "type": {
169
  "type": "None"
170
  }
171
  }
172
  },
173
- "type": "basic",
174
- "name": "Scenario selector"
175
- }
 
 
 
176
  },
 
 
 
 
177
  "position": {
178
- "x": -549.1300345090008,
179
- "y": 1086.4852248156676
180
  },
181
- "parentId": null,
182
- "height": 200.0,
183
- "width": 200.0
184
  },
185
  {
186
- "id": "Chat API 1",
187
- "type": "basic",
188
  "data": {
189
- "title": "Chat API",
190
- "params": {
191
- "model": "gpt-4o-mini"
192
- },
193
  "display": null,
194
  "error": null,
195
  "meta": {
196
- "name": "Chat API",
197
- "type": "basic",
198
  "outputs": {
199
  "output": {
200
- "type": {
201
- "type": "None"
202
- },
203
  "position": "top",
204
- "name": "output"
205
- }
206
- },
207
- "inputs": {
208
- "chatbot": {
209
- "name": "chatbot",
210
- "type": {
211
- "type": "<class 'inspect._empty'>"
212
- },
213
- "position": "bottom"
214
- },
215
- "chat_processor": {
216
- "name": "chat_processor",
217
- "position": "bottom",
218
  "type": {
219
- "type": "<class 'inspect._empty'>"
220
  }
221
- },
222
- "knowledge_base": {
223
- "type": {
224
- "type": "<class 'inspect._empty'>"
225
- },
226
- "position": "bottom",
227
- "name": "knowledge_base"
228
  }
229
  },
230
  "params": {
231
- "model": {
232
- "default": "gpt-4o-mini",
 
233
  "type": {
234
  "type": "<class 'str'>"
235
- },
236
- "name": "model"
237
- }
238
- }
239
- }
240
- },
241
- "position": {
242
- "x": -22.866663363810787,
243
- "y": 258.20943122219336
244
- },
245
- "parentId": null,
246
- "width": 200.0,
247
- "height": 200.0
248
- },
249
- {
250
- "id": "Knowledge base 1",
251
- "type": "basic",
252
- "data": {
253
- "title": "Knowledge base",
254
- "params": {
255
- "template_cluster_path": "uploads/chat_api/data/lynx/tempclusters.pickle",
256
- "edges_path": "uploads/chat_api/data/lynx/edges.pickle",
257
- "nodes_path": "uploads/chat_api/data/lynx/nodes.pickle"
258
- },
259
- "display": null,
260
- "error": null,
261
- "meta": {
262
- "name": "Knowledge base",
263
- "type": "basic",
264
- "params": {
265
- "nodes_path": {
266
- "name": "nodes_path",
267
- "default": "nodes.pickle",
268
  "type": {
269
  "type": "<class 'str'>"
270
  }
271
  },
272
- "template_cluster_path": {
 
 
273
  "type": {
274
  "type": "<class 'str'>"
275
- },
276
- "name": "template_cluster_path",
277
- "default": "tempclusters.pickle"
278
  },
279
- "edges_path": {
280
- "name": "edges_path",
281
- "default": "edges.pickle",
282
  "type": {
283
  "type": "<class 'str'>"
284
  }
285
  }
286
  },
287
- "inputs": {},
288
- "outputs": {
289
- "output": {
290
- "position": "top",
291
- "name": "output",
292
- "type": {
293
- "type": "None"
294
- }
295
- }
296
- }
297
- }
298
  },
 
 
 
 
299
  "position": {
300
- "x": 598.8683124946176,
301
- "y": 609.9499973808545
302
  },
303
- "width": 336.0,
304
- "height": 320.0,
305
- "parentId": null
306
  },
307
  {
308
- "id": "RAG chatbot 1",
309
- "type": "basic",
310
  "data": {
311
- "title": "RAG chatbot",
312
- "params": {
313
- "limits_by_type": "{\"information\": [2, 3], \"summary\": [2, 3]}",
314
- "max_results": "5",
315
- "negative_answer": "I'm sorry, but the data I've been trained on does not contain any information related to your question.",
316
- "strict_limits": true
317
- },
318
  "display": null,
319
  "error": null,
320
  "meta": {
 
 
321
  "outputs": {
322
  "output": {
323
- "position": "top",
324
  "name": "output",
 
325
  "type": {
326
  "type": "None"
327
  }
328
  }
329
  },
330
  "params": {
331
- "max_results": {
332
- "default": 5.0,
 
333
  "type": {
334
- "type": "<class 'int'>"
335
- },
336
- "name": "max_results"
337
  },
338
- "strict_limits": {
339
- "name": "strict_limits",
340
- "default": true,
341
  "type": {
342
- "type": "<class 'bool'>"
343
  }
344
  },
345
- "negative_answer": {
346
- "default": "I'm sorry, but the data I've been trained on does not contain any information related to your question.",
347
- "name": "negative_answer",
348
  "type": {
349
  "type": "<class 'str'>"
350
  }
351
  },
352
- "limits_by_type": {
353
- "default": "{}",
354
- "name": "limits_by_type",
355
  "type": {
356
  "type": "<class 'str'>"
357
  }
358
  }
359
  },
360
- "name": "RAG chatbot",
361
- "type": "basic",
362
- "inputs": {
363
- "rag_graph": {
364
- "type": {
365
- "type": "<class 'inspect._empty'>"
366
- },
367
- "name": "rag_graph",
368
- "position": "bottom"
369
- },
370
- "llm": {
371
- "name": "llm",
372
- "position": "bottom",
373
- "type": {
374
- "type": "<class 'inspect._empty'>"
375
- }
376
- },
377
- "scenario_selector": {
378
- "type": {
379
- "type": "<class 'inspect._empty'>"
380
- },
381
- "name": "scenario_selector",
382
- "position": "bottom"
383
- }
384
- }
385
  },
386
- "beingResized": false
 
387
  },
 
 
 
 
388
  "position": {
389
- "x": -533.1301830766971,
390
- "y": 547.294980747757
391
  },
392
- "parentId": null,
393
- "height": 399.0,
394
- "width": 339.0
395
  },
396
  {
397
- "id": "RAG graph 1",
398
- "type": "basic",
399
  "data": {
400
- "title": "RAG graph",
401
- "params": {},
402
  "display": null,
403
  "error": null,
404
  "meta": {
405
- "type": "basic",
406
  "inputs": {
407
- "text_embedder": {
408
- "type": {
409
- "type": "<class 'inspect._empty'>"
410
- },
411
- "position": "bottom",
412
- "name": "text_embedder"
413
- },
414
- "vector_store": {
415
  "position": "bottom",
416
  "type": {
417
  "type": "<class 'inspect._empty'>"
418
- },
419
- "name": "vector_store"
420
- }
421
- },
422
- "name": "RAG graph",
423
- "params": {},
424
- "outputs": {
425
- "output": {
426
- "position": "top",
427
- "type": {
428
- "type": "None"
429
- },
430
- "name": "output"
431
- }
432
- }
433
- }
434
- },
435
- "position": {
436
- "x": -817.8208895639339,
437
- "y": 1014.836542916127
438
- },
439
- "parentId": null,
440
- "width": 200.0,
441
- "height": 200.0
442
- },
443
- {
444
- "id": "Vector store 1",
445
- "type": "basic",
446
- "data": {
447
- "title": "Vector store",
448
- "params": {
449
- "name": "chromadb",
450
- "collection_name": "lynx"
451
- },
452
- "display": null,
453
- "error": null,
454
- "beingResized": false,
455
- "meta": {
456
- "params": {
457
- "collection_name": {
458
- "type": {
459
- "type": "<class 'str'>"
460
- },
461
- "default": "lynx",
462
- "name": "collection_name"
463
  },
464
- "name": {
465
- "default": "chromadb",
 
466
  "type": {
467
- "type": "<class 'str'>"
468
- },
469
- "name": "name"
470
  }
471
  },
472
- "type": "basic",
473
- "name": "Vector store",
474
  "outputs": {
475
  "output": {
 
 
476
  "type": {
477
  "type": "None"
478
- },
479
- "position": "top",
480
- "name": "output"
481
  }
482
  },
483
- "inputs": {}
484
- }
485
- },
486
- "position": {
487
- "x": -1053.794625339574,
488
- "y": 1347.7711940497127
489
- },
490
- "height": 227.0,
491
- "parentId": null,
492
- "width": 275.0
493
- },
494
- {
495
- "id": "Text embedder 2",
496
- "type": "basic",
497
- "data": {
498
- "title": "Text embedder",
499
- "params": {
500
- "model": "text-embedding-ada-002"
501
- },
502
- "display": null,
503
- "error": null,
504
- "meta": {
505
  "params": {
506
- "model": {
507
- "default": "text-embedding-ada-002",
508
- "type": {
509
- "type": "<class 'str'>"
510
- },
511
- "name": "model"
512
- }
513
- },
514
- "name": "Text embedder",
515
- "outputs": {
516
- "output": {
517
  "type": {
518
- "type": "None"
519
- },
520
- "position": "top",
521
- "name": "output"
522
  }
523
  },
524
- "type": "basic",
525
- "inputs": {
526
- "llm": {
527
- "type": {
528
- "type": "<class 'inspect._empty'>"
529
- },
530
- "name": "llm",
531
- "position": "bottom"
532
- }
533
- }
534
- }
535
  },
 
 
 
 
536
  "position": {
537
- "x": -719.98604638686,
538
- "y": 1343.5978526690794
539
  },
540
- "width": 200.0,
541
- "height": 200.0,
542
- "parentId": null
543
  },
544
  {
545
- "id": "LLM 2",
546
- "type": "basic",
547
  "data": {
548
- "title": "LLM",
549
- "params": {
550
- "name": "openai"
551
- },
552
  "display": null,
553
  "error": null,
554
  "meta": {
 
 
555
  "outputs": {
556
  "output": {
557
- "position": "top",
558
  "name": "output",
 
559
  "type": {
560
  "type": "None"
561
  }
562
  }
563
  },
564
- "name": "LLM",
565
- "type": "basic",
566
- "inputs": {},
567
  "params": {
 
 
 
 
 
 
 
568
  "name": {
569
- "default": "openai",
570
  "name": "name",
571
  "type": {
572
  "type": "<class 'str'>"
573
  }
574
- }
575
- }
576
- }
577
- },
578
- "position": {
579
- "x": -727.6171373682814,
580
- "y": 1649.7242636905507
581
- },
582
- "width": 200.0,
583
- "parentId": null,
584
- "height": 200.0
585
- },
586
- {
587
- "id": "Truncate history 1",
588
- "type": "basic",
589
- "data": {
590
- "title": "Truncate history",
591
- "params": {
592
- "max_tokens": 10000.0
593
- },
594
- "display": null,
595
- "error": null,
596
- "meta": {
597
- "outputs": {
598
- "output": {
599
- "type": {
600
- "type": "None"
601
- },
602
- "name": "output",
603
- "position": "top"
604
- }
605
- },
606
- "type": "basic",
607
- "params": {
608
- "max_tokens": {
609
- "default": 10000.0,
610
- "name": "max_tokens",
611
  "type": {
612
  "type": "<class 'int'>"
613
  }
614
- }
615
- },
616
- "name": "Truncate history",
617
- "inputs": {}
618
- }
619
- },
620
- "position": {
621
- "x": 0.08889822620079713,
622
- "y": 1044.7639853229612
623
- },
624
- "height": 200.0,
625
- "width": 200.0,
626
- "parentId": null
627
- },
628
- {
629
- "id": "Chat processor 1",
630
- "type": "basic",
631
- "data": {
632
- "title": "Chat processor",
633
- "params": {},
634
- "display": null,
635
- "error": null,
636
- "__execution_delay": null,
637
- "collapsed": true,
638
- "meta": {
639
- "name": "Chat processor",
640
- "inputs": {
641
- "processor": {
642
- "name": "processor",
643
- "position": "bottom",
644
  "type": {
645
- "type": "<class 'inspect._empty'>"
646
  }
647
- }
648
- },
649
- "params": {},
650
- "type": "basic",
651
- "outputs": {
652
- "output": {
653
  "type": {
654
- "type": "None"
655
- },
656
- "position": "top",
657
- "name": "output"
658
  }
659
- }
660
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
661
  },
 
 
 
662
  "position": {
663
- "x": 182.89729246405872,
664
- "y": 778.546274223181
665
  },
666
- "parentId": null,
667
- "width": 200.0,
668
- "height": 200.0
669
  },
670
  {
671
- "id": "Mask 1",
672
- "type": "basic",
673
  "data": {
674
- "title": "Mask",
675
- "params": {
676
- "mask_pattern": "masked_email_address_{}",
677
- "name": "email",
678
679
- "regex": "([a-z0-9!#$%&'*+\\/=?^_`{|.}~-]+@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?)"
680
- },
681
  "display": null,
682
  "error": null,
683
  "meta": {
684
  "inputs": {},
 
685
  "outputs": {
686
  "output": {
687
- "position": "top",
688
  "name": "output",
 
689
  "type": {
690
  "type": "None"
691
  }
692
  }
693
  },
694
- "type": "basic",
695
- "name": "Mask",
696
  "params": {
697
- "name": {
698
- "default": "",
699
- "name": "name",
700
  "type": {
701
  "type": "<class 'str'>"
702
  }
703
  },
704
- "exceptions": {
705
- "name": "exceptions",
706
- "type": {
707
- "type": "<class 'str'>"
708
- },
709
- "default": ""
710
- },
711
- "regex": {
712
  "type": {
713
  "type": "<class 'str'>"
714
- },
715
- "name": "regex",
716
- "default": ""
717
  },
718
- "mask_pattern": {
719
- "default": "",
 
720
  "type": {
721
  "type": "<class 'str'>"
722
- },
723
- "name": "mask_pattern"
724
  }
725
- }
726
- }
 
 
 
 
 
 
 
 
 
 
 
 
727
  },
 
 
 
728
  "position": {
729
- "x": 233.69759202223884,
730
- "y": 1041.6145468043276
731
  },
732
- "height": 200.0,
733
- "parentId": null,
734
- "width": 200.0
735
  },
736
  {
737
- "id": "Mask 2",
738
- "type": "basic",
739
  "data": {
740
- "title": "Mask",
741
- "params": {
742
- "regex": "((?:(?:\\\\d{4}[- ]?){3}\\\\d{4}|\\\\d{15,16}))(?![\\\\d])",
743
- "exceptions": "",
744
- "name": "credit_card",
745
- "mask_pattern": "masked_credit_card_number_{}"
746
- },
747
  "display": null,
748
  "error": null,
749
  "meta": {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
750
  "outputs": {
751
  "output": {
752
  "name": "output",
@@ -756,214 +628,170 @@
756
  }
757
  }
758
  },
759
- "inputs": {},
760
- "name": "Mask",
761
- "type": "basic",
762
  "params": {
763
- "exceptions": {
 
 
764
  "type": {
765
  "type": "<class 'str'>"
766
- },
767
- "default": "",
768
- "name": "exceptions"
769
  },
770
- "regex": {
771
- "default": "",
 
772
  "type": {
773
  "type": "<class 'str'>"
774
- },
775
- "name": "regex"
776
  },
777
- "mask_pattern": {
778
- "name": "mask_pattern",
 
779
  "type": {
780
  "type": "<class 'str'>"
781
- },
782
- "default": ""
783
  },
784
- "name": {
785
- "name": "name",
786
- "default": "",
787
  "type": {
788
  "type": "<class 'str'>"
789
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
790
  }
791
- }
792
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
793
  },
 
 
 
794
  "position": {
795
- "x": 513.2761671440603,
796
- "y": 1034.8547191984255
797
  },
798
- "width": 200.0,
799
- "parentId": null,
800
- "height": 200.0
801
  },
802
  {
803
- "id": "Test Chat API 2",
804
- "type": "basic",
805
  "data": {
806
- "title": "Test Chat API",
807
- "params": {
808
- "show_details": false
809
- },
810
  "display": null,
811
  "error": null,
812
- "collapsed": false,
813
- "__execution_delay": 0.0,
814
  "meta": {
815
- "params": {
816
- "show_details": {
817
- "default": false,
818
- "type": {
819
- "type": "<class 'bool'>"
820
- },
821
- "name": "show_details"
822
- }
823
- },
824
  "inputs": {
825
- "message": {
826
- "name": "message",
827
  "position": "left",
828
  "type": {
829
  "type": "<class 'inspect._empty'>"
830
  }
831
  },
832
- "chat_api": {
 
833
  "position": "bottom",
834
  "type": {
835
  "type": "<class 'inspect._empty'>"
836
- },
837
- "name": "chat_api"
838
  }
839
  },
 
840
  "outputs": {
841
  "output": {
842
- "position": "right",
 
843
  "type": {
844
  "type": "None"
845
- },
846
- "name": "output"
847
  }
848
  },
849
- "name": "Test Chat API",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
850
  "type": "basic"
851
- }
 
 
 
 
 
 
 
852
  },
 
 
 
853
  "position": {
854
- "x": -57.377776548056346,
855
- "y": -16.924593985348814
856
  },
857
- "width": 376.0,
858
- "parentId": null,
859
- "height": 225.0
860
- }
861
- ],
862
- "edges": [
863
- {
864
- "id": "xy-edge__Knowledge base 1output-Chat API 1knowledge_base",
865
- "source": "Knowledge base 1",
866
- "target": "Chat API 1",
867
- "sourceHandle": "output",
868
- "targetHandle": "knowledge_base"
869
- },
870
- {
871
- "id": "xy-edge__RAG chatbot 1output-Chat API 1chatbot",
872
- "source": "RAG chatbot 1",
873
- "target": "Chat API 1",
874
- "sourceHandle": "output",
875
- "targetHandle": "chatbot"
876
- },
877
- {
878
- "id": "xy-edge__LLM 1output-RAG chatbot 1llm",
879
- "source": "LLM 1",
880
- "target": "RAG chatbot 1",
881
- "sourceHandle": "output",
882
- "targetHandle": "llm"
883
- },
884
- {
885
- "id": "xy-edge__Scenario selector 1output-RAG chatbot 1scenario_selector",
886
- "source": "Scenario selector 1",
887
- "target": "RAG chatbot 1",
888
- "sourceHandle": "output",
889
- "targetHandle": "scenario_selector"
890
- },
891
- {
892
- "id": "xy-edge__RAG graph 1output-RAG chatbot 1rag_graph",
893
- "source": "RAG graph 1",
894
- "target": "RAG chatbot 1",
895
- "sourceHandle": "output",
896
- "targetHandle": "rag_graph"
897
- },
898
- {
899
- "id": "xy-edge__Vector store 1output-RAG graph 1vector_store",
900
- "source": "Vector store 1",
901
- "target": "RAG graph 1",
902
- "sourceHandle": "output",
903
- "targetHandle": "vector_store"
904
- },
905
- {
906
- "id": "xy-edge__Text embedder 2output-RAG graph 1text_embedder",
907
- "source": "Text embedder 2",
908
- "target": "RAG graph 1",
909
- "sourceHandle": "output",
910
- "targetHandle": "text_embedder"
911
- },
912
- {
913
- "id": "xy-edge__LLM 2output-Text embedder 2llm",
914
- "source": "LLM 2",
915
- "target": "Text embedder 2",
916
- "sourceHandle": "output",
917
- "targetHandle": "llm"
918
- },
919
- {
920
- "id": "xy-edge__Truncate history 1output-Chat processor 1processor",
921
- "source": "Truncate history 1",
922
- "target": "Chat processor 1",
923
- "sourceHandle": "output",
924
- "targetHandle": "processor"
925
- },
926
- {
927
- "id": "xy-edge__Chat processor 1output-Chat API 1chat_processor",
928
- "source": "Chat processor 1",
929
- "target": "Chat API 1",
930
- "sourceHandle": "output",
931
- "targetHandle": "chat_processor"
932
- },
933
- {
934
- "id": "xy-edge__Mask 1output-Chat processor 1processor",
935
- "source": "Mask 1",
936
- "target": "Chat processor 1",
937
- "sourceHandle": "output",
938
- "targetHandle": "processor"
939
- },
940
- {
941
- "id": "xy-edge__Mask 2output-Chat processor 1processor",
942
- "source": "Mask 2",
943
- "target": "Chat processor 1",
944
- "sourceHandle": "output",
945
- "targetHandle": "processor"
946
- },
947
- {
948
- "id": "xy-edge__Input chat 1output-Test Chat API 2message",
949
- "source": "Input chat 1",
950
- "target": "Test Chat API 2",
951
- "sourceHandle": "output",
952
- "targetHandle": "message"
953
- },
954
- {
955
- "id": "xy-edge__Test Chat API 2output-View 1input",
956
- "source": "Test Chat API 2",
957
- "target": "View 1",
958
- "sourceHandle": "output",
959
- "targetHandle": "input"
960
- },
961
- {
962
- "id": "xy-edge__Chat API 1output-Test Chat API 2chat_api",
963
- "source": "Chat API 1",
964
- "target": "Test Chat API 2",
965
- "sourceHandle": "output",
966
- "targetHandle": "chat_api"
967
  }
968
  ]
969
  }
 
1
  {
2
+ "edges": [
3
+ {
4
+ "id": "xy-edge__Truncate history 1output-Chat processor 1processor",
5
+ "source": "Truncate history 1",
6
+ "sourceHandle": "output",
7
+ "target": "Chat processor 1",
8
+ "targetHandle": "processor"
9
+ },
10
+ {
11
+ "id": "xy-edge__Mask 1output-Chat processor 1processor",
12
+ "source": "Mask 1",
13
+ "sourceHandle": "output",
14
+ "target": "Chat processor 1",
15
+ "targetHandle": "processor"
16
+ },
17
+ {
18
+ "id": "xy-edge__Mask 2output-Chat processor 1processor",
19
+ "source": "Mask 2",
20
+ "sourceHandle": "output",
21
+ "target": "Chat processor 1",
22
+ "targetHandle": "processor"
23
+ },
24
+ {
25
+ "id": "xy-edge__Input chat 1output-Test Chat API 2message",
26
+ "source": "Input chat 1",
27
+ "sourceHandle": "output",
28
+ "target": "Test Chat API 2",
29
+ "targetHandle": "message"
30
+ },
31
+ {
32
+ "id": "xy-edge__Test Chat API 2output-View 1input",
33
+ "source": "Test Chat API 2",
34
+ "sourceHandle": "output",
35
+ "target": "View 1",
36
+ "targetHandle": "input"
37
+ },
38
+ {
39
+ "id": "LynxScribe RAG Graph Chatbot Backend 1 Test Chat API 2",
40
+ "source": "LynxScribe RAG Graph Chatbot Backend 1",
41
+ "sourceHandle": "output",
42
+ "target": "Test Chat API 2",
43
+ "targetHandle": "chat_api"
44
+ },
45
+ {
46
+ "id": "Chat processor 1 LynxScribe RAG Graph Chatbot Backend 1",
47
+ "source": "Chat processor 1",
48
+ "sourceHandle": "output",
49
+ "target": "LynxScribe RAG Graph Chatbot Backend 1",
50
+ "targetHandle": "chat_processor"
51
+ },
52
+ {
53
+ "id": "Cloud-sourced File Loader 1 LynxScribe RAG Graph Chatbot Builder 1",
54
+ "source": "Cloud-sourced File Loader 1",
55
+ "sourceHandle": "output",
56
+ "target": "LynxScribe RAG Graph Chatbot Builder 1",
57
+ "targetHandle": "file_urls"
58
+ },
59
+ {
60
+ "id": "LynxScribe RAG Graph Chatbot Builder 1 LynxScribe RAG Graph Chatbot Backend 1",
61
+ "source": "LynxScribe RAG Graph Chatbot Builder 1",
62
+ "sourceHandle": "output",
63
+ "target": "LynxScribe RAG Graph Chatbot Backend 1",
64
+ "targetHandle": "knowledge_base"
65
+ },
66
+ {
67
+ "id": "LynxScribe RAG Graph Vector Store 1 LynxScribe RAG Graph Chatbot Builder 1",
68
+ "source": "LynxScribe RAG Graph Vector Store 1",
69
+ "sourceHandle": "output",
70
+ "target": "LynxScribe RAG Graph Chatbot Builder 1",
71
+ "targetHandle": "rag_graph"
72
+ }
73
+ ],
74
  "env": "LynxScribe",
75
  "nodes": [
76
  {
 
 
77
  "data": {
78
+ "__execution_delay": 0.0,
79
+ "collapsed": null,
 
 
80
  "display": null,
81
  "error": null,
 
82
  "meta": {
83
  "inputs": {},
84
+ "name": "Input chat",
 
 
 
 
 
 
 
 
85
  "outputs": {
86
  "output": {
87
  "name": "output",
88
+ "position": "right",
89
  "type": {
90
  "type": "None"
91
+ }
92
+ }
93
+ },
94
+ "params": {
95
+ "chat": {
96
+ "default": null,
97
+ "name": "chat",
98
+ "type": {
99
+ "type": "<class 'str'>"
100
+ }
101
  }
102
  },
 
103
  "type": "basic"
104
  },
105
+ "params": {
106
+ "chat": "What products does Lynx have?"
107
+ },
108
+ "status": "done",
109
+ "title": "Input chat"
110
  },
111
+ "dragHandle": ".bg-primary",
112
+ "height": 186.0,
113
+ "id": "Input chat 1",
114
+ "parentId": null,
115
  "position": {
116
+ "x": -2606.8829929570456,
117
+ "y": -648.2654341415332
118
  },
119
+ "type": "basic",
120
+ "width": 259.0
 
121
  },
122
  {
 
 
123
  "data": {
 
 
124
  "display": {
125
  "dataframes": {
126
  "df": {
 
129
  ],
130
  "data": [
131
  [
132
+ "Lynx Analytics offers a range of data analytics products and solutions tailored for various industries. Here are some of our key offerings:\n\n- **Generative AI**: We provide innovative solutions such as chatbots specifically designed for the pharmaceutical and service provider sectors.\n \n- **Graph AI**: Our advanced graph reasoning tools help in areas like transport scenario planning and predicting patient outcomes with graph representation learning.\n\n- **Pharma and Life Sciences**: We focus on marketing support, including Next Best Action predictions and Brand Adoption Ladder analysis, as well as supporting drug discovery and medical analytics.\n\n- **Retail Solutions**: Our products include Price AI, Assort AI, and Promo AI to optimize pricing, assortment, and promotion strategies for retailers.\n\n- **Financial Services**: We offer digital banking analytics solutions and a Customer Happiness Index to enhance customer experience and retention.\n\n- **Telecommunications**: While briefly mentioned, our telecom solutions help optimize operations, including fibre CAPEX optimization and churn prevention management.\n\nThese products are complemented by consulting services to help businesses make data-driven decisions. If you need more specific information or a demo, feel free to get in touch.\n\nCould you share what prompted your visit to our website today? Or may I know which specific domain or industry you are interested in or work in?"
133
  ]
134
  ]
135
  }
 
137
  },
138
  "error": null,
139
  "meta": {
 
 
140
  "inputs": {
141
  "input": {
142
  "name": "input",
143
+ "position": "left",
144
  "type": {
145
  "type": "<class 'inspect._empty'>"
146
+ }
 
147
  }
148
  },
149
+ "name": "View",
150
  "outputs": {},
151
+ "params": {},
152
+ "type": "table_view"
153
+ },
154
+ "params": {},
155
+ "status": "done",
156
+ "title": "View"
157
  },
158
+ "dragHandle": ".bg-primary",
159
+ "height": 950.0,
160
+ "id": "View 1",
161
+ "parentId": null,
162
  "position": {
163
+ "x": -754.9225960536905,
164
+ "y": -643.161064357758
165
  },
166
+ "type": "table_view",
167
+ "width": 1256.0
 
168
  },
169
  {
 
 
170
  "data": {
 
 
 
 
171
  "display": null,
172
  "error": null,
173
  "meta": {
174
  "inputs": {},
175
+ "name": "Truncate history",
176
  "outputs": {
177
  "output": {
178
+ "name": "output",
179
+ "position": "top",
180
  "type": {
181
  "type": "None"
182
+ }
 
 
183
  }
184
  },
 
185
  "params": {
186
+ "max_tokens": {
187
+ "default": 10000.0,
188
+ "name": "max_tokens",
189
  "type": {
190
+ "type": "<class 'int'>"
191
  }
192
  }
193
  },
194
+ "type": "basic"
195
+ },
196
+ "params": {
197
+ "max_tokens": 10000.0
198
+ },
199
+ "status": "done",
200
+ "title": "Truncate history"
201
  },
202
+ "dragHandle": ".bg-primary",
203
+ "height": 200.0,
204
+ "id": "Truncate history 1",
205
+ "parentId": null,
206
  "position": {
207
+ "x": -1536.508533731351,
208
+ "y": 728.1204075546109
209
  },
210
+ "type": "basic",
211
+ "width": 200.0
 
212
  },
213
  {
 
 
214
  "data": {
215
+ "__execution_delay": null,
216
+ "collapsed": false,
 
 
 
217
  "display": null,
218
  "error": null,
219
  "meta": {
220
+ "inputs": {
221
+ "processor": {
222
+ "name": "processor",
223
+ "position": "bottom",
 
 
 
 
 
 
224
  "type": {
225
+ "type": "<class 'inspect._empty'>"
226
+ }
 
227
  }
228
  },
229
+ "name": "Chat processor",
230
  "outputs": {
231
  "output": {
 
232
  "name": "output",
233
+ "position": "top",
234
  "type": {
235
  "type": "None"
236
  }
237
  }
238
  },
239
+ "params": {},
240
+ "type": "basic"
241
+ },
242
+ "params": {},
243
+ "status": "done",
244
+ "title": "Chat processor"
245
  },
246
+ "dragHandle": ".bg-primary",
247
+ "height": 89.0,
248
+ "id": "Chat processor 1",
249
+ "parentId": null,
250
  "position": {
251
+ "x": -1527.1027075359414,
252
+ "y": 605.2129408898476
253
  },
254
+ "type": "basic",
255
+ "width": 416.0
 
256
  },
257
  {
 
 
258
  "data": {
 
 
 
 
259
  "display": null,
260
  "error": null,
261
  "meta": {
262
+ "inputs": {},
263
+ "name": "Mask",
264
  "outputs": {
265
  "output": {
266
+ "name": "output",
 
 
267
  "position": "top",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
268
  "type": {
269
+ "type": "None"
270
  }
 
 
 
 
 
 
 
271
  }
272
  },
273
  "params": {
274
+ "exceptions": {
275
+ "default": "",
276
+ "name": "exceptions",
277
  "type": {
278
  "type": "<class 'str'>"
279
+ }
280
+ },
281
+ "mask_pattern": {
282
+ "default": "",
283
+ "name": "mask_pattern",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
284
  "type": {
285
  "type": "<class 'str'>"
286
  }
287
  },
288
+ "name": {
289
+ "default": "",
290
+ "name": "name",
291
  "type": {
292
  "type": "<class 'str'>"
293
+ }
 
 
294
  },
295
+ "regex": {
296
+ "default": "",
297
+ "name": "regex",
298
  "type": {
299
  "type": "<class 'str'>"
300
  }
301
  }
302
  },
303
+ "type": "basic"
304
+ },
305
+ "params": {
306
307
+ "mask_pattern": "masked_email_address_{}",
308
+ "name": "email",
309
+ "regex": "([a-z0-9!#$%&'*+\\/=?^_`{|.}~-]+@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?)"
310
+ },
311
+ "status": "done",
312
+ "title": "Mask"
 
313
  },
314
+ "dragHandle": ".bg-primary",
315
+ "height": 358.0,
316
+ "id": "Mask 1",
317
+ "parentId": null,
318
  "position": {
319
+ "x": -1309.5065330408577,
320
+ "y": 731.6791509394458
321
  },
322
+ "type": "basic",
323
+ "width": 313.0
 
324
  },
325
  {
 
 
326
  "data": {
 
 
 
 
 
 
 
327
  "display": null,
328
  "error": null,
329
  "meta": {
330
+ "inputs": {},
331
+ "name": "Mask",
332
  "outputs": {
333
  "output": {
 
334
  "name": "output",
335
+ "position": "top",
336
  "type": {
337
  "type": "None"
338
  }
339
  }
340
  },
341
  "params": {
342
+ "exceptions": {
343
+ "default": "",
344
+ "name": "exceptions",
345
  "type": {
346
+ "type": "<class 'str'>"
347
+ }
 
348
  },
349
+ "mask_pattern": {
350
+ "default": "",
351
+ "name": "mask_pattern",
352
  "type": {
353
+ "type": "<class 'str'>"
354
  }
355
  },
356
+ "name": {
357
+ "default": "",
358
+ "name": "name",
359
  "type": {
360
  "type": "<class 'str'>"
361
  }
362
  },
363
+ "regex": {
364
+ "default": "",
365
+ "name": "regex",
366
  "type": {
367
  "type": "<class 'str'>"
368
  }
369
  }
370
  },
371
+ "type": "basic"
372
+ },
373
+ "params": {
374
+ "exceptions": "",
375
+ "mask_pattern": "masked_credit_card_number_{}",
376
+ "name": "credit_card",
377
+ "regex": "((?:(?:\\\\d{4}[- ]?){3}\\\\d{4}|\\\\d{15,16}))(?![\\\\d])"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
378
  },
379
+ "status": "done",
380
+ "title": "Mask"
381
  },
382
+ "dragHandle": ".bg-primary",
383
+ "height": 358.0,
384
+ "id": "Mask 2",
385
+ "parentId": null,
386
  "position": {
387
+ "x": -983.2612912523697,
388
+ "y": 731.5859900002104
389
  },
390
+ "type": "basic",
391
+ "width": 315.0
 
392
  },
393
  {
 
 
394
  "data": {
395
+ "__execution_delay": 0.0,
396
+ "collapsed": false,
397
  "display": null,
398
  "error": null,
399
  "meta": {
 
400
  "inputs": {
401
+ "chat_api": {
402
+ "name": "chat_api",
 
 
 
 
 
 
403
  "position": "bottom",
404
  "type": {
405
  "type": "<class 'inspect._empty'>"
406
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
407
  },
408
+ "message": {
409
+ "name": "message",
410
+ "position": "left",
411
  "type": {
412
+ "type": "<class 'inspect._empty'>"
413
+ }
 
414
  }
415
  },
416
+ "name": "Test Chat API",
 
417
  "outputs": {
418
  "output": {
419
+ "name": "output",
420
+ "position": "right",
421
  "type": {
422
  "type": "None"
423
+ }
 
 
424
  }
425
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
426
  "params": {
427
+ "show_details": {
428
+ "default": false,
429
+ "name": "show_details",
 
 
 
 
 
 
 
 
430
  "type": {
431
+ "type": "<class 'bool'>"
432
+ }
 
 
433
  }
434
  },
435
+ "type": "basic"
436
+ },
437
+ "params": {},
438
+ "status": "done",
439
+ "title": "Test Chat API"
 
 
 
 
 
 
440
  },
441
+ "dragHandle": ".bg-primary",
442
+ "height": 201.0,
443
+ "id": "Test Chat API 2",
444
+ "parentId": null,
445
  "position": {
446
+ "x": -2024.044443214723,
447
+ "y": -654.8412606520155
448
  },
449
+ "type": "basic",
450
+ "width": 906.0
 
451
  },
452
  {
 
 
453
  "data": {
454
+ "__execution_delay": 0.0,
455
+ "collapsed": null,
 
 
456
  "display": null,
457
  "error": null,
458
  "meta": {
459
+ "inputs": {},
460
+ "name": "LynxScribe RAG Graph Vector Store",
461
  "outputs": {
462
  "output": {
 
463
  "name": "output",
464
+ "position": "top",
465
  "type": {
466
  "type": "None"
467
  }
468
  }
469
  },
 
 
 
470
  "params": {
471
+ "collection_name": {
472
+ "default": "lynx",
473
+ "name": "collection_name",
474
+ "type": {
475
+ "type": "<class 'str'>"
476
+ }
477
+ },
478
  "name": {
479
+ "default": "faiss",
480
  "name": "name",
481
  "type": {
482
  "type": "<class 'str'>"
483
  }
484
+ },
485
+ "num_dimensions": {
486
+ "default": 3072.0,
487
+ "name": "num_dimensions",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
488
  "type": {
489
  "type": "<class 'int'>"
490
  }
491
+ },
492
+ "text_embedder_interface": {
493
+ "default": "openai",
494
+ "name": "text_embedder_interface",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
495
  "type": {
496
+ "type": "<class 'str'>"
497
  }
498
+ },
499
+ "text_embedder_model_name_or_path": {
500
+ "default": "text-embedding-3-large",
501
+ "name": "text_embedder_model_name_or_path",
 
 
502
  "type": {
503
+ "type": "<class 'str'>"
504
+ }
 
 
505
  }
506
+ },
507
+ "position": {
508
+ "x": 189.0,
509
+ "y": 489.0
510
+ },
511
+ "type": "basic"
512
+ },
513
+ "params": {
514
+ "collection_name": "lynx",
515
+ "name": "faiss",
516
+ "num_dimensions": "1536",
517
+ "text_embedder_interface": "openai",
518
+ "text_embedder_model_name_or_path": "text-embedding-ada-002"
519
+ },
520
+ "status": "done",
521
+ "title": "LynxScribe RAG Graph Vector Store"
522
  },
523
+ "dragHandle": ".bg-primary",
524
+ "height": 443.0,
525
+ "id": "LynxScribe RAG Graph Vector Store 1",
526
  "position": {
527
+ "x": -2019.279230344727,
528
+ "y": 1080.1955856484205
529
  },
530
+ "type": "basic",
531
+ "width": 336.0
 
532
  },
533
  {
 
 
534
  "data": {
535
+ "__execution_delay": 0.0,
536
+ "collapsed": null,
 
 
 
 
 
537
  "display": null,
538
  "error": null,
539
  "meta": {
540
  "inputs": {},
541
+ "name": "Cloud-sourced File Loader",
542
  "outputs": {
543
  "output": {
 
544
  "name": "output",
545
+ "position": "right",
546
  "type": {
547
  "type": "None"
548
  }
549
  }
550
  },
 
 
551
  "params": {
552
+ "accepted_file_types": {
553
+ "default": ".jpg, .jpeg, .png",
554
+ "name": "accepted_file_types",
555
  "type": {
556
  "type": "<class 'str'>"
557
  }
558
  },
559
+ "cloud_provider": {
560
+ "default": "gcp",
561
+ "name": "cloud_provider",
 
 
 
 
 
562
  "type": {
563
  "type": "<class 'str'>"
564
+ }
 
 
565
  },
566
+ "folder_URL": {
567
+ "default": "https://storage.googleapis.com/lynxkite_public_data/lynxscribe-images/image-rag-test",
568
+ "name": "folder_URL",
569
  "type": {
570
  "type": "<class 'str'>"
571
+ }
 
572
  }
573
+ },
574
+ "position": {
575
+ "x": 189.0,
576
+ "y": 412.0
577
+ },
578
+ "type": "basic"
579
+ },
580
+ "params": {
581
+ "accepted_file_types": ".pickle",
582
+ "cloud_provider": "gcp",
583
+ "folder_URL": "https://storage.googleapis.com/lynxkite_public_data/lynxscribe-knowledge-graphs/lynx-chatbot"
584
+ },
585
+ "status": "done",
586
+ "title": "Cloud-sourced File Loader"
587
  },
588
+ "dragHandle": ".bg-primary",
589
+ "height": 325.0,
590
+ "id": "Cloud-sourced File Loader 1",
591
  "position": {
592
+ "x": -2884.195823214815,
593
+ "y": 630.6408986626046
594
  },
595
+ "type": "basic",
596
+ "width": 622.0
 
597
  },
598
  {
 
 
599
  "data": {
600
+ "__execution_delay": 0.0,
601
+ "collapsed": null,
 
 
 
 
 
602
  "display": null,
603
  "error": null,
604
  "meta": {
605
+ "inputs": {
606
+ "chat_processor": {
607
+ "name": "chat_processor",
608
+ "position": "bottom",
609
+ "type": {
610
+ "type": "<class 'inspect._empty'>"
611
+ }
612
+ },
613
+ "knowledge_base": {
614
+ "name": "knowledge_base",
615
+ "position": "bottom",
616
+ "type": {
617
+ "type": "<class 'inspect._empty'>"
618
+ }
619
+ }
620
+ },
621
+ "name": "LynxScribe RAG Graph Chatbot Backend",
622
  "outputs": {
623
  "output": {
624
  "name": "output",
 
628
  }
629
  }
630
  },
 
 
 
631
  "params": {
632
+ "llm_interface": {
633
+ "default": "openai",
634
+ "name": "llm_interface",
635
  "type": {
636
  "type": "<class 'str'>"
637
+ }
 
 
638
  },
639
+ "llm_model_name": {
640
+ "default": "gpt-4o",
641
+ "name": "llm_model_name",
642
  "type": {
643
  "type": "<class 'str'>"
644
+ }
 
645
  },
646
+ "negative_answer": {
647
+ "default": "I'm sorry, but the data I've been trained on does not contain any information related to your question.",
648
+ "name": "negative_answer",
649
  "type": {
650
  "type": "<class 'str'>"
651
+ }
 
652
  },
653
+ "retriever_limits_by_type": {
654
+ "default": "{}",
655
+ "name": "retriever_limits_by_type",
656
  "type": {
657
  "type": "<class 'str'>"
658
  }
659
+ },
660
+ "retriever_max_iterations": {
661
+ "default": 3.0,
662
+ "name": "retriever_max_iterations",
663
+ "type": {
664
+ "type": "<class 'int'>"
665
+ }
666
+ },
667
+ "retriever_overall_chunk_limit": {
668
+ "default": 20.0,
669
+ "name": "retriever_overall_chunk_limit",
670
+ "type": {
671
+ "type": "<class 'int'>"
672
+ }
673
+ },
674
+ "retriever_overall_token_limit": {
675
+ "default": 3000.0,
676
+ "name": "retriever_overall_token_limit",
677
+ "type": {
678
+ "type": "<class 'int'>"
679
+ }
680
+ },
681
+ "retriever_strict_limits": {
682
+ "default": true,
683
+ "name": "retriever_strict_limits",
684
+ "type": {
685
+ "type": "<class 'bool'>"
686
+ }
687
  }
688
+ },
689
+ "position": {
690
+ "x": 543.0,
691
+ "y": 256.0
692
+ },
693
+ "type": "basic"
694
+ },
695
+ "params": {
696
+ "llm_interface": "openai",
697
+ "llm_model_name": "gpt-4o",
698
+ "negative_answer": "I'm sorry, but the data I've been trained on does not contain any information related to your question.",
699
+ "retriever_limits_by_type": "{\"information\": [1, 5], \"summary\": [0, 2], \"template_qna\": [1, 3], \"QnA question\": [0, 0]}",
700
+ "retriever_max_iterations": 3.0,
701
+ "retriever_overall_chunk_limit": "20",
702
+ "retriever_overall_token_limit": 3000.0,
703
+ "retriever_strict_limits": true
704
+ },
705
+ "status": "done",
706
+ "title": "LynxScribe RAG Graph Chatbot Backend"
707
  },
708
+ "dragHandle": ".bg-primary",
709
+ "height": 556.0,
710
+ "id": "LynxScribe RAG Graph Chatbot Backend 1",
711
  "position": {
712
+ "x": -2020.0,
713
+ "y": -188.33333333333334
714
  },
715
+ "type": "basic",
716
+ "width": 903.0
 
717
  },
718
  {
 
 
719
  "data": {
 
 
 
 
720
  "display": null,
721
  "error": null,
 
 
722
  "meta": {
 
 
 
 
 
 
 
 
 
723
  "inputs": {
724
+ "file_urls": {
725
+ "name": "file_urls",
726
  "position": "left",
727
  "type": {
728
  "type": "<class 'inspect._empty'>"
729
  }
730
  },
731
+ "rag_graph": {
732
+ "name": "rag_graph",
733
  "position": "bottom",
734
  "type": {
735
  "type": "<class 'inspect._empty'>"
736
+ }
 
737
  }
738
  },
739
+ "name": "LynxScribe RAG Graph Chatbot Builder",
740
  "outputs": {
741
  "output": {
742
+ "name": "output",
743
+ "position": "top",
744
  "type": {
745
  "type": "None"
746
+ }
 
747
  }
748
  },
749
+ "params": {
750
+ "input_type": {
751
+ "default": "v1",
752
+ "name": "input_type",
753
+ "type": {
754
+ "type": "<class 'str'>"
755
+ }
756
+ },
757
+ "node_types": {
758
+ "default": "intent_cluster",
759
+ "name": "node_types",
760
+ "type": {
761
+ "type": "<class 'str'>"
762
+ }
763
+ },
764
+ "scenario_file": {
765
+ "default": "uploads/lynx_chatbot_scenario_selector.yaml",
766
+ "name": "scenario_file",
767
+ "type": {
768
+ "type": "<class 'str'>"
769
+ }
770
+ }
771
+ },
772
+ "position": {
773
+ "x": 466.0,
774
+ "y": 478.0
775
+ },
776
  "type": "basic"
777
+ },
778
+ "params": {
779
+ "input_type": "v1",
780
+ "node_types": "intent_cluster",
781
+ "scenario_file": "uploads/lynx_chatbot_scenario_selector.yaml"
782
+ },
783
+ "status": "done",
784
+ "title": "LynxScribe RAG Graph Chatbot Builder"
785
  },
786
+ "dragHandle": ".bg-primary",
787
+ "height": 364.0,
788
+ "id": "LynxScribe RAG Graph Chatbot Builder 1",
789
  "position": {
790
+ "x": -2101.666666666667,
791
+ "y": 611.6666666666666
792
  },
793
+ "type": "basic",
794
+ "width": 500.0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
795
  }
796
  ]
797
  }
examples/uploads/lynx_chatbot_scenario_selector.yaml ADDED
@@ -0,0 +1,302 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ - name: general_interest
2
+ mode: retrieve_llm # Literal[retrieve_llm, retrieve_only, llm_only, fixed_answer, sticky_answer]
3
+ prompt_messages: # Answer prompts in [role, content] format, should contain {context}
4
+ - role: system # Literal[system, assistant, user, tool]
5
+ content: &role >
6
+ You are LynxScribe, a chatbot representing Lynx Analytics, a leading Singaporean analytics
7
+ company specializing in pharma, life sciences, generative AI, and graph AI. Your role is to
8
+ respond to inquiries on the Lynx Analytics website. To better understand the visitors'
9
+ needs, you may ask follow-up questions as detailed in subsequent instructions.
10
+ - role: system
11
+ content: &preferences >
12
+ Lynx Analytics specializes in a range of areas including pharma (with a focus on marketing
13
+ support), life sciences, graph AI, and generative AI solutions. When responding to inquiries
14
+ about our solutions or products, give priority to those related to generative AI (chatbots
15
+ for pharma and service providers), graph AI (graph reasoning), and pharma (research, key
16
+ opinion leaders, brand adoption ladder). Also, briefly touch upon our offerings in retail
17
+ (price AI, assort AI, promo AI) and finance (digital banking, Customer Happiness Index), as
18
+ these are areas of secondary priority. Additionally, although telecommunication is worth
19
+ mentioning briefly to highlight our comprehensive range of expertise and solutions.
20
+ - role: system
21
+ content: &context >
22
+ Respond to questions solely based on the context outlined below:\n\n{context}
23
+ - role: system
24
+ content: &instr_prices >
25
+ If inquiries about pricing arise, suggest contacting Lynx Analytics for detailed
26
+ information. Additionally, emphasize that Lynx Analytics offers solutions at competitive
27
+ prices without compromising on quality.
28
+ - role: system
29
+ content: &ask_industry >
30
+ If it's not mentioned in the chat history, include a question at the end of your response
31
+ to inquire about their industry interest or employment. For example: 'May I know which
32
+ specific domain or industry you are interested in or work in?'
33
+ - role: system
34
+ content: &ask_visit_reason >
35
+ If the chat history does not reveal it, ask about their reason for visiting the website. For
36
+ instance, you might say: 'Could you share what prompted your visit to our website today?'
37
+ - role: system
38
+ content: &final_instr >
39
+ Carefully answer questions based on the provided context. Refrain from introducing new
40
+ names; use only those within your context. Respond in the language of the question. If
41
+ necessary, ask follow-up questions. Ensure your answers are clear, utilizing bullet points
42
+ where appropriate. Avoid phrases like 'According to this article' to maintain a natural
43
+ tone.
44
+ link_answer: &link # When present, formatted node link appends to answer, should contain {link}
45
+ "\n\nPlease visit <a href='{link}' target='_blank'>{link}</a> for further information."
46
+ min_similarity_score: -1 # Only need to specify if > -1 and in RETRIEVE_LLM or RETRIEVE_ONLY mode
47
+ - name: life_sciences_interest
48
+ mode: retrieve_llm
49
+ prompt_messages:
50
+ - role: system
51
+ content: *role
52
+ - role: system
53
+ content: *preferences
54
+ - role: system
55
+ content: *context
56
+ - role: system
57
+ content: *instr_prices
58
+ - role: system
59
+ content: &ask_profession >
60
+ If their job is not mentioned in the chat history, add a question at the end of your answer
61
+ about their profession. For example: 'Could you please tell me about your current profession
62
+ or occupation?'
63
+ - role: system
64
+ content: *ask_visit_reason
65
+ - role: system
66
+ content: &ask_email >
67
+ If their email is not already in the chat history, suggest that they can provide their email
68
+ address for further contact. For instance: 'Should you wish for further communication
69
+ regarding your queries, feel free to provide your email address.'
70
+ - role: system
71
+ content: *final_instr
72
+ link_answer: *link
73
+ min_similarity_score: -1
74
+ - name: finance_interest
75
+ mode: retrieve_llm
76
+ prompt_messages:
77
+ - role: system
78
+ content: *role
79
+ - role: system
80
+ content: *context
81
+ - role: system
82
+ content: *instr_prices
83
+ - role: system
84
+ content: &ask_responsibilities >
85
+ If their job or responsibilities are not detailed in the chat history, include a question
86
+ at the end of your response. For example: 'Would you mind sharing some details about the
87
+ specific responsibilities you manage in your role?'
88
+ - role: system
89
+ content: *ask_visit_reason
90
+ - role: system
91
+ content: *ask_email
92
+ - role: system
93
+ content: *final_instr
94
+ link_answer: *link
95
+ min_similarity_score: -1
96
+ - name: telco_interest
97
+ mode: retrieve_llm
98
+ prompt_messages:
99
+ - role: system
100
+ content: *role
101
+ - role: system
102
+ content: *context
103
+ - role: system
104
+ content: *instr_prices
105
+ - role: system
106
+ content: *ask_responsibilities
107
+ - role: system
108
+ content: *ask_visit_reason
109
+ - role: system
110
+ content: *ask_email
111
+ - role: system
112
+ content: *final_instr
113
+ link_answer: *link
114
+ min_similarity_score: -1
115
+ - name: retail_interest
116
+ mode: retrieve_llm
117
+ prompt_messages:
118
+ - role: system
119
+ content: *role
120
+ - role: system
121
+ content: *context
122
+ - role: system
123
+ content: *instr_prices
124
+ - role: system
125
+ content: *ask_responsibilities
126
+ - role: system
127
+ content: *ask_visit_reason
128
+ - role: system
129
+ content: *ask_email
130
+ - role: system
131
+ content: *final_instr
132
+ link_answer: *link
133
+ min_similarity_score: -1
134
+ - name: lynx_kite
135
+ mode: retrieve_llm
136
+ prompt_messages:
137
+ - role: system
138
+ content: *role
139
+ - role: system
140
+ content: *preferences
141
+ - role: system
142
+ content: *context
143
+ - role: system
144
+ content: *instr_prices
145
+ - role: system
146
+ content: *ask_industry
147
+ - role: system
148
+ content: &ask_graph >
149
+ If it's not mentioned in the chat history, include a question at the end of your response to
150
+ inquire about their specific needs related to graph analytics. For example: 'May I know
151
+ which particular graph-related problem you are looking to solve with graph analytics?'
152
+ - role: system
153
+ content: *ask_email
154
+ - role: system
155
+ content: *final_instr
156
+ link_answer: *link
157
+ min_similarity_score: -1
158
+ - name: lynx_team
159
+ mode: retrieve_llm
160
+ prompt_messages:
161
+ - role: system
162
+ content: *role
163
+ - role: system
164
+ content: *context
165
+ - role: system
166
+ content: *instr_prices
167
+ - role: system
168
+ content: *ask_visit_reason
169
+ - role: system
170
+ content: >
171
+ When they inquire about names that could refer to multiple individuals, provide the names
172
+ along with a brief description of each. Then, ask for clarification on which specific
173
+ individual they are referring to.
174
+ - role: system
175
+ content: *final_instr
176
+ link_answer: *link
177
+ min_similarity_score: -1
178
+ - name: lynx_career
179
+ mode: retrieve_llm
180
+ prompt_messages:
181
+ - role: system
182
+ content: *role
183
+ - role: system
184
+ content: *context
185
+ - role: system
186
+ content: *instr_prices
187
+ - role: system
188
+ content: *ask_responsibilities
189
+ - role: system
190
+ content: >
191
+ If it's not already mentioned in the chat history, include a question at the end of your
192
+ response to inquire about their motivation for wanting to work with us. For example: 'Could
193
+ you share what motivates you to seek a position with our team?'
194
+ - role: system
195
+ content: *ask_email
196
+ - role: system
197
+ content: *final_instr
198
+ link_answer: *link
199
+ min_similarity_score: -1
200
+ - name: lynxscribe
201
+ mode: retrieve_llm
202
+ prompt_messages:
203
+ - role: system
204
+ content: *role
205
+ - role: system
206
+ content: *preferences
207
+ - role: system
208
+ content: *context
209
+ - role: system
210
+ content: *instr_prices
211
+ - role: system
212
+ content: *ask_industry
213
+ - role: system
214
+ content: >
215
+ If the chat history does not already include this information, add a question at the end of
216
+ your response to identify their specific needs in generative AI. For example: 'Could you
217
+ please specify the problem you are aiming to address using generative AI?'
218
+ - role: system
219
+ content: *ask_email
220
+ - role: system
221
+ content: *final_instr
222
+ link_answer: *link
223
+ min_similarity_score: -1
224
+ - name: general_ds
225
+ mode: retrieve_llm
226
+ prompt_messages:
227
+ - role: system
228
+ content: *role
229
+ - role: system
230
+ content: *context
231
+ - role: system
232
+ content: *instr_prices
233
+ - role: system
234
+ content: *ask_industry
235
+ - role: system
236
+ content: *ask_visit_reason
237
+ - role: system
238
+ content: *ask_email
239
+ - role: system
240
+ content: *final_instr
241
+ link_answer: *link
242
+ min_similarity_score: -1
243
+ - name: general_graph
244
+ mode: retrieve_llm
245
+ prompt_messages:
246
+ - role: system
247
+ content: *role
248
+ - role: system
249
+ content: *preferences
250
+ - role: system
251
+ content: *context
252
+ - role: system
253
+ content: *instr_prices
254
+ - role: system
255
+ content: *ask_graph
256
+ - role: system
257
+ content: *ask_industry
258
+ - role: system
259
+ content: *ask_email
260
+ - role: system
261
+ content: *final_instr
262
+ link_answer: *link
263
+ min_similarity_score: -1
264
+ - name: other_okay
265
+ mode: retrieve_llm
266
+ prompt_messages:
267
+ - role: system
268
+ content: *role
269
+ - role: system
270
+ content: *preferences
271
+ - role: system
272
+ content: *context
273
+ - role: system
274
+ content: *instr_prices
275
+ - role: system
276
+ content: *ask_industry
277
+ - role: system
278
+ content: *ask_visit_reason
279
+ - role: system
280
+ content: *final_instr
281
+ link_answer: *link
282
+ min_similarity_score: -1
283
+ - name: contact_us
284
+ mode: retrieve_llm
285
+ prompt_messages:
286
+ - role: system
287
+ content: *role
288
+ - role: system
289
+ content: *context
290
+ - role: system
291
+ content: *instr_prices
292
+ - role: system
293
+ content: *ask_email
294
+ - role: system
295
+ content: *final_instr
296
+ link_answer: *link
297
+ min_similarity_score: -1
298
+ - name: malicious
299
+ mode: fixed_answer # Could be sticky, but if we want the user to rephrase, let's give 2nd chance
300
+ fixed_answer: >
301
+ I am sorry, but I feel you want me use in a wrong way. If I feel it wrong, please try to
302
+ rephrase your question.
lynxkite-lynxscribe/src/lynxkite_lynxscribe/lynxscribe_ops.py CHANGED
@@ -30,6 +30,8 @@ from lynxkite.core import ops
30
  import json
31
  from lynxkite.core.executors import one_by_one
32
 
 
 
33
  # logger
34
  # import logging
35
  # logging.basicConfig(level=logging.INFO)
@@ -42,11 +44,12 @@ op = ops.op_registration(ENV)
42
  output_on_top = ops.output_position(output="top")
43
 
44
 
45
- @op("Cloud-sourced Image Loader")
46
- def cloud_image_loader(
47
  *,
48
  cloud_provider: str = "gcp",
49
  folder_URL: str = "https://storage.googleapis.com/lynxkite_public_data/lynxscribe-images/image-rag-test",
 
50
  ):
51
  """
52
  Gives back the list of URLs of all the images from a cloud-based folder.
@@ -55,6 +58,8 @@ def cloud_image_loader(
55
  if folder_URL[-1].endswith("/"):
56
  folder_URL = folder_URL[:-1]
57
 
 
 
58
  if cloud_provider == "gcp":
59
  client = storage.Client()
60
  url_useful_part = folder_URL.split(".com/")[-1]
@@ -66,18 +71,16 @@ def cloud_image_loader(
66
 
67
  bucket = client.bucket(bucket_name)
68
  blobs = bucket.list_blobs(prefix=prefix)
69
- image_urls = [
70
- blob.public_url
71
- for blob in blobs
72
- if blob.name.endswith((".jpg", ".jpeg", ".png"))
73
  ]
74
- return {"image_urls": image_urls}
75
  else:
76
  raise ValueError(f"Cloud provider '{cloud_provider}' is not supported.")
77
 
78
 
79
  @output_on_top
80
- @op("LynxScribe RAG Vector Store")
81
  @mem.cache
82
  def ls_rag_graph(
83
  *,
@@ -151,7 +154,7 @@ def ls_image_describer(
151
  @op("LynxScribe Image RAG Builder")
152
  @mem.cache
153
  async def ls_image_rag_builder(
154
- image_urls,
155
  image_describer,
156
  rag_graph,
157
  ):
@@ -166,7 +169,7 @@ async def ls_image_rag_builder(
166
 
167
  # handling inputs
168
  image_describer = image_describer[0]["image_describer"]
169
- image_urls = image_urls["image_urls"]
170
  rag_graph = rag_graph[0]["rag_graph"]
171
 
172
  # generate prompts from inputs
@@ -300,83 +303,207 @@ def view_image(embedding_similarities):
300
  return embedding_similarities[0]["image_url"]
301
 
302
 
303
- @output_on_top
304
- @op("Vector store")
305
- def vector_store(*, name="chromadb", collection_name="lynx"):
306
- vector_store = get_vector_store(name=name, collection_name=collection_name)
307
- return {"vector_store": vector_store}
308
 
309
 
310
- @output_on_top
311
- @op("LLM")
312
- def llm(*, name="openai"):
313
- llm = get_llm_engine(name=name)
314
- return {"llm": llm}
315
 
316
 
317
- @output_on_top
318
- @ops.input_position(llm="bottom")
319
- @op("Text embedder")
320
- def text_embedder(llm, *, model="text-embedding-ada-002"):
321
- llm = llm[0]["llm"]
322
- text_embedder = TextEmbedder(llm=llm, model=model)
323
- return {"text_embedder": text_embedder}
324
 
325
 
326
- @output_on_top
327
- @ops.input_position(vector_store="bottom", text_embedder="bottom")
328
- @op("RAG graph")
329
- def rag_graph(vector_store, text_embedder):
330
- vector_store = vector_store[0]["vector_store"]
331
- text_embedder = text_embedder[0]["text_embedder"]
332
- rag_graph = RAGGraph(
333
- PandasKnowledgeBaseGraph(vector_store=vector_store, text_embedder=text_embedder)
334
- )
335
- return {"rag_graph": rag_graph}
336
 
337
 
338
  @output_on_top
339
- @op("Scenario selector")
340
- def scenario_selector(*, scenario_file: str, node_types="intent_cluster"):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
341
  scenarios = load_config(scenario_file)
342
  node_types = [t.strip() for t in node_types.split(",")]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
343
  scenario_selector = ScenarioSelector(
344
  scenarios=[Scenario(**scenario) for scenario in scenarios],
345
  node_types=node_types,
346
  )
347
- return {"scenario_selector": scenario_selector}
348
 
 
 
 
 
 
 
 
 
349
 
350
- DEFAULT_NEGATIVE_ANSWER = "I'm sorry, but the data I've been trained on does not contain any information related to your question."
 
 
 
 
 
 
 
 
 
 
351
 
352
 
353
  @output_on_top
354
- @ops.input_position(rag_graph="bottom", scenario_selector="bottom", llm="bottom")
355
- @op("RAG chatbot")
356
- def rag_chatbot(
357
- rag_graph,
358
- scenario_selector,
359
- llm,
360
  *,
361
  negative_answer=DEFAULT_NEGATIVE_ANSWER,
362
- limits_by_type="{}",
363
- strict_limits=True,
364
- max_results=5,
 
 
 
 
 
365
  ):
366
- rag_graph = rag_graph[0]["rag_graph"]
367
- scenario_selector = scenario_selector[0]["scenario_selector"]
368
- llm = llm[0]["llm"]
369
- limits_by_type = json.loads(limits_by_type)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
370
  rag_chatbot = RAGChatbot(
371
  rag_graph=rag_graph,
372
  scenario_selector=scenario_selector,
373
  llm=llm,
374
  negative_answer=negative_answer,
375
- limits_by_type=limits_by_type,
376
- strict_limits=strict_limits,
377
- max_results=max_results,
 
 
 
 
 
378
  )
379
- return {"chatbot": rag_chatbot}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
380
 
381
 
382
  @output_on_top
@@ -446,37 +573,37 @@ def input_chat(*, chat: str):
446
  return {"text": chat}
447
 
448
 
449
- @output_on_top
450
- @ops.input_position(chatbot="bottom", chat_processor="bottom", knowledge_base="bottom")
451
- @op("Chat API")
452
- def chat_api(chatbot, chat_processor, knowledge_base, *, model="gpt-4o-mini"):
453
- chatbot = chatbot[0]["chatbot"]
454
- chat_processor = chat_processor[0]["chat_processor"]
455
- knowledge_base = knowledge_base[0]
456
- c = ChatAPI(
457
- chatbot=chatbot,
458
- chat_processor=chat_processor,
459
- model=model,
460
- )
461
- if knowledge_base:
462
- c.chatbot.rag_graph.kg_base.load_v1_knowledge_base(**knowledge_base)
463
- c.chatbot.scenario_selector.check_compatibility(c.chatbot.rag_graph)
464
- return {"chat_api": c}
465
-
466
-
467
- @output_on_top
468
- @op("Knowledge base")
469
- def knowledge_base(
470
- *,
471
- nodes_path="nodes.pickle",
472
- edges_path="edges.pickle",
473
- template_cluster_path="tempclusters.pickle",
474
- ):
475
- return {
476
- "nodes_path": nodes_path,
477
- "edges_path": edges_path,
478
- "template_cluster_path": template_cluster_path,
479
- }
480
 
481
 
482
  @op("View", view="table_view")
 
30
  import json
31
  from lynxkite.core.executors import one_by_one
32
 
33
+ DEFAULT_NEGATIVE_ANSWER = "I'm sorry, but the data I've been trained on does not contain any information related to your question."
34
+
35
  # logger
36
  # import logging
37
  # logging.basicConfig(level=logging.INFO)
 
44
  output_on_top = ops.output_position(output="top")
45
 
46
 
47
+ @op("Cloud-sourced File Loader")
48
+ def cloud_file_loader(
49
  *,
50
  cloud_provider: str = "gcp",
51
  folder_URL: str = "https://storage.googleapis.com/lynxkite_public_data/lynxscribe-images/image-rag-test",
52
+ accepted_file_types: str = ".jpg, .jpeg, .png",
53
  ):
54
  """
55
  Gives back the list of URLs of all the images from a cloud-based folder.
 
58
  if folder_URL[-1].endswith("/"):
59
  folder_URL = folder_URL[:-1]
60
 
61
+ accepted_file_types = tuple([t.strip() for t in accepted_file_types.split(",")])
62
+
63
  if cloud_provider == "gcp":
64
  client = storage.Client()
65
  url_useful_part = folder_URL.split(".com/")[-1]
 
71
 
72
  bucket = client.bucket(bucket_name)
73
  blobs = bucket.list_blobs(prefix=prefix)
74
+ file_urls = [
75
+ blob.public_url for blob in blobs if blob.name.endswith(accepted_file_types)
 
 
76
  ]
77
+ return {"file_urls": file_urls}
78
  else:
79
  raise ValueError(f"Cloud provider '{cloud_provider}' is not supported.")
80
 
81
 
82
  @output_on_top
83
+ @op("LynxScribe RAG Graph Vector Store")
84
  @mem.cache
85
  def ls_rag_graph(
86
  *,
 
154
  @op("LynxScribe Image RAG Builder")
155
  @mem.cache
156
  async def ls_image_rag_builder(
157
+ file_urls,
158
  image_describer,
159
  rag_graph,
160
  ):
 
169
 
170
  # handling inputs
171
  image_describer = image_describer[0]["image_describer"]
172
+ image_urls = file_urls["file_urls"]
173
  rag_graph = rag_graph[0]["rag_graph"]
174
 
175
  # generate prompts from inputs
 
303
  return embedding_similarities[0]["image_url"]
304
 
305
 
306
+ # @output_on_top
307
+ # @op("Vector store")
308
+ # def vector_store(*, name="chromadb", collection_name="lynx"):
309
+ # vector_store = get_vector_store(name=name, collection_name=collection_name)
310
+ # return {"vector_store": vector_store}
311
 
312
 
313
+ # @output_on_top
314
+ # @op("LLM")
315
+ # def llm(*, name="openai"):
316
+ # llm = get_llm_engine(name=name)
317
+ # return {"llm": llm}
318
 
319
 
320
+ # @output_on_top
321
+ # @ops.input_position(llm="bottom")
322
+ # @op("Text embedder")
323
+ # def text_embedder(llm, *, model="text-embedding-ada-002"):
324
+ # llm = llm[0]["llm"]
325
+ # text_embedder = TextEmbedder(llm=llm, model=model)
326
+ # return {"text_embedder": text_embedder}
327
 
328
 
329
+ # @output_on_top
330
+ # @ops.input_position(vector_store="bottom", text_embedder="bottom")
331
+ # @op("RAG graph")
332
+ # def rag_graph(vector_store, text_embedder):
333
+ # vector_store = vector_store[0]["vector_store"]
334
+ # text_embedder = text_embedder[0]["text_embedder"]
335
+ # rag_graph = RAGGraph(
336
+ # PandasKnowledgeBaseGraph(vector_store=vector_store, text_embedder=text_embedder)
337
+ # )
338
+ # return {"rag_graph": rag_graph}
339
 
340
 
341
  @output_on_top
342
+ @ops.input_position(rag_graph="bottom")
343
+ @op("LynxScribe RAG Graph Chatbot Builder")
344
+ @mem.cache
345
+ def ls_rag_chatbot_builder(
346
+ file_urls,
347
+ rag_graph,
348
+ *,
349
+ scenario_file: str = "uploads/lynx_chatbot_scenario_selector.yaml",
350
+ node_types: str = "intent_cluster",
351
+ input_type: str = "v1",
352
+ ):
353
+ """
354
+ Builds up a RAG Graph-based chatbot. It could load the chatbot from
355
+ an existing folder (v1 or v2).
356
+
357
+ TODO: Later, we should not use these saved files, but we should build
358
+ up the chatbot from scratch - will be added soon). That time we will
359
+ add the summarizer-related parameters (LLM interface and model).
360
+
361
+ TODO: Later, the scenario selector can be built up synthetically from
362
+ the input documents - or semi-automated.
363
+
364
+ TODO: Currently, we do not affected by the embedder, as the files are
365
+ pre-loaded, so the text embedder should have the same model as the
366
+ one used in the files...
367
+ """
368
+
369
  scenarios = load_config(scenario_file)
370
  node_types = [t.strip() for t in node_types.split(",")]
371
+
372
+ # handling inputs
373
+ file_urls = file_urls["file_urls"]
374
+ rag_graph = rag_graph[0]["rag_graph"]
375
+
376
+ # loading v1 knowledge base (shitty solution, but temporary)
377
+ if input_type == "v1":
378
+ node_file = [f for f in file_urls if "nodes.p" in f][0]
379
+ edge_file = [f for f in file_urls if "edges.p" in f][0]
380
+ tempcluster_file = [f for f in file_urls if "clusters.p" in f][0]
381
+ rag_graph.kg_base.load_v1_knowledge_base(
382
+ nodes_path=node_file,
383
+ edges_path=edge_file,
384
+ template_cluster_path=tempcluster_file,
385
+ )
386
+ elif input_type == "v2":
387
+ raise ValueError("Currently only v1 input type is supported.")
388
+ else:
389
+ raise ValueError(f"Input type '{input_type}' is not supported.")
390
+
391
+ # loading the scenarios
392
  scenario_selector = ScenarioSelector(
393
  scenarios=[Scenario(**scenario) for scenario in scenarios],
394
  node_types=node_types,
395
  )
 
396
 
397
+ # TODO: later we should unify this "knowledge base" object across the functions
398
+ # this could be always an input of a RAG Chatbot, but also for other apps.
399
+ return {
400
+ "knowledge_base": {
401
+ "rag_graph": rag_graph,
402
+ "scenario_selector": scenario_selector,
403
+ }
404
+ }
405
 
406
+
407
+ # @output_on_top
408
+ # @op("Scenario selector")
409
+ # def scenario_selector(*, scenario_file: str, node_types="intent_cluster"):
410
+ # scenarios = load_config(scenario_file)
411
+ # node_types = [t.strip() for t in node_types.split(",")]
412
+ # scenario_selector = ScenarioSelector(
413
+ # scenarios=[Scenario(**scenario) for scenario in scenarios],
414
+ # node_types=node_types,
415
+ # )
416
+ # return {"scenario_selector": scenario_selector}
417
 
418
 
419
  @output_on_top
420
+ @ops.input_position(knowledge_base="bottom", chat_processor="bottom")
421
+ @op("LynxScribe RAG Graph Chatbot Backend")
422
+ def ls_rag_chatbot_backend(
423
+ knowledge_base,
424
+ chat_processor,
 
425
  *,
426
  negative_answer=DEFAULT_NEGATIVE_ANSWER,
427
+ retriever_limits_by_type="{}",
428
+ retriever_strict_limits=True,
429
+ retriever_overall_chunk_limit=20,
430
+ retriever_overall_token_limit=3000,
431
+ retriever_max_iterations=3,
432
+ llm_interface: str = "openai",
433
+ llm_model_name: str = "gpt-4o",
434
+ # api_key_name: str = "OPENAI_API_KEY",
435
  ):
436
+ """
437
+ Returns with a chatbot instance.
438
+ """
439
+
440
+ # handling_inputs
441
+ rag_graph = knowledge_base[0]["knowledge_base"]["rag_graph"]
442
+ scenario_selector = knowledge_base[0]["knowledge_base"]["scenario_selector"]
443
+ chat_processor = chat_processor[0]["chat_processor"]
444
+ limits_by_type = json.loads(retriever_limits_by_type)
445
+
446
+ # connecting to the LLM
447
+ llm_params = {"name": llm_interface}
448
+ # if api_key_name:
449
+ # llm_params["api_key"] = os.getenv(api_key_name)
450
+ llm = get_llm_engine(**llm_params)
451
+
452
+ # setting the parameters
453
+ params = {
454
+ "limits_by_type": limits_by_type,
455
+ "strict_limits": retriever_strict_limits,
456
+ "max_results": retriever_overall_chunk_limit,
457
+ "token_limit": retriever_overall_token_limit,
458
+ "max_iterations": retriever_max_iterations,
459
+ }
460
+
461
+ # generating the RAG Chatbot
462
  rag_chatbot = RAGChatbot(
463
  rag_graph=rag_graph,
464
  scenario_selector=scenario_selector,
465
  llm=llm,
466
  negative_answer=negative_answer,
467
+ **params,
468
+ )
469
+
470
+ # generating the chatbot back-end
471
+ c = ChatAPI(
472
+ chatbot=rag_chatbot,
473
+ chat_processor=chat_processor,
474
+ model=llm_model_name,
475
  )
476
+
477
+ return {"chat_api": c}
478
+
479
+
480
+ # @output_on_top
481
+ # @ops.input_position(rag_graph="bottom", scenario_selector="bottom", llm="bottom")
482
+ # @op("RAG chatbot")
483
+ # def rag_chatbot(
484
+ # rag_graph,
485
+ # scenario_selector,
486
+ # llm,
487
+ # *,
488
+ # negative_answer=DEFAULT_NEGATIVE_ANSWER,
489
+ # limits_by_type="{}",
490
+ # strict_limits=True,
491
+ # max_results=5,
492
+ # ):
493
+ # rag_graph = rag_graph[0]["rag_graph"]
494
+ # scenario_selector = scenario_selector[0]["scenario_selector"]
495
+ # llm = llm[0]["llm"]
496
+ # limits_by_type = json.loads(limits_by_type)
497
+ # rag_chatbot = RAGChatbot(
498
+ # rag_graph=rag_graph,
499
+ # scenario_selector=scenario_selector,
500
+ # llm=llm,
501
+ # negative_answer=negative_answer,
502
+ # limits_by_type=limits_by_type,
503
+ # strict_limits=strict_limits,
504
+ # max_results=max_results,
505
+ # )
506
+ # return {"chatbot": rag_chatbot}
507
 
508
 
509
  @output_on_top
 
573
  return {"text": chat}
574
 
575
 
576
+ # @output_on_top
577
+ # @ops.input_position(chatbot="bottom", chat_processor="bottom", knowledge_base="bottom")
578
+ # @op("Chat API")
579
+ # def chat_api(chatbot, chat_processor, knowledge_base, *, model="gpt-4o-mini"):
580
+ # chatbot = chatbot[0]["chatbot"]
581
+ # chat_processor = chat_processor[0]["chat_processor"]
582
+ # knowledge_base = knowledge_base[0]
583
+ # c = ChatAPI(
584
+ # chatbot=chatbot,
585
+ # chat_processor=chat_processor,
586
+ # model=model,
587
+ # )
588
+ # if knowledge_base:
589
+ # c.chatbot.rag_graph.kg_base.load_v1_knowledge_base(**knowledge_base)
590
+ # c.chatbot.scenario_selector.check_compatibility(c.chatbot.rag_graph)
591
+ # return {"chat_api": c}
592
+
593
+
594
+ # @output_on_top
595
+ # @op("Knowledge base")
596
+ # def knowledge_base(
597
+ # *,
598
+ # nodes_path="nodes.pickle",
599
+ # edges_path="edges.pickle",
600
+ # template_cluster_path="tempclusters.pickle",
601
+ # ):
602
+ # return {
603
+ # "nodes_path": nodes_path,
604
+ # "edges_path": edges_path,
605
+ # "template_cluster_path": template_cluster_path,
606
+ # }
607
 
608
 
609
  @op("View", view="table_view")