Spaces:
Sleeping
Sleeping
johaunh
commited on
Commit
·
049a574
1
Parent(s):
bf917cb
Resolve merge error
Browse files
main.py
CHANGED
@@ -3,7 +3,6 @@ import re
|
|
3 |
import secrets
|
4 |
import string
|
5 |
import yaml
|
6 |
-
import yaml
|
7 |
from datetime import datetime
|
8 |
from zipfile import ZipFile
|
9 |
|
@@ -11,7 +10,6 @@ import gradio as gr
|
|
11 |
import nltk
|
12 |
import pandas as pd
|
13 |
from langchain.embeddings import OpenAIEmbeddings
|
14 |
-
from langchain.embeddings import OpenAIEmbeddings
|
15 |
from langchain.chains import SimpleSequentialChain
|
16 |
from langchain.chat_models import ChatOpenAI
|
17 |
from nltk.tokenize import sent_tokenize
|
@@ -158,13 +156,10 @@ def extract_knowledge_graph(api_key: str, batch_size: int, modules: list[str], t
|
|
158 |
|
159 |
pipeline.init(steps)
|
160 |
|
161 |
-
# split text into batches
|
162 |
# split text into batches
|
163 |
sentences = sent_tokenize(text)
|
164 |
batches = [" ".join(sentences[i:i+batch_size])
|
165 |
for i in range(0, len(sentences), batch_size)]
|
166 |
-
batches = [" ".join(sentences[i:i+batch_size])
|
167 |
-
for i in range(0, len(sentences), batch_size)]
|
168 |
|
169 |
# create KG
|
170 |
knowledge_graph = []
|
@@ -192,9 +187,6 @@ def extract_knowledge_graph(api_key: str, batch_size: int, modules: list[str], t
|
|
192 |
"_timestamp": now,
|
193 |
"batch_size": batch_size,
|
194 |
"modules": steps
|
195 |
-
"_timestamp": now,
|
196 |
-
"batch_size": batch_size,
|
197 |
-
"modules": steps
|
198 |
}
|
199 |
|
200 |
# unique identifier for local saving
|
|
|
3 |
import secrets
|
4 |
import string
|
5 |
import yaml
|
|
|
6 |
from datetime import datetime
|
7 |
from zipfile import ZipFile
|
8 |
|
|
|
10 |
import nltk
|
11 |
import pandas as pd
|
12 |
from langchain.embeddings import OpenAIEmbeddings
|
|
|
13 |
from langchain.chains import SimpleSequentialChain
|
14 |
from langchain.chat_models import ChatOpenAI
|
15 |
from nltk.tokenize import sent_tokenize
|
|
|
156 |
|
157 |
pipeline.init(steps)
|
158 |
|
|
|
159 |
# split text into batches
|
160 |
sentences = sent_tokenize(text)
|
161 |
batches = [" ".join(sentences[i:i+batch_size])
|
162 |
for i in range(0, len(sentences), batch_size)]
|
|
|
|
|
163 |
|
164 |
# create KG
|
165 |
knowledge_graph = []
|
|
|
187 |
"_timestamp": now,
|
188 |
"batch_size": batch_size,
|
189 |
"modules": steps
|
|
|
|
|
|
|
190 |
}
|
191 |
|
192 |
# unique identifier for local saving
|