rhoitjadhav's picture
update dockerfile
858bd61
raw
history blame
902 Bytes
import time
import requests
def load_data():
# install datasets library with pip install datasets
import argilla as rg
from datasets import load_dataset
rg.init(api_key="admin.apikey")
# load dataset from the hub
dataset = load_dataset("argilla/gutenberg_spacy-ner", split="train")
# read in dataset, assuming its a dataset for token classification
dataset_rg = rg.read_datasets(dataset, task="TokenClassification")
# log the dataset
rg.log(dataset_rg, "gutenberg_spacy-ner")
if __name__ == '__main__':
while True:
try:
response = requests.get("http://localhost:6900/")
if response.status_code == 200:
load_data()
print("Data Loaded!")
break
else:
time.sleep(5)
except Exception as e:
print(e)
time.sleep(10)