cyberosa
initial files and scripts. App under construction
a134d9b
raw
history blame
1.62 kB
from datetime import datetime, timedelta
import gradio as gr
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
import duckdb
import logging
def get_logger():
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# stream handler and formatter
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.DEBUG)
formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
return logger
logger = get_logger()
def prepare_data():
"""
Get all data from the parquet files
"""
logger.info("Getting all data")
con = duckdb.connect(":memory:")
# Query to fetch invalid trades data
query = f"""
SELECT *
FROM read_parquet('./live_data/markets_live_data.parquet')
"""
df = con.execute(query).fetchdf()
return df
demo = gr.Blocks()
markets_data = prepare_data()
with demo:
gr.HTML("<h1>Olas Predict Live Markets </h1>")
gr.Markdown("This app shows the distributions of predictions on the live markets.")
with gr.Tabs():
with gr.TabItem("💹Probability distributions"):
with gr.Row():
gr.Markdown("# Daily probability distribution of live markets")
with gr.Row():
# TODO
print("WIP")
gr.Markdown("Under construction (WIP)")
# daily_distributions = plot_daily_market_distributions(markets_data)
demo.queue(default_concurrency_limit=40).launch()