{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Transformar os dados RAW (Binance BTCUSDT, all trades) -> OHCL" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "raw_dataset_path = '../datasets/BTCUSDT-Trades/'\n", "output_path = '../output'" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import os\n", "from loguru import logger\n", "import dask.dataframe as dd\n", "from dask.diagnostics import ProgressBar\n", "import pandas as pd\n", "\n", "logger.remove() \n", "logger.add(lambda msg: print(msg, end=\"\"), level=\"INFO\")\n", "\n", "from utils import create_dollar_bars\n", "\n", "def dataSampler(barType=\"standard\", samplerType=\"time\", samplerAmount=100, maxRecords=None):\n", " barTypeDictionary = {\n", " \"standard\": \"Padrão\",\n", " \"imbalance\": \"de Desequilíbrio\",\n", " \"run\": \"de Ordem Iceberg\"\n", " }\n", " samplerDictionary = {\n", " \"time\": \"Temporal\",\n", " \"ticks\": \"Ticks\",\n", " \"volume\": \"Volume\",\n", " \"dollar\": \"Dollar\"\n", " }\n", " \n", " output_directory = os.path.join(output_path, f\"{samplerType}-bars-[{samplerAmount}]\")\n", " if not os.path.exists(output_directory):\n", " os.makedirs(output_directory)\n", " logger.info(f\"Diretório criado: {output_directory}\")\n", " \n", " print(f\"Criando Barras {samplerDictionary[samplerType]} {barTypeDictionary[barType]} agrupadas a cada {samplerAmount}...\") \n", "\n", " # Verificar se já existem arquivos .parquet no diretório de saída\n", " parquet_files_output = [f for f in os.listdir(output_directory) if f.endswith('.parquet')]\n", " if parquet_files_output:\n", " logger.info(f\"'{output_directory}' já existe e contém arquivos .parquet. Carregando de {output_directory}...\")\n", " \n", " # Carregar todos os arquivos .parquet usando Dask\n", " try:\n", " bars = dd.read_parquet(os.path.join(output_directory, '*.parquet')).compute()\n", " logger.info(\"'dollar_bars' carregado com sucesso.\")\n", " return bars\n", " except Exception as e:\n", " logger.error(f\"Erro ao carregar arquivos .parquet: {e}\")\n", " \n", " logger.info(\"Criando 'dollar_bars'...\")\n", "\n", " dollar_bars_path = os.path.join(output_directory, 'dollar_bars.parquet')\n", "\n", " # Obter a lista de todos os arquivos Parquet no raw_dataset_path\n", " parquet_files = [os.path.join(raw_dataset_path, f) for f in os.listdir(raw_dataset_path) if f.endswith('.parquet')]\n", " parquet_files.sort()\n", " \n", " if not parquet_files:\n", " logger.warning(f\"Nenhum arquivo .parquet encontrado em '{raw_dataset_path}'.\")\n", " return []\n", " \n", " logger.info(f\"Total de arquivos .parquet a serem processados: {len(parquet_files)}\")\n", " \n", " # Carregar todos os arquivos .parquet usando Dask diretamente\n", " try:\n", " df_dask = dd.read_parquet(os.path.join(raw_dataset_path, '*.parquet'))\n", " logger.info(\"Todos os arquivos .parquet foram carregados com sucesso.\")\n", " except Exception as e:\n", " logger.error(f\"Erro ao carregar arquivos .parquet: {e}\")\n", " return []\n", " \n", " # Se maxRecords estiver definido, limitar o DataFrame\n", " if maxRecords is not None:\n", " df_dask = df_dask.head(maxRecords, compute=False)\n", " logger.info(f\"Limite de registros definido para {maxRecords}.\")\n", " \n", " # Criar e salvar 'dollar_bars'\n", " try:\n", " dollar_bars = create_dollar_bars(df_dask, samplerAmount, dollar_bars_path)\n", " logger.info(\"'dollar_bars' criado e salvo com sucesso.\")\n", " return dollar_bars\n", " except Exception as e:\n", " logger.error(f\"Erro ao criar 'dollar_bars': {e}\")\n", " return []\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# Realiza Amostragem" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "serieBars = dataSampler(\n", " barType=\"standard\",\n", " samplerType=\"dollar\",\n", " samplerAmount=10_000_000\n", ")\n", "\n", "sample_bars = serieBars.head()\n", "display(sample_bars)" ] } ], "metadata": { "kernelspec": { "display_name": "base", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.4" } }, "nbformat": 4, "nbformat_minor": 2 }