File size: 3,151 Bytes
81f890a
c26932b
 
 
 
 
f7c63a7
0921933
72d7898
f7c63a7
c26932b
 
0921933
 
c26932b
119eea2
 
 
 
 
 
c26932b
119eea2
 
c26932b
0921933
f7c63a7
c26932b
119eea2
c26932b
 
119eea2
6d9bc02
c26932b
0921933
c26932b
0921933
 
c26932b
 
0921933
 
 
c26932b
 
0921933
c26932b
6a1288a
119eea2
 
 
6a1288a
 
119eea2
0921933
c26932b
6a1288a
 
 
 
 
 
 
 
 
c26932b
6a1288a
c26932b
 
72d7898
0921933
c26932b
0921933
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75

# Import the necessary libraries:
# - `gradio` is a library for creating interactive web interfaces
# - `typing` provides type annotations for Python
# - `data_loader` is a custom module that contains functions to read different types of data

import gradio as gr
from typing import Dict, Any
from data_loader import read_dask_data, read_polars_data, read_another_dask_data

# Define a function called `load_and_process_data` that takes a dataset choice and the number of rows to display
# This function is responsible for loading and processing the data based on the user's input
def load_and_process_data(dataset_choice: str, num_rows: int) -> Dict[str, Any]:
    try:
        # Create a mapping of dataset choices to their corresponding data loading functions
        dataset_mapping = {
            "Dask Data": read_dask_data,
            "Polars Data": read_polars_data,
            "Another Dask Data": read_another_dask_data
        }

        # Fetch the appropriate data loading function based on the user's dataset choice
        data_loader = dataset_mapping.get(dataset_choice)
        if not data_loader:
            # If the dataset choice is invalid, return an error message
            return {"error": "Invalid dataset choice."}

        # Load the data using the selected data loading function
        data = data_loader()

        # Process the data to show the specified number of rows
        processed_data = data.head(num_rows)

        # Convert the processed data to a dictionary for JSON serialization
        return {
            "processed_data": processed_data.to_dict()
        }
    except Exception as e:
        # If an exception occurs during data processing, log the error
        # and return an error message
        print(f"Error processing data: {str(e)}")
        return {"error": "Unable to process data. Please check the logs for details."}

# Define a function called `create_interface` that creates a Gradio interface
# The interface allows the user to select a dataset and the number of rows to display
def create_interface():
    # Define the input components for the interface
    dataset_choice = gr.components.Dropdown(
        choices=["Dask Data", "Polars Data", "Another Dask Data"], 
        label="Select Dataset"
    )
    num_rows = gr.components.Slider(
        minimum=1, maximum=100, value=5, label="Number of Rows to Display"
    )

    # Define the layout of the Gradio interface
    with gr.Blocks() as demo:
        gr.Markdown("# Enhanced Dataset Loader Demo")
        gr.Markdown("Interact with various datasets and select the amount of data to display.")
        with gr.Row():
            dataset_choice.render()
            num_rows.render()
        processed_data_output = gr.JSON(label="Processed Data")
        processed_data_output.render()

    # Add the input components and the data processing function to the interface
    demo.add(dataset_choice, num_rows, processed_data_output, load_and_process_data)

    # Launch the Gradio interface
    demo.launch()

# Execute the `create_interface` function when the script is run
if __name__ == "__main__":
    create_interface()