Upload 284 files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitignore +18 -0
- LICENSE +19 -0
- Makefile +70 -0
- README.md +48 -3
- __init__.py +0 -0
- common_utils.py +70 -0
- dask_queries/__init__.py +0 -0
- dask_queries/executor.py +4 -0
- dask_queries/q1.py +73 -0
- dask_queries/q2.py +157 -0
- dask_queries/q3.py +63 -0
- dask_queries/q4.py +47 -0
- dask_queries/q5.py +64 -0
- dask_queries/q6.py +46 -0
- dask_queries/q7.py +139 -0
- dask_queries/utils.py +119 -0
- duckdb_queries/__init__.py +0 -0
- duckdb_queries/executor.py +4 -0
- duckdb_queries/q1.py +41 -0
- duckdb_queries/q10.py +55 -0
- duckdb_queries/q11.py +49 -0
- duckdb_queries/q12.py +48 -0
- duckdb_queries/q13.py +42 -0
- duckdb_queries/q14.py +34 -0
- duckdb_queries/q15.py +57 -0
- duckdb_queries/q16.py +52 -0
- duckdb_queries/q17.py +38 -0
- duckdb_queries/q18.py +55 -0
- duckdb_queries/q19.py +56 -0
- duckdb_queries/q2.py +68 -0
- duckdb_queries/q20.py +61 -0
- duckdb_queries/q21.py +63 -0
- duckdb_queries/q22.py +57 -0
- duckdb_queries/q3.py +45 -0
- duckdb_queries/q4.py +42 -0
- duckdb_queries/q5.py +49 -0
- duckdb_queries/q6.py +29 -0
- duckdb_queries/q7.py +63 -0
- duckdb_queries/q8.py +65 -0
- duckdb_queries/q9.py +57 -0
- duckdb_queries/utils.py +115 -0
- fireducks_queries/__init__.py +0 -0
- fireducks_queries/executor.py +4 -0
- fireducks_queries/q1.py +71 -0
- fireducks_queries/q2.py +159 -0
- fireducks_queries/q3.py +62 -0
- fireducks_queries/q4.py +42 -0
- fireducks_queries/q5.py +64 -0
- fireducks_queries/q6.py +44 -0
- fireducks_queries/q7.py +137 -0
.gitignore
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
.idea/
|
2 |
+
.swp
|
3 |
+
tables_scale_1*/
|
4 |
+
*.tbl
|
5 |
+
*.o
|
6 |
+
dbgen
|
7 |
+
.venv/
|
8 |
+
__pycache__/
|
9 |
+
qgen
|
10 |
+
dask_user_space/
|
11 |
+
dask-worker-space/
|
12 |
+
timings*.csv
|
13 |
+
plots/
|
14 |
+
**/spark-warehouse
|
15 |
+
perf.*
|
16 |
+
target/
|
17 |
+
**/flamegraph.svg
|
18 |
+
Cargo.lock
|
LICENSE
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Copyright (c) 2020 Ritchie Vink
|
2 |
+
|
3 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
4 |
+
of this software and associated documentation files (the "Software"), to deal
|
5 |
+
in the Software without restriction, including without limitation the rights
|
6 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
7 |
+
copies of the Software, and to permit persons to whom the Software is
|
8 |
+
furnished to do so, subject to the following conditions:
|
9 |
+
|
10 |
+
The above copyright notice and this permission notice shall be included in all
|
11 |
+
copies or substantial portions of the Software.
|
12 |
+
|
13 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
14 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
15 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
16 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
17 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
18 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
19 |
+
SOFTWARE.
|
Makefile
ADDED
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
SHELL=/bin/bash
|
2 |
+
PYTHON=.venv/bin/python
|
3 |
+
|
4 |
+
.venv:
|
5 |
+
@python -m venv .venv
|
6 |
+
@.venv/bin/pip install -U pip
|
7 |
+
@.venv/bin/pip install --no-cache-dir -r requirements.txt
|
8 |
+
|
9 |
+
.fireducks:
|
10 |
+
@python -m venv .fireducks
|
11 |
+
@.venv/bin/pip install -U pip
|
12 |
+
@.venv/bin/pip install --no-cache-dir -r requirements-fireducks.txt
|
13 |
+
|
14 |
+
clean-tpch-dbgen:
|
15 |
+
$(MAKE) -C tpch-dbgen clean
|
16 |
+
|
17 |
+
clean-venv:
|
18 |
+
rm -r .venv
|
19 |
+
|
20 |
+
clean-tables:
|
21 |
+
rm -r tables_scale_*
|
22 |
+
|
23 |
+
clean: clean-tpch-dbgen clean-venv
|
24 |
+
|
25 |
+
tables_scale_1: .venv
|
26 |
+
$(MAKE) -C tpch-dbgen all
|
27 |
+
cd tpch-dbgen && ./dbgen -vf -s 1 && cd ..
|
28 |
+
mkdir -p "tables_scale_1"
|
29 |
+
mv tpch-dbgen/*.tbl tables_scale_1/
|
30 |
+
.venv/bin/python prepare_files.py 1
|
31 |
+
|
32 |
+
tables_scale_10: .venv
|
33 |
+
$(MAKE) -C tpch-dbgen all
|
34 |
+
cd tpch-dbgen && ./dbgen -vf -s 10 && cd ..
|
35 |
+
mkdir -p "tables_scale_10"
|
36 |
+
mv tpch-dbgen/*.tbl tables_scale_10/
|
37 |
+
.venv/bin/python prepare_files.py 10
|
38 |
+
|
39 |
+
run_polars: .venv
|
40 |
+
.venv/bin/python -m polars_queries.executor
|
41 |
+
|
42 |
+
run_pandas: .venv
|
43 |
+
.venv/bin/python -m pandas_queries.executor
|
44 |
+
|
45 |
+
run_fireducks: .fireducks
|
46 |
+
.fireducks/bin/python -m fireducks_queries.executor
|
47 |
+
|
48 |
+
run_dask: .venv
|
49 |
+
.venv/bin/python -m dask_queries.executor
|
50 |
+
|
51 |
+
run_modin: .venv
|
52 |
+
.venv/bin/python -m modin_queries.executor
|
53 |
+
|
54 |
+
run_vaex: .venv
|
55 |
+
.venv/bin/python -m vaex_queries.executor
|
56 |
+
|
57 |
+
run_spark: .venv
|
58 |
+
.venv/bin/python -m spark_queries.executor
|
59 |
+
|
60 |
+
run_duckdb: .venv
|
61 |
+
.venv/bin/python -m duckdb_queries.executor
|
62 |
+
|
63 |
+
plot_results: .venv
|
64 |
+
.venv/bin/python -m scripts.plot_results
|
65 |
+
|
66 |
+
run_all: run_polars run_pandas run_fireducks
|
67 |
+
|
68 |
+
pre-commit:
|
69 |
+
.venv/bin/python -m isort .
|
70 |
+
.venv/bin/python -m black .
|
README.md
CHANGED
@@ -1,3 +1,48 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
polars-tpch
|
2 |
+
===========
|
3 |
+
|
4 |
+
This repo contains the code used for performance evaluation of polars. The benchmarks are TPC-standardised queries and data designed to test the performance of "real" workflows.
|
5 |
+
|
6 |
+
From the [TPC website](https://www.tpc.org/tpch/):
|
7 |
+
> TPC-H is a decision support benchmark. It consists of a suite of business-oriented ad hoc queries and concurrent data modifications. The queries and the data populating the database have been chosen to have broad industry-wide relevance. This benchmark illustrates decision support systems that examine large volumes of data, execute queries with a high degree of complexity, and give answers to critical business questions.
|
8 |
+
|
9 |
+
## Generating TPC-H Data
|
10 |
+
|
11 |
+
### Project setup
|
12 |
+
|
13 |
+
```shell
|
14 |
+
# clone this repository
|
15 |
+
git clone https://github.com/pola-rs/tpch.git
|
16 |
+
cd tpch/tpch-dbgen
|
17 |
+
|
18 |
+
# build tpch-dbgen
|
19 |
+
make
|
20 |
+
```
|
21 |
+
|
22 |
+
Notes:
|
23 |
+
|
24 |
+
- For MacOS, the above `make` command will result in an error while compiling like below,
|
25 |
+
|
26 |
+
```shell
|
27 |
+
bm_utils.c:71:10: fatal error: 'malloc.h' file not found
|
28 |
+
#include <malloc.h>
|
29 |
+
^~~~~~~~~~
|
30 |
+
1 error generated.
|
31 |
+
make: *** [bm_utils.o] Error 1
|
32 |
+
```
|
33 |
+
To fix this, change the import statement `#include <malloc.h>` to `#include <sys/malloc.h>` in the files where error
|
34 |
+
is reported (`bm_utils.c` and `varsub.c`) and then re-run the command `make`.
|
35 |
+
|
36 |
+
### Execute
|
37 |
+
|
38 |
+
```shell
|
39 |
+
# change directory to the root of the repository
|
40 |
+
cd ../
|
41 |
+
./run.sh
|
42 |
+
```
|
43 |
+
|
44 |
+
This will do the following,
|
45 |
+
|
46 |
+
- Create a new virtual environment with all required dependencies.
|
47 |
+
- Generate data for benchmarks.
|
48 |
+
- Run the benchmark suite.
|
__init__.py
ADDED
File without changes
|
common_utils.py
ADDED
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import re
|
3 |
+
import sys
|
4 |
+
from subprocess import run
|
5 |
+
|
6 |
+
from linetimer import CodeTimer
|
7 |
+
|
8 |
+
INCLUDE_IO = bool(os.environ.get("INCLUDE_IO", False))
|
9 |
+
SHOW_RESULTS = bool(os.environ.get("SHOW_RESULTS", False))
|
10 |
+
LOG_TIMINGS = bool(os.environ.get("LOG_TIMINGS", False))
|
11 |
+
SCALE_FACTOR = os.environ.get("SCALE_FACTOR", "1")
|
12 |
+
WRITE_PLOT = bool(os.environ.get("WRITE_PLOT", False))
|
13 |
+
FILE_TYPE = os.environ.get("FILE_TYPE", "parquet")
|
14 |
+
SPARK_LOG_LEVEL = os.environ.get("SPARK_LOG_LEVEL", "ERROR")
|
15 |
+
print("include io:", INCLUDE_IO)
|
16 |
+
print("show results:", SHOW_RESULTS)
|
17 |
+
print("log timings:", LOG_TIMINGS)
|
18 |
+
print("file type:", FILE_TYPE)
|
19 |
+
|
20 |
+
CWD = os.path.dirname(os.path.realpath(__file__))
|
21 |
+
DATASET_BASE_DIR = os.path.join(CWD, f"tables_scale_{SCALE_FACTOR}")
|
22 |
+
ANSWERS_BASE_DIR = os.path.join(CWD, "tpch-dbgen/answers")
|
23 |
+
TIMINGS_FILE = os.path.join(CWD, os.environ.get("TIMINGS_FILE", "timings.csv"))
|
24 |
+
DEFAULT_PLOTS_DIR = os.path.join(CWD, "plots")
|
25 |
+
|
26 |
+
|
27 |
+
def append_row(solution: str, q: str, secs: float, version: str, success=True):
|
28 |
+
with open(TIMINGS_FILE, "a") as f:
|
29 |
+
if f.tell() == 0:
|
30 |
+
f.write("solution,version,query_no,duration[s],include_io,success\n")
|
31 |
+
f.write(f"{solution},{version},{q},{secs},{INCLUDE_IO},{success}\n")
|
32 |
+
|
33 |
+
|
34 |
+
def on_second_call(func):
|
35 |
+
def helper(*args, **kwargs):
|
36 |
+
helper.calls += 1
|
37 |
+
|
38 |
+
# first call is outside the function
|
39 |
+
# this call must set the result
|
40 |
+
if helper.calls == 1:
|
41 |
+
# include IO will compute the result on the 2nd call
|
42 |
+
if not INCLUDE_IO:
|
43 |
+
helper.result = func(*args, **kwargs)
|
44 |
+
return helper.result
|
45 |
+
|
46 |
+
# second call is in the query, now we set the result
|
47 |
+
if INCLUDE_IO and helper.calls == 2:
|
48 |
+
helper.result = func(*args, **kwargs)
|
49 |
+
|
50 |
+
return helper.result
|
51 |
+
|
52 |
+
helper.calls = 0
|
53 |
+
helper.result = None
|
54 |
+
|
55 |
+
return helper
|
56 |
+
|
57 |
+
|
58 |
+
def execute_all(solution: str):
|
59 |
+
package_name = f"{solution}_queries"
|
60 |
+
|
61 |
+
expr = re.compile(r"^q(\d+).py")
|
62 |
+
num_queries = 0
|
63 |
+
for file in os.listdir(package_name):
|
64 |
+
g = expr.search(file)
|
65 |
+
if g is not None:
|
66 |
+
num_queries = max(int(g.group(1)), num_queries)
|
67 |
+
|
68 |
+
with CodeTimer(name=f"Overall execution of ALL {solution} queries", unit="s"):
|
69 |
+
for i in range(1, num_queries + 1):
|
70 |
+
run([sys.executable, "-m", f"{package_name}.q{i}"])
|
dask_queries/__init__.py
ADDED
File without changes
|
dask_queries/executor.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from common_utils import execute_all
|
2 |
+
|
3 |
+
if __name__ == "__main__":
|
4 |
+
execute_all("dask")
|
dask_queries/q1.py
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import date, datetime
|
2 |
+
|
3 |
+
from dask_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 1
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
VAR1 = datetime(1998, 9, 2)
|
10 |
+
|
11 |
+
lineitem = utils.get_line_item_ds
|
12 |
+
# first call one time to cache in case we don't include the IO times
|
13 |
+
lineitem()
|
14 |
+
|
15 |
+
def query():
|
16 |
+
nonlocal lineitem
|
17 |
+
lineitem = lineitem()
|
18 |
+
|
19 |
+
lineitem_filtered = lineitem.loc[
|
20 |
+
:,
|
21 |
+
[
|
22 |
+
"l_quantity",
|
23 |
+
"l_extendedprice",
|
24 |
+
"l_discount",
|
25 |
+
"l_tax",
|
26 |
+
"l_returnflag",
|
27 |
+
"l_linestatus",
|
28 |
+
"l_shipdate",
|
29 |
+
"l_orderkey",
|
30 |
+
],
|
31 |
+
]
|
32 |
+
sel = lineitem_filtered.l_shipdate <= VAR1
|
33 |
+
lineitem_filtered = lineitem_filtered[sel].copy()
|
34 |
+
lineitem_filtered["sum_qty"] = lineitem_filtered.l_quantity
|
35 |
+
lineitem_filtered["sum_base_price"] = lineitem_filtered.l_extendedprice
|
36 |
+
lineitem_filtered["avg_qty"] = lineitem_filtered.l_quantity
|
37 |
+
lineitem_filtered["avg_price"] = lineitem_filtered.l_extendedprice
|
38 |
+
lineitem_filtered["sum_disc_price"] = lineitem_filtered.l_extendedprice * (
|
39 |
+
1 - lineitem_filtered.l_discount
|
40 |
+
)
|
41 |
+
lineitem_filtered["sum_charge"] = (
|
42 |
+
lineitem_filtered.l_extendedprice
|
43 |
+
* (1 - lineitem_filtered.l_discount)
|
44 |
+
* (1 + lineitem_filtered.l_tax)
|
45 |
+
)
|
46 |
+
lineitem_filtered["avg_disc"] = lineitem_filtered.l_discount
|
47 |
+
lineitem_filtered["count_order"] = lineitem_filtered.l_orderkey
|
48 |
+
gb = lineitem_filtered.groupby(["l_returnflag", "l_linestatus"])
|
49 |
+
|
50 |
+
total = gb.agg(
|
51 |
+
{
|
52 |
+
"sum_qty": "sum",
|
53 |
+
"sum_base_price": "sum",
|
54 |
+
"sum_disc_price": "sum",
|
55 |
+
"sum_charge": "sum",
|
56 |
+
"avg_qty": "mean",
|
57 |
+
"avg_price": "mean",
|
58 |
+
"avg_disc": "mean",
|
59 |
+
"count_order": "count",
|
60 |
+
}
|
61 |
+
)
|
62 |
+
|
63 |
+
result_df = (
|
64 |
+
total.compute().reset_index().sort_values(["l_returnflag", "l_linestatus"])
|
65 |
+
)
|
66 |
+
|
67 |
+
return result_df
|
68 |
+
|
69 |
+
utils.run_query(Q_NUM, query)
|
70 |
+
|
71 |
+
|
72 |
+
if __name__ == "__main__":
|
73 |
+
q()
|
dask_queries/q2.py
ADDED
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from dask_queries import utils
|
2 |
+
|
3 |
+
Q_NUM = 2
|
4 |
+
|
5 |
+
|
6 |
+
def q():
|
7 |
+
var1 = 15
|
8 |
+
var2 = "BRASS"
|
9 |
+
var3 = "EUROPE"
|
10 |
+
|
11 |
+
region_ds = utils.get_region_ds
|
12 |
+
nation_ds = utils.get_nation_ds
|
13 |
+
supplier_ds = utils.get_supplier_ds
|
14 |
+
part_ds = utils.get_part_ds
|
15 |
+
part_supp_ds = utils.get_part_supp_ds
|
16 |
+
|
17 |
+
# first call one time to cache in case we don't include the IO times
|
18 |
+
region_ds()
|
19 |
+
nation_ds()
|
20 |
+
supplier_ds()
|
21 |
+
part_ds()
|
22 |
+
part_supp_ds()
|
23 |
+
|
24 |
+
def query():
|
25 |
+
nonlocal region_ds
|
26 |
+
nonlocal nation_ds
|
27 |
+
nonlocal supplier_ds
|
28 |
+
nonlocal part_ds
|
29 |
+
nonlocal part_supp_ds
|
30 |
+
region_ds = region_ds()
|
31 |
+
nation_ds = nation_ds()
|
32 |
+
supplier_ds = supplier_ds()
|
33 |
+
part_ds = part_ds()
|
34 |
+
part_supp_ds = part_supp_ds()
|
35 |
+
|
36 |
+
nation_filtered = nation_ds[["n_nationkey", "n_name", "n_regionkey"]]
|
37 |
+
region_filtered = region_ds[(region_ds["r_name"] == var3)]
|
38 |
+
region_filtered = region_filtered[["r_regionkey"]]
|
39 |
+
r_n_merged = nation_filtered.merge(
|
40 |
+
region_filtered, left_on="n_regionkey", right_on="r_regionkey", how="inner"
|
41 |
+
)
|
42 |
+
r_n_merged = r_n_merged.loc[:, ["n_nationkey", "n_name"]]
|
43 |
+
supplier_filtered = supplier_ds.loc[
|
44 |
+
:,
|
45 |
+
[
|
46 |
+
"s_suppkey",
|
47 |
+
"s_name",
|
48 |
+
"s_address",
|
49 |
+
"s_nationkey",
|
50 |
+
"s_phone",
|
51 |
+
"s_acctbal",
|
52 |
+
"s_comment",
|
53 |
+
],
|
54 |
+
]
|
55 |
+
s_r_n_merged = r_n_merged.merge(
|
56 |
+
supplier_filtered,
|
57 |
+
left_on="n_nationkey",
|
58 |
+
right_on="s_nationkey",
|
59 |
+
how="inner",
|
60 |
+
)
|
61 |
+
s_r_n_merged = s_r_n_merged.loc[
|
62 |
+
:,
|
63 |
+
[
|
64 |
+
"n_name",
|
65 |
+
"s_suppkey",
|
66 |
+
"s_name",
|
67 |
+
"s_address",
|
68 |
+
"s_phone",
|
69 |
+
"s_acctbal",
|
70 |
+
"s_comment",
|
71 |
+
],
|
72 |
+
]
|
73 |
+
partsupp_filtered = part_supp_ds.loc[
|
74 |
+
:, ["ps_partkey", "ps_suppkey", "ps_supplycost"]
|
75 |
+
]
|
76 |
+
ps_s_r_n_merged = s_r_n_merged.merge(
|
77 |
+
partsupp_filtered, left_on="s_suppkey", right_on="ps_suppkey", how="inner"
|
78 |
+
)
|
79 |
+
ps_s_r_n_merged = ps_s_r_n_merged.loc[
|
80 |
+
:,
|
81 |
+
[
|
82 |
+
"n_name",
|
83 |
+
"s_name",
|
84 |
+
"s_address",
|
85 |
+
"s_phone",
|
86 |
+
"s_acctbal",
|
87 |
+
"s_comment",
|
88 |
+
"ps_partkey",
|
89 |
+
"ps_supplycost",
|
90 |
+
],
|
91 |
+
]
|
92 |
+
part_filtered = part_ds.loc[:, ["p_partkey", "p_mfgr", "p_size", "p_type"]]
|
93 |
+
part_filtered = part_filtered[
|
94 |
+
(part_filtered["p_size"] == var1)
|
95 |
+
& (part_filtered["p_type"].astype(str).str.endswith(var2))
|
96 |
+
]
|
97 |
+
part_filtered = part_filtered.loc[:, ["p_partkey", "p_mfgr"]]
|
98 |
+
merged_df = part_filtered.merge(
|
99 |
+
ps_s_r_n_merged, left_on="p_partkey", right_on="ps_partkey", how="inner"
|
100 |
+
)
|
101 |
+
merged_df = merged_df.loc[
|
102 |
+
:,
|
103 |
+
[
|
104 |
+
"n_name",
|
105 |
+
"s_name",
|
106 |
+
"s_address",
|
107 |
+
"s_phone",
|
108 |
+
"s_acctbal",
|
109 |
+
"s_comment",
|
110 |
+
"ps_supplycost",
|
111 |
+
"p_partkey",
|
112 |
+
"p_mfgr",
|
113 |
+
],
|
114 |
+
]
|
115 |
+
min_values = merged_df.groupby("p_partkey")["ps_supplycost"].min().reset_index()
|
116 |
+
min_values.columns = ["P_PARTKEY_CPY", "MIN_SUPPLYCOST"]
|
117 |
+
merged_df = merged_df.merge(
|
118 |
+
min_values,
|
119 |
+
left_on=["p_partkey", "ps_supplycost"],
|
120 |
+
right_on=["P_PARTKEY_CPY", "MIN_SUPPLYCOST"],
|
121 |
+
how="inner",
|
122 |
+
)
|
123 |
+
result_df = merged_df.loc[
|
124 |
+
:,
|
125 |
+
[
|
126 |
+
"s_acctbal",
|
127 |
+
"s_name",
|
128 |
+
"n_name",
|
129 |
+
"p_partkey",
|
130 |
+
"p_mfgr",
|
131 |
+
"s_address",
|
132 |
+
"s_phone",
|
133 |
+
"s_comment",
|
134 |
+
],
|
135 |
+
].compute()
|
136 |
+
result_df = result_df.sort_values(
|
137 |
+
by=[
|
138 |
+
"s_acctbal",
|
139 |
+
"n_name",
|
140 |
+
"s_name",
|
141 |
+
"p_partkey",
|
142 |
+
],
|
143 |
+
ascending=[
|
144 |
+
False,
|
145 |
+
True,
|
146 |
+
True,
|
147 |
+
True,
|
148 |
+
],
|
149 |
+
)[:100]
|
150 |
+
|
151 |
+
return result_df
|
152 |
+
|
153 |
+
utils.run_query(Q_NUM, query)
|
154 |
+
|
155 |
+
|
156 |
+
if __name__ == "__main__":
|
157 |
+
q()
|
dask_queries/q3.py
ADDED
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import datetime
|
2 |
+
|
3 |
+
from dask_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 3
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
var1 = datetime.datetime.strptime("1995-03-15", "%Y-%m-%d")
|
10 |
+
var2 = "BUILDING"
|
11 |
+
|
12 |
+
line_item_ds = utils.get_line_item_ds
|
13 |
+
orders_ds = utils.get_orders_ds
|
14 |
+
customer_ds = utils.get_customer_ds
|
15 |
+
|
16 |
+
# first call one time to cache in case we don't include the IO times
|
17 |
+
line_item_ds()
|
18 |
+
orders_ds()
|
19 |
+
customer_ds()
|
20 |
+
|
21 |
+
def query():
|
22 |
+
nonlocal line_item_ds
|
23 |
+
nonlocal orders_ds
|
24 |
+
nonlocal customer_ds
|
25 |
+
line_item_ds = line_item_ds()
|
26 |
+
orders_ds = orders_ds()
|
27 |
+
customer_ds = customer_ds()
|
28 |
+
|
29 |
+
lineitem_filtered = line_item_ds.loc[
|
30 |
+
:, ["l_orderkey", "l_extendedprice", "l_discount", "l_shipdate"]
|
31 |
+
]
|
32 |
+
orders_filtered = orders_ds.loc[
|
33 |
+
:, ["o_orderkey", "o_custkey", "o_orderdate", "o_shippriority"]
|
34 |
+
]
|
35 |
+
customer_filtered = customer_ds.loc[:, ["c_mktsegment", "c_custkey"]]
|
36 |
+
lsel = lineitem_filtered.l_shipdate > var1
|
37 |
+
osel = orders_filtered.o_orderdate < var1
|
38 |
+
csel = customer_filtered.c_mktsegment == var2
|
39 |
+
flineitem = lineitem_filtered[lsel]
|
40 |
+
forders = orders_filtered[osel]
|
41 |
+
fcustomer = customer_filtered[csel]
|
42 |
+
jn1 = fcustomer.merge(forders, left_on="c_custkey", right_on="o_custkey")
|
43 |
+
jn2 = jn1.merge(flineitem, left_on="o_orderkey", right_on="l_orderkey")
|
44 |
+
jn2["revenue"] = jn2.l_extendedprice * (1 - jn2.l_discount)
|
45 |
+
total = (
|
46 |
+
jn2.groupby(["l_orderkey", "o_orderdate", "o_shippriority"])["revenue"]
|
47 |
+
.sum()
|
48 |
+
.compute()
|
49 |
+
.reset_index()
|
50 |
+
.sort_values(["revenue"], ascending=False)
|
51 |
+
)
|
52 |
+
|
53 |
+
result_df = total[:10].loc[
|
54 |
+
:, ["l_orderkey", "revenue", "o_orderdate", "o_shippriority"]
|
55 |
+
]
|
56 |
+
|
57 |
+
return result_df
|
58 |
+
|
59 |
+
utils.run_query(Q_NUM, query)
|
60 |
+
|
61 |
+
|
62 |
+
if __name__ == "__main__":
|
63 |
+
q()
|
dask_queries/q4.py
ADDED
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import datetime
|
2 |
+
|
3 |
+
from dask_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 4
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
date1 = datetime.strptime("1993-10-01", "%Y-%m-%d")
|
10 |
+
date2 = datetime.strptime("1993-07-01", "%Y-%m-%d")
|
11 |
+
|
12 |
+
line_item_ds = utils.get_line_item_ds
|
13 |
+
orders_ds = utils.get_orders_ds
|
14 |
+
|
15 |
+
# first call one time to cache in case we don't include the IO times
|
16 |
+
line_item_ds()
|
17 |
+
orders_ds()
|
18 |
+
|
19 |
+
def query():
|
20 |
+
nonlocal line_item_ds
|
21 |
+
nonlocal orders_ds
|
22 |
+
line_item_ds = line_item_ds()
|
23 |
+
orders_ds = orders_ds()
|
24 |
+
|
25 |
+
lsel = line_item_ds.l_commitdate < line_item_ds.l_receiptdate
|
26 |
+
osel = (orders_ds.o_orderdate < date1) & (orders_ds.o_orderdate >= date2)
|
27 |
+
flineitem = line_item_ds[lsel]
|
28 |
+
forders = orders_ds[osel]
|
29 |
+
forders = forders[["o_orderkey", "o_orderpriority"]]
|
30 |
+
# jn = forders[forders["o_orderkey"].compute().isin(flineitem["l_orderkey"])] # doesn't support isin
|
31 |
+
jn = forders.merge(
|
32 |
+
flineitem, left_on="o_orderkey", right_on="l_orderkey"
|
33 |
+
).drop_duplicates(subset=["o_orderkey"])[["o_orderpriority", "o_orderkey"]]
|
34 |
+
result_df = (
|
35 |
+
jn.groupby("o_orderpriority")["o_orderkey"]
|
36 |
+
.count()
|
37 |
+
.reset_index()
|
38 |
+
.sort_values(["o_orderpriority"])
|
39 |
+
)
|
40 |
+
result_df = result_df.compute()
|
41 |
+
return result_df.rename({"o_orderkey": "order_count"}, axis=1)
|
42 |
+
|
43 |
+
utils.run_query(Q_NUM, query)
|
44 |
+
|
45 |
+
|
46 |
+
if __name__ == "__main__":
|
47 |
+
q()
|
dask_queries/q5.py
ADDED
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import datetime
|
2 |
+
|
3 |
+
from dask_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 5
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
date1 = datetime.datetime.strptime("1994-01-01", "%Y-%m-%d")
|
10 |
+
date2 = datetime.datetime.strptime("1995-01-01", "%Y-%m-%d")
|
11 |
+
|
12 |
+
region_ds = utils.get_region_ds
|
13 |
+
nation_ds = utils.get_nation_ds
|
14 |
+
customer_ds = utils.get_customer_ds
|
15 |
+
line_item_ds = utils.get_line_item_ds
|
16 |
+
orders_ds = utils.get_orders_ds
|
17 |
+
supplier_ds = utils.get_supplier_ds
|
18 |
+
|
19 |
+
# first call one time to cache in case we don't include the IO times
|
20 |
+
region_ds()
|
21 |
+
nation_ds()
|
22 |
+
customer_ds()
|
23 |
+
line_item_ds()
|
24 |
+
orders_ds()
|
25 |
+
supplier_ds()
|
26 |
+
|
27 |
+
def query():
|
28 |
+
nonlocal region_ds
|
29 |
+
nonlocal nation_ds
|
30 |
+
nonlocal customer_ds
|
31 |
+
nonlocal line_item_ds
|
32 |
+
nonlocal orders_ds
|
33 |
+
nonlocal supplier_ds
|
34 |
+
|
35 |
+
region_ds = region_ds()
|
36 |
+
nation_ds = nation_ds()
|
37 |
+
customer_ds = customer_ds()
|
38 |
+
line_item_ds = line_item_ds()
|
39 |
+
orders_ds = orders_ds()
|
40 |
+
supplier_ds = supplier_ds()
|
41 |
+
|
42 |
+
rsel = region_ds.r_name == "ASIA"
|
43 |
+
osel = (orders_ds.o_orderdate >= date1) & (orders_ds.o_orderdate < date2)
|
44 |
+
forders = orders_ds[osel]
|
45 |
+
fregion = region_ds[rsel]
|
46 |
+
jn1 = fregion.merge(nation_ds, left_on="r_regionkey", right_on="n_regionkey")
|
47 |
+
jn2 = jn1.merge(customer_ds, left_on="n_nationkey", right_on="c_nationkey")
|
48 |
+
jn3 = jn2.merge(forders, left_on="c_custkey", right_on="o_custkey")
|
49 |
+
jn4 = jn3.merge(line_item_ds, left_on="o_orderkey", right_on="l_orderkey")
|
50 |
+
jn5 = supplier_ds.merge(
|
51 |
+
jn4,
|
52 |
+
left_on=["s_suppkey", "s_nationkey"],
|
53 |
+
right_on=["l_suppkey", "n_nationkey"],
|
54 |
+
)
|
55 |
+
jn5["revenue"] = jn5.l_extendedprice * (1.0 - jn5.l_discount)
|
56 |
+
gb = jn5.groupby("n_name")["revenue"].sum()
|
57 |
+
result_df = gb.compute().reset_index().sort_values("revenue", ascending=False)
|
58 |
+
return result_df
|
59 |
+
|
60 |
+
utils.run_query(Q_NUM, query)
|
61 |
+
|
62 |
+
|
63 |
+
if __name__ == "__main__":
|
64 |
+
q()
|
dask_queries/q6.py
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import datetime
|
2 |
+
|
3 |
+
import pandas as pd
|
4 |
+
|
5 |
+
from dask_queries import utils
|
6 |
+
|
7 |
+
Q_NUM = 6
|
8 |
+
|
9 |
+
|
10 |
+
def q():
|
11 |
+
date1 = datetime.datetime.strptime("1994-01-01", "%Y-%m-%d")
|
12 |
+
date2 = datetime.datetime.strptime("1995-01-01", "%Y-%m-%d")
|
13 |
+
var3 = 24
|
14 |
+
|
15 |
+
line_item_ds = utils.get_line_item_ds
|
16 |
+
|
17 |
+
# first call one time to cache in case we don't include the IO times
|
18 |
+
line_item_ds()
|
19 |
+
|
20 |
+
def query():
|
21 |
+
nonlocal line_item_ds
|
22 |
+
line_item_ds = line_item_ds()
|
23 |
+
|
24 |
+
lineitem_filtered = line_item_ds.loc[
|
25 |
+
:, ["l_quantity", "l_extendedprice", "l_discount", "l_shipdate"]
|
26 |
+
]
|
27 |
+
sel = (
|
28 |
+
(lineitem_filtered.l_shipdate >= date1)
|
29 |
+
& (lineitem_filtered.l_shipdate < date2)
|
30 |
+
& (lineitem_filtered.l_discount >= 0.05)
|
31 |
+
& (lineitem_filtered.l_discount <= 0.07)
|
32 |
+
& (lineitem_filtered.l_quantity < var3)
|
33 |
+
)
|
34 |
+
|
35 |
+
flineitem = lineitem_filtered[sel]
|
36 |
+
result_value = (
|
37 |
+
(flineitem.l_extendedprice * flineitem.l_discount).sum().compute()
|
38 |
+
)
|
39 |
+
result_df = pd.DataFrame({"revenue": [result_value]})
|
40 |
+
return result_df
|
41 |
+
|
42 |
+
utils.run_query(Q_NUM, query)
|
43 |
+
|
44 |
+
|
45 |
+
if __name__ == "__main__":
|
46 |
+
q()
|
dask_queries/q7.py
ADDED
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import datetime
|
2 |
+
from datetime import datetime
|
3 |
+
|
4 |
+
import dask.dataframe as dd
|
5 |
+
|
6 |
+
from dask_queries import utils
|
7 |
+
|
8 |
+
Q_NUM = 7
|
9 |
+
|
10 |
+
|
11 |
+
def q():
|
12 |
+
var1 = datetime.strptime("1995-01-01", "%Y-%m-%d")
|
13 |
+
var2 = datetime.strptime("1997-01-01", "%Y-%m-%d")
|
14 |
+
nation_ds = utils.get_nation_ds
|
15 |
+
customer_ds = utils.get_customer_ds
|
16 |
+
line_item_ds = utils.get_line_item_ds
|
17 |
+
orders_ds = utils.get_orders_ds
|
18 |
+
supplier_ds = utils.get_supplier_ds
|
19 |
+
|
20 |
+
# first call one time to cache in case we don't include the IO times
|
21 |
+
nation_ds()
|
22 |
+
customer_ds()
|
23 |
+
line_item_ds()
|
24 |
+
orders_ds()
|
25 |
+
supplier_ds()
|
26 |
+
|
27 |
+
def query():
|
28 |
+
nonlocal nation_ds
|
29 |
+
nonlocal customer_ds
|
30 |
+
nonlocal line_item_ds
|
31 |
+
nonlocal orders_ds
|
32 |
+
nonlocal supplier_ds
|
33 |
+
|
34 |
+
nation_ds = nation_ds()
|
35 |
+
customer_ds = customer_ds()
|
36 |
+
line_item_ds = line_item_ds()
|
37 |
+
orders_ds = orders_ds()
|
38 |
+
supplier_ds = supplier_ds()
|
39 |
+
|
40 |
+
lineitem_filtered = line_item_ds[
|
41 |
+
(line_item_ds["l_shipdate"] >= var1) & (line_item_ds["l_shipdate"] < var2)
|
42 |
+
]
|
43 |
+
lineitem_filtered["l_year"] = lineitem_filtered["l_shipdate"].dt.year
|
44 |
+
lineitem_filtered["revenue"] = lineitem_filtered["l_extendedprice"] * (
|
45 |
+
1.0 - lineitem_filtered["l_discount"]
|
46 |
+
)
|
47 |
+
lineitem_filtered = lineitem_filtered.loc[
|
48 |
+
:, ["l_orderkey", "l_suppkey", "l_year", "revenue"]
|
49 |
+
]
|
50 |
+
supplier_filtered = supplier_ds.loc[:, ["s_suppkey", "s_nationkey"]]
|
51 |
+
orders_filtered = orders_ds.loc[:, ["o_orderkey", "o_custkey"]]
|
52 |
+
customer_filtered = customer_ds.loc[:, ["c_custkey", "c_nationkey"]]
|
53 |
+
n1 = nation_ds[(nation_ds["n_name"] == "FRANCE")].loc[
|
54 |
+
:, ["n_nationkey", "n_name"]
|
55 |
+
]
|
56 |
+
n2 = nation_ds[(nation_ds["n_name"] == "GERMANY")].loc[
|
57 |
+
:, ["n_nationkey", "n_name"]
|
58 |
+
]
|
59 |
+
|
60 |
+
# ----- do nation 1 -----
|
61 |
+
N1_C = customer_filtered.merge(
|
62 |
+
n1, left_on="c_nationkey", right_on="n_nationkey", how="inner"
|
63 |
+
)
|
64 |
+
N1_C = N1_C.drop(columns=["c_nationkey", "n_nationkey"]).rename(
|
65 |
+
columns={"n_name": "cust_nation"}
|
66 |
+
)
|
67 |
+
N1_C_O = N1_C.merge(
|
68 |
+
orders_filtered, left_on="c_custkey", right_on="o_custkey", how="inner"
|
69 |
+
)
|
70 |
+
N1_C_O = N1_C_O.drop(columns=["c_custkey", "o_custkey"])
|
71 |
+
|
72 |
+
N2_S = supplier_filtered.merge(
|
73 |
+
n2, left_on="s_nationkey", right_on="n_nationkey", how="inner"
|
74 |
+
)
|
75 |
+
N2_S = N2_S.drop(columns=["s_nationkey", "n_nationkey"]).rename(
|
76 |
+
columns={"n_name": "supp_nation"}
|
77 |
+
)
|
78 |
+
N2_S_L = N2_S.merge(
|
79 |
+
lineitem_filtered, left_on="s_suppkey", right_on="l_suppkey", how="inner"
|
80 |
+
)
|
81 |
+
N2_S_L = N2_S_L.drop(columns=["s_suppkey", "l_suppkey"])
|
82 |
+
|
83 |
+
total1 = N1_C_O.merge(
|
84 |
+
N2_S_L, left_on="o_orderkey", right_on="l_orderkey", how="inner"
|
85 |
+
)
|
86 |
+
total1 = total1.drop(columns=["o_orderkey", "l_orderkey"])
|
87 |
+
|
88 |
+
# ----- do nation 2 ----- (same as nation 1 section but with nation 2)
|
89 |
+
N2_C = customer_filtered.merge(
|
90 |
+
n2, left_on="c_nationkey", right_on="n_nationkey", how="inner"
|
91 |
+
)
|
92 |
+
N2_C = N2_C.drop(columns=["c_nationkey", "n_nationkey"]).rename(
|
93 |
+
columns={"n_name": "cust_nation"}
|
94 |
+
)
|
95 |
+
N2_C_O = N2_C.merge(
|
96 |
+
orders_filtered, left_on="c_custkey", right_on="o_custkey", how="inner"
|
97 |
+
)
|
98 |
+
N2_C_O = N2_C_O.drop(columns=["c_custkey", "o_custkey"])
|
99 |
+
|
100 |
+
N1_S = supplier_filtered.merge(
|
101 |
+
n1, left_on="s_nationkey", right_on="n_nationkey", how="inner"
|
102 |
+
)
|
103 |
+
N1_S = N1_S.drop(columns=["s_nationkey", "n_nationkey"]).rename(
|
104 |
+
columns={"n_name": "supp_nation"}
|
105 |
+
)
|
106 |
+
N1_S_L = N1_S.merge(
|
107 |
+
lineitem_filtered, left_on="s_suppkey", right_on="l_suppkey", how="inner"
|
108 |
+
)
|
109 |
+
N1_S_L = N1_S_L.drop(columns=["s_suppkey", "l_suppkey"])
|
110 |
+
|
111 |
+
total2 = N2_C_O.merge(
|
112 |
+
N1_S_L, left_on="o_orderkey", right_on="l_orderkey", how="inner"
|
113 |
+
)
|
114 |
+
total2 = total2.drop(columns=["o_orderkey", "l_orderkey"])
|
115 |
+
|
116 |
+
# concat results
|
117 |
+
total = dd.concat([total1, total2])
|
118 |
+
result_df = (
|
119 |
+
total.groupby(["supp_nation", "cust_nation", "l_year"])
|
120 |
+
.revenue.agg("sum")
|
121 |
+
.reset_index()
|
122 |
+
)
|
123 |
+
result_df.columns = ["supp_nation", "cust_nation", "l_year", "revenue"]
|
124 |
+
|
125 |
+
result_df = result_df.compute().sort_values(
|
126 |
+
by=["supp_nation", "cust_nation", "l_year"],
|
127 |
+
ascending=[
|
128 |
+
True,
|
129 |
+
True,
|
130 |
+
True,
|
131 |
+
],
|
132 |
+
)
|
133 |
+
return result_df
|
134 |
+
|
135 |
+
utils.run_query(Q_NUM, query)
|
136 |
+
|
137 |
+
|
138 |
+
if __name__ == "__main__":
|
139 |
+
q()
|
dask_queries/utils.py
ADDED
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import timeit
|
3 |
+
from os.path import join
|
4 |
+
from typing import Callable, Union
|
5 |
+
|
6 |
+
import dask.dataframe as dd
|
7 |
+
import pandas as pd
|
8 |
+
from linetimer import CodeTimer, linetimer
|
9 |
+
|
10 |
+
from common_utils import (
|
11 |
+
ANSWERS_BASE_DIR,
|
12 |
+
DATASET_BASE_DIR,
|
13 |
+
FILE_TYPE,
|
14 |
+
INCLUDE_IO,
|
15 |
+
LOG_TIMINGS,
|
16 |
+
SHOW_RESULTS,
|
17 |
+
append_row,
|
18 |
+
on_second_call,
|
19 |
+
)
|
20 |
+
|
21 |
+
|
22 |
+
def read_ds(path: str) -> Union:
|
23 |
+
if INCLUDE_IO:
|
24 |
+
return dd.read_parquet(path)
|
25 |
+
if FILE_TYPE == "feather":
|
26 |
+
raise ValueError("file type feather not supported for dask queries")
|
27 |
+
|
28 |
+
return dd.from_pandas(pd.read_parquet(path), npartitions=os.cpu_count())
|
29 |
+
|
30 |
+
|
31 |
+
def get_query_answer(query: int, base_dir: str = ANSWERS_BASE_DIR) -> dd.DataFrame:
|
32 |
+
answer_df = pd.read_csv(
|
33 |
+
join(base_dir, f"q{query}.out"),
|
34 |
+
sep="|",
|
35 |
+
parse_dates=True,
|
36 |
+
infer_datetime_format=True,
|
37 |
+
)
|
38 |
+
return answer_df.rename(columns=lambda x: x.strip())
|
39 |
+
|
40 |
+
|
41 |
+
def test_results(q_num: int, result_df: pd.DataFrame):
|
42 |
+
with CodeTimer(name=f"Testing result of dask Query {q_num}", unit="s"):
|
43 |
+
answer = get_query_answer(q_num)
|
44 |
+
|
45 |
+
for c, t in answer.dtypes.items():
|
46 |
+
s1 = result_df[c]
|
47 |
+
s2 = answer[c]
|
48 |
+
|
49 |
+
if t.name == "object":
|
50 |
+
s1 = s1.astype("string").apply(lambda x: x.strip())
|
51 |
+
s2 = s2.astype("string").apply(lambda x: x.strip())
|
52 |
+
|
53 |
+
pd.testing.assert_series_equal(left=s1, right=s2, check_index=False)
|
54 |
+
|
55 |
+
|
56 |
+
@on_second_call
|
57 |
+
def get_line_item_ds(base_dir: str = DATASET_BASE_DIR) -> dd.DataFrame:
|
58 |
+
return read_ds(join(base_dir, "lineitem.parquet"))
|
59 |
+
|
60 |
+
|
61 |
+
@on_second_call
|
62 |
+
def get_orders_ds(base_dir: str = DATASET_BASE_DIR) -> dd.DataFrame:
|
63 |
+
return read_ds(join(base_dir, "orders.parquet"))
|
64 |
+
|
65 |
+
|
66 |
+
@on_second_call
|
67 |
+
def get_customer_ds(base_dir: str = DATASET_BASE_DIR) -> dd.DataFrame:
|
68 |
+
return read_ds(join(base_dir, "customer.parquet"))
|
69 |
+
|
70 |
+
|
71 |
+
@on_second_call
|
72 |
+
def get_region_ds(base_dir: str = DATASET_BASE_DIR) -> dd.DataFrame:
|
73 |
+
return read_ds(join(base_dir, "region.parquet"))
|
74 |
+
|
75 |
+
|
76 |
+
@on_second_call
|
77 |
+
def get_nation_ds(base_dir: str = DATASET_BASE_DIR) -> dd.DataFrame:
|
78 |
+
return read_ds(join(base_dir, "nation.parquet"))
|
79 |
+
|
80 |
+
|
81 |
+
@on_second_call
|
82 |
+
def get_supplier_ds(base_dir: str = DATASET_BASE_DIR) -> dd.DataFrame:
|
83 |
+
return read_ds(join(base_dir, "supplier.parquet"))
|
84 |
+
|
85 |
+
|
86 |
+
@on_second_call
|
87 |
+
def get_part_ds(base_dir: str = DATASET_BASE_DIR) -> dd.DataFrame:
|
88 |
+
return read_ds(join(base_dir, "part.parquet"))
|
89 |
+
|
90 |
+
|
91 |
+
@on_second_call
|
92 |
+
def get_part_supp_ds(base_dir: str = DATASET_BASE_DIR) -> dd.DataFrame:
|
93 |
+
return read_ds(join(base_dir, "partsupp.parquet"))
|
94 |
+
|
95 |
+
|
96 |
+
def run_query(q_num: str, query: Callable):
|
97 |
+
@linetimer(name=f"Overall execution of dask Query {q_num}", unit="s")
|
98 |
+
def run():
|
99 |
+
import dask
|
100 |
+
|
101 |
+
dask.config.set(scheduler="threads")
|
102 |
+
|
103 |
+
with CodeTimer(name=f"Get result of dask Query {q_num}", unit="s"):
|
104 |
+
t0 = timeit.default_timer()
|
105 |
+
|
106 |
+
result = query()
|
107 |
+
secs = timeit.default_timer() - t0
|
108 |
+
|
109 |
+
if LOG_TIMINGS:
|
110 |
+
append_row(
|
111 |
+
solution="dask", version=dask.__version__, q=f"q{q_num}", secs=secs
|
112 |
+
)
|
113 |
+
else:
|
114 |
+
test_results(q_num, result)
|
115 |
+
|
116 |
+
if SHOW_RESULTS:
|
117 |
+
print(result)
|
118 |
+
|
119 |
+
run()
|
duckdb_queries/__init__.py
ADDED
File without changes
|
duckdb_queries/executor.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from common_utils import execute_all
|
2 |
+
|
3 |
+
if __name__ == "__main__":
|
4 |
+
execute_all("duckdb")
|
duckdb_queries/q1.py
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 1
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
lineitem = utils.get_line_item_ds()
|
10 |
+
|
11 |
+
query_str = f"""
|
12 |
+
select
|
13 |
+
l_returnflag,
|
14 |
+
l_linestatus,
|
15 |
+
sum(l_quantity) as sum_qty,
|
16 |
+
sum(l_extendedprice) as sum_base_price,
|
17 |
+
sum(l_extendedprice * (1 - l_discount)) as sum_disc_price,
|
18 |
+
sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge,
|
19 |
+
avg(l_quantity) as avg_qty,
|
20 |
+
avg(l_extendedprice) as avg_price,
|
21 |
+
avg(l_discount) as avg_disc,
|
22 |
+
count(*) as count_order
|
23 |
+
from
|
24 |
+
{lineitem}
|
25 |
+
where
|
26 |
+
l_shipdate <= '1998-09-02'
|
27 |
+
group by
|
28 |
+
l_returnflag,
|
29 |
+
l_linestatus
|
30 |
+
order by
|
31 |
+
l_returnflag,
|
32 |
+
l_linestatus
|
33 |
+
"""
|
34 |
+
|
35 |
+
q_final = duckdb.sql(query_str)
|
36 |
+
|
37 |
+
utils.run_query(Q_NUM, q_final)
|
38 |
+
|
39 |
+
|
40 |
+
if __name__ == "__main__":
|
41 |
+
q()
|
duckdb_queries/q10.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 10
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
customer_ds = utils.get_customer_ds()
|
10 |
+
orders_ds = utils.get_orders_ds()
|
11 |
+
line_item_ds = utils.get_line_item_ds()
|
12 |
+
nation_ds = utils.get_nation_ds()
|
13 |
+
|
14 |
+
query_str = f"""
|
15 |
+
select
|
16 |
+
c_custkey,
|
17 |
+
c_name,
|
18 |
+
round(sum(l_extendedprice * (1 - l_discount)), 2) as revenue,
|
19 |
+
c_acctbal,
|
20 |
+
n_name,
|
21 |
+
trim(c_address) as c_address,
|
22 |
+
c_phone,
|
23 |
+
trim(c_comment) as c_comment
|
24 |
+
from
|
25 |
+
{customer_ds},
|
26 |
+
{orders_ds},
|
27 |
+
{line_item_ds},
|
28 |
+
{nation_ds}
|
29 |
+
where
|
30 |
+
c_custkey = o_custkey
|
31 |
+
and l_orderkey = o_orderkey
|
32 |
+
and o_orderdate >= date '1993-10-01'
|
33 |
+
and o_orderdate < date '1993-10-01' + interval '3' month
|
34 |
+
and l_returnflag = 'R'
|
35 |
+
and c_nationkey = n_nationkey
|
36 |
+
group by
|
37 |
+
c_custkey,
|
38 |
+
c_name,
|
39 |
+
c_acctbal,
|
40 |
+
c_phone,
|
41 |
+
n_name,
|
42 |
+
c_address,
|
43 |
+
c_comment
|
44 |
+
order by
|
45 |
+
revenue desc
|
46 |
+
limit 20
|
47 |
+
"""
|
48 |
+
|
49 |
+
q_final = duckdb.sql(query_str)
|
50 |
+
|
51 |
+
utils.run_query(Q_NUM, q_final)
|
52 |
+
|
53 |
+
|
54 |
+
if __name__ == "__main__":
|
55 |
+
q()
|
duckdb_queries/q11.py
ADDED
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 11
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
supplier_ds = utils.get_supplier_ds()
|
10 |
+
part_supp_ds = utils.get_part_supp_ds()
|
11 |
+
nation_ds = utils.get_nation_ds()
|
12 |
+
|
13 |
+
query_str = f"""
|
14 |
+
select
|
15 |
+
ps_partkey,
|
16 |
+
round(sum(ps_supplycost * ps_availqty), 2) as value
|
17 |
+
from
|
18 |
+
{part_supp_ds},
|
19 |
+
{supplier_ds},
|
20 |
+
{nation_ds}
|
21 |
+
where
|
22 |
+
ps_suppkey = s_suppkey
|
23 |
+
and s_nationkey = n_nationkey
|
24 |
+
and n_name = 'GERMANY'
|
25 |
+
group by
|
26 |
+
ps_partkey having
|
27 |
+
sum(ps_supplycost * ps_availqty) > (
|
28 |
+
select
|
29 |
+
sum(ps_supplycost * ps_availqty) * 0.0001
|
30 |
+
from
|
31 |
+
{part_supp_ds},
|
32 |
+
{supplier_ds},
|
33 |
+
{nation_ds}
|
34 |
+
where
|
35 |
+
ps_suppkey = s_suppkey
|
36 |
+
and s_nationkey = n_nationkey
|
37 |
+
and n_name = 'GERMANY'
|
38 |
+
)
|
39 |
+
order by
|
40 |
+
value desc
|
41 |
+
"""
|
42 |
+
|
43 |
+
q_final = duckdb.sql(query_str)
|
44 |
+
|
45 |
+
utils.run_query(Q_NUM, q_final)
|
46 |
+
|
47 |
+
|
48 |
+
if __name__ == "__main__":
|
49 |
+
q()
|
duckdb_queries/q12.py
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 12
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
line_item_ds = utils.get_line_item_ds()
|
10 |
+
orders_ds = utils.get_orders_ds()
|
11 |
+
|
12 |
+
query_str = f"""
|
13 |
+
select
|
14 |
+
l_shipmode,
|
15 |
+
sum(case
|
16 |
+
when o_orderpriority = '1-URGENT'
|
17 |
+
or o_orderpriority = '2-HIGH'
|
18 |
+
then 1
|
19 |
+
else 0
|
20 |
+
end) as high_line_count,
|
21 |
+
sum(case
|
22 |
+
when o_orderpriority <> '1-URGENT'
|
23 |
+
and o_orderpriority <> '2-HIGH'
|
24 |
+
then 1
|
25 |
+
else 0
|
26 |
+
end) as low_line_count
|
27 |
+
from
|
28 |
+
{orders_ds},
|
29 |
+
{line_item_ds}
|
30 |
+
where
|
31 |
+
o_orderkey = l_orderkey
|
32 |
+
and l_shipmode in ('MAIL', 'SHIP')
|
33 |
+
and l_commitdate < l_receiptdate
|
34 |
+
and l_shipdate < l_commitdate
|
35 |
+
and l_receiptdate >= date '1994-01-01'
|
36 |
+
and l_receiptdate < date '1994-01-01' + interval '1' year
|
37 |
+
group by
|
38 |
+
l_shipmode
|
39 |
+
order by
|
40 |
+
l_shipmode
|
41 |
+
"""
|
42 |
+
q_final = duckdb.sql(query_str)
|
43 |
+
|
44 |
+
utils.run_query(Q_NUM, q_final)
|
45 |
+
|
46 |
+
|
47 |
+
if __name__ == "__main__":
|
48 |
+
q()
|
duckdb_queries/q13.py
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 13
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
orders_ds = utils.get_orders_ds()
|
10 |
+
customer_ds = utils.get_customer_ds()
|
11 |
+
|
12 |
+
query_str = f"""
|
13 |
+
select
|
14 |
+
c_count, count(*) as custdist
|
15 |
+
from (
|
16 |
+
select
|
17 |
+
c_custkey,
|
18 |
+
count(o_orderkey)
|
19 |
+
from
|
20 |
+
{customer_ds} left outer join {orders_ds} on
|
21 |
+
c_custkey = o_custkey
|
22 |
+
and o_comment not like '%special%requests%'
|
23 |
+
group by
|
24 |
+
c_custkey
|
25 |
+
)as c_orders (c_custkey, c_count)
|
26 |
+
group by
|
27 |
+
c_count
|
28 |
+
order by
|
29 |
+
custdist desc,
|
30 |
+
c_count desc
|
31 |
+
"""
|
32 |
+
|
33 |
+
utils.get_customer_ds()
|
34 |
+
utils.get_orders_ds()
|
35 |
+
|
36 |
+
q_final = duckdb.sql(query_str)
|
37 |
+
|
38 |
+
utils.run_query(Q_NUM, q_final)
|
39 |
+
|
40 |
+
|
41 |
+
if __name__ == "__main__":
|
42 |
+
q()
|
duckdb_queries/q14.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 14
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
part_ds = utils.get_part_ds()
|
10 |
+
line_item_ds = utils.get_line_item_ds()
|
11 |
+
|
12 |
+
query_str = f"""
|
13 |
+
select
|
14 |
+
round(100.00 * sum(case
|
15 |
+
when p_type like 'PROMO%'
|
16 |
+
then l_extendedprice * (1 - l_discount)
|
17 |
+
else 0
|
18 |
+
end) / sum(l_extendedprice * (1 - l_discount)), 2) as promo_revenue
|
19 |
+
from
|
20 |
+
{line_item_ds},
|
21 |
+
{part_ds}
|
22 |
+
where
|
23 |
+
l_partkey = p_partkey
|
24 |
+
and l_shipdate >= date '1995-09-01'
|
25 |
+
and l_shipdate < date '1995-09-01' + interval '1' month
|
26 |
+
"""
|
27 |
+
|
28 |
+
q_final = duckdb.sql(query_str)
|
29 |
+
|
30 |
+
utils.run_query(Q_NUM, q_final)
|
31 |
+
|
32 |
+
|
33 |
+
if __name__ == "__main__":
|
34 |
+
q()
|
duckdb_queries/q15.py
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
from duckdb import DuckDBPyConnection
|
3 |
+
|
4 |
+
from duckdb_queries import utils
|
5 |
+
|
6 |
+
Q_NUM = 15
|
7 |
+
|
8 |
+
|
9 |
+
def q():
|
10 |
+
line_item_ds = utils.get_line_item_ds()
|
11 |
+
supplier_ds = utils.get_supplier_ds()
|
12 |
+
|
13 |
+
ddl = f"""
|
14 |
+
create or replace temporary view revenue (supplier_no, total_revenue) as
|
15 |
+
select
|
16 |
+
l_suppkey,
|
17 |
+
sum(l_extendedprice * (1 - l_discount))
|
18 |
+
from
|
19 |
+
{line_item_ds}
|
20 |
+
where
|
21 |
+
l_shipdate >= date '1996-01-01'
|
22 |
+
and l_shipdate < date '1996-01-01' + interval '3' month
|
23 |
+
group by
|
24 |
+
l_suppkey
|
25 |
+
"""
|
26 |
+
|
27 |
+
query_str = f"""
|
28 |
+
select
|
29 |
+
s_suppkey,
|
30 |
+
s_name,
|
31 |
+
s_address,
|
32 |
+
s_phone,
|
33 |
+
total_revenue
|
34 |
+
from
|
35 |
+
{supplier_ds},
|
36 |
+
revenue
|
37 |
+
where
|
38 |
+
s_suppkey = supplier_no
|
39 |
+
and total_revenue = (
|
40 |
+
select
|
41 |
+
max(total_revenue)
|
42 |
+
from
|
43 |
+
revenue
|
44 |
+
)
|
45 |
+
order by
|
46 |
+
s_suppkey
|
47 |
+
"""
|
48 |
+
|
49 |
+
_ = duckdb.execute(ddl)
|
50 |
+
q_final = duckdb.sql(query_str)
|
51 |
+
|
52 |
+
utils.run_query(Q_NUM, q_final)
|
53 |
+
duckdb.execute("DROP VIEW IF EXISTS revenue")
|
54 |
+
|
55 |
+
|
56 |
+
if __name__ == "__main__":
|
57 |
+
q()
|
duckdb_queries/q16.py
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 16
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
part_ds = utils.get_part_ds()
|
10 |
+
supplier_ds = utils.get_supplier_ds()
|
11 |
+
part_supp_ds = utils.get_part_supp_ds()
|
12 |
+
|
13 |
+
query_str = f"""
|
14 |
+
select
|
15 |
+
p_brand,
|
16 |
+
p_type,
|
17 |
+
p_size,
|
18 |
+
count(distinct ps_suppkey) as supplier_cnt
|
19 |
+
from
|
20 |
+
{part_supp_ds},
|
21 |
+
{part_ds}
|
22 |
+
where
|
23 |
+
p_partkey = ps_partkey
|
24 |
+
and p_brand <> 'Brand#45'
|
25 |
+
and p_type not like 'MEDIUM POLISHED%'
|
26 |
+
and p_size in (49, 14, 23, 45, 19, 3, 36, 9)
|
27 |
+
and ps_suppkey not in (
|
28 |
+
select
|
29 |
+
s_suppkey
|
30 |
+
from
|
31 |
+
{supplier_ds}
|
32 |
+
where
|
33 |
+
s_comment like '%Customer%Complaints%'
|
34 |
+
)
|
35 |
+
group by
|
36 |
+
p_brand,
|
37 |
+
p_type,
|
38 |
+
p_size
|
39 |
+
order by
|
40 |
+
supplier_cnt desc,
|
41 |
+
p_brand,
|
42 |
+
p_type,
|
43 |
+
p_size
|
44 |
+
"""
|
45 |
+
|
46 |
+
q_final = duckdb.sql(query_str)
|
47 |
+
|
48 |
+
utils.run_query(Q_NUM, q_final)
|
49 |
+
|
50 |
+
|
51 |
+
if __name__ == "__main__":
|
52 |
+
q()
|
duckdb_queries/q17.py
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 17
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
part_ds = utils.get_part_ds()
|
10 |
+
line_item_ds = utils.get_line_item_ds()
|
11 |
+
|
12 |
+
query_str = f"""
|
13 |
+
select
|
14 |
+
round(sum(l_extendedprice) / 7.0, 2) as avg_yearly
|
15 |
+
from
|
16 |
+
{line_item_ds},
|
17 |
+
{part_ds}
|
18 |
+
where
|
19 |
+
p_partkey = l_partkey
|
20 |
+
and p_brand = 'Brand#23'
|
21 |
+
and p_container = 'MED BOX'
|
22 |
+
and l_quantity < (
|
23 |
+
select
|
24 |
+
0.2 * avg(l_quantity)
|
25 |
+
from
|
26 |
+
{line_item_ds}
|
27 |
+
where
|
28 |
+
l_partkey = p_partkey
|
29 |
+
)
|
30 |
+
"""
|
31 |
+
|
32 |
+
q_final = duckdb.sql(query_str)
|
33 |
+
|
34 |
+
utils.run_query(Q_NUM, q_final)
|
35 |
+
|
36 |
+
|
37 |
+
if __name__ == "__main__":
|
38 |
+
q()
|
duckdb_queries/q18.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 18
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
line_item_ds = utils.get_line_item_ds()
|
10 |
+
orders_ds = utils.get_orders_ds()
|
11 |
+
customer_ds = utils.get_customer_ds()
|
12 |
+
|
13 |
+
query_str = f"""
|
14 |
+
select
|
15 |
+
c_name,
|
16 |
+
c_custkey,
|
17 |
+
o_orderkey,
|
18 |
+
o_orderdate as o_orderdat,
|
19 |
+
o_totalprice,
|
20 |
+
sum(l_quantity) as col6
|
21 |
+
from
|
22 |
+
{customer_ds},
|
23 |
+
{orders_ds},
|
24 |
+
{line_item_ds}
|
25 |
+
where
|
26 |
+
o_orderkey in (
|
27 |
+
select
|
28 |
+
l_orderkey
|
29 |
+
from
|
30 |
+
{line_item_ds}
|
31 |
+
group by
|
32 |
+
l_orderkey having
|
33 |
+
sum(l_quantity) > 300
|
34 |
+
)
|
35 |
+
and c_custkey = o_custkey
|
36 |
+
and o_orderkey = l_orderkey
|
37 |
+
group by
|
38 |
+
c_name,
|
39 |
+
c_custkey,
|
40 |
+
o_orderkey,
|
41 |
+
o_orderdate,
|
42 |
+
o_totalprice
|
43 |
+
order by
|
44 |
+
o_totalprice desc,
|
45 |
+
o_orderdate
|
46 |
+
limit 100
|
47 |
+
"""
|
48 |
+
|
49 |
+
q_final = duckdb.sql(query_str)
|
50 |
+
|
51 |
+
utils.run_query(Q_NUM, q_final)
|
52 |
+
|
53 |
+
|
54 |
+
if __name__ == "__main__":
|
55 |
+
q()
|
duckdb_queries/q19.py
ADDED
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 19
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
part_ds = utils.get_part_ds()
|
10 |
+
line_item_ds = utils.get_line_item_ds()
|
11 |
+
|
12 |
+
query_str = f"""
|
13 |
+
select
|
14 |
+
round(sum(l_extendedprice* (1 - l_discount)), 2) as revenue
|
15 |
+
from
|
16 |
+
{line_item_ds},
|
17 |
+
{part_ds}
|
18 |
+
where
|
19 |
+
(
|
20 |
+
p_partkey = l_partkey
|
21 |
+
and p_brand = 'Brand#12'
|
22 |
+
and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG')
|
23 |
+
and l_quantity >= 1 and l_quantity <= 1 + 10
|
24 |
+
and p_size between 1 and 5
|
25 |
+
and l_shipmode in ('AIR', 'AIR REG')
|
26 |
+
and l_shipinstruct = 'DELIVER IN PERSON'
|
27 |
+
)
|
28 |
+
or
|
29 |
+
(
|
30 |
+
p_partkey = l_partkey
|
31 |
+
and p_brand = 'Brand#23'
|
32 |
+
and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK')
|
33 |
+
and l_quantity >= 10 and l_quantity <= 20
|
34 |
+
and p_size between 1 and 10
|
35 |
+
and l_shipmode in ('AIR', 'AIR REG')
|
36 |
+
and l_shipinstruct = 'DELIVER IN PERSON'
|
37 |
+
)
|
38 |
+
or
|
39 |
+
(
|
40 |
+
p_partkey = l_partkey
|
41 |
+
and p_brand = 'Brand#34'
|
42 |
+
and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG')
|
43 |
+
and l_quantity >= 20 and l_quantity <= 30
|
44 |
+
and p_size between 1 and 15
|
45 |
+
and l_shipmode in ('AIR', 'AIR REG')
|
46 |
+
and l_shipinstruct = 'DELIVER IN PERSON'
|
47 |
+
)
|
48 |
+
"""
|
49 |
+
|
50 |
+
q_final = duckdb.sql(query_str)
|
51 |
+
|
52 |
+
utils.run_query(Q_NUM, q_final)
|
53 |
+
|
54 |
+
|
55 |
+
if __name__ == "__main__":
|
56 |
+
q()
|
duckdb_queries/q2.py
ADDED
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 2
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
region_ds = utils.get_region_ds()
|
10 |
+
nation_ds = utils.get_nation_ds()
|
11 |
+
supplier_ds = utils.get_supplier_ds()
|
12 |
+
part_ds = utils.get_part_ds()
|
13 |
+
part_supp_ds = utils.get_part_supp_ds()
|
14 |
+
|
15 |
+
query_str = f"""
|
16 |
+
select
|
17 |
+
s_acctbal,
|
18 |
+
s_name,
|
19 |
+
n_name,
|
20 |
+
p_partkey,
|
21 |
+
p_mfgr,
|
22 |
+
trim(s_address) as s_address,
|
23 |
+
s_phone,
|
24 |
+
trim(s_comment) as s_comment
|
25 |
+
from
|
26 |
+
{part_ds},
|
27 |
+
{supplier_ds},
|
28 |
+
{part_supp_ds},
|
29 |
+
{nation_ds},
|
30 |
+
{region_ds}
|
31 |
+
where
|
32 |
+
p_partkey = ps_partkey
|
33 |
+
and s_suppkey = ps_suppkey
|
34 |
+
and p_size = 15
|
35 |
+
and p_type like '%BRASS'
|
36 |
+
and s_nationkey = n_nationkey
|
37 |
+
and n_regionkey = r_regionkey
|
38 |
+
and r_name = 'EUROPE'
|
39 |
+
and ps_supplycost = (
|
40 |
+
select
|
41 |
+
min(ps_supplycost)
|
42 |
+
from
|
43 |
+
{part_supp_ds},
|
44 |
+
{supplier_ds},
|
45 |
+
{nation_ds},
|
46 |
+
{region_ds}
|
47 |
+
where
|
48 |
+
p_partkey = ps_partkey
|
49 |
+
and s_suppkey = ps_suppkey
|
50 |
+
and s_nationkey = n_nationkey
|
51 |
+
and n_regionkey = r_regionkey
|
52 |
+
and r_name = 'EUROPE'
|
53 |
+
)
|
54 |
+
order by
|
55 |
+
s_acctbal desc,
|
56 |
+
n_name,
|
57 |
+
s_name,
|
58 |
+
p_partkey
|
59 |
+
limit 100
|
60 |
+
"""
|
61 |
+
|
62 |
+
q_final = duckdb.sql(query_str)
|
63 |
+
|
64 |
+
utils.run_query(Q_NUM, q_final)
|
65 |
+
|
66 |
+
|
67 |
+
if __name__ == "__main__":
|
68 |
+
q()
|
duckdb_queries/q20.py
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 20
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
line_item_ds = utils.get_line_item_ds()
|
10 |
+
nation_ds = utils.get_nation_ds()
|
11 |
+
supplier_ds = utils.get_supplier_ds()
|
12 |
+
part_ds = utils.get_part_ds()
|
13 |
+
part_supp_ds = utils.get_part_supp_ds()
|
14 |
+
|
15 |
+
query_str = f"""
|
16 |
+
select
|
17 |
+
s_name,
|
18 |
+
trim(s_address) as s_address
|
19 |
+
from
|
20 |
+
{supplier_ds},
|
21 |
+
{nation_ds}
|
22 |
+
where
|
23 |
+
s_suppkey in (
|
24 |
+
select
|
25 |
+
ps_suppkey
|
26 |
+
from
|
27 |
+
{part_supp_ds}
|
28 |
+
where
|
29 |
+
ps_partkey in (
|
30 |
+
select
|
31 |
+
p_partkey
|
32 |
+
from
|
33 |
+
{part_ds}
|
34 |
+
where
|
35 |
+
p_name like 'forest%'
|
36 |
+
)
|
37 |
+
and ps_availqty > (
|
38 |
+
select
|
39 |
+
0.5 * sum(l_quantity)
|
40 |
+
from
|
41 |
+
{line_item_ds}
|
42 |
+
where
|
43 |
+
l_partkey = ps_partkey
|
44 |
+
and l_suppkey = ps_suppkey
|
45 |
+
and l_shipdate >= date '1994-01-01'
|
46 |
+
and l_shipdate < date '1994-01-01' + interval '1' year
|
47 |
+
)
|
48 |
+
)
|
49 |
+
and s_nationkey = n_nationkey
|
50 |
+
and n_name = 'CANADA'
|
51 |
+
order by
|
52 |
+
s_name
|
53 |
+
"""
|
54 |
+
|
55 |
+
q_final = duckdb.sql(query_str)
|
56 |
+
|
57 |
+
utils.run_query(Q_NUM, q_final)
|
58 |
+
|
59 |
+
|
60 |
+
if __name__ == "__main__":
|
61 |
+
q()
|
duckdb_queries/q21.py
ADDED
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 21
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
line_item_ds = utils.get_line_item_ds()
|
10 |
+
supplier_ds = utils.get_supplier_ds()
|
11 |
+
nation_ds = utils.get_nation_ds()
|
12 |
+
orders_ds = utils.get_orders_ds()
|
13 |
+
|
14 |
+
query_str = f"""
|
15 |
+
select
|
16 |
+
s_name,
|
17 |
+
count(*) as numwait
|
18 |
+
from
|
19 |
+
{supplier_ds},
|
20 |
+
{line_item_ds} l1,
|
21 |
+
{orders_ds},
|
22 |
+
{nation_ds}
|
23 |
+
where
|
24 |
+
s_suppkey = l1.l_suppkey
|
25 |
+
and o_orderkey = l1.l_orderkey
|
26 |
+
and o_orderstatus = 'F'
|
27 |
+
and l1.l_receiptdate > l1.l_commitdate
|
28 |
+
and exists (
|
29 |
+
select
|
30 |
+
*
|
31 |
+
from
|
32 |
+
{line_item_ds} l2
|
33 |
+
where
|
34 |
+
l2.l_orderkey = l1.l_orderkey
|
35 |
+
and l2.l_suppkey <> l1.l_suppkey
|
36 |
+
)
|
37 |
+
and not exists (
|
38 |
+
select
|
39 |
+
*
|
40 |
+
from
|
41 |
+
{line_item_ds} l3
|
42 |
+
where
|
43 |
+
l3.l_orderkey = l1.l_orderkey
|
44 |
+
and l3.l_suppkey <> l1.l_suppkey
|
45 |
+
and l3.l_receiptdate > l3.l_commitdate
|
46 |
+
)
|
47 |
+
and s_nationkey = n_nationkey
|
48 |
+
and n_name = 'SAUDI ARABIA'
|
49 |
+
group by
|
50 |
+
s_name
|
51 |
+
order by
|
52 |
+
numwait desc,
|
53 |
+
s_name
|
54 |
+
limit 100
|
55 |
+
"""
|
56 |
+
|
57 |
+
q_final = duckdb.sql(query_str)
|
58 |
+
|
59 |
+
utils.run_query(Q_NUM, q_final)
|
60 |
+
|
61 |
+
|
62 |
+
if __name__ == "__main__":
|
63 |
+
q()
|
duckdb_queries/q22.py
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 22
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
orders_ds = utils.get_orders_ds()
|
10 |
+
customer_ds = utils.get_customer_ds()
|
11 |
+
|
12 |
+
query_str = f"""
|
13 |
+
select
|
14 |
+
cntrycode,
|
15 |
+
count(*) as numcust,
|
16 |
+
sum(c_acctbal) as totacctbal
|
17 |
+
from (
|
18 |
+
select
|
19 |
+
substring(c_phone from 1 for 2) as cntrycode,
|
20 |
+
c_acctbal
|
21 |
+
from
|
22 |
+
{customer_ds}
|
23 |
+
where
|
24 |
+
substring(c_phone from 1 for 2) in
|
25 |
+
(13, 31, 23, 29, 30, 18, 17)
|
26 |
+
and c_acctbal > (
|
27 |
+
select
|
28 |
+
avg(c_acctbal)
|
29 |
+
from
|
30 |
+
{customer_ds}
|
31 |
+
where
|
32 |
+
c_acctbal > 0.00
|
33 |
+
and substring (c_phone from 1 for 2) in
|
34 |
+
(13, 31, 23, 29, 30, 18, 17)
|
35 |
+
)
|
36 |
+
and not exists (
|
37 |
+
select
|
38 |
+
*
|
39 |
+
from
|
40 |
+
{orders_ds}
|
41 |
+
where
|
42 |
+
o_custkey = c_custkey
|
43 |
+
)
|
44 |
+
) as custsale
|
45 |
+
group by
|
46 |
+
cntrycode
|
47 |
+
order by
|
48 |
+
cntrycode
|
49 |
+
"""
|
50 |
+
|
51 |
+
q_final = duckdb.sql(query_str)
|
52 |
+
|
53 |
+
utils.run_query(Q_NUM, q_final)
|
54 |
+
|
55 |
+
|
56 |
+
if __name__ == "__main__":
|
57 |
+
q()
|
duckdb_queries/q3.py
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 3
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
customer_ds = utils.get_customer_ds()
|
10 |
+
line_item_ds = utils.get_line_item_ds()
|
11 |
+
orders_ds = utils.get_orders_ds()
|
12 |
+
|
13 |
+
query_str = f"""
|
14 |
+
select
|
15 |
+
l_orderkey,
|
16 |
+
sum(l_extendedprice * (1 - l_discount)) as revenue,
|
17 |
+
o_orderdate,
|
18 |
+
o_shippriority
|
19 |
+
from
|
20 |
+
{customer_ds},
|
21 |
+
{orders_ds},
|
22 |
+
{line_item_ds}
|
23 |
+
where
|
24 |
+
c_mktsegment = 'BUILDING'
|
25 |
+
and c_custkey = o_custkey
|
26 |
+
and l_orderkey = o_orderkey
|
27 |
+
and o_orderdate < '1995-03-15'
|
28 |
+
and l_shipdate > '1995-03-15'
|
29 |
+
group by
|
30 |
+
l_orderkey,
|
31 |
+
o_orderdate,
|
32 |
+
o_shippriority
|
33 |
+
order by
|
34 |
+
revenue desc,
|
35 |
+
o_orderdate
|
36 |
+
limit 10
|
37 |
+
"""
|
38 |
+
|
39 |
+
q_final = duckdb.sql(query_str)
|
40 |
+
|
41 |
+
utils.run_query(Q_NUM, q_final)
|
42 |
+
|
43 |
+
|
44 |
+
if __name__ == "__main__":
|
45 |
+
q()
|
duckdb_queries/q4.py
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 4
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
line_item_ds = utils.get_line_item_ds()
|
10 |
+
orders_ds = utils.get_orders_ds()
|
11 |
+
|
12 |
+
query_str = f"""
|
13 |
+
select
|
14 |
+
o_orderpriority,
|
15 |
+
count(*) as order_count
|
16 |
+
from
|
17 |
+
{orders_ds}
|
18 |
+
where
|
19 |
+
o_orderdate >= timestamp '1993-07-01'
|
20 |
+
and o_orderdate < timestamp '1993-07-01' + interval '3' month
|
21 |
+
and exists (
|
22 |
+
select
|
23 |
+
*
|
24 |
+
from
|
25 |
+
{line_item_ds}
|
26 |
+
where
|
27 |
+
l_orderkey = o_orderkey
|
28 |
+
and l_commitdate < l_receiptdate
|
29 |
+
)
|
30 |
+
group by
|
31 |
+
o_orderpriority
|
32 |
+
order by
|
33 |
+
o_orderpriority
|
34 |
+
"""
|
35 |
+
|
36 |
+
q_final = duckdb.sql(query_str)
|
37 |
+
|
38 |
+
utils.run_query(Q_NUM, q_final)
|
39 |
+
|
40 |
+
|
41 |
+
if __name__ == "__main__":
|
42 |
+
q()
|
duckdb_queries/q5.py
ADDED
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 5
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
region_ds = utils.get_region_ds()
|
10 |
+
nation_ds = utils.get_nation_ds()
|
11 |
+
customer_ds = utils.get_customer_ds()
|
12 |
+
line_item_ds = utils.get_line_item_ds()
|
13 |
+
orders_ds = utils.get_orders_ds()
|
14 |
+
supplier_ds = utils.get_supplier_ds()
|
15 |
+
|
16 |
+
query_str = f"""
|
17 |
+
select
|
18 |
+
n_name,
|
19 |
+
sum(l_extendedprice * (1 - l_discount)) as revenue
|
20 |
+
from
|
21 |
+
{customer_ds},
|
22 |
+
{orders_ds},
|
23 |
+
{line_item_ds},
|
24 |
+
{supplier_ds},
|
25 |
+
{nation_ds},
|
26 |
+
{region_ds}
|
27 |
+
where
|
28 |
+
c_custkey = o_custkey
|
29 |
+
and l_orderkey = o_orderkey
|
30 |
+
and l_suppkey = s_suppkey
|
31 |
+
and c_nationkey = s_nationkey
|
32 |
+
and s_nationkey = n_nationkey
|
33 |
+
and n_regionkey = r_regionkey
|
34 |
+
and r_name = 'ASIA'
|
35 |
+
and o_orderdate >= timestamp '1994-01-01'
|
36 |
+
and o_orderdate < timestamp '1994-01-01' + interval '1' year
|
37 |
+
group by
|
38 |
+
n_name
|
39 |
+
order by
|
40 |
+
revenue desc
|
41 |
+
"""
|
42 |
+
|
43 |
+
q_final = duckdb.sql(query_str)
|
44 |
+
|
45 |
+
utils.run_query(Q_NUM, q_final)
|
46 |
+
|
47 |
+
|
48 |
+
if __name__ == "__main__":
|
49 |
+
q()
|
duckdb_queries/q6.py
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 6
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
line_item_ds = utils.get_line_item_ds()
|
10 |
+
|
11 |
+
query_str = f"""
|
12 |
+
select
|
13 |
+
sum(l_extendedprice * l_discount) as revenue
|
14 |
+
from
|
15 |
+
{line_item_ds}
|
16 |
+
where
|
17 |
+
l_shipdate >= timestamp '1994-01-01'
|
18 |
+
and l_shipdate < timestamp '1994-01-01' + interval '1' year
|
19 |
+
and l_discount between .06 - 0.01 and .06 + 0.01
|
20 |
+
and l_quantity < 24
|
21 |
+
"""
|
22 |
+
|
23 |
+
q_final = duckdb.sql(query_str)
|
24 |
+
|
25 |
+
utils.run_query(Q_NUM, q_final)
|
26 |
+
|
27 |
+
|
28 |
+
if __name__ == "__main__":
|
29 |
+
q()
|
duckdb_queries/q7.py
ADDED
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 7
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
nation_ds = utils.get_nation_ds()
|
10 |
+
customer_ds = utils.get_customer_ds()
|
11 |
+
line_item_ds = utils.get_line_item_ds()
|
12 |
+
orders_ds = utils.get_orders_ds()
|
13 |
+
supplier_ds = utils.get_supplier_ds()
|
14 |
+
|
15 |
+
query_str = f"""
|
16 |
+
select
|
17 |
+
supp_nation,
|
18 |
+
cust_nation,
|
19 |
+
l_year,
|
20 |
+
sum(volume) as revenue
|
21 |
+
from
|
22 |
+
(
|
23 |
+
select
|
24 |
+
n1.n_name as supp_nation,
|
25 |
+
n2.n_name as cust_nation,
|
26 |
+
year(l_shipdate) as l_year,
|
27 |
+
l_extendedprice * (1 - l_discount) as volume
|
28 |
+
from
|
29 |
+
{supplier_ds},
|
30 |
+
{line_item_ds},
|
31 |
+
{orders_ds},
|
32 |
+
{customer_ds},
|
33 |
+
{nation_ds} n1,
|
34 |
+
{nation_ds} n2
|
35 |
+
where
|
36 |
+
s_suppkey = l_suppkey
|
37 |
+
and o_orderkey = l_orderkey
|
38 |
+
and c_custkey = o_custkey
|
39 |
+
and s_nationkey = n1.n_nationkey
|
40 |
+
and c_nationkey = n2.n_nationkey
|
41 |
+
and (
|
42 |
+
(n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY')
|
43 |
+
or (n1.n_name = 'GERMANY' and n2.n_name = 'FRANCE')
|
44 |
+
)
|
45 |
+
and l_shipdate between timestamp '1995-01-01' and timestamp '1996-12-31'
|
46 |
+
) as shipping
|
47 |
+
group by
|
48 |
+
supp_nation,
|
49 |
+
cust_nation,
|
50 |
+
l_year
|
51 |
+
order by
|
52 |
+
supp_nation,
|
53 |
+
cust_nation,
|
54 |
+
l_year
|
55 |
+
"""
|
56 |
+
|
57 |
+
q_final = duckdb.sql(query_str)
|
58 |
+
|
59 |
+
utils.run_query(Q_NUM, q_final)
|
60 |
+
|
61 |
+
|
62 |
+
if __name__ == "__main__":
|
63 |
+
q()
|
duckdb_queries/q8.py
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 8
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
part_ds = utils.get_part_ds()
|
10 |
+
supplier_ds = utils.get_supplier_ds()
|
11 |
+
line_item_ds = utils.get_line_item_ds()
|
12 |
+
orders_ds = utils.get_orders_ds()
|
13 |
+
customer_ds = utils.get_customer_ds()
|
14 |
+
nation_ds = utils.get_nation_ds()
|
15 |
+
region_ds = utils.get_region_ds()
|
16 |
+
|
17 |
+
query_str = f"""
|
18 |
+
select
|
19 |
+
o_year,
|
20 |
+
round(
|
21 |
+
sum(case
|
22 |
+
when nation = 'BRAZIL' then volume
|
23 |
+
else 0
|
24 |
+
end) / sum(volume)
|
25 |
+
, 2) as mkt_share
|
26 |
+
from
|
27 |
+
(
|
28 |
+
select
|
29 |
+
extract(year from o_orderdate) as o_year,
|
30 |
+
l_extendedprice * (1 - l_discount) as volume,
|
31 |
+
n2.n_name as nation
|
32 |
+
from
|
33 |
+
{part_ds},
|
34 |
+
{supplier_ds},
|
35 |
+
{line_item_ds},
|
36 |
+
{orders_ds},
|
37 |
+
{customer_ds},
|
38 |
+
{nation_ds} n1,
|
39 |
+
{nation_ds} n2,
|
40 |
+
{region_ds}
|
41 |
+
where
|
42 |
+
p_partkey = l_partkey
|
43 |
+
and s_suppkey = l_suppkey
|
44 |
+
and l_orderkey = o_orderkey
|
45 |
+
and o_custkey = c_custkey
|
46 |
+
and c_nationkey = n1.n_nationkey
|
47 |
+
and n1.n_regionkey = r_regionkey
|
48 |
+
and r_name = 'AMERICA'
|
49 |
+
and s_nationkey = n2.n_nationkey
|
50 |
+
and o_orderdate between timestamp '1995-01-01' and timestamp '1996-12-31'
|
51 |
+
and p_type = 'ECONOMY ANODIZED STEEL'
|
52 |
+
) as all_nations
|
53 |
+
group by
|
54 |
+
o_year
|
55 |
+
order by
|
56 |
+
o_year
|
57 |
+
"""
|
58 |
+
|
59 |
+
q_final = duckdb.sql(query_str)
|
60 |
+
|
61 |
+
utils.run_query(Q_NUM, q_final)
|
62 |
+
|
63 |
+
|
64 |
+
if __name__ == "__main__":
|
65 |
+
q()
|
duckdb_queries/q9.py
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
|
3 |
+
from duckdb_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 9
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
part_ds = utils.get_part_ds()
|
10 |
+
supplier_ds = utils.get_supplier_ds()
|
11 |
+
line_item_ds = utils.get_line_item_ds()
|
12 |
+
orders_ds = utils.get_orders_ds()
|
13 |
+
part_supp_ds = utils.get_part_supp_ds()
|
14 |
+
nation_ds = utils.get_nation_ds()
|
15 |
+
|
16 |
+
query_str = f"""
|
17 |
+
select
|
18 |
+
nation,
|
19 |
+
o_year,
|
20 |
+
round(sum(amount), 2) as sum_profit
|
21 |
+
from
|
22 |
+
(
|
23 |
+
select
|
24 |
+
n_name as nation,
|
25 |
+
year(o_orderdate) as o_year,
|
26 |
+
l_extendedprice * (1 - l_discount) - ps_supplycost * l_quantity as amount
|
27 |
+
from
|
28 |
+
{part_ds},
|
29 |
+
{supplier_ds},
|
30 |
+
{line_item_ds},
|
31 |
+
{part_supp_ds},
|
32 |
+
{orders_ds},
|
33 |
+
{nation_ds}
|
34 |
+
where
|
35 |
+
s_suppkey = l_suppkey
|
36 |
+
and ps_suppkey = l_suppkey
|
37 |
+
and ps_partkey = l_partkey
|
38 |
+
and p_partkey = l_partkey
|
39 |
+
and o_orderkey = l_orderkey
|
40 |
+
and s_nationkey = n_nationkey
|
41 |
+
and p_name like '%green%'
|
42 |
+
) as profit
|
43 |
+
group by
|
44 |
+
nation,
|
45 |
+
o_year
|
46 |
+
order by
|
47 |
+
nation,
|
48 |
+
o_year desc
|
49 |
+
"""
|
50 |
+
|
51 |
+
q_final = duckdb.sql(query_str)
|
52 |
+
|
53 |
+
utils.run_query(Q_NUM, q_final)
|
54 |
+
|
55 |
+
|
56 |
+
if __name__ == "__main__":
|
57 |
+
q()
|
duckdb_queries/utils.py
ADDED
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import timeit
|
2 |
+
from importlib.metadata import version
|
3 |
+
from os.path import join
|
4 |
+
from typing import Any
|
5 |
+
|
6 |
+
import duckdb
|
7 |
+
import polars as pl
|
8 |
+
from duckdb import DuckDBPyRelation
|
9 |
+
from linetimer import CodeTimer, linetimer
|
10 |
+
from polars import testing as pl_test
|
11 |
+
|
12 |
+
from common_utils import (
|
13 |
+
ANSWERS_BASE_DIR,
|
14 |
+
DATASET_BASE_DIR,
|
15 |
+
FILE_TYPE,
|
16 |
+
INCLUDE_IO,
|
17 |
+
LOG_TIMINGS,
|
18 |
+
SHOW_RESULTS,
|
19 |
+
append_row,
|
20 |
+
)
|
21 |
+
|
22 |
+
|
23 |
+
def _scan_ds(path: str):
|
24 |
+
path = f"{path}.{FILE_TYPE}"
|
25 |
+
if FILE_TYPE == "parquet":
|
26 |
+
if INCLUDE_IO:
|
27 |
+
duckdb.read_parquet(path)
|
28 |
+
return f"'{path}'"
|
29 |
+
else:
|
30 |
+
name = path.replace("/", "_").replace(".", "_")
|
31 |
+
duckdb.sql(
|
32 |
+
f"create temp table if not exists {name} as select * from read_parquet('{path}');"
|
33 |
+
)
|
34 |
+
return name
|
35 |
+
elif FILE_TYPE == "feather":
|
36 |
+
raise ValueError("duckdb does not support feather for now")
|
37 |
+
else:
|
38 |
+
raise ValueError(f"file type: {FILE_TYPE} not expected")
|
39 |
+
return path
|
40 |
+
|
41 |
+
|
42 |
+
def get_query_answer(query: int, base_dir: str = ANSWERS_BASE_DIR) -> pl.LazyFrame:
|
43 |
+
answer_ldf = pl.scan_csv(
|
44 |
+
join(base_dir, f"q{query}.out"),
|
45 |
+
separator="|",
|
46 |
+
has_header=True,
|
47 |
+
try_parse_dates=True,
|
48 |
+
)
|
49 |
+
cols = answer_ldf.columns
|
50 |
+
answer_ldf = answer_ldf.select(
|
51 |
+
[pl.col(c).alias(c.strip()) for c in cols]
|
52 |
+
).with_columns([pl.col(pl.datatypes.Utf8).str.strip().keep_name()])
|
53 |
+
|
54 |
+
return answer_ldf
|
55 |
+
|
56 |
+
|
57 |
+
def test_results(q_num: int, result_df: pl.DataFrame):
|
58 |
+
with CodeTimer(name=f"Testing result of duckdb Query {q_num}", unit="s"):
|
59 |
+
answer = get_query_answer(q_num).collect()
|
60 |
+
pl_test.assert_frame_equal(left=result_df, right=answer, check_dtype=False)
|
61 |
+
|
62 |
+
|
63 |
+
def get_line_item_ds(base_dir: str = DATASET_BASE_DIR) -> str:
|
64 |
+
return _scan_ds(join(base_dir, "lineitem"))
|
65 |
+
|
66 |
+
|
67 |
+
def get_orders_ds(base_dir: str = DATASET_BASE_DIR) -> str:
|
68 |
+
return _scan_ds(join(base_dir, "orders"))
|
69 |
+
|
70 |
+
|
71 |
+
def get_customer_ds(base_dir: str = DATASET_BASE_DIR) -> str:
|
72 |
+
return _scan_ds(join(base_dir, "customer"))
|
73 |
+
|
74 |
+
|
75 |
+
def get_region_ds(base_dir: str = DATASET_BASE_DIR) -> str:
|
76 |
+
return _scan_ds(join(base_dir, "region"))
|
77 |
+
|
78 |
+
|
79 |
+
def get_nation_ds(base_dir: str = DATASET_BASE_DIR) -> str:
|
80 |
+
return _scan_ds(join(base_dir, "nation"))
|
81 |
+
|
82 |
+
|
83 |
+
def get_supplier_ds(base_dir: str = DATASET_BASE_DIR) -> str:
|
84 |
+
return _scan_ds(join(base_dir, "supplier"))
|
85 |
+
|
86 |
+
|
87 |
+
def get_part_ds(base_dir: str = DATASET_BASE_DIR) -> str:
|
88 |
+
return _scan_ds(join(base_dir, "part"))
|
89 |
+
|
90 |
+
|
91 |
+
def get_part_supp_ds(base_dir: str = DATASET_BASE_DIR) -> str:
|
92 |
+
return _scan_ds(join(base_dir, "partsupp"))
|
93 |
+
|
94 |
+
|
95 |
+
def run_query(q_num: int, context: DuckDBPyRelation):
|
96 |
+
@linetimer(name=f"Overall execution of duckdb Query {q_num}", unit="s")
|
97 |
+
def query():
|
98 |
+
with CodeTimer(name=f"Get result of duckdb Query {q_num}", unit="s"):
|
99 |
+
t0 = timeit.default_timer()
|
100 |
+
# force duckdb to materialize
|
101 |
+
result = context.pl()
|
102 |
+
|
103 |
+
secs = timeit.default_timer() - t0
|
104 |
+
|
105 |
+
if LOG_TIMINGS:
|
106 |
+
append_row(
|
107 |
+
solution="duckdb", version=version("duckdb"), q=f"q{q_num}", secs=secs
|
108 |
+
)
|
109 |
+
else:
|
110 |
+
test_results(q_num, result)
|
111 |
+
|
112 |
+
if SHOW_RESULTS:
|
113 |
+
print(result)
|
114 |
+
|
115 |
+
query()
|
fireducks_queries/__init__.py
ADDED
File without changes
|
fireducks_queries/executor.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from common_utils import execute_all
|
2 |
+
|
3 |
+
if __name__ == "__main__":
|
4 |
+
execute_all("fireducks")
|
fireducks_queries/q1.py
ADDED
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import datetime
|
2 |
+
|
3 |
+
from fireducks_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 1
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
VAR1 = datetime(1998, 9, 2)
|
10 |
+
|
11 |
+
lineitem = utils.get_line_item_ds
|
12 |
+
# first call one time to cache in case we don't include the IO times
|
13 |
+
lineitem()
|
14 |
+
|
15 |
+
def query():
|
16 |
+
nonlocal lineitem
|
17 |
+
lineitem = lineitem()
|
18 |
+
|
19 |
+
lineitem_filtered = lineitem.loc[
|
20 |
+
:,
|
21 |
+
[
|
22 |
+
"l_quantity",
|
23 |
+
"l_extendedprice",
|
24 |
+
"l_discount",
|
25 |
+
"l_tax",
|
26 |
+
"l_returnflag",
|
27 |
+
"l_linestatus",
|
28 |
+
"l_shipdate",
|
29 |
+
"l_orderkey",
|
30 |
+
],
|
31 |
+
]
|
32 |
+
sel = lineitem_filtered.l_shipdate <= VAR1
|
33 |
+
lineitem_filtered = lineitem_filtered[sel]
|
34 |
+
lineitem_filtered["sum_qty"] = lineitem_filtered.l_quantity
|
35 |
+
lineitem_filtered["sum_base_price"] = lineitem_filtered.l_extendedprice
|
36 |
+
lineitem_filtered["avg_qty"] = lineitem_filtered.l_quantity
|
37 |
+
lineitem_filtered["avg_price"] = lineitem_filtered.l_extendedprice
|
38 |
+
lineitem_filtered["sum_disc_price"] = lineitem_filtered.l_extendedprice * (
|
39 |
+
1 - lineitem_filtered.l_discount
|
40 |
+
)
|
41 |
+
lineitem_filtered["sum_charge"] = (
|
42 |
+
lineitem_filtered.l_extendedprice
|
43 |
+
* (1 - lineitem_filtered.l_discount)
|
44 |
+
* (1 + lineitem_filtered.l_tax)
|
45 |
+
)
|
46 |
+
lineitem_filtered["avg_disc"] = lineitem_filtered.l_discount
|
47 |
+
lineitem_filtered["count_order"] = lineitem_filtered.l_orderkey
|
48 |
+
gb = lineitem_filtered.groupby(["l_returnflag", "l_linestatus"])
|
49 |
+
|
50 |
+
total = gb.agg(
|
51 |
+
{
|
52 |
+
"sum_qty": "sum",
|
53 |
+
"sum_base_price": "sum",
|
54 |
+
"sum_disc_price": "sum",
|
55 |
+
"sum_charge": "sum",
|
56 |
+
"avg_qty": "mean",
|
57 |
+
"avg_price": "mean",
|
58 |
+
"avg_disc": "mean",
|
59 |
+
"count_order": "count",
|
60 |
+
}
|
61 |
+
)
|
62 |
+
|
63 |
+
result_df = total.reset_index().sort_values(["l_returnflag", "l_linestatus"])
|
64 |
+
|
65 |
+
return result_df
|
66 |
+
|
67 |
+
utils.run_query(Q_NUM, query)
|
68 |
+
|
69 |
+
|
70 |
+
if __name__ == "__main__":
|
71 |
+
q()
|
fireducks_queries/q2.py
ADDED
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fireducks_queries import utils
|
2 |
+
|
3 |
+
Q_NUM = 2
|
4 |
+
|
5 |
+
|
6 |
+
def q():
|
7 |
+
var1 = 15
|
8 |
+
var2 = "BRASS"
|
9 |
+
var3 = "EUROPE"
|
10 |
+
|
11 |
+
region_ds = utils.get_region_ds
|
12 |
+
nation_ds = utils.get_nation_ds
|
13 |
+
supplier_ds = utils.get_supplier_ds
|
14 |
+
part_ds = utils.get_part_ds
|
15 |
+
part_supp_ds = utils.get_part_supp_ds
|
16 |
+
|
17 |
+
# first call one time to cache in case we don't include the IO times
|
18 |
+
region_ds()
|
19 |
+
nation_ds()
|
20 |
+
supplier_ds()
|
21 |
+
part_ds()
|
22 |
+
part_supp_ds()
|
23 |
+
|
24 |
+
def query():
|
25 |
+
nonlocal region_ds
|
26 |
+
nonlocal nation_ds
|
27 |
+
nonlocal supplier_ds
|
28 |
+
nonlocal part_ds
|
29 |
+
nonlocal part_supp_ds
|
30 |
+
region_ds = region_ds()
|
31 |
+
nation_ds = nation_ds()
|
32 |
+
supplier_ds = supplier_ds()
|
33 |
+
part_ds = part_ds()
|
34 |
+
part_supp_ds = part_supp_ds()
|
35 |
+
|
36 |
+
nation_filtered = nation_ds.loc[:, ["n_nationkey", "n_name", "n_regionkey"]]
|
37 |
+
region_filtered = region_ds[(region_ds["r_name"] == var3)]
|
38 |
+
region_filtered = region_filtered.loc[:, ["r_regionkey"]]
|
39 |
+
r_n_merged = nation_filtered.merge(
|
40 |
+
region_filtered, left_on="n_regionkey", right_on="r_regionkey", how="inner"
|
41 |
+
)
|
42 |
+
r_n_merged = r_n_merged.loc[:, ["n_nationkey", "n_name"]]
|
43 |
+
supplier_filtered = supplier_ds.loc[
|
44 |
+
:,
|
45 |
+
[
|
46 |
+
"s_suppkey",
|
47 |
+
"s_name",
|
48 |
+
"s_address",
|
49 |
+
"s_nationkey",
|
50 |
+
"s_phone",
|
51 |
+
"s_acctbal",
|
52 |
+
"s_comment",
|
53 |
+
],
|
54 |
+
]
|
55 |
+
s_r_n_merged = r_n_merged.merge(
|
56 |
+
supplier_filtered,
|
57 |
+
left_on="n_nationkey",
|
58 |
+
right_on="s_nationkey",
|
59 |
+
how="inner",
|
60 |
+
)
|
61 |
+
s_r_n_merged = s_r_n_merged.loc[
|
62 |
+
:,
|
63 |
+
[
|
64 |
+
"n_name",
|
65 |
+
"s_suppkey",
|
66 |
+
"s_name",
|
67 |
+
"s_address",
|
68 |
+
"s_phone",
|
69 |
+
"s_acctbal",
|
70 |
+
"s_comment",
|
71 |
+
],
|
72 |
+
]
|
73 |
+
partsupp_filtered = part_supp_ds.loc[
|
74 |
+
:, ["ps_partkey", "ps_suppkey", "ps_supplycost"]
|
75 |
+
]
|
76 |
+
ps_s_r_n_merged = s_r_n_merged.merge(
|
77 |
+
partsupp_filtered, left_on="s_suppkey", right_on="ps_suppkey", how="inner"
|
78 |
+
)
|
79 |
+
ps_s_r_n_merged = ps_s_r_n_merged.loc[
|
80 |
+
:,
|
81 |
+
[
|
82 |
+
"n_name",
|
83 |
+
"s_name",
|
84 |
+
"s_address",
|
85 |
+
"s_phone",
|
86 |
+
"s_acctbal",
|
87 |
+
"s_comment",
|
88 |
+
"ps_partkey",
|
89 |
+
"ps_supplycost",
|
90 |
+
],
|
91 |
+
]
|
92 |
+
part_filtered = part_ds.loc[:, ["p_partkey", "p_mfgr", "p_size", "p_type"]]
|
93 |
+
part_filtered = part_filtered[
|
94 |
+
(part_filtered["p_size"] == var1)
|
95 |
+
& (part_filtered["p_type"].str.endswith(var2))
|
96 |
+
]
|
97 |
+
part_filtered = part_filtered.loc[:, ["p_partkey", "p_mfgr"]]
|
98 |
+
merged_df = part_filtered.merge(
|
99 |
+
ps_s_r_n_merged, left_on="p_partkey", right_on="ps_partkey", how="inner"
|
100 |
+
)
|
101 |
+
merged_df = merged_df.loc[
|
102 |
+
:,
|
103 |
+
[
|
104 |
+
"n_name",
|
105 |
+
"s_name",
|
106 |
+
"s_address",
|
107 |
+
"s_phone",
|
108 |
+
"s_acctbal",
|
109 |
+
"s_comment",
|
110 |
+
"ps_supplycost",
|
111 |
+
"p_partkey",
|
112 |
+
"p_mfgr",
|
113 |
+
],
|
114 |
+
]
|
115 |
+
min_values = merged_df.groupby("p_partkey", as_index=False)[
|
116 |
+
"ps_supplycost"
|
117 |
+
].min()
|
118 |
+
min_values.columns = ["P_PARTKEY_CPY", "MIN_SUPPLYCOST"]
|
119 |
+
merged_df = merged_df.merge(
|
120 |
+
min_values,
|
121 |
+
left_on=["p_partkey", "ps_supplycost"],
|
122 |
+
right_on=["P_PARTKEY_CPY", "MIN_SUPPLYCOST"],
|
123 |
+
how="inner",
|
124 |
+
)
|
125 |
+
result_df = merged_df.loc[
|
126 |
+
:,
|
127 |
+
[
|
128 |
+
"s_acctbal",
|
129 |
+
"s_name",
|
130 |
+
"n_name",
|
131 |
+
"p_partkey",
|
132 |
+
"p_mfgr",
|
133 |
+
"s_address",
|
134 |
+
"s_phone",
|
135 |
+
"s_comment",
|
136 |
+
],
|
137 |
+
]
|
138 |
+
result_df = result_df.sort_values(
|
139 |
+
by=[
|
140 |
+
"s_acctbal",
|
141 |
+
"n_name",
|
142 |
+
"s_name",
|
143 |
+
"p_partkey",
|
144 |
+
],
|
145 |
+
ascending=[
|
146 |
+
False,
|
147 |
+
True,
|
148 |
+
True,
|
149 |
+
True,
|
150 |
+
],
|
151 |
+
)[:100]
|
152 |
+
|
153 |
+
return result_df
|
154 |
+
|
155 |
+
utils.run_query(Q_NUM, query)
|
156 |
+
|
157 |
+
|
158 |
+
if __name__ == "__main__":
|
159 |
+
q()
|
fireducks_queries/q3.py
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import datetime
|
2 |
+
|
3 |
+
from fireducks_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 3
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
var1 = var2 = datetime(1995, 3, 15)
|
10 |
+
var3 = "BUILDING"
|
11 |
+
|
12 |
+
customer_ds = utils.get_customer_ds
|
13 |
+
line_item_ds = utils.get_line_item_ds
|
14 |
+
orders_ds = utils.get_orders_ds
|
15 |
+
|
16 |
+
# first call one time to cache in case we don't include the IO times
|
17 |
+
customer_ds()
|
18 |
+
line_item_ds()
|
19 |
+
orders_ds()
|
20 |
+
|
21 |
+
def query():
|
22 |
+
nonlocal customer_ds
|
23 |
+
nonlocal line_item_ds
|
24 |
+
nonlocal orders_ds
|
25 |
+
customer_ds = customer_ds()
|
26 |
+
line_item_ds = line_item_ds()
|
27 |
+
orders_ds = orders_ds()
|
28 |
+
|
29 |
+
lineitem_filtered = line_item_ds.loc[
|
30 |
+
:, ["l_orderkey", "l_extendedprice", "l_discount", "l_shipdate"]
|
31 |
+
]
|
32 |
+
orders_filtered = orders_ds.loc[
|
33 |
+
:, ["o_orderkey", "o_custkey", "o_orderdate", "o_shippriority"]
|
34 |
+
]
|
35 |
+
customer_filtered = customer_ds.loc[:, ["c_mktsegment", "c_custkey"]]
|
36 |
+
lsel = lineitem_filtered.l_shipdate > var1
|
37 |
+
osel = orders_filtered.o_orderdate < var2
|
38 |
+
csel = customer_filtered.c_mktsegment == var3
|
39 |
+
flineitem = lineitem_filtered[lsel]
|
40 |
+
forders = orders_filtered[osel]
|
41 |
+
fcustomer = customer_filtered[csel]
|
42 |
+
jn1 = fcustomer.merge(forders, left_on="c_custkey", right_on="o_custkey")
|
43 |
+
jn2 = jn1.merge(flineitem, left_on="o_orderkey", right_on="l_orderkey")
|
44 |
+
jn2["revenue"] = jn2.l_extendedprice * (1 - jn2.l_discount)
|
45 |
+
|
46 |
+
total = (
|
47 |
+
jn2.groupby(
|
48 |
+
["l_orderkey", "o_orderdate", "o_shippriority"], as_index=False
|
49 |
+
)["revenue"]
|
50 |
+
.sum()
|
51 |
+
.sort_values(["revenue"], ascending=False)
|
52 |
+
)
|
53 |
+
result_df = total[:10].loc[
|
54 |
+
:, ["l_orderkey", "revenue", "o_orderdate", "o_shippriority"]
|
55 |
+
]
|
56 |
+
return result_df
|
57 |
+
|
58 |
+
utils.run_query(Q_NUM, query)
|
59 |
+
|
60 |
+
|
61 |
+
if __name__ == "__main__":
|
62 |
+
q()
|
fireducks_queries/q4.py
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import datetime
|
2 |
+
|
3 |
+
from fireducks_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 4
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
date1 = datetime(1993, 10, 1)
|
10 |
+
date2 = datetime(1993, 7, 1)
|
11 |
+
|
12 |
+
line_item_ds = utils.get_line_item_ds
|
13 |
+
orders_ds = utils.get_orders_ds
|
14 |
+
|
15 |
+
# first call one time to cache in case we don't include the IO times
|
16 |
+
line_item_ds()
|
17 |
+
orders_ds()
|
18 |
+
|
19 |
+
def query():
|
20 |
+
nonlocal line_item_ds
|
21 |
+
nonlocal orders_ds
|
22 |
+
line_item_ds = line_item_ds()
|
23 |
+
orders_ds = orders_ds()
|
24 |
+
|
25 |
+
lsel = line_item_ds.l_commitdate < line_item_ds.l_receiptdate
|
26 |
+
osel = (orders_ds.o_orderdate < date1) & (orders_ds.o_orderdate >= date2)
|
27 |
+
flineitem = line_item_ds[lsel]
|
28 |
+
forders = orders_ds[osel]
|
29 |
+
jn = forders[forders["o_orderkey"].isin(flineitem["l_orderkey"])]
|
30 |
+
result_df = (
|
31 |
+
jn.groupby("o_orderpriority", as_index=False)["o_orderkey"]
|
32 |
+
.count()
|
33 |
+
.sort_values(["o_orderpriority"])
|
34 |
+
.rename(columns={"o_orderkey": "order_count"})
|
35 |
+
)
|
36 |
+
return result_df
|
37 |
+
|
38 |
+
utils.run_query(Q_NUM, query)
|
39 |
+
|
40 |
+
|
41 |
+
if __name__ == "__main__":
|
42 |
+
q()
|
fireducks_queries/q5.py
ADDED
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import datetime
|
2 |
+
|
3 |
+
from fireducks_queries import utils
|
4 |
+
|
5 |
+
Q_NUM = 5
|
6 |
+
|
7 |
+
|
8 |
+
def q():
|
9 |
+
date1 = datetime.datetime.strptime("1994-01-01", "%Y-%m-%d")
|
10 |
+
date2 = datetime.datetime.strptime("1995-01-01", "%Y-%m-%d")
|
11 |
+
|
12 |
+
region_ds = utils.get_region_ds
|
13 |
+
nation_ds = utils.get_nation_ds
|
14 |
+
customer_ds = utils.get_customer_ds
|
15 |
+
line_item_ds = utils.get_line_item_ds
|
16 |
+
orders_ds = utils.get_orders_ds
|
17 |
+
supplier_ds = utils.get_supplier_ds
|
18 |
+
|
19 |
+
# first call one time to cache in case we don't include the IO times
|
20 |
+
region_ds()
|
21 |
+
nation_ds()
|
22 |
+
customer_ds()
|
23 |
+
line_item_ds()
|
24 |
+
orders_ds()
|
25 |
+
supplier_ds()
|
26 |
+
|
27 |
+
def query():
|
28 |
+
nonlocal region_ds
|
29 |
+
nonlocal nation_ds
|
30 |
+
nonlocal customer_ds
|
31 |
+
nonlocal line_item_ds
|
32 |
+
nonlocal orders_ds
|
33 |
+
nonlocal supplier_ds
|
34 |
+
|
35 |
+
region_ds = region_ds()
|
36 |
+
nation_ds = nation_ds()
|
37 |
+
customer_ds = customer_ds()
|
38 |
+
line_item_ds = line_item_ds()
|
39 |
+
orders_ds = orders_ds()
|
40 |
+
supplier_ds = supplier_ds()
|
41 |
+
|
42 |
+
rsel = region_ds.r_name == "ASIA"
|
43 |
+
osel = (orders_ds.o_orderdate >= date1) & (orders_ds.o_orderdate < date2)
|
44 |
+
forders = orders_ds[osel]
|
45 |
+
fregion = region_ds[rsel]
|
46 |
+
jn1 = fregion.merge(nation_ds, left_on="r_regionkey", right_on="n_regionkey")
|
47 |
+
jn2 = jn1.merge(customer_ds, left_on="n_nationkey", right_on="c_nationkey")
|
48 |
+
jn3 = jn2.merge(forders, left_on="c_custkey", right_on="o_custkey")
|
49 |
+
jn4 = jn3.merge(line_item_ds, left_on="o_orderkey", right_on="l_orderkey")
|
50 |
+
jn5 = supplier_ds.merge(
|
51 |
+
jn4,
|
52 |
+
left_on=["s_suppkey", "s_nationkey"],
|
53 |
+
right_on=["l_suppkey", "n_nationkey"],
|
54 |
+
)
|
55 |
+
jn5["revenue"] = jn5.l_extendedprice * (1.0 - jn5.l_discount)
|
56 |
+
gb = jn5.groupby("n_name", as_index=False)["revenue"].sum()
|
57 |
+
result_df = gb.sort_values("revenue", ascending=False)
|
58 |
+
return result_df
|
59 |
+
|
60 |
+
utils.run_query(Q_NUM, query)
|
61 |
+
|
62 |
+
|
63 |
+
if __name__ == "__main__":
|
64 |
+
q()
|
fireducks_queries/q6.py
ADDED
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import datetime
|
2 |
+
|
3 |
+
import fireducks.pandas as pd
|
4 |
+
|
5 |
+
from fireducks_queries import utils
|
6 |
+
|
7 |
+
Q_NUM = 6
|
8 |
+
|
9 |
+
|
10 |
+
def q():
|
11 |
+
date1 = datetime(1994, 1, 1)
|
12 |
+
date2 = datetime(1995, 1, 1)
|
13 |
+
var3 = 24
|
14 |
+
|
15 |
+
line_item_ds = utils.get_line_item_ds
|
16 |
+
|
17 |
+
# first call one time to cache in case we don't include the IO times
|
18 |
+
line_item_ds()
|
19 |
+
|
20 |
+
def query():
|
21 |
+
nonlocal line_item_ds
|
22 |
+
line_item_ds = line_item_ds()
|
23 |
+
|
24 |
+
lineitem_filtered = line_item_ds.loc[
|
25 |
+
:, ["l_quantity", "l_extendedprice", "l_discount", "l_shipdate"]
|
26 |
+
]
|
27 |
+
sel = (
|
28 |
+
(lineitem_filtered.l_shipdate >= date1)
|
29 |
+
& (lineitem_filtered.l_shipdate < date2)
|
30 |
+
& (lineitem_filtered.l_discount >= 0.05)
|
31 |
+
& (lineitem_filtered.l_discount <= 0.07)
|
32 |
+
& (lineitem_filtered.l_quantity < var3)
|
33 |
+
)
|
34 |
+
|
35 |
+
flineitem = lineitem_filtered[sel]
|
36 |
+
result_value = (flineitem.l_extendedprice * flineitem.l_discount).sum()
|
37 |
+
result_df = pd.DataFrame({"revenue": [result_value]})
|
38 |
+
return result_df
|
39 |
+
|
40 |
+
utils.run_query(Q_NUM, query)
|
41 |
+
|
42 |
+
|
43 |
+
if __name__ == "__main__":
|
44 |
+
q()
|
fireducks_queries/q7.py
ADDED
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import datetime
|
2 |
+
|
3 |
+
import fireducks.pandas as pd
|
4 |
+
|
5 |
+
from fireducks_queries import utils
|
6 |
+
|
7 |
+
Q_NUM = 7
|
8 |
+
|
9 |
+
|
10 |
+
def q():
|
11 |
+
nation_ds = utils.get_nation_ds
|
12 |
+
customer_ds = utils.get_customer_ds
|
13 |
+
line_item_ds = utils.get_line_item_ds
|
14 |
+
orders_ds = utils.get_orders_ds
|
15 |
+
supplier_ds = utils.get_supplier_ds
|
16 |
+
|
17 |
+
# first call one time to cache in case we don't include the IO times
|
18 |
+
nation_ds()
|
19 |
+
customer_ds()
|
20 |
+
line_item_ds()
|
21 |
+
orders_ds()
|
22 |
+
supplier_ds()
|
23 |
+
|
24 |
+
def query():
|
25 |
+
nonlocal nation_ds
|
26 |
+
nonlocal customer_ds
|
27 |
+
nonlocal line_item_ds
|
28 |
+
nonlocal orders_ds
|
29 |
+
nonlocal supplier_ds
|
30 |
+
|
31 |
+
nation_ds = nation_ds()
|
32 |
+
customer_ds = customer_ds()
|
33 |
+
line_item_ds = line_item_ds()
|
34 |
+
orders_ds = orders_ds()
|
35 |
+
supplier_ds = supplier_ds()
|
36 |
+
|
37 |
+
lineitem_filtered = line_item_ds[
|
38 |
+
(line_item_ds["l_shipdate"] >= datetime(1995, 1, 1))
|
39 |
+
& (line_item_ds["l_shipdate"] < datetime(1997, 1, 1))
|
40 |
+
]
|
41 |
+
lineitem_filtered["l_year"] = lineitem_filtered["l_shipdate"].dt.year
|
42 |
+
lineitem_filtered["revenue"] = lineitem_filtered["l_extendedprice"] * (
|
43 |
+
1.0 - lineitem_filtered["l_discount"]
|
44 |
+
)
|
45 |
+
lineitem_filtered = lineitem_filtered.loc[
|
46 |
+
:, ["l_orderkey", "l_suppkey", "l_year", "revenue"]
|
47 |
+
]
|
48 |
+
supplier_filtered = supplier_ds.loc[:, ["s_suppkey", "s_nationkey"]]
|
49 |
+
orders_filtered = orders_ds.loc[:, ["o_orderkey", "o_custkey"]]
|
50 |
+
customer_filtered = customer_ds.loc[:, ["c_custkey", "c_nationkey"]]
|
51 |
+
n1 = nation_ds[(nation_ds["n_name"] == "FRANCE")].loc[
|
52 |
+
:, ["n_nationkey", "n_name"]
|
53 |
+
]
|
54 |
+
n2 = nation_ds[(nation_ds["n_name"] == "GERMANY")].loc[
|
55 |
+
:, ["n_nationkey", "n_name"]
|
56 |
+
]
|
57 |
+
|
58 |
+
# ----- do nation 1 -----
|
59 |
+
N1_C = customer_filtered.merge(
|
60 |
+
n1, left_on="c_nationkey", right_on="n_nationkey", how="inner"
|
61 |
+
)
|
62 |
+
N1_C = N1_C.drop(columns=["c_nationkey", "n_nationkey"]).rename(
|
63 |
+
columns={"n_name": "cust_nation"}
|
64 |
+
)
|
65 |
+
N1_C_O = N1_C.merge(
|
66 |
+
orders_filtered, left_on="c_custkey", right_on="o_custkey", how="inner"
|
67 |
+
)
|
68 |
+
N1_C_O = N1_C_O.drop(columns=["c_custkey", "o_custkey"])
|
69 |
+
|
70 |
+
N2_S = supplier_filtered.merge(
|
71 |
+
n2, left_on="s_nationkey", right_on="n_nationkey", how="inner"
|
72 |
+
)
|
73 |
+
N2_S = N2_S.drop(columns=["s_nationkey", "n_nationkey"]).rename(
|
74 |
+
columns={"n_name": "supp_nation"}
|
75 |
+
)
|
76 |
+
N2_S_L = N2_S.merge(
|
77 |
+
lineitem_filtered, left_on="s_suppkey", right_on="l_suppkey", how="inner"
|
78 |
+
)
|
79 |
+
N2_S_L = N2_S_L.drop(columns=["s_suppkey", "l_suppkey"])
|
80 |
+
|
81 |
+
total1 = N1_C_O.merge(
|
82 |
+
N2_S_L, left_on="o_orderkey", right_on="l_orderkey", how="inner"
|
83 |
+
)
|
84 |
+
total1 = total1.drop(columns=["o_orderkey", "l_orderkey"])
|
85 |
+
|
86 |
+
# ----- do nation 2 ----- (same as nation 1 section but with nation 2)
|
87 |
+
N2_C = customer_filtered.merge(
|
88 |
+
n2, left_on="c_nationkey", right_on="n_nationkey", how="inner"
|
89 |
+
)
|
90 |
+
N2_C = N2_C.drop(columns=["c_nationkey", "n_nationkey"]).rename(
|
91 |
+
columns={"n_name": "cust_nation"}
|
92 |
+
)
|
93 |
+
N2_C_O = N2_C.merge(
|
94 |
+
orders_filtered, left_on="c_custkey", right_on="o_custkey", how="inner"
|
95 |
+
)
|
96 |
+
N2_C_O = N2_C_O.drop(columns=["c_custkey", "o_custkey"])
|
97 |
+
|
98 |
+
N1_S = supplier_filtered.merge(
|
99 |
+
n1, left_on="s_nationkey", right_on="n_nationkey", how="inner"
|
100 |
+
)
|
101 |
+
N1_S = N1_S.drop(columns=["s_nationkey", "n_nationkey"]).rename(
|
102 |
+
columns={"n_name": "supp_nation"}
|
103 |
+
)
|
104 |
+
N1_S_L = N1_S.merge(
|
105 |
+
lineitem_filtered, left_on="s_suppkey", right_on="l_suppkey", how="inner"
|
106 |
+
)
|
107 |
+
N1_S_L = N1_S_L.drop(columns=["s_suppkey", "l_suppkey"])
|
108 |
+
|
109 |
+
total2 = N2_C_O.merge(
|
110 |
+
N1_S_L, left_on="o_orderkey", right_on="l_orderkey", how="inner"
|
111 |
+
)
|
112 |
+
total2 = total2.drop(columns=["o_orderkey", "l_orderkey"])
|
113 |
+
|
114 |
+
# concat results
|
115 |
+
total = pd.concat([total1, total2])
|
116 |
+
result_df = (
|
117 |
+
total.groupby(["supp_nation", "cust_nation", "l_year"])
|
118 |
+
.revenue.agg("sum")
|
119 |
+
.reset_index()
|
120 |
+
)
|
121 |
+
result_df.columns = ["supp_nation", "cust_nation", "l_year", "revenue"]
|
122 |
+
|
123 |
+
result_df = result_df.sort_values(
|
124 |
+
by=["supp_nation", "cust_nation", "l_year"],
|
125 |
+
ascending=[
|
126 |
+
True,
|
127 |
+
True,
|
128 |
+
True,
|
129 |
+
],
|
130 |
+
)
|
131 |
+
return result_df
|
132 |
+
|
133 |
+
utils.run_query(Q_NUM, query)
|
134 |
+
|
135 |
+
|
136 |
+
if __name__ == "__main__":
|
137 |
+
q()
|