Jon Solow
commited on
Commit
·
dd64f21
1
Parent(s):
537e6f6
Add everything from yfdashboard
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .devcontainer/devcontainer.json +49 -0
- .devcontainer/docker-compose.yml +27 -0
- .gitignore +166 -0
- Dockerfile +37 -0
- README.md +48 -5
- dev-requirements.txt +463 -0
- docker-compose.yml +11 -0
- pyproject.toml +48 -0
- regenerate_requirements.sh +19 -0
- requirements.txt +167 -0
- src/.streamlit/config.toml +2 -0
- src/Home.py +27 -0
- src/config.py +3 -0
- src/domain/__init__.py +0 -0
- src/domain/conferences.py +17 -0
- src/domain/divisions.py +24 -0
- src/domain/teams.py +286 -0
- src/login_component.py +53 -0
- src/maximum_roster_strategy/__init__.py +0 -0
- src/maximum_roster_strategy/data_loader.py +19 -0
- src/page_selector.py +40 -0
- src/pages/10_Player_News.py +53 -0
- src/pages/11_Next_Gen_Stats.py +70 -0
- src/pages/1_Keepers.py +178 -0
- src/pages/3_ECR.py +88 -0
- src/pages/4_Practice_Reports.py +66 -0
- src/pages/50_League_Simulation.py +26 -0
- src/pages/5_Targets.py +56 -0
- src/pages/6_Redzone_Opportunities.py +56 -0
- src/pages/7_Snap_Counts.py +69 -0
- src/pages/80_Maximum_Roster_Strategy.py +165 -0
- src/pages/8_FTN_Charting.py +40 -0
- src/pages/98_Load_Data.py +35 -0
- src/pages/99_Keeper_Rules.py +37 -0
- src/pages/9_Team_Formations.py +83 -0
- src/queries/__init__.py +0 -0
- src/queries/footballguys/__init__.py +0 -0
- src/queries/footballguys/constants.py +34 -0
- src/queries/footballguys/helpers.py +130 -0
- src/queries/footballguys/refresh.py +46 -0
- src/queries/nbcsports/player_news.py +52 -0
- src/queries/nfl_teams/__init__.py +0 -0
- src/queries/nfl_teams/practice_reports.py +123 -0
- src/queries/nflverse/__init__.py +0 -0
- src/queries/nflverse/github_data.py +123 -0
- src/shared_page.py +22 -0
- src/start.sh +3 -0
- src/streamlit_filter.py +102 -0
- src/style.css +150 -0
- tests/contract/test_nbcsports_player_news.py +13 -0
.devcontainer/devcontainer.json
ADDED
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
2 |
+
// README at: https://github.com/devcontainers/templates/tree/main/src/docker-existing-docker-compose
|
3 |
+
{
|
4 |
+
"name": "Existing Docker Compose (Extend)",
|
5 |
+
|
6 |
+
// Update the 'dockerComposeFile' list if you have more compose files or use different names.
|
7 |
+
// The .devcontainer/docker-compose.yml file contains any overrides you need/want to make.
|
8 |
+
"dockerComposeFile": [
|
9 |
+
"../docker-compose.yml",
|
10 |
+
"docker-compose.yml"
|
11 |
+
],
|
12 |
+
|
13 |
+
// The 'service' property is the name of the service for the container that VS Code should
|
14 |
+
// use. Update this value and .devcontainer/docker-compose.yml to the real service name.
|
15 |
+
"service": "streamlit",
|
16 |
+
|
17 |
+
// The optional 'workspaceFolder' property is the path VS Code should open by default when
|
18 |
+
// connected. This is typically a file mount in .devcontainer/docker-compose.yml
|
19 |
+
"workspaceFolder": "/app",
|
20 |
+
"customizations": {
|
21 |
+
"vscode": {
|
22 |
+
"extensions": [
|
23 |
+
"ms-python.python",
|
24 |
+
"ms-toolsai.jupyter"
|
25 |
+
]
|
26 |
+
}
|
27 |
+
}
|
28 |
+
|
29 |
+
// Features to add to the dev container. More info: https://containers.dev/features.
|
30 |
+
// "features": {},
|
31 |
+
|
32 |
+
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
33 |
+
// "forwardPorts": [],
|
34 |
+
|
35 |
+
// Uncomment the next line if you want start specific services in your Docker Compose config.
|
36 |
+
// "runServices": [],
|
37 |
+
|
38 |
+
// Uncomment the next line if you want to keep your containers running after VS Code shuts down.
|
39 |
+
// "shutdownAction": "none",
|
40 |
+
|
41 |
+
// Uncomment the next line to run commands after the container is created.
|
42 |
+
// "postCreateCommand": "cat /etc/os-release",
|
43 |
+
|
44 |
+
// Configure tool-specific properties.
|
45 |
+
// "customizations": {},
|
46 |
+
|
47 |
+
// Uncomment to connect as an existing user other than the container default. More info: https://aka.ms/dev-containers-non-root.
|
48 |
+
// "remoteUser": "devcontainer"
|
49 |
+
}
|
.devcontainer/docker-compose.yml
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
version: '3.8'
|
2 |
+
services:
|
3 |
+
# Update this to the name of the service you want to work with in your docker-compose.yml file
|
4 |
+
streamlit:
|
5 |
+
# Uncomment if you want to override the service's Dockerfile to one in the .devcontainer
|
6 |
+
# folder. Note that the path of the Dockerfile and context is relative to the *primary*
|
7 |
+
# docker-compose.yml file (the first in the devcontainer.json "dockerComposeFile"
|
8 |
+
# array). The sample below assumes your primary file is in the root of your project.
|
9 |
+
#
|
10 |
+
build:
|
11 |
+
dockerfile: ./Dockerfile
|
12 |
+
context: ./
|
13 |
+
target: development
|
14 |
+
|
15 |
+
volumes:
|
16 |
+
# Update this to wherever you want VS Code to mount the folder of your project
|
17 |
+
- .:/app:cached
|
18 |
+
|
19 |
+
# Uncomment the next four lines if you will use a ptrace-based debugger like C++, Go, and Rust.
|
20 |
+
# cap_add:
|
21 |
+
# - SYS_PTRACE
|
22 |
+
# security_opt:
|
23 |
+
# - seccomp:unconfined
|
24 |
+
|
25 |
+
# Overrides default command so things don't shut down after the process ends.
|
26 |
+
command: ["/bin/sh -c \"while sleep 1000; do :; done\""]
|
27 |
+
|
.gitignore
ADDED
@@ -0,0 +1,166 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Byte-compiled / optimized / DLL files
|
2 |
+
__pycache__/
|
3 |
+
*.py[cod]
|
4 |
+
*$py.class
|
5 |
+
|
6 |
+
# C extensions
|
7 |
+
*.so
|
8 |
+
|
9 |
+
# Distribution / packaging
|
10 |
+
.Python
|
11 |
+
build/
|
12 |
+
develop-eggs/
|
13 |
+
dist/
|
14 |
+
downloads/
|
15 |
+
eggs/
|
16 |
+
.eggs/
|
17 |
+
lib/
|
18 |
+
lib64/
|
19 |
+
parts/
|
20 |
+
sdist/
|
21 |
+
var/
|
22 |
+
wheels/
|
23 |
+
share/python-wheels/
|
24 |
+
*.egg-info/
|
25 |
+
.installed.cfg
|
26 |
+
*.egg
|
27 |
+
MANIFEST
|
28 |
+
|
29 |
+
# PyInstaller
|
30 |
+
# Usually these files are written by a python script from a template
|
31 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
32 |
+
*.manifest
|
33 |
+
*.spec
|
34 |
+
|
35 |
+
# Installer logs
|
36 |
+
pip-log.txt
|
37 |
+
pip-delete-this-directory.txt
|
38 |
+
|
39 |
+
# Unit test / coverage reports
|
40 |
+
htmlcov/
|
41 |
+
.tox/
|
42 |
+
.nox/
|
43 |
+
.coverage
|
44 |
+
.coverage.*
|
45 |
+
.cache
|
46 |
+
nosetests.xml
|
47 |
+
coverage.xml
|
48 |
+
*.cover
|
49 |
+
*.py,cover
|
50 |
+
.hypothesis/
|
51 |
+
.pytest_cache/
|
52 |
+
cover/
|
53 |
+
|
54 |
+
# Translations
|
55 |
+
*.mo
|
56 |
+
*.pot
|
57 |
+
|
58 |
+
# Django stuff:
|
59 |
+
*.log
|
60 |
+
local_settings.py
|
61 |
+
db.sqlite3
|
62 |
+
db.sqlite3-journal
|
63 |
+
|
64 |
+
# Flask stuff:
|
65 |
+
instance/
|
66 |
+
.webassets-cache
|
67 |
+
|
68 |
+
# Scrapy stuff:
|
69 |
+
.scrapy
|
70 |
+
|
71 |
+
# Sphinx documentation
|
72 |
+
docs/_build/
|
73 |
+
|
74 |
+
# PyBuilder
|
75 |
+
.pybuilder/
|
76 |
+
target/
|
77 |
+
|
78 |
+
# Jupyter Notebook
|
79 |
+
.ipynb_checkpoints
|
80 |
+
|
81 |
+
# IPython
|
82 |
+
profile_default/
|
83 |
+
ipython_config.py
|
84 |
+
|
85 |
+
# pyenv
|
86 |
+
# For a library or package, you might want to ignore these files since the code is
|
87 |
+
# intended to run in multiple environments; otherwise, check them in:
|
88 |
+
# .python-version
|
89 |
+
|
90 |
+
# pipenv
|
91 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
92 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
93 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
94 |
+
# install all needed dependencies.
|
95 |
+
#Pipfile.lock
|
96 |
+
|
97 |
+
# poetry
|
98 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
99 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
100 |
+
# commonly ignored for libraries.
|
101 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
102 |
+
#poetry.lock
|
103 |
+
|
104 |
+
# pdm
|
105 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
106 |
+
#pdm.lock
|
107 |
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
108 |
+
# in version control.
|
109 |
+
# https://pdm.fming.dev/#use-with-ide
|
110 |
+
.pdm.toml
|
111 |
+
|
112 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
113 |
+
__pypackages__/
|
114 |
+
|
115 |
+
# Celery stuff
|
116 |
+
celerybeat-schedule
|
117 |
+
celerybeat.pid
|
118 |
+
|
119 |
+
# SageMath parsed files
|
120 |
+
*.sage.py
|
121 |
+
|
122 |
+
# Environments
|
123 |
+
.env
|
124 |
+
.venv
|
125 |
+
env/
|
126 |
+
venv/
|
127 |
+
ENV/
|
128 |
+
env.bak/
|
129 |
+
venv.bak/
|
130 |
+
|
131 |
+
# Spyder project settings
|
132 |
+
.spyderproject
|
133 |
+
.spyproject
|
134 |
+
|
135 |
+
# Rope project settings
|
136 |
+
.ropeproject
|
137 |
+
|
138 |
+
# mkdocs documentation
|
139 |
+
/site
|
140 |
+
|
141 |
+
# mypy
|
142 |
+
.mypy_cache/
|
143 |
+
.dmypy.json
|
144 |
+
dmypy.json
|
145 |
+
|
146 |
+
# Pyre type checker
|
147 |
+
.pyre/
|
148 |
+
|
149 |
+
# pytype static type analyzer
|
150 |
+
.pytype/
|
151 |
+
|
152 |
+
# Cython debug symbols
|
153 |
+
cython_debug/
|
154 |
+
|
155 |
+
# PyCharm
|
156 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
157 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
158 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
159 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
160 |
+
#.idea/
|
161 |
+
|
162 |
+
# Streamlit
|
163 |
+
secrets.toml
|
164 |
+
|
165 |
+
.ipynb_checkpoints
|
166 |
+
*.ipynb
|
Dockerfile
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.11-slim as base
|
2 |
+
|
3 |
+
ENV APP_BASE_PATH="/app"
|
4 |
+
ENV APP_SRC_PATH=${APP_BASE_PATH}/src
|
5 |
+
ENV PYTHONPATH="${APP_SRC_PATH}:${PYTHONPATH}"
|
6 |
+
WORKDIR $APP_BASE_PATH
|
7 |
+
|
8 |
+
RUN apt-get update && apt-get install -y \
|
9 |
+
build-essential \
|
10 |
+
curl \
|
11 |
+
software-properties-common \
|
12 |
+
git \
|
13 |
+
&& rm -rf /var/lib/apt/lists/*
|
14 |
+
|
15 |
+
RUN pip3 install pip-tools
|
16 |
+
COPY ./pyproject.toml .
|
17 |
+
|
18 |
+
FROM base as pip-service
|
19 |
+
|
20 |
+
COPY ./requirements.txt .
|
21 |
+
RUN pip3 install -r requirements.txt
|
22 |
+
|
23 |
+
FROM pip-service as service-setup
|
24 |
+
EXPOSE 8501
|
25 |
+
HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health
|
26 |
+
ENTRYPOINT ["./start.sh"]
|
27 |
+
|
28 |
+
FROM pip-service as service
|
29 |
+
WORKDIR $APP_SRC_PATH
|
30 |
+
COPY ./src .
|
31 |
+
|
32 |
+
FROM service-setup as development
|
33 |
+
WORKDIR $APP_BASE_PATH
|
34 |
+
COPY ./dev-requirements.txt .
|
35 |
+
RUN pip3 install -r dev-requirements.txt
|
36 |
+
|
37 |
+
WORKDIR $APP_SRC_PATH
|
README.md
CHANGED
@@ -1,12 +1,55 @@
|
|
1 |
---
|
2 |
-
title:
|
3 |
-
emoji:
|
4 |
-
colorFrom:
|
5 |
-
colorTo:
|
6 |
sdk: streamlit
|
7 |
sdk_version: 1.29.0
|
8 |
-
app_file:
|
9 |
pinned: false
|
10 |
---
|
11 |
|
12 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
---
|
2 |
+
title: YFDash
|
3 |
+
emoji: 🏃
|
4 |
+
colorFrom: indigo
|
5 |
+
colorTo: blue
|
6 |
sdk: streamlit
|
7 |
sdk_version: 1.29.0
|
8 |
+
app_file: src/Home.py
|
9 |
pinned: false
|
10 |
---
|
11 |
|
12 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
13 |
+
|
14 |
+
# nfl-playoff-challenge-steamlit Streamlit Application
|
15 |
+
|
16 |
+
Template Repo for Streamlit Application - https://github.com/JonSolow/streamlit-template
|
17 |
+
|
18 |
+
Here are some of the features of the template:
|
19 |
+
|
20 |
+
- VSCode .devcontainer for local development: [Documentation](https://code.visualstudio.com/docs/devcontainers/containers)
|
21 |
+
- docker-compose.yml
|
22 |
+
- Dockerfile
|
23 |
+
- Linting Configuration [TO-DO] (allows for clean code and quicker detection of possible bugs as one of the first steps in Shift-left testing)
|
24 |
+
- [Black](https://black.readthedocs.io/en/stable/index.html)
|
25 |
+
- [ruff](https://beta.ruff.rs/docs/)
|
26 |
+
- [mypy](https://mypy.readthedocs.io/en/stable/index.html)
|
27 |
+
- Unit Tests
|
28 |
+
- [pytest](https://docs.pytest.org/)
|
29 |
+
|
30 |
+
# Start Here to Develop
|
31 |
+
|
32 |
+
1. Prerequisites
|
33 |
+
|
34 |
+
- Install [Visual Studio Code](https://code.visualstudio.com/)
|
35 |
+
- Install [Visual Studo Code Extension - Dev containers](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers)
|
36 |
+
- Install [Docker](https://www.docker.com/)
|
37 |
+
|
38 |
+
2. Confirm Docker is installed by executing command `docker -v`
|
39 |
+
3. Open VSCode to a New Window and open this repository's directory
|
40 |
+
4. You may see a notification that the Folder containers a Dev Container configuration file. If so, click on "Reopen in Container"
|
41 |
+
|
42 |
+
- If you do not see this notification, press `F1` key and begin typing the following until you can see the option "Dev Containers: Rebuild and reopen in Container".
|
43 |
+
- This action will reopen the VSCode within a Docker container suitable to develop and locally run the application.
|
44 |
+
|
45 |
+
5. The dev container will start up the Streamlit application.
|
46 |
+
|
47 |
+
- To access the application, navigate to http://localhost:8501
|
48 |
+
- The container forwards the port 8501 where the Streamlit application is hosted
|
49 |
+
- Any changes made to the code will be reflected in the Streamlit application when you refresh.
|
50 |
+
|
51 |
+
6. Now inside the VSCode dev container, to run tests, execute `./tests/run_tests.sh`
|
52 |
+
|
53 |
+
- This script has an optional argument `-f` for "fix mode" which allows for configuration of black and ruff to automatically apply fixes.
|
54 |
+
|
55 |
+
7. As functions are added to the application, unit tests can/should be added in `tests/unit`, with existing support utilizing the `pytest` library.
|
dev-requirements.txt
ADDED
@@ -0,0 +1,463 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#
|
2 |
+
# This file is autogenerated by pip-compile with Python 3.11
|
3 |
+
# by the following command:
|
4 |
+
#
|
5 |
+
# pip-compile --extra=dev --output-file=dev-requirements.txt pyproject.toml
|
6 |
+
#
|
7 |
+
altair==5.2.0
|
8 |
+
# via streamlit
|
9 |
+
annotated-types==0.6.0
|
10 |
+
# via pydantic
|
11 |
+
anyio==4.1.0
|
12 |
+
# via
|
13 |
+
# httpcore
|
14 |
+
# jupyter-server
|
15 |
+
argon2-cffi==23.1.0
|
16 |
+
# via jupyter-server
|
17 |
+
argon2-cffi-bindings==21.2.0
|
18 |
+
# via argon2-cffi
|
19 |
+
arrow==1.3.0
|
20 |
+
# via isoduration
|
21 |
+
asttokens==2.4.1
|
22 |
+
# via stack-data
|
23 |
+
async-lru==2.0.4
|
24 |
+
# via jupyterlab
|
25 |
+
attrs==23.1.0
|
26 |
+
# via
|
27 |
+
# jsonschema
|
28 |
+
# referencing
|
29 |
+
babel==2.13.1
|
30 |
+
# via jupyterlab-server
|
31 |
+
beautifulsoup4==4.12.2
|
32 |
+
# via
|
33 |
+
# nfl-playoff-challenge-steamlit (pyproject.toml)
|
34 |
+
# nbconvert
|
35 |
+
black==23.11.0
|
36 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
37 |
+
bleach==6.1.0
|
38 |
+
# via nbconvert
|
39 |
+
blinker==1.7.0
|
40 |
+
# via streamlit
|
41 |
+
cachetools==5.3.2
|
42 |
+
# via streamlit
|
43 |
+
certifi==2023.11.17
|
44 |
+
# via
|
45 |
+
# httpcore
|
46 |
+
# httpx
|
47 |
+
# requests
|
48 |
+
cffi==1.16.0
|
49 |
+
# via argon2-cffi-bindings
|
50 |
+
charset-normalizer==3.3.2
|
51 |
+
# via requests
|
52 |
+
click==8.1.7
|
53 |
+
# via
|
54 |
+
# black
|
55 |
+
# streamlit
|
56 |
+
comm==0.2.0
|
57 |
+
# via
|
58 |
+
# ipykernel
|
59 |
+
# ipywidgets
|
60 |
+
debugpy==1.8.0
|
61 |
+
# via ipykernel
|
62 |
+
decorator==5.1.1
|
63 |
+
# via ipython
|
64 |
+
defusedxml==0.7.1
|
65 |
+
# via nbconvert
|
66 |
+
duckdb==0.9.2
|
67 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
68 |
+
executing==2.0.1
|
69 |
+
# via stack-data
|
70 |
+
fastjsonschema==2.19.0
|
71 |
+
# via nbformat
|
72 |
+
fqdn==1.5.1
|
73 |
+
# via jsonschema
|
74 |
+
gitdb==4.0.11
|
75 |
+
# via gitpython
|
76 |
+
gitpython==3.1.40
|
77 |
+
# via streamlit
|
78 |
+
h11==0.14.0
|
79 |
+
# via httpcore
|
80 |
+
html5lib==1.1
|
81 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
82 |
+
httpcore==0.17.3
|
83 |
+
# via httpx
|
84 |
+
httpx==0.24.1
|
85 |
+
# via httpx-oauth
|
86 |
+
httpx-oauth==0.13.0
|
87 |
+
# via streamlit-oauth
|
88 |
+
idna==3.6
|
89 |
+
# via
|
90 |
+
# anyio
|
91 |
+
# httpx
|
92 |
+
# jsonschema
|
93 |
+
# requests
|
94 |
+
importlib-metadata==6.9.0
|
95 |
+
# via streamlit
|
96 |
+
iniconfig==2.0.0
|
97 |
+
# via pytest
|
98 |
+
ipykernel==6.27.1
|
99 |
+
# via
|
100 |
+
# jupyter
|
101 |
+
# jupyter-console
|
102 |
+
# jupyterlab
|
103 |
+
# qtconsole
|
104 |
+
ipython==8.18.1
|
105 |
+
# via
|
106 |
+
# ipykernel
|
107 |
+
# ipywidgets
|
108 |
+
# jupyter-console
|
109 |
+
ipywidgets==8.1.1
|
110 |
+
# via jupyter
|
111 |
+
isoduration==20.11.0
|
112 |
+
# via jsonschema
|
113 |
+
jedi==0.19.1
|
114 |
+
# via ipython
|
115 |
+
jinja2==3.1.2
|
116 |
+
# via
|
117 |
+
# altair
|
118 |
+
# jupyter-server
|
119 |
+
# jupyterlab
|
120 |
+
# jupyterlab-server
|
121 |
+
# nbconvert
|
122 |
+
# pydeck
|
123 |
+
json5==0.9.14
|
124 |
+
# via jupyterlab-server
|
125 |
+
jsonpointer==2.4
|
126 |
+
# via jsonschema
|
127 |
+
jsonschema[format-nongpl]==4.20.0
|
128 |
+
# via
|
129 |
+
# altair
|
130 |
+
# jupyter-events
|
131 |
+
# jupyterlab-server
|
132 |
+
# nbformat
|
133 |
+
jsonschema-specifications==2023.11.2
|
134 |
+
# via jsonschema
|
135 |
+
jupyter==1.0.0
|
136 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
137 |
+
jupyter-client==8.6.0
|
138 |
+
# via
|
139 |
+
# ipykernel
|
140 |
+
# jupyter-console
|
141 |
+
# jupyter-server
|
142 |
+
# nbclient
|
143 |
+
# qtconsole
|
144 |
+
jupyter-console==6.6.3
|
145 |
+
# via jupyter
|
146 |
+
jupyter-core==5.5.0
|
147 |
+
# via
|
148 |
+
# ipykernel
|
149 |
+
# jupyter-client
|
150 |
+
# jupyter-console
|
151 |
+
# jupyter-server
|
152 |
+
# jupyterlab
|
153 |
+
# nbclient
|
154 |
+
# nbconvert
|
155 |
+
# nbformat
|
156 |
+
# qtconsole
|
157 |
+
jupyter-events==0.9.0
|
158 |
+
# via jupyter-server
|
159 |
+
jupyter-lsp==2.2.1
|
160 |
+
# via jupyterlab
|
161 |
+
jupyter-server==2.11.1
|
162 |
+
# via
|
163 |
+
# jupyter-lsp
|
164 |
+
# jupyterlab
|
165 |
+
# jupyterlab-server
|
166 |
+
# notebook
|
167 |
+
# notebook-shim
|
168 |
+
jupyter-server-terminals==0.4.4
|
169 |
+
# via jupyter-server
|
170 |
+
jupyterlab==4.0.9
|
171 |
+
# via notebook
|
172 |
+
jupyterlab-pygments==0.3.0
|
173 |
+
# via nbconvert
|
174 |
+
jupyterlab-server==2.25.2
|
175 |
+
# via
|
176 |
+
# jupyterlab
|
177 |
+
# notebook
|
178 |
+
jupyterlab-widgets==3.0.9
|
179 |
+
# via ipywidgets
|
180 |
+
lxml==4.9.3
|
181 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
182 |
+
lxml-stubs==0.4.0
|
183 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
184 |
+
markdown-it-py==3.0.0
|
185 |
+
# via rich
|
186 |
+
markupsafe==2.1.3
|
187 |
+
# via
|
188 |
+
# jinja2
|
189 |
+
# nbconvert
|
190 |
+
matplotlib-inline==0.1.6
|
191 |
+
# via
|
192 |
+
# ipykernel
|
193 |
+
# ipython
|
194 |
+
mdurl==0.1.2
|
195 |
+
# via markdown-it-py
|
196 |
+
mistune==3.0.2
|
197 |
+
# via nbconvert
|
198 |
+
mypy==1.7.1
|
199 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
200 |
+
mypy-extensions==1.0.0
|
201 |
+
# via
|
202 |
+
# black
|
203 |
+
# mypy
|
204 |
+
nbclient==0.9.0
|
205 |
+
# via nbconvert
|
206 |
+
nbconvert==7.11.0
|
207 |
+
# via
|
208 |
+
# jupyter
|
209 |
+
# jupyter-server
|
210 |
+
nbformat==5.9.2
|
211 |
+
# via
|
212 |
+
# jupyter-server
|
213 |
+
# nbclient
|
214 |
+
# nbconvert
|
215 |
+
nest-asyncio==1.5.8
|
216 |
+
# via ipykernel
|
217 |
+
notebook==7.0.6
|
218 |
+
# via jupyter
|
219 |
+
notebook-shim==0.2.3
|
220 |
+
# via
|
221 |
+
# jupyterlab
|
222 |
+
# notebook
|
223 |
+
numpy==1.26.2
|
224 |
+
# via
|
225 |
+
# nfl-playoff-challenge-steamlit (pyproject.toml)
|
226 |
+
# altair
|
227 |
+
# pandas
|
228 |
+
# pandas-stubs
|
229 |
+
# pyarrow
|
230 |
+
# pydeck
|
231 |
+
# streamlit
|
232 |
+
overrides==7.4.0
|
233 |
+
# via jupyter-server
|
234 |
+
packaging==23.2
|
235 |
+
# via
|
236 |
+
# altair
|
237 |
+
# black
|
238 |
+
# ipykernel
|
239 |
+
# jupyter-server
|
240 |
+
# jupyterlab
|
241 |
+
# jupyterlab-server
|
242 |
+
# nbconvert
|
243 |
+
# pytest
|
244 |
+
# qtconsole
|
245 |
+
# qtpy
|
246 |
+
# streamlit
|
247 |
+
pandas==2.1.3
|
248 |
+
# via
|
249 |
+
# nfl-playoff-challenge-steamlit (pyproject.toml)
|
250 |
+
# altair
|
251 |
+
# streamlit
|
252 |
+
pandas-stubs==2.1.1.230928
|
253 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
254 |
+
pandocfilters==1.5.0
|
255 |
+
# via nbconvert
|
256 |
+
parso==0.8.3
|
257 |
+
# via jedi
|
258 |
+
pathspec==0.11.2
|
259 |
+
# via black
|
260 |
+
pexpect==4.9.0
|
261 |
+
# via ipython
|
262 |
+
pillow==10.1.0
|
263 |
+
# via streamlit
|
264 |
+
platformdirs==4.0.0
|
265 |
+
# via
|
266 |
+
# black
|
267 |
+
# jupyter-core
|
268 |
+
pluggy==1.3.0
|
269 |
+
# via pytest
|
270 |
+
prometheus-client==0.19.0
|
271 |
+
# via jupyter-server
|
272 |
+
prompt-toolkit==3.0.41
|
273 |
+
# via
|
274 |
+
# ipython
|
275 |
+
# jupyter-console
|
276 |
+
protobuf==4.25.1
|
277 |
+
# via streamlit
|
278 |
+
psutil==5.9.6
|
279 |
+
# via ipykernel
|
280 |
+
ptyprocess==0.7.0
|
281 |
+
# via
|
282 |
+
# pexpect
|
283 |
+
# terminado
|
284 |
+
pure-eval==0.2.2
|
285 |
+
# via stack-data
|
286 |
+
pyarrow==14.0.1
|
287 |
+
# via streamlit
|
288 |
+
pycparser==2.21
|
289 |
+
# via cffi
|
290 |
+
pydantic==2.5.2
|
291 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
292 |
+
pydantic-core==2.14.5
|
293 |
+
# via pydantic
|
294 |
+
pydeck==0.8.1b0
|
295 |
+
# via streamlit
|
296 |
+
pygments==2.17.2
|
297 |
+
# via
|
298 |
+
# ipython
|
299 |
+
# jupyter-console
|
300 |
+
# nbconvert
|
301 |
+
# qtconsole
|
302 |
+
# rich
|
303 |
+
pytest==7.4.3
|
304 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
305 |
+
python-dateutil==2.8.2
|
306 |
+
# via
|
307 |
+
# arrow
|
308 |
+
# jupyter-client
|
309 |
+
# pandas
|
310 |
+
# streamlit
|
311 |
+
python-dotenv==1.0.0
|
312 |
+
# via streamlit-oauth
|
313 |
+
python-json-logger==2.0.7
|
314 |
+
# via jupyter-events
|
315 |
+
pytz==2023.3.post1
|
316 |
+
# via pandas
|
317 |
+
pyyaml==6.0.1
|
318 |
+
# via jupyter-events
|
319 |
+
pyzmq==25.1.1
|
320 |
+
# via
|
321 |
+
# ipykernel
|
322 |
+
# jupyter-client
|
323 |
+
# jupyter-console
|
324 |
+
# jupyter-server
|
325 |
+
# qtconsole
|
326 |
+
qtconsole==5.5.1
|
327 |
+
# via jupyter
|
328 |
+
qtpy==2.4.1
|
329 |
+
# via qtconsole
|
330 |
+
referencing==0.31.1
|
331 |
+
# via
|
332 |
+
# jsonschema
|
333 |
+
# jsonschema-specifications
|
334 |
+
# jupyter-events
|
335 |
+
requests==2.31.0
|
336 |
+
# via
|
337 |
+
# jupyterlab-server
|
338 |
+
# streamlit
|
339 |
+
rfc3339-validator==0.1.4
|
340 |
+
# via
|
341 |
+
# jsonschema
|
342 |
+
# jupyter-events
|
343 |
+
rfc3986-validator==0.1.1
|
344 |
+
# via
|
345 |
+
# jsonschema
|
346 |
+
# jupyter-events
|
347 |
+
rich==13.7.0
|
348 |
+
# via streamlit
|
349 |
+
rpds-py==0.13.2
|
350 |
+
# via
|
351 |
+
# jsonschema
|
352 |
+
# referencing
|
353 |
+
ruff==0.1.6
|
354 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
355 |
+
send2trash==1.8.2
|
356 |
+
# via jupyter-server
|
357 |
+
six==1.16.0
|
358 |
+
# via
|
359 |
+
# asttokens
|
360 |
+
# bleach
|
361 |
+
# html5lib
|
362 |
+
# python-dateutil
|
363 |
+
# rfc3339-validator
|
364 |
+
smmap==5.0.1
|
365 |
+
# via gitdb
|
366 |
+
sniffio==1.3.0
|
367 |
+
# via
|
368 |
+
# anyio
|
369 |
+
# httpcore
|
370 |
+
# httpx
|
371 |
+
soupsieve==2.5
|
372 |
+
# via beautifulsoup4
|
373 |
+
stack-data==0.6.3
|
374 |
+
# via ipython
|
375 |
+
streamlit==1.29.0
|
376 |
+
# via
|
377 |
+
# nfl-playoff-challenge-steamlit (pyproject.toml)
|
378 |
+
# streamlit-oauth
|
379 |
+
streamlit-oauth==0.1.5
|
380 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
381 |
+
tenacity==8.2.3
|
382 |
+
# via streamlit
|
383 |
+
terminado==0.18.0
|
384 |
+
# via
|
385 |
+
# jupyter-server
|
386 |
+
# jupyter-server-terminals
|
387 |
+
tinycss2==1.2.1
|
388 |
+
# via nbconvert
|
389 |
+
toml==0.10.2
|
390 |
+
# via streamlit
|
391 |
+
toolz==0.12.0
|
392 |
+
# via altair
|
393 |
+
tornado==6.4
|
394 |
+
# via
|
395 |
+
# ipykernel
|
396 |
+
# jupyter-client
|
397 |
+
# jupyter-server
|
398 |
+
# jupyterlab
|
399 |
+
# notebook
|
400 |
+
# streamlit
|
401 |
+
# terminado
|
402 |
+
traitlets==5.14.0
|
403 |
+
# via
|
404 |
+
# comm
|
405 |
+
# ipykernel
|
406 |
+
# ipython
|
407 |
+
# ipywidgets
|
408 |
+
# jupyter-client
|
409 |
+
# jupyter-console
|
410 |
+
# jupyter-core
|
411 |
+
# jupyter-events
|
412 |
+
# jupyter-server
|
413 |
+
# jupyterlab
|
414 |
+
# matplotlib-inline
|
415 |
+
# nbclient
|
416 |
+
# nbconvert
|
417 |
+
# nbformat
|
418 |
+
# qtconsole
|
419 |
+
types-beautifulsoup4==4.12.0.7
|
420 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
421 |
+
types-html5lib==1.1.11.15
|
422 |
+
# via types-beautifulsoup4
|
423 |
+
types-python-dateutil==2.8.19.14
|
424 |
+
# via arrow
|
425 |
+
types-pytz==2023.3.1.1
|
426 |
+
# via pandas-stubs
|
427 |
+
types-requests==2.31.0.10
|
428 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
429 |
+
typing-extensions==4.8.0
|
430 |
+
# via
|
431 |
+
# mypy
|
432 |
+
# pydantic
|
433 |
+
# pydantic-core
|
434 |
+
# streamlit
|
435 |
+
tzdata==2023.3
|
436 |
+
# via pandas
|
437 |
+
tzlocal==5.2
|
438 |
+
# via streamlit
|
439 |
+
uri-template==1.3.0
|
440 |
+
# via jsonschema
|
441 |
+
urllib3==2.1.0
|
442 |
+
# via
|
443 |
+
# requests
|
444 |
+
# types-requests
|
445 |
+
validators==0.22.0
|
446 |
+
# via streamlit
|
447 |
+
watchdog==3.0.0
|
448 |
+
# via streamlit
|
449 |
+
wcwidth==0.2.12
|
450 |
+
# via prompt-toolkit
|
451 |
+
webcolors==1.13
|
452 |
+
# via jsonschema
|
453 |
+
webencodings==0.5.1
|
454 |
+
# via
|
455 |
+
# bleach
|
456 |
+
# html5lib
|
457 |
+
# tinycss2
|
458 |
+
websocket-client==1.6.4
|
459 |
+
# via jupyter-server
|
460 |
+
widgetsnbextension==4.0.9
|
461 |
+
# via ipywidgets
|
462 |
+
zipp==3.17.0
|
463 |
+
# via importlib-metadata
|
docker-compose.yml
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
services:
|
2 |
+
streamlit:
|
3 |
+
build:
|
4 |
+
dockerfile: ./Dockerfile
|
5 |
+
context: ./
|
6 |
+
target: service
|
7 |
+
ports:
|
8 |
+
- '8501:8501'
|
9 |
+
environment:
|
10 |
+
- USER_ID=1000
|
11 |
+
- GROUP_ID=1000
|
pyproject.toml
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[build-system]
|
2 |
+
requires = ["setuptools"]
|
3 |
+
|
4 |
+
[project]
|
5 |
+
requires-python = ">=3.9"
|
6 |
+
version = "1"
|
7 |
+
name = "nfl-playoff-challenge-steamlit"
|
8 |
+
dependencies = [
|
9 |
+
"streamlit",
|
10 |
+
"beautifulsoup4",
|
11 |
+
"duckdb",
|
12 |
+
"html5lib",
|
13 |
+
"lxml",
|
14 |
+
"numpy",
|
15 |
+
"pandas",
|
16 |
+
"pydantic",
|
17 |
+
"streamlit-oauth",
|
18 |
+
]
|
19 |
+
|
20 |
+
[project.optional-dependencies]
|
21 |
+
dev = [
|
22 |
+
"black",
|
23 |
+
"jupyter",
|
24 |
+
"lxml-stubs",
|
25 |
+
"mypy",
|
26 |
+
"pytest",
|
27 |
+
"pandas-stubs",
|
28 |
+
"ruff",
|
29 |
+
"types-beautifulsoup4",
|
30 |
+
"types-requests",
|
31 |
+
]
|
32 |
+
|
33 |
+
[tool.black]
|
34 |
+
line-length = 120
|
35 |
+
target-version = ["py311"]
|
36 |
+
|
37 |
+
[tool.ruff]
|
38 |
+
line-length = 120
|
39 |
+
src = ["src"]
|
40 |
+
|
41 |
+
[tool.mypy]
|
42 |
+
python_version = "3.11"
|
43 |
+
|
44 |
+
[[tool.mypy.overrides]]
|
45 |
+
module = [
|
46 |
+
'streamlit_oauth'
|
47 |
+
]
|
48 |
+
ignore_missing_imports = true
|
regenerate_requirements.sh
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
#!/bin/bash
|
3 |
+
|
4 |
+
set -ex
|
5 |
+
|
6 |
+
ADDITIONAL_ARGS=$@
|
7 |
+
|
8 |
+
pip-compile \
|
9 |
+
-o requirements.txt \
|
10 |
+
$ADDITIONAL_ARGS \
|
11 |
+
pyproject.toml
|
12 |
+
|
13 |
+
pip-compile \
|
14 |
+
--extra=dev \
|
15 |
+
-o dev-requirements.txt \
|
16 |
+
$ADDITIONAL_ARGS \
|
17 |
+
pyproject.toml
|
18 |
+
|
19 |
+
python update_streamlit_version.py
|
requirements.txt
ADDED
@@ -0,0 +1,167 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#
|
2 |
+
# This file is autogenerated by pip-compile with Python 3.11
|
3 |
+
# by the following command:
|
4 |
+
#
|
5 |
+
# pip-compile --output-file=requirements.txt pyproject.toml
|
6 |
+
#
|
7 |
+
altair==5.2.0
|
8 |
+
# via streamlit
|
9 |
+
annotated-types==0.6.0
|
10 |
+
# via pydantic
|
11 |
+
anyio==4.1.0
|
12 |
+
# via httpcore
|
13 |
+
attrs==23.1.0
|
14 |
+
# via
|
15 |
+
# jsonschema
|
16 |
+
# referencing
|
17 |
+
beautifulsoup4==4.12.2
|
18 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
19 |
+
blinker==1.7.0
|
20 |
+
# via streamlit
|
21 |
+
cachetools==5.3.2
|
22 |
+
# via streamlit
|
23 |
+
certifi==2023.11.17
|
24 |
+
# via
|
25 |
+
# httpcore
|
26 |
+
# httpx
|
27 |
+
# requests
|
28 |
+
charset-normalizer==3.3.2
|
29 |
+
# via requests
|
30 |
+
click==8.1.7
|
31 |
+
# via streamlit
|
32 |
+
duckdb==0.9.2
|
33 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
34 |
+
gitdb==4.0.11
|
35 |
+
# via gitpython
|
36 |
+
gitpython==3.1.40
|
37 |
+
# via streamlit
|
38 |
+
h11==0.14.0
|
39 |
+
# via httpcore
|
40 |
+
html5lib==1.1
|
41 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
42 |
+
httpcore==0.17.3
|
43 |
+
# via httpx
|
44 |
+
httpx==0.24.1
|
45 |
+
# via httpx-oauth
|
46 |
+
httpx-oauth==0.13.0
|
47 |
+
# via streamlit-oauth
|
48 |
+
idna==3.6
|
49 |
+
# via
|
50 |
+
# anyio
|
51 |
+
# httpx
|
52 |
+
# requests
|
53 |
+
importlib-metadata==6.9.0
|
54 |
+
# via streamlit
|
55 |
+
jinja2==3.1.2
|
56 |
+
# via
|
57 |
+
# altair
|
58 |
+
# pydeck
|
59 |
+
jsonschema==4.20.0
|
60 |
+
# via altair
|
61 |
+
jsonschema-specifications==2023.11.2
|
62 |
+
# via jsonschema
|
63 |
+
lxml==4.9.3
|
64 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
65 |
+
markdown-it-py==3.0.0
|
66 |
+
# via rich
|
67 |
+
markupsafe==2.1.3
|
68 |
+
# via jinja2
|
69 |
+
mdurl==0.1.2
|
70 |
+
# via markdown-it-py
|
71 |
+
numpy==1.26.2
|
72 |
+
# via
|
73 |
+
# nfl-playoff-challenge-steamlit (pyproject.toml)
|
74 |
+
# altair
|
75 |
+
# pandas
|
76 |
+
# pyarrow
|
77 |
+
# pydeck
|
78 |
+
# streamlit
|
79 |
+
packaging==23.2
|
80 |
+
# via
|
81 |
+
# altair
|
82 |
+
# streamlit
|
83 |
+
pandas==2.1.3
|
84 |
+
# via
|
85 |
+
# nfl-playoff-challenge-steamlit (pyproject.toml)
|
86 |
+
# altair
|
87 |
+
# streamlit
|
88 |
+
pillow==10.1.0
|
89 |
+
# via streamlit
|
90 |
+
protobuf==4.25.1
|
91 |
+
# via streamlit
|
92 |
+
pyarrow==14.0.1
|
93 |
+
# via streamlit
|
94 |
+
pydantic==2.5.2
|
95 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
96 |
+
pydantic-core==2.14.5
|
97 |
+
# via pydantic
|
98 |
+
pydeck==0.8.1b0
|
99 |
+
# via streamlit
|
100 |
+
pygments==2.17.2
|
101 |
+
# via rich
|
102 |
+
python-dateutil==2.8.2
|
103 |
+
# via
|
104 |
+
# pandas
|
105 |
+
# streamlit
|
106 |
+
python-dotenv==1.0.0
|
107 |
+
# via streamlit-oauth
|
108 |
+
pytz==2023.3.post1
|
109 |
+
# via pandas
|
110 |
+
referencing==0.31.1
|
111 |
+
# via
|
112 |
+
# jsonschema
|
113 |
+
# jsonschema-specifications
|
114 |
+
requests==2.31.0
|
115 |
+
# via streamlit
|
116 |
+
rich==13.7.0
|
117 |
+
# via streamlit
|
118 |
+
rpds-py==0.13.2
|
119 |
+
# via
|
120 |
+
# jsonschema
|
121 |
+
# referencing
|
122 |
+
six==1.16.0
|
123 |
+
# via
|
124 |
+
# html5lib
|
125 |
+
# python-dateutil
|
126 |
+
smmap==5.0.1
|
127 |
+
# via gitdb
|
128 |
+
sniffio==1.3.0
|
129 |
+
# via
|
130 |
+
# anyio
|
131 |
+
# httpcore
|
132 |
+
# httpx
|
133 |
+
soupsieve==2.5
|
134 |
+
# via beautifulsoup4
|
135 |
+
streamlit==1.29.0
|
136 |
+
# via
|
137 |
+
# nfl-playoff-challenge-steamlit (pyproject.toml)
|
138 |
+
# streamlit-oauth
|
139 |
+
streamlit-oauth==0.1.5
|
140 |
+
# via nfl-playoff-challenge-steamlit (pyproject.toml)
|
141 |
+
tenacity==8.2.3
|
142 |
+
# via streamlit
|
143 |
+
toml==0.10.2
|
144 |
+
# via streamlit
|
145 |
+
toolz==0.12.0
|
146 |
+
# via altair
|
147 |
+
tornado==6.4
|
148 |
+
# via streamlit
|
149 |
+
typing-extensions==4.8.0
|
150 |
+
# via
|
151 |
+
# pydantic
|
152 |
+
# pydantic-core
|
153 |
+
# streamlit
|
154 |
+
tzdata==2023.3
|
155 |
+
# via pandas
|
156 |
+
tzlocal==5.2
|
157 |
+
# via streamlit
|
158 |
+
urllib3==2.1.0
|
159 |
+
# via requests
|
160 |
+
validators==0.22.0
|
161 |
+
# via streamlit
|
162 |
+
watchdog==3.0.0
|
163 |
+
# via streamlit
|
164 |
+
webencodings==0.5.1
|
165 |
+
# via html5lib
|
166 |
+
zipp==3.17.0
|
167 |
+
# via importlib-metadata
|
src/.streamlit/config.toml
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
[theme]
|
2 |
+
base="dark"
|
src/Home.py
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
|
3 |
+
from config import DEFAULT_ICON, LEAGUE_NAME
|
4 |
+
from login_component import get_authorization_button
|
5 |
+
from page_selector import remove_seasonal_pages
|
6 |
+
|
7 |
+
|
8 |
+
def get_app():
|
9 |
+
keeper_title = f"{LEAGUE_NAME}"
|
10 |
+
st.set_page_config(page_title=keeper_title, page_icon=DEFAULT_ICON)
|
11 |
+
get_authorization_button()
|
12 |
+
st.markdown(
|
13 |
+
f"""
|
14 |
+
Welcome {LEAGUE_NAME}!
|
15 |
+
|
16 |
+
Navigate between pages using the left sidebar.
|
17 |
+
|
18 |
+
If the sidebar is not visible, click the **>** in the upper left corner to open.
|
19 |
+
|
20 |
+
"""
|
21 |
+
)
|
22 |
+
|
23 |
+
remove_seasonal_pages()
|
24 |
+
|
25 |
+
|
26 |
+
if __name__ == "__main__":
|
27 |
+
get_app()
|
src/config.py
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
LEAGUE_NAME = "LOFG"
|
2 |
+
DEFAULT_ICON = "🏉"
|
3 |
+
LEAGUE_NUMBER_TEAMS = 12
|
src/domain/__init__.py
ADDED
File without changes
|
src/domain/conferences.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from dataclasses import dataclass
|
2 |
+
from typing import List
|
3 |
+
|
4 |
+
|
5 |
+
@dataclass
|
6 |
+
class NFLConference:
|
7 |
+
name: str
|
8 |
+
short_name: str
|
9 |
+
|
10 |
+
def __post_init__(self):
|
11 |
+
ALL_CONFERENCES.append(self)
|
12 |
+
|
13 |
+
|
14 |
+
ALL_CONFERENCES: List[NFLConference] = []
|
15 |
+
|
16 |
+
NFC = NFLConference(name="National Football Conference", short_name="NFC")
|
17 |
+
AFC = NFLConference(name="American Football Conference", short_name="AFC")
|
src/domain/divisions.py
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from dataclasses import dataclass
|
2 |
+
from typing import List
|
3 |
+
from domain import conferences
|
4 |
+
|
5 |
+
|
6 |
+
@dataclass
|
7 |
+
class NFLDivision:
|
8 |
+
name: str
|
9 |
+
conference: conferences.NFLConference
|
10 |
+
|
11 |
+
def __post_init__(self):
|
12 |
+
ALL_DIVISIONS.append(self)
|
13 |
+
|
14 |
+
|
15 |
+
ALL_DIVISIONS: List[NFLDivision] = []
|
16 |
+
|
17 |
+
NFCWest = NFLDivision(name="NFC West", conference=conferences.NFC)
|
18 |
+
NFCNorth = NFLDivision(name="NFC North", conference=conferences.NFC)
|
19 |
+
NFCSouth = NFLDivision(name="NFC South", conference=conferences.NFC)
|
20 |
+
NFCEast = NFLDivision(name="NFC East", conference=conferences.NFC)
|
21 |
+
AFCWest = NFLDivision(name="AFC West", conference=conferences.AFC)
|
22 |
+
AFCNorth = NFLDivision(name="AFC North", conference=conferences.AFC)
|
23 |
+
AFCSouth = NFLDivision(name="AFC South", conference=conferences.AFC)
|
24 |
+
AFCEast = NFLDivision(name="AFC East", conference=conferences.AFC)
|
src/domain/teams.py
ADDED
@@ -0,0 +1,286 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from dataclasses import dataclass
|
2 |
+
from urllib.parse import urljoin
|
3 |
+
from typing import List
|
4 |
+
from domain.conferences import NFLConference
|
5 |
+
from domain import divisions
|
6 |
+
|
7 |
+
|
8 |
+
@dataclass
|
9 |
+
class NFLTeam:
|
10 |
+
team_name: str
|
11 |
+
team_short_name: str
|
12 |
+
city: str
|
13 |
+
division: divisions.NFLDivision
|
14 |
+
footballguys_short_name: str = ""
|
15 |
+
url: str = ""
|
16 |
+
injury_report_suffix: str = "/team/injury-report/"
|
17 |
+
|
18 |
+
def __post_init__(self):
|
19 |
+
self.footballguys_short_name = (
|
20 |
+
self.team_short_name if self.footballguys_short_name == "" else self.footballguys_short_name
|
21 |
+
)
|
22 |
+
self.conference: NFLConference = self.division.conference
|
23 |
+
self.injury_report_url = urljoin(self.url, self.injury_report_suffix)
|
24 |
+
self.team_full_name = " ".join([self.city, self.team_name])
|
25 |
+
|
26 |
+
ALL_TEAMS.append(self)
|
27 |
+
|
28 |
+
|
29 |
+
ALL_TEAMS: List[NFLTeam] = []
|
30 |
+
|
31 |
+
|
32 |
+
arizona_cardinals = NFLTeam(
|
33 |
+
team_name="Cardinals",
|
34 |
+
team_short_name="ARI",
|
35 |
+
city="Arizona",
|
36 |
+
division=divisions.NFCWest,
|
37 |
+
url="https://www.azcardinals.com/",
|
38 |
+
)
|
39 |
+
|
40 |
+
atlanta_falcons = NFLTeam(
|
41 |
+
team_name="Falcons",
|
42 |
+
team_short_name="ATL",
|
43 |
+
city="Atlanta",
|
44 |
+
division=divisions.NFCSouth,
|
45 |
+
url="https://www.atlantafalcons.com/",
|
46 |
+
)
|
47 |
+
|
48 |
+
baltimore_ravens = NFLTeam(
|
49 |
+
team_name="Ravens",
|
50 |
+
team_short_name="BAL",
|
51 |
+
city="Baltimore",
|
52 |
+
division=divisions.AFCNorth,
|
53 |
+
url="https://www.baltimoreravens.com/",
|
54 |
+
)
|
55 |
+
|
56 |
+
buffalo_bills = NFLTeam(
|
57 |
+
team_name="Bills",
|
58 |
+
team_short_name="BUF",
|
59 |
+
city="Buffalo",
|
60 |
+
division=divisions.AFCEast,
|
61 |
+
url="https://www.buffalobills.com/",
|
62 |
+
)
|
63 |
+
|
64 |
+
carolina_panthers = NFLTeam(
|
65 |
+
team_name="Panthers",
|
66 |
+
team_short_name="CAR",
|
67 |
+
city="Carolina",
|
68 |
+
division=divisions.NFCSouth,
|
69 |
+
url="https://www.panthers.com/",
|
70 |
+
)
|
71 |
+
|
72 |
+
chicago_bears = NFLTeam(
|
73 |
+
team_name="Bears",
|
74 |
+
team_short_name="CHI",
|
75 |
+
city="Chicago",
|
76 |
+
division=divisions.NFCNorth,
|
77 |
+
url="https://www.chicagobears.com/",
|
78 |
+
)
|
79 |
+
|
80 |
+
cincinnati_bengals = NFLTeam(
|
81 |
+
team_name="Bengals",
|
82 |
+
team_short_name="CIN",
|
83 |
+
city="Cincinnati",
|
84 |
+
division=divisions.AFCNorth,
|
85 |
+
url="https://www.bengals.com/",
|
86 |
+
)
|
87 |
+
|
88 |
+
cleveland_browns = NFLTeam(
|
89 |
+
team_name="Browns",
|
90 |
+
team_short_name="CLE",
|
91 |
+
city="Cleveland",
|
92 |
+
division=divisions.AFCNorth,
|
93 |
+
url="https://www.clevelandbrowns.com/",
|
94 |
+
)
|
95 |
+
|
96 |
+
dallas_cowboys = NFLTeam(
|
97 |
+
team_name="Cowboys",
|
98 |
+
team_short_name="DAL",
|
99 |
+
city="Dallas",
|
100 |
+
division=divisions.NFCEast,
|
101 |
+
url="https://www.dallascowboys.com/",
|
102 |
+
)
|
103 |
+
|
104 |
+
denver_broncos = NFLTeam(
|
105 |
+
team_name="Broncos",
|
106 |
+
team_short_name="DEN",
|
107 |
+
city="Denver",
|
108 |
+
division=divisions.AFCWest,
|
109 |
+
url="https://www.denverbroncos.com/",
|
110 |
+
)
|
111 |
+
|
112 |
+
detroit_lions = NFLTeam(
|
113 |
+
team_name="Lions",
|
114 |
+
team_short_name="DET",
|
115 |
+
city="Detroit",
|
116 |
+
division=divisions.NFCNorth,
|
117 |
+
url="https://www.detroitlions.com/",
|
118 |
+
)
|
119 |
+
|
120 |
+
green_bay_packers = NFLTeam(
|
121 |
+
team_name="Packers",
|
122 |
+
team_short_name="GB",
|
123 |
+
city="Green Bay",
|
124 |
+
division=divisions.NFCNorth,
|
125 |
+
url="https://www.packers.com/",
|
126 |
+
)
|
127 |
+
|
128 |
+
houston_texans = NFLTeam(
|
129 |
+
team_name="Texans",
|
130 |
+
team_short_name="HOU",
|
131 |
+
city="Houston",
|
132 |
+
division=divisions.AFCSouth,
|
133 |
+
url="https://www.houstontexans.com/",
|
134 |
+
)
|
135 |
+
|
136 |
+
indianapolis_colts = NFLTeam(
|
137 |
+
city="Indianapolis",
|
138 |
+
team_name="Colts",
|
139 |
+
team_short_name="IND",
|
140 |
+
division=divisions.AFCSouth,
|
141 |
+
url="https://www.colts.com/",
|
142 |
+
)
|
143 |
+
|
144 |
+
jacksonville_jaguars = NFLTeam(
|
145 |
+
city="Jacksonville",
|
146 |
+
team_name="Jaguars",
|
147 |
+
team_short_name="JAX",
|
148 |
+
division=divisions.AFCSouth,
|
149 |
+
url="https://www.jaguars.com/",
|
150 |
+
)
|
151 |
+
|
152 |
+
kansas_city_chiefs = NFLTeam(
|
153 |
+
city="Kansas City",
|
154 |
+
team_name="Chiefs",
|
155 |
+
team_short_name="KC",
|
156 |
+
division=divisions.AFCWest,
|
157 |
+
url="https://www.chiefs.com/",
|
158 |
+
)
|
159 |
+
|
160 |
+
las_vegas_raiders = NFLTeam(
|
161 |
+
city="Las Vegas",
|
162 |
+
team_name="Raiders",
|
163 |
+
team_short_name="LV",
|
164 |
+
division=divisions.AFCWest,
|
165 |
+
url="https://www.raiders.com/",
|
166 |
+
)
|
167 |
+
|
168 |
+
los_angeles_chargers = NFLTeam(
|
169 |
+
city="Los Angeles",
|
170 |
+
team_name="Chargers",
|
171 |
+
team_short_name="LAC",
|
172 |
+
division=divisions.AFCWest,
|
173 |
+
url="https://www.chargers.com/",
|
174 |
+
)
|
175 |
+
|
176 |
+
los_angeles_rams = NFLTeam(
|
177 |
+
city="Los Angeles",
|
178 |
+
team_name="Rams",
|
179 |
+
team_short_name="LAR",
|
180 |
+
division=divisions.NFCWest,
|
181 |
+
url="https://www.therams.com/",
|
182 |
+
)
|
183 |
+
|
184 |
+
miami_dolphins = NFLTeam(
|
185 |
+
city="Miami",
|
186 |
+
team_name="Dolphins",
|
187 |
+
team_short_name="MIA",
|
188 |
+
division=divisions.AFCEast,
|
189 |
+
url="https://www.miamidolphins.com/",
|
190 |
+
)
|
191 |
+
|
192 |
+
minnesota_vikings = NFLTeam(
|
193 |
+
city="Minnesota",
|
194 |
+
team_name="Vikings",
|
195 |
+
team_short_name="MIN",
|
196 |
+
division=divisions.NFCNorth,
|
197 |
+
url="https://www.vikings.com/",
|
198 |
+
)
|
199 |
+
|
200 |
+
new_england_patriots = NFLTeam(
|
201 |
+
city="New England",
|
202 |
+
team_name="Patriots",
|
203 |
+
team_short_name="NE",
|
204 |
+
division=divisions.AFCEast,
|
205 |
+
url="https://www.patriots.com/",
|
206 |
+
)
|
207 |
+
|
208 |
+
new_orleans_saints = NFLTeam(
|
209 |
+
city="New Orleans",
|
210 |
+
team_name="Saints",
|
211 |
+
team_short_name="NO",
|
212 |
+
division=divisions.NFCSouth,
|
213 |
+
url="https://www.neworleanssaints.com/",
|
214 |
+
)
|
215 |
+
|
216 |
+
new_york_giants = NFLTeam(
|
217 |
+
city="New York",
|
218 |
+
team_name="Giants",
|
219 |
+
team_short_name="NYG",
|
220 |
+
division=divisions.NFCEast,
|
221 |
+
url="https://www.giants.com/",
|
222 |
+
)
|
223 |
+
|
224 |
+
new_york_jets = NFLTeam(
|
225 |
+
city="New York",
|
226 |
+
team_name="Jets",
|
227 |
+
team_short_name="NYJ",
|
228 |
+
division=divisions.AFCEast,
|
229 |
+
url="https://www.newyorkjets.com/",
|
230 |
+
)
|
231 |
+
|
232 |
+
philadelphia_eagles = NFLTeam(
|
233 |
+
city="Philadelphia",
|
234 |
+
team_name="Eagles",
|
235 |
+
team_short_name="PHI",
|
236 |
+
division=divisions.NFCEast,
|
237 |
+
url="https://www.philadelphiaeagles.com/",
|
238 |
+
)
|
239 |
+
|
240 |
+
pittsburgh_steelers = NFLTeam(
|
241 |
+
city="Pittsburgh",
|
242 |
+
team_name="Steelers",
|
243 |
+
team_short_name="PIT",
|
244 |
+
division=divisions.AFCNorth,
|
245 |
+
url="https://www.steelers.com/",
|
246 |
+
)
|
247 |
+
|
248 |
+
san_francisco_49ers = NFLTeam(
|
249 |
+
city="San Francisco",
|
250 |
+
team_name="49ers",
|
251 |
+
team_short_name="SF",
|
252 |
+
division=divisions.NFCWest,
|
253 |
+
url="https://www.49ers.com/",
|
254 |
+
)
|
255 |
+
|
256 |
+
seattle_seahawks = NFLTeam(
|
257 |
+
city="Seattle",
|
258 |
+
team_name="Seahawks",
|
259 |
+
team_short_name="SEA",
|
260 |
+
division=divisions.NFCWest,
|
261 |
+
url="https://www.seahawks.com/",
|
262 |
+
)
|
263 |
+
|
264 |
+
tampa_bay_buccaneers = NFLTeam(
|
265 |
+
city="Tampa Bay",
|
266 |
+
team_name="Buccaneers",
|
267 |
+
team_short_name="TB",
|
268 |
+
division=divisions.NFCSouth,
|
269 |
+
url="https://www.buccaneers.com/",
|
270 |
+
)
|
271 |
+
|
272 |
+
tennessee_titans = NFLTeam(
|
273 |
+
city="Tennessee",
|
274 |
+
team_name="Titans",
|
275 |
+
team_short_name="TEN",
|
276 |
+
division=divisions.AFCSouth,
|
277 |
+
url="https://www.tennesseetitans.com/",
|
278 |
+
)
|
279 |
+
|
280 |
+
washington_football_team = NFLTeam(
|
281 |
+
city="Washington",
|
282 |
+
team_name="Commanders",
|
283 |
+
team_short_name="WAS",
|
284 |
+
division=divisions.NFCEast,
|
285 |
+
url="https://www.commanders.com/",
|
286 |
+
)
|
src/login_component.py
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
|
3 |
+
from streamlit_oauth import OAuth2Component
|
4 |
+
import os
|
5 |
+
|
6 |
+
# # Load environment variables from .env file
|
7 |
+
# from dotenv import load_dotenv
|
8 |
+
# load_dotenv()
|
9 |
+
|
10 |
+
# Set environment variables
|
11 |
+
AUTHORIZE_URL = os.environ.get("AUTHORIZE_URL")
|
12 |
+
TOKEN_URL = os.environ.get("TOKEN_URL")
|
13 |
+
REFRESH_TOKEN_URL = os.environ.get("REFRESH_TOKEN_URL")
|
14 |
+
REVOKE_TOKEN_URL = os.environ.get("REVOKE_TOKEN_URL")
|
15 |
+
CLIENT_ID = os.environ.get("CLIENT_ID")
|
16 |
+
CLIENT_SECRET = os.environ.get("CLIENT_SECRET")
|
17 |
+
REDIRECT_URI = os.environ.get("REDIRECT_URI")
|
18 |
+
SCOPE = os.environ.get("SCOPE")
|
19 |
+
|
20 |
+
ENABLE_LOGIN = os.environ.get("ENABLE_LOGIN", False)
|
21 |
+
|
22 |
+
# Create OAuth2Component instance
|
23 |
+
oauth2 = OAuth2Component(CLIENT_ID, CLIENT_SECRET, AUTHORIZE_URL, TOKEN_URL, REFRESH_TOKEN_URL, REVOKE_TOKEN_URL)
|
24 |
+
|
25 |
+
|
26 |
+
def is_token_in_session() -> bool:
|
27 |
+
return "token" in st.session_state
|
28 |
+
|
29 |
+
|
30 |
+
def get_authorization_button():
|
31 |
+
if not ENABLE_LOGIN:
|
32 |
+
return
|
33 |
+
# Check if token exists in session state
|
34 |
+
if not is_token_in_session():
|
35 |
+
# If not, show authorize button
|
36 |
+
result = oauth2.authorize_button("Login", REDIRECT_URI, SCOPE)
|
37 |
+
if result and "token" in result:
|
38 |
+
# If authorization successful, save token in session state
|
39 |
+
st.session_state.token = result.get("token")
|
40 |
+
st.rerun()
|
41 |
+
else:
|
42 |
+
# # If token exists in session state, allow logout
|
43 |
+
st.session_state["token"]
|
44 |
+
if st.button("Logout"):
|
45 |
+
del st.session_state.token
|
46 |
+
st.rerun()
|
47 |
+
# # If token exists in session state, show the token
|
48 |
+
# token = st.session_state["token"]
|
49 |
+
# if st.button("Refresh Token"):
|
50 |
+
# # If refresh token button is clicked, refresh the token
|
51 |
+
# token = oauth2.refresh_token(token)
|
52 |
+
# st.session_state.token = token
|
53 |
+
# st.rerun()
|
src/maximum_roster_strategy/__init__.py
ADDED
File without changes
|
src/maximum_roster_strategy/data_loader.py
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import pandas as pd
|
3 |
+
|
4 |
+
|
5 |
+
MRS_SHEET_ID = os.environ.get("MRS_SHEET_ID")
|
6 |
+
|
7 |
+
|
8 |
+
def get_google_sheet_data() -> pd.DataFrame:
|
9 |
+
return get_sheet_data(0)
|
10 |
+
|
11 |
+
|
12 |
+
def get_sheet_data(sheet_id: int = 0):
|
13 |
+
sheet_url = f"https://docs.google.com/spreadsheet/ccc?key={MRS_SHEET_ID}&output=csv&gid={sheet_id}"
|
14 |
+
df = pd.read_csv(sheet_url)
|
15 |
+
return df
|
16 |
+
|
17 |
+
|
18 |
+
def get_timeslot_labels() -> pd.DataFrame:
|
19 |
+
return get_sheet_data(1875906423)
|
src/page_selector.py
ADDED
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from streamlit.source_util import _on_pages_changed, get_pages
|
2 |
+
|
3 |
+
# Adapted from https://discuss.streamlit.io/t/how-to-hide-all-pages-before-login/32508
|
4 |
+
# Note this code is intended to remove pages at app load time, not based on login
|
5 |
+
|
6 |
+
SEASON_MODE = "season"
|
7 |
+
OFFSEASON_MODE = "offseason"
|
8 |
+
|
9 |
+
CURRENT_MODE = SEASON_MODE
|
10 |
+
|
11 |
+
MAIN_PAGE_FILE = "Home.py"
|
12 |
+
|
13 |
+
MODE_PAGE_EXCLUSION_MAP = {
|
14 |
+
SEASON_MODE: [
|
15 |
+
"Keepers",
|
16 |
+
"ECR",
|
17 |
+
"League_Simulation",
|
18 |
+
"Keeper_Rules",
|
19 |
+
"Maximum_Roster_Strategy",
|
20 |
+
],
|
21 |
+
OFFSEASON_MODE: [
|
22 |
+
"Practice_Reports",
|
23 |
+
"League_Simulation",
|
24 |
+
"Maximum_Roster_Strategy",
|
25 |
+
],
|
26 |
+
}
|
27 |
+
|
28 |
+
|
29 |
+
def remove_seasonal_pages():
|
30 |
+
all_pages = get_pages(MAIN_PAGE_FILE)
|
31 |
+
pages_to_remove = MODE_PAGE_EXCLUSION_MAP[CURRENT_MODE]
|
32 |
+
|
33 |
+
page_keys_to_remove = []
|
34 |
+
for k, v in all_pages.items():
|
35 |
+
if v["page_name"] in pages_to_remove:
|
36 |
+
page_keys_to_remove.append(k)
|
37 |
+
for k_remove in page_keys_to_remove:
|
38 |
+
del all_pages[k_remove]
|
39 |
+
|
40 |
+
_on_pages_changed.send()
|
src/pages/10_Player_News.py
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import datetime
|
2 |
+
import streamlit as st
|
3 |
+
|
4 |
+
from config import DEFAULT_ICON
|
5 |
+
from shared_page import common_page_config
|
6 |
+
|
7 |
+
from queries.nbcsports.player_news import get_player_news_window_hours
|
8 |
+
|
9 |
+
|
10 |
+
@st.cache_data(ttl=60 * 60 * 24)
|
11 |
+
def load_data():
|
12 |
+
data = get_player_news_window_hours(24)
|
13 |
+
teams_list = sorted(filter(None, data.Team.unique()))
|
14 |
+
position_list = data.Position.unique()
|
15 |
+
data_load_time_str = datetime.datetime.utcnow().strftime("%m/%d/%Y %I:%M %p")
|
16 |
+
return data, teams_list, position_list, data_load_time_str
|
17 |
+
|
18 |
+
|
19 |
+
def get_page():
|
20 |
+
page_title = "Player News - Last 24 Hours"
|
21 |
+
st.set_page_config(page_title=page_title, page_icon=DEFAULT_ICON, layout="wide")
|
22 |
+
common_page_config()
|
23 |
+
st.title(page_title)
|
24 |
+
if st.button("Refresh Data"):
|
25 |
+
st.cache_data.clear()
|
26 |
+
data, teams_list, position_list, data_load_time_str = load_data()
|
27 |
+
st.write(f"Data loaded as of: {data_load_time_str} UTC")
|
28 |
+
|
29 |
+
teams_selected = st.multiselect("Team:", teams_list, placeholder="Select a team to filter") or teams_list
|
30 |
+
|
31 |
+
with st.container():
|
32 |
+
filtered_data = data[(data.Team.isin(teams_selected))]
|
33 |
+
st.dataframe(
|
34 |
+
filtered_data,
|
35 |
+
hide_index=True,
|
36 |
+
height=35 * (len(filtered_data) + 1) + 12,
|
37 |
+
use_container_width=True,
|
38 |
+
column_order=[
|
39 |
+
"Date/Time",
|
40 |
+
"Name",
|
41 |
+
"Headline",
|
42 |
+
"Team",
|
43 |
+
"Position",
|
44 |
+
],
|
45 |
+
column_config={
|
46 |
+
"Date/Time": st.column_config.DatetimeColumn(format="MM-DD HH:mm"),
|
47 |
+
"Team": st.column_config.TextColumn(width="small"),
|
48 |
+
},
|
49 |
+
)
|
50 |
+
|
51 |
+
|
52 |
+
if __name__ == "__main__":
|
53 |
+
get_page()
|
src/pages/11_Next_Gen_Stats.py
ADDED
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
|
3 |
+
from config import DEFAULT_ICON
|
4 |
+
from shared_page import common_page_config
|
5 |
+
|
6 |
+
from streamlit_filter import get_multiselect_for_df_column
|
7 |
+
from queries.nflverse.github_data import get_nextgen_stats, get_current_tables, SEASON
|
8 |
+
|
9 |
+
|
10 |
+
hide_columns = [
|
11 |
+
"season",
|
12 |
+
"season_type",
|
13 |
+
"player_gsis_id",
|
14 |
+
"player_first_name",
|
15 |
+
"player_last_name",
|
16 |
+
"player_jersey_number",
|
17 |
+
"player_short_name",
|
18 |
+
]
|
19 |
+
|
20 |
+
|
21 |
+
def get_page():
|
22 |
+
page_title = f"Next Gen Stats - {SEASON}"
|
23 |
+
st.set_page_config(page_title=page_title, page_icon=DEFAULT_ICON, layout="wide")
|
24 |
+
common_page_config()
|
25 |
+
st.title(page_title)
|
26 |
+
|
27 |
+
stat_category = st.selectbox("Stat Category", ["Passing", "Rushing", "Receiving"])
|
28 |
+
ngs_table_name = f"nextgen_stats_ngs_{stat_category.lower()}"
|
29 |
+
current_tables_list = get_current_tables()
|
30 |
+
|
31 |
+
if ngs_table_name not in current_tables_list:
|
32 |
+
st.write("Data not loaded.")
|
33 |
+
st.write("Check loaded data [here](./Load_Data)")
|
34 |
+
return
|
35 |
+
data = get_nextgen_stats(SEASON, stat_category)
|
36 |
+
|
37 |
+
season_or_week = st.selectbox("Season or Weekly Stats", ["Season", "Week"])
|
38 |
+
if season_or_week == "Season":
|
39 |
+
data = data[data["week"] == 0]
|
40 |
+
data.drop(columns=["week"], inplace=True)
|
41 |
+
else:
|
42 |
+
data = data[data["week"] > 0]
|
43 |
+
week_selection = st.slider(
|
44 |
+
"Filter Week Range:",
|
45 |
+
min_value=data["week"].min(),
|
46 |
+
max_value=data["week"].max(),
|
47 |
+
value=(data["week"].min(), data["week"].max()),
|
48 |
+
step=1,
|
49 |
+
)
|
50 |
+
data = data[data["week"].between(*week_selection)]
|
51 |
+
|
52 |
+
data.drop(columns=hide_columns, inplace=True)
|
53 |
+
positions_selected = get_multiselect_for_df_column(data, "player_position")
|
54 |
+
teams_selected = get_multiselect_for_df_column(data, "team_abbr")
|
55 |
+
|
56 |
+
data = data[(data["player_position"].isin(positions_selected) & data["team_abbr"].isin(teams_selected))]
|
57 |
+
|
58 |
+
with st.container():
|
59 |
+
filtered_data = data
|
60 |
+
st.dataframe(
|
61 |
+
filtered_data,
|
62 |
+
hide_index=True,
|
63 |
+
# height=35 * (len(filtered_data) + 1) + 12,
|
64 |
+
use_container_width=False,
|
65 |
+
column_config={},
|
66 |
+
)
|
67 |
+
|
68 |
+
|
69 |
+
if __name__ == "__main__":
|
70 |
+
get_page()
|
src/pages/1_Keepers.py
ADDED
@@ -0,0 +1,178 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import numpy as np
|
3 |
+
import pandas as pd
|
4 |
+
import streamlit as st
|
5 |
+
|
6 |
+
from config import DEFAULT_ICON, LEAGUE_NAME, LEAGUE_NUMBER_TEAMS
|
7 |
+
from shared_page import common_page_config
|
8 |
+
from streamlit_filter import filter_dataframe
|
9 |
+
|
10 |
+
|
11 |
+
KEEPER_DATA_URL = "../../tests/mocks/2023_keepers.csv"
|
12 |
+
HEADSHOT_DATA_URL = "../../tests/mocks/2023_player_headshots.csv"
|
13 |
+
|
14 |
+
|
15 |
+
def load_player_ids() -> pd.DataFrame:
|
16 |
+
df = pd.read_csv(r"https://raw.githubusercontent.com/dynastyprocess/data/master/files/db_playerids.csv")
|
17 |
+
df["merge_id"] = df["yahoo_id"].combine_first(df["stats_id"])
|
18 |
+
return df
|
19 |
+
|
20 |
+
|
21 |
+
def load_adp() -> pd.DataFrame:
|
22 |
+
df = pd.read_csv(r"https://raw.githubusercontent.com/dynastyprocess/data/master/files/db_fpecr_latest.csv")
|
23 |
+
df = df.loc[
|
24 |
+
df.fp_page == "/nfl/rankings/ppr-superflex-cheatsheets.php",
|
25 |
+
[
|
26 |
+
"yahoo_id",
|
27 |
+
"ecr",
|
28 |
+
"sd",
|
29 |
+
],
|
30 |
+
]
|
31 |
+
return df
|
32 |
+
|
33 |
+
|
34 |
+
def convert_ecr_to_round_val(ecr_float: float, round_offset: float = 1.0, pick_offset: float = -1.0) -> float:
|
35 |
+
# As a float, store pick 1 of round 1 as 1.0
|
36 |
+
return round_offset + (ecr_float + pick_offset) / LEAGUE_NUMBER_TEAMS
|
37 |
+
|
38 |
+
|
39 |
+
def add_opinionated_keeper_value(df: pd.DataFrame):
|
40 |
+
# Manual Hack for overranking of backup QBs
|
41 |
+
df.loc[
|
42 |
+
df["name"].isin(
|
43 |
+
[
|
44 |
+
"Teddy Bridgewater",
|
45 |
+
"Davis Mills",
|
46 |
+
"Andy Dalton",
|
47 |
+
"Tyler Huntley",
|
48 |
+
"Mike White",
|
49 |
+
"Gardner Minshew",
|
50 |
+
"Colt McCoy",
|
51 |
+
"Sam Darnold",
|
52 |
+
"Carson Wentz",
|
53 |
+
"Trey Lance",
|
54 |
+
"Taylor Heinicke",
|
55 |
+
]
|
56 |
+
),
|
57 |
+
["ecr"],
|
58 |
+
] = np.nan
|
59 |
+
|
60 |
+
df["ecr"] = df["ecr"].apply(convert_ecr_to_round_val)
|
61 |
+
# Convert sd without offset to show as pure pick diff
|
62 |
+
df["sd"] = df["sd"].apply(lambda x: convert_ecr_to_round_val(x, 0, 0))
|
63 |
+
# assumes midround keeper
|
64 |
+
# fill -99 for players that are not ranked in ecr
|
65 |
+
df["value_keeper"] = (df["keeper_cost"] + 0.5 - df["ecr"]).fillna(-99)
|
66 |
+
|
67 |
+
|
68 |
+
@st.cache_data(ttl=60 * 60 * 24)
|
69 |
+
def load_data():
|
70 |
+
data = pd.read_csv(os.path.join(os.path.dirname(__file__), KEEPER_DATA_URL), index_col=0)
|
71 |
+
# Hack to get position, replace with better position from yahoo api in future
|
72 |
+
data["position"] = data["eligible_positions"].apply(lambda x: eval(x)[0])
|
73 |
+
data.columns = data.columns.str.lower()
|
74 |
+
teams_list = sorted(list(data["team_name"].unique()))
|
75 |
+
|
76 |
+
# Merge player ids
|
77 |
+
df_player_ids = load_player_ids()
|
78 |
+
data = data.merge(df_player_ids, how="left", left_on="player_id", right_on="merge_id", suffixes=("", "_ids"))
|
79 |
+
|
80 |
+
# Merge ADP
|
81 |
+
df_adp = load_adp()
|
82 |
+
data = data.merge(df_adp, how="left", left_on="player_id", right_on="yahoo_id", suffixes=("", "_adp"))
|
83 |
+
add_opinionated_keeper_value(data)
|
84 |
+
return data, teams_list
|
85 |
+
|
86 |
+
|
87 |
+
def filtered_keeper_dataframe(data: pd.DataFrame, teams_list: list[str]):
|
88 |
+
teams_selected = st.multiselect("Team:", teams_list, placeholder="Select a user team to filter")
|
89 |
+
teams_filter = data["team_name"].isin(teams_selected) if teams_selected else data["team_name"].isin(teams_list)
|
90 |
+
|
91 |
+
eligible_options = [True, False]
|
92 |
+
is_eligible_selected = st.multiselect(
|
93 |
+
"Keeper Eligible:", eligible_options, placeholder="Select True to filter eligible only"
|
94 |
+
)
|
95 |
+
eligible_filter = (
|
96 |
+
data["eligible"].isin(is_eligible_selected) if is_eligible_selected else data["eligible"].isin(eligible_options)
|
97 |
+
)
|
98 |
+
is_advanced = st.checkbox("Show Advanced View")
|
99 |
+
|
100 |
+
id_cols = [
|
101 |
+
"team_name",
|
102 |
+
"headshot_url",
|
103 |
+
"name",
|
104 |
+
]
|
105 |
+
|
106 |
+
id_cols_advanced = [
|
107 |
+
"team",
|
108 |
+
"position",
|
109 |
+
]
|
110 |
+
|
111 |
+
cost_cols = [
|
112 |
+
"keeper_cost",
|
113 |
+
"eligible",
|
114 |
+
]
|
115 |
+
|
116 |
+
cost_cols_advanced = [
|
117 |
+
"years_eligible",
|
118 |
+
]
|
119 |
+
|
120 |
+
adp_cols: list[str] = []
|
121 |
+
|
122 |
+
adp_cols_advanced = [
|
123 |
+
"ecr",
|
124 |
+
"value_keeper",
|
125 |
+
]
|
126 |
+
|
127 |
+
if is_advanced:
|
128 |
+
show_columns = id_cols + id_cols_advanced + cost_cols + cost_cols_advanced + adp_cols + adp_cols_advanced
|
129 |
+
else:
|
130 |
+
show_columns = id_cols + cost_cols + adp_cols
|
131 |
+
|
132 |
+
data_with_filters_applied = data.loc[teams_filter & eligible_filter, show_columns]
|
133 |
+
|
134 |
+
filtered_data = filter_dataframe(data_with_filters_applied)
|
135 |
+
st.dataframe(
|
136 |
+
filtered_data,
|
137 |
+
hide_index=True,
|
138 |
+
height=35 * (len(filtered_data) + 1) + 12,
|
139 |
+
use_container_width=True,
|
140 |
+
column_config={
|
141 |
+
"team_name": st.column_config.TextColumn(label="League Team", help="Name of fantasy League team."),
|
142 |
+
"headshot_url": st.column_config.ImageColumn(label="", help="Player image"),
|
143 |
+
"name": st.column_config.TextColumn(label="Name", help="Player's name"),
|
144 |
+
"team": st.column_config.TextColumn(label="NFL Team"),
|
145 |
+
"position": st.column_config.TextColumn(label="Position", help="Player's position"),
|
146 |
+
"keeper_cost": st.column_config.NumberColumn(
|
147 |
+
label="Keeper Cost", help="Draft Round Cost to keep player. See Rules for details."
|
148 |
+
),
|
149 |
+
"eligible": st.column_config.CheckboxColumn(label="Eligible", help="Is player eligible to be keeper?"),
|
150 |
+
"years_eligible": st.column_config.NumberColumn(
|
151 |
+
label="Years Eligible",
|
152 |
+
help="Number of further consecutive seasons player can be kept (subject to maximum of 2)",
|
153 |
+
),
|
154 |
+
"ecr": st.column_config.NumberColumn(
|
155 |
+
label="ECR",
|
156 |
+
help="Player's average draft round.pick Expert Consensus Rank (ECR) for PPR - Superflex League",
|
157 |
+
),
|
158 |
+
"value_keeper": st.column_config.NumberColumn(
|
159 |
+
label="Value Keeper",
|
160 |
+
help="Approx. number of draft rounds of keeper value vs ECR PPR - Superflex League",
|
161 |
+
),
|
162 |
+
},
|
163 |
+
)
|
164 |
+
|
165 |
+
|
166 |
+
def get_keeper_app():
|
167 |
+
keeper_title = f"{LEAGUE_NAME} Keeper Options"
|
168 |
+
st.set_page_config(page_title=keeper_title, page_icon=DEFAULT_ICON, layout="wide")
|
169 |
+
common_page_config()
|
170 |
+
st.title(keeper_title)
|
171 |
+
data, teams_list = load_data()
|
172 |
+
|
173 |
+
with st.container():
|
174 |
+
filtered_keeper_dataframe(data, teams_list)
|
175 |
+
|
176 |
+
|
177 |
+
if __name__ == "__main__":
|
178 |
+
get_keeper_app()
|
src/pages/3_ECR.py
ADDED
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import pandas as pd
|
3 |
+
import streamlit as st
|
4 |
+
|
5 |
+
from config import DEFAULT_ICON
|
6 |
+
from shared_page import common_page_config
|
7 |
+
from streamlit_filter import filter_dataframe
|
8 |
+
|
9 |
+
|
10 |
+
KEEPER_DATA_URL = "../../tests/mocks/2023_keepers.csv"
|
11 |
+
HEADSHOT_DATA_URL = "../../tests/mocks/2023_player_headshots.csv"
|
12 |
+
|
13 |
+
|
14 |
+
def load_adp() -> pd.DataFrame:
|
15 |
+
df = pd.read_csv(r"https://raw.githubusercontent.com/dynastyprocess/data/master/files/db_fpecr_latest.csv")
|
16 |
+
df["ranking_type"] = df["fp_page"].apply(lambda x: os.path.split(x)[-1].replace(".php", ""))
|
17 |
+
return df
|
18 |
+
|
19 |
+
|
20 |
+
@st.cache_data(ttl=60 * 60 * 24)
|
21 |
+
def load_data():
|
22 |
+
# Merge ADP
|
23 |
+
data = load_adp()
|
24 |
+
ranking_type_list = sorted(list(data.ranking_type.unique()))
|
25 |
+
return data, ranking_type_list
|
26 |
+
|
27 |
+
|
28 |
+
def filtered_ecr_dataframe(data: pd.DataFrame, ranking_type_list: list[str]):
|
29 |
+
default_ix = ranking_type_list.index("ppr-superflex-cheatsheets")
|
30 |
+
ranking_type_selected = st.selectbox("ECR Format:", ranking_type_list, index=default_ix)
|
31 |
+
ranking_type_filter = data["ranking_type"] == ranking_type_selected
|
32 |
+
|
33 |
+
is_advanced = st.checkbox("Show Advanced View")
|
34 |
+
|
35 |
+
id_cols = [
|
36 |
+
# "player_square_image_url",
|
37 |
+
"player",
|
38 |
+
"pos",
|
39 |
+
"team",
|
40 |
+
]
|
41 |
+
|
42 |
+
id_cols_advanced = [
|
43 |
+
"bye",
|
44 |
+
"player_owned_yahoo",
|
45 |
+
]
|
46 |
+
|
47 |
+
adp_cols: list[str] = [
|
48 |
+
"ecr",
|
49 |
+
]
|
50 |
+
|
51 |
+
adp_cols_advanced = ["sd", "best", "worst"]
|
52 |
+
|
53 |
+
if is_advanced:
|
54 |
+
show_columns = id_cols + id_cols_advanced + adp_cols + adp_cols_advanced
|
55 |
+
else:
|
56 |
+
show_columns = id_cols + adp_cols
|
57 |
+
|
58 |
+
data_filtered_by_ranking_type = data.loc[ranking_type_filter]
|
59 |
+
latest_scrape_date = data_filtered_by_ranking_type.scrape_date.max()
|
60 |
+
st.write(f"Scraped data as of: {latest_scrape_date}")
|
61 |
+
|
62 |
+
filtered_data = filter_dataframe(data.loc[ranking_type_filter, show_columns])
|
63 |
+
st.dataframe(
|
64 |
+
filtered_data,
|
65 |
+
hide_index=True,
|
66 |
+
height=35 * (len(filtered_data) + 1) + 12,
|
67 |
+
use_container_width=True,
|
68 |
+
column_config={
|
69 |
+
# "player_square_image_url": st.column_config.ImageColumn(label="", help="Player image"),
|
70 |
+
},
|
71 |
+
)
|
72 |
+
|
73 |
+
st.write("Source: https://github.com/dynastyprocess/data")
|
74 |
+
|
75 |
+
|
76 |
+
def get_keeper_app():
|
77 |
+
keeper_title = "Expert Consensus Rankings"
|
78 |
+
st.set_page_config(page_title=keeper_title, page_icon=DEFAULT_ICON, layout="wide")
|
79 |
+
common_page_config()
|
80 |
+
st.title(keeper_title)
|
81 |
+
data, ecr_type_list = load_data()
|
82 |
+
|
83 |
+
with st.container():
|
84 |
+
filtered_ecr_dataframe(data, ecr_type_list)
|
85 |
+
|
86 |
+
|
87 |
+
if __name__ == "__main__":
|
88 |
+
get_keeper_app()
|
src/pages/4_Practice_Reports.py
ADDED
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import datetime
|
2 |
+
import streamlit as st
|
3 |
+
|
4 |
+
from config import DEFAULT_ICON
|
5 |
+
from shared_page import common_page_config
|
6 |
+
|
7 |
+
from queries.nfl_teams.practice_reports import scrape_all_team_injury_report, CURRENT_SEASON, CURRENT_WEEK
|
8 |
+
from streamlit_filter import filter_dataframe
|
9 |
+
|
10 |
+
|
11 |
+
@st.cache_data(ttl=60 * 60 * 1)
|
12 |
+
def load_data():
|
13 |
+
data = scrape_all_team_injury_report()
|
14 |
+
teams_list = list(data.Team.unique())
|
15 |
+
position_list = list(data.Position.unique())
|
16 |
+
status_list = list(data.game_status.unique())
|
17 |
+
last_practice_day_list = list(data["Last Practice Day"].unique())
|
18 |
+
data_load_time_str = datetime.datetime.utcnow().strftime("%m/%d/%Y %I:%M %p")
|
19 |
+
return data, teams_list, position_list, status_list, last_practice_day_list, data_load_time_str
|
20 |
+
|
21 |
+
|
22 |
+
def get_page():
|
23 |
+
page_title = f"Team Practice Reports - {CURRENT_SEASON} Week {CURRENT_WEEK}"
|
24 |
+
st.set_page_config(page_title=page_title, page_icon=DEFAULT_ICON, layout="wide")
|
25 |
+
common_page_config()
|
26 |
+
st.title(page_title)
|
27 |
+
if st.button("Refresh Data"):
|
28 |
+
st.cache_data.clear()
|
29 |
+
data, teams_list, position_list, status_list, last_practice_day_list, data_load_time_str = load_data()
|
30 |
+
st.write(f"Data loaded as of: {data_load_time_str} UTC")
|
31 |
+
teams_selected = st.multiselect("Team:", teams_list, placeholder="Select a team to filter") or teams_list
|
32 |
+
positions_selected = (
|
33 |
+
st.multiselect("Position:", position_list, placeholder="Select a position to filter") or position_list
|
34 |
+
)
|
35 |
+
status_selected = (
|
36 |
+
st.multiselect("Game Status:", status_list, placeholder="Select a game status to filter") or status_list
|
37 |
+
)
|
38 |
+
last_practice_day_selected = (
|
39 |
+
st.multiselect(
|
40 |
+
"Last Practice Day:", last_practice_day_list, placeholder="Select a day of last team practice to filter"
|
41 |
+
)
|
42 |
+
or last_practice_day_list
|
43 |
+
)
|
44 |
+
|
45 |
+
with st.container():
|
46 |
+
filtered_data = filter_dataframe(
|
47 |
+
data[
|
48 |
+
(
|
49 |
+
data.Team.isin(teams_selected)
|
50 |
+
& data.Position.isin(positions_selected)
|
51 |
+
& data.game_status.isin(status_selected)
|
52 |
+
& data["Last Practice Day"].isin(last_practice_day_selected)
|
53 |
+
)
|
54 |
+
]
|
55 |
+
)
|
56 |
+
st.dataframe(
|
57 |
+
filtered_data,
|
58 |
+
hide_index=True,
|
59 |
+
height=35 * (len(filtered_data) + 1) + 12,
|
60 |
+
use_container_width=False,
|
61 |
+
column_config={},
|
62 |
+
)
|
63 |
+
|
64 |
+
|
65 |
+
if __name__ == "__main__":
|
66 |
+
get_page()
|
src/pages/50_League_Simulation.py
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
|
3 |
+
from config import DEFAULT_ICON
|
4 |
+
from shared_page import common_page_config
|
5 |
+
|
6 |
+
from login_component import is_token_in_session
|
7 |
+
|
8 |
+
|
9 |
+
def get_page():
|
10 |
+
page_title = "Yahoo FF League Simulation"
|
11 |
+
st.set_page_config(page_title=page_title, page_icon=DEFAULT_ICON, layout="wide")
|
12 |
+
common_page_config()
|
13 |
+
st.title(page_title)
|
14 |
+
|
15 |
+
if not is_token_in_session():
|
16 |
+
st.write(
|
17 |
+
"You must authorize the application to access your account in order to use this feature."
|
18 |
+
" Please click Login button above."
|
19 |
+
)
|
20 |
+
|
21 |
+
else:
|
22 |
+
st.write("Logged in. Feature to go here")
|
23 |
+
|
24 |
+
|
25 |
+
if __name__ == "__main__":
|
26 |
+
get_page()
|
src/pages/5_Targets.py
ADDED
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import datetime
|
2 |
+
import numpy as np
|
3 |
+
import streamlit as st
|
4 |
+
|
5 |
+
from config import DEFAULT_ICON
|
6 |
+
from shared_page import common_page_config
|
7 |
+
|
8 |
+
from queries.footballguys.constants import YEAR
|
9 |
+
from queries.footballguys.refresh import request_stat
|
10 |
+
from streamlit_filter import filter_dataframe
|
11 |
+
|
12 |
+
|
13 |
+
@st.cache_data(ttl=60 * 60 * 24)
|
14 |
+
def load_data():
|
15 |
+
stat_name = "targets"
|
16 |
+
data = request_stat(stat_name)
|
17 |
+
data_load_time_str = datetime.datetime.utcnow().strftime("%m/%d/%Y %I:%M %p")
|
18 |
+
return data, data_load_time_str
|
19 |
+
|
20 |
+
|
21 |
+
def get_page():
|
22 |
+
page_title = f"Player Targets - {YEAR}"
|
23 |
+
st.set_page_config(page_title=page_title, page_icon=DEFAULT_ICON, layout="wide")
|
24 |
+
common_page_config()
|
25 |
+
st.title(page_title)
|
26 |
+
if st.button("Refresh Data"):
|
27 |
+
st.cache_data.clear()
|
28 |
+
data, data_load_time_str = load_data()
|
29 |
+
st.write(f"Data loaded as of: {data_load_time_str} UTC")
|
30 |
+
|
31 |
+
selected_subtotals = st.selectbox("Show:", ["Player Totals", "Position Totals"], index=0)
|
32 |
+
if selected_subtotals == "Player Totals":
|
33 |
+
data = data[~data.name.str.contains(" Totals")]
|
34 |
+
elif selected_subtotals == "Position Totals":
|
35 |
+
data = data[data.name.str.contains(" Totals")]
|
36 |
+
|
37 |
+
value_types = st.selectbox("Counts / Percent:", ["Counts", "Percent"], index=0)
|
38 |
+
if value_types == "Percent":
|
39 |
+
numerical_data = data.select_dtypes(include=np.number)
|
40 |
+
numerical_cols = numerical_data.columns
|
41 |
+
df_percent_values = numerical_data / data.groupby("TEAM").transform(sum).select_dtypes(include=np.number)
|
42 |
+
data.loc[:, numerical_cols] = df_percent_values
|
43 |
+
|
44 |
+
with st.container():
|
45 |
+
filtered_data = filter_dataframe(data)
|
46 |
+
st.dataframe(
|
47 |
+
filtered_data,
|
48 |
+
hide_index=True,
|
49 |
+
height=35 * (len(filtered_data) + 1) + 12,
|
50 |
+
use_container_width=False,
|
51 |
+
column_config={},
|
52 |
+
)
|
53 |
+
|
54 |
+
|
55 |
+
if __name__ == "__main__":
|
56 |
+
get_page()
|
src/pages/6_Redzone_Opportunities.py
ADDED
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import datetime
|
2 |
+
import numpy as np
|
3 |
+
import streamlit as st
|
4 |
+
|
5 |
+
from config import DEFAULT_ICON
|
6 |
+
from shared_page import common_page_config
|
7 |
+
|
8 |
+
from queries.footballguys.constants import YEAR
|
9 |
+
from queries.footballguys.refresh import request_stat
|
10 |
+
from streamlit_filter import filter_dataframe
|
11 |
+
|
12 |
+
|
13 |
+
@st.cache_data(ttl=60 * 60 * 24)
|
14 |
+
def load_data():
|
15 |
+
stat_name = "redzone"
|
16 |
+
data = request_stat(stat_name)
|
17 |
+
data_load_time_str = datetime.datetime.utcnow().strftime("%m/%d/%Y %I:%M %p")
|
18 |
+
return data, data_load_time_str
|
19 |
+
|
20 |
+
|
21 |
+
def get_page():
|
22 |
+
page_title = f"Player Redzone Opportunities - {YEAR}"
|
23 |
+
st.set_page_config(page_title=page_title, page_icon=DEFAULT_ICON, layout="wide")
|
24 |
+
common_page_config()
|
25 |
+
st.title(page_title)
|
26 |
+
if st.button("Refresh Data"):
|
27 |
+
st.cache_data.clear()
|
28 |
+
data, data_load_time_str = load_data()
|
29 |
+
st.write(f"Data loaded as of: {data_load_time_str} UTC")
|
30 |
+
|
31 |
+
selected_subtotals = st.selectbox("Show:", ["Player Totals", "Position Totals"], index=0)
|
32 |
+
if selected_subtotals == "Player Totals":
|
33 |
+
data = data[~data.name.str.contains(" Totals")]
|
34 |
+
elif selected_subtotals == "Position Totals":
|
35 |
+
data = data[data.name.str.contains(" Totals")]
|
36 |
+
|
37 |
+
value_types = st.selectbox("Counts / Percent:", ["Counts", "Percent"], index=0)
|
38 |
+
if value_types == "Percent":
|
39 |
+
numerical_data = data.select_dtypes(include=np.number)
|
40 |
+
numerical_cols = numerical_data.columns
|
41 |
+
df_percent_values = numerical_data / data.groupby("TEAM").transform(sum).select_dtypes(include=np.number)
|
42 |
+
data.loc[:, numerical_cols] = df_percent_values
|
43 |
+
|
44 |
+
with st.container():
|
45 |
+
filtered_data = filter_dataframe(data)
|
46 |
+
st.dataframe(
|
47 |
+
filtered_data,
|
48 |
+
hide_index=True,
|
49 |
+
height=35 * (len(filtered_data) + 1) + 12,
|
50 |
+
use_container_width=False,
|
51 |
+
column_config={},
|
52 |
+
)
|
53 |
+
|
54 |
+
|
55 |
+
if __name__ == "__main__":
|
56 |
+
get_page()
|
src/pages/7_Snap_Counts.py
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import datetime
|
2 |
+
import streamlit as st
|
3 |
+
|
4 |
+
from config import DEFAULT_ICON
|
5 |
+
from shared_page import common_page_config
|
6 |
+
|
7 |
+
from queries.footballguys.constants import YEAR
|
8 |
+
from queries.nflverse.github_data import get_snap_counts, get_current_tables, SEASON
|
9 |
+
|
10 |
+
|
11 |
+
def load_data():
|
12 |
+
data = get_snap_counts(YEAR)
|
13 |
+
data = data[data.fantasy_position]
|
14 |
+
teams_list = sorted(data.team.unique())
|
15 |
+
position_list = data.position.unique()
|
16 |
+
weeks_list = sorted(data.week.unique())
|
17 |
+
data_load_time_str = datetime.datetime.utcnow().strftime("%m/%d/%Y %I:%M %p")
|
18 |
+
return data, teams_list, position_list, weeks_list, data_load_time_str
|
19 |
+
|
20 |
+
|
21 |
+
def get_page():
|
22 |
+
page_title = f"Snap Counts and Percentages - {YEAR}"
|
23 |
+
st.set_page_config(page_title=page_title, page_icon=DEFAULT_ICON, layout="wide")
|
24 |
+
common_page_config()
|
25 |
+
st.title(page_title)
|
26 |
+
if f"snap_counts_snap_counts_{SEASON}" not in get_current_tables():
|
27 |
+
st.write("Data not loaded.")
|
28 |
+
st.write("Check loaded data [here](./Load_Data)")
|
29 |
+
return
|
30 |
+
data, teams_list, position_list, weeks_list, data_load_time_str = load_data()
|
31 |
+
st.write(f"Data loaded as of: {data_load_time_str} UTC")
|
32 |
+
teams_selected = st.multiselect("Team:", teams_list, placeholder="Select a team to filter") or teams_list
|
33 |
+
positions_selected = (
|
34 |
+
st.multiselect("Position:", position_list, placeholder="Select a position to filter") or position_list
|
35 |
+
)
|
36 |
+
weeks_selected = st.multiselect("Week:", weeks_list, placeholder="Select a week to filter") or weeks_list
|
37 |
+
|
38 |
+
with st.container():
|
39 |
+
filtered_data = data[
|
40 |
+
(data.team.isin(teams_selected) & data.position.isin(positions_selected) & data.week.isin(weeks_selected))
|
41 |
+
]
|
42 |
+
st.dataframe(
|
43 |
+
filtered_data,
|
44 |
+
hide_index=True,
|
45 |
+
# height=35 * (len(filtered_data) + 1) + 12,
|
46 |
+
use_container_width=False,
|
47 |
+
column_order=[
|
48 |
+
"season",
|
49 |
+
"game_type",
|
50 |
+
"week",
|
51 |
+
"player",
|
52 |
+
"position",
|
53 |
+
"team",
|
54 |
+
"opponent",
|
55 |
+
"offense_snaps",
|
56 |
+
"offense_pct",
|
57 |
+
"defense_snaps",
|
58 |
+
"defense_pct",
|
59 |
+
"st_snaps",
|
60 |
+
"st_pct",
|
61 |
+
],
|
62 |
+
column_config={
|
63 |
+
"season": st.column_config.TextColumn(help="Year of NFL Season"),
|
64 |
+
},
|
65 |
+
)
|
66 |
+
|
67 |
+
|
68 |
+
if __name__ == "__main__":
|
69 |
+
get_page()
|
src/pages/80_Maximum_Roster_Strategy.py
ADDED
@@ -0,0 +1,165 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import streamlit as st
|
3 |
+
import streamlit.components.v1 as components
|
4 |
+
|
5 |
+
|
6 |
+
from config import DEFAULT_ICON
|
7 |
+
from shared_page import common_page_config, get_local_style
|
8 |
+
from maximum_roster_strategy import data_loader
|
9 |
+
|
10 |
+
|
11 |
+
MINIMUM_WEEK = 6
|
12 |
+
MAXIMUM_WEEK = 7
|
13 |
+
|
14 |
+
MIN_TIER = 1
|
15 |
+
MAX_TIER = 4
|
16 |
+
|
17 |
+
POSITION_OPTIONS = ["RB", "WR", "TE", "QB"]
|
18 |
+
|
19 |
+
POSITION_ABBR_FULL_NAME_MAP = {
|
20 |
+
"RB": "Running Backs",
|
21 |
+
"WR": "Wide Receivers",
|
22 |
+
"TE": "Tight Ends",
|
23 |
+
"QB": "Quarterbacks (Superflex / 2QB Leagues Only)",
|
24 |
+
}
|
25 |
+
|
26 |
+
|
27 |
+
@st.cache_data(ttl=5 * 60)
|
28 |
+
def load_data():
|
29 |
+
return data_loader.get_google_sheet_data(), data_loader.get_timeslot_labels()
|
30 |
+
|
31 |
+
|
32 |
+
def get_player_grid_div(player_series: pd.Series) -> str:
|
33 |
+
player_notes = player_series["Hold Condition"]
|
34 |
+
if (outcome := player_series["Outcome"]) == "Drop":
|
35 |
+
player_class = "drop-player"
|
36 |
+
elif outcome == "Light Hold":
|
37 |
+
player_class = "light-hold-player"
|
38 |
+
elif outcome == "Hold":
|
39 |
+
player_class = "hold-player"
|
40 |
+
else:
|
41 |
+
player_class = "undetermined-player"
|
42 |
+
|
43 |
+
if isinstance(player_weekly_note := player_series["Article Notes"], str):
|
44 |
+
player_notes += "<br><br>" + player_weekly_note
|
45 |
+
return f"""
|
46 |
+
<details class="mrs-grid-player content">
|
47 |
+
<summary class="{player_class}">
|
48 |
+
{player_series["Formatted"]}
|
49 |
+
</summary>
|
50 |
+
<p>
|
51 |
+
{player_notes}
|
52 |
+
</p>
|
53 |
+
</details>
|
54 |
+
"""
|
55 |
+
|
56 |
+
|
57 |
+
def get_time_slot_div(time_slot_list: list[str]) -> str:
|
58 |
+
code_str = ""
|
59 |
+
for time_slot_idx, time_slot in enumerate(time_slot_list):
|
60 |
+
code_str += f"""<div class="timeslot{time_slot_idx + 1} timeslot">{time_slot}</div>\n"""
|
61 |
+
return code_str
|
62 |
+
|
63 |
+
|
64 |
+
def get_tier_div(tier_str: str | int, tier_num: str | int) -> str:
|
65 |
+
return f"""<div class="tier{tier_num} tier">Tier {tier_str}</div>"""
|
66 |
+
|
67 |
+
|
68 |
+
def get_player_container(df_players: pd.DataFrame, slot_number: int | str) -> str:
|
69 |
+
if len(df_players) == 0:
|
70 |
+
player_code_str = "<br>"
|
71 |
+
else:
|
72 |
+
player_code_str = "\n".join(df_players.apply(get_player_grid_div, axis=1).tolist())
|
73 |
+
return f"""<div class="playerslot{slot_number} playerslot">{player_code_str}</div>"""
|
74 |
+
|
75 |
+
|
76 |
+
def get_position_breakdown(df: pd.DataFrame, position_abbr: str, position_full_str: str, time_slots: list[str]):
|
77 |
+
with st.container():
|
78 |
+
st.header(position_full_str)
|
79 |
+
df_pos = df[df["Position"] == position_abbr]
|
80 |
+
|
81 |
+
grid_code_str = ""
|
82 |
+
grid_code_str += get_time_slot_div(time_slots)
|
83 |
+
|
84 |
+
tier_list = list(range(MIN_TIER, MAX_TIER + 1))
|
85 |
+
slot_number = 0
|
86 |
+
for tier_idx, tier in enumerate(tier_list):
|
87 |
+
grid_code_str += get_tier_div(tier, tier_idx + 1)
|
88 |
+
for time_slot in time_slots:
|
89 |
+
df_tier_slot = df_pos[(df_pos["TimeSlotName"] == time_slot) & (df_pos["Tier"] == tier)]
|
90 |
+
slot_number += 1
|
91 |
+
grid_code_str += get_player_container(df_tier_slot, slot_number)
|
92 |
+
|
93 |
+
components.html(
|
94 |
+
f"""
|
95 |
+
{get_local_style()}
|
96 |
+
<div class="grid-container-{len(time_slots)}">
|
97 |
+
{grid_code_str}
|
98 |
+
</div>
|
99 |
+
<br>
|
100 |
+
<div class="grid-legend">Colors Legend:
|
101 |
+
<div class="drop-player">Drop Player</div> |
|
102 |
+
<div class="light-hold-player">Light Hold Player</div> |
|
103 |
+
<div class="hold-player">Strong Hold Player</div>
|
104 |
+
</div>
|
105 |
+
""",
|
106 |
+
height=1000,
|
107 |
+
scrolling=True,
|
108 |
+
)
|
109 |
+
|
110 |
+
|
111 |
+
def get_page():
|
112 |
+
page_title = "Maximum Roster Strategy"
|
113 |
+
st.set_page_config(page_title=page_title, page_icon=DEFAULT_ICON, layout="wide")
|
114 |
+
common_page_config()
|
115 |
+
st.title(page_title)
|
116 |
+
|
117 |
+
with st.expander(label="Instructions"):
|
118 |
+
st.write(
|
119 |
+
"""
|
120 |
+
To get started with MRS: https://solowfantasyfootball.wordpress.com/2023/09/07/maximum-roster-strategy-explained/
|
121 |
+
|
122 |
+
Players are organized by game time slot, position, and tier.
|
123 |
+
|
124 |
+
Pick up a player during their game's time slot for potential upside if particular circumstances are met.
|
125 |
+
|
126 |
+
After the game, players will be colored by outcome: Drop (Red), Light Hold (Yellow), or Strong Hold (Green)."""
|
127 |
+
)
|
128 |
+
col_select, week_select = st.columns(2, gap="small")
|
129 |
+
url_params = st.experimental_get_query_params()
|
130 |
+
initial_position_index = 0
|
131 |
+
if url_position := url_params.get("position"):
|
132 |
+
selected_position = url_position[0]
|
133 |
+
if selected_position in POSITION_OPTIONS:
|
134 |
+
initial_position_index = POSITION_OPTIONS.index(selected_position)
|
135 |
+
|
136 |
+
week_options = list(range(MAXIMUM_WEEK, MINIMUM_WEEK - 1, -1))
|
137 |
+
initial_week_index = 0
|
138 |
+
if url_week := url_params.get("week"):
|
139 |
+
try:
|
140 |
+
selected_week = int(url_week[0])
|
141 |
+
except Exception:
|
142 |
+
st.warning("Week parameter must be integer value", icon="⚠️")
|
143 |
+
selected_week = MAXIMUM_WEEK
|
144 |
+
if selected_week in week_options:
|
145 |
+
initial_week_index = week_options.index(selected_week)
|
146 |
+
|
147 |
+
with col_select:
|
148 |
+
position = st.selectbox(label="Position", options=POSITION_OPTIONS, index=initial_position_index)
|
149 |
+
with week_select:
|
150 |
+
week = st.selectbox(label="Week", options=week_options, index=initial_week_index)
|
151 |
+
url_params.update({"position": position, "week": week})
|
152 |
+
st.experimental_set_query_params(**url_params)
|
153 |
+
if st.experimental_get_query_params().get("refresh"):
|
154 |
+
st.cache_data.clear()
|
155 |
+
df_mrs, all_time_slots_df = load_data()
|
156 |
+
df_mrs = df_mrs[df_mrs["Week"] == week]
|
157 |
+
current_week_timeslots = (
|
158 |
+
all_time_slots_df[all_time_slots_df["Week"] == week].sort_values("WeekTimeSlotIndex").TimeSlotName.tolist()
|
159 |
+
)
|
160 |
+
|
161 |
+
get_position_breakdown(df_mrs, position, POSITION_ABBR_FULL_NAME_MAP[position], current_week_timeslots)
|
162 |
+
|
163 |
+
|
164 |
+
if __name__ == "__main__":
|
165 |
+
get_page()
|
src/pages/8_FTN_Charting.py
ADDED
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import datetime
|
2 |
+
import streamlit as st
|
3 |
+
|
4 |
+
from config import DEFAULT_ICON
|
5 |
+
from shared_page import common_page_config
|
6 |
+
|
7 |
+
from queries.footballguys.constants import YEAR
|
8 |
+
from queries.nflverse.github_data import get_ftn_charting, get_current_tables, SEASON
|
9 |
+
|
10 |
+
|
11 |
+
def load_data():
|
12 |
+
data = get_ftn_charting(YEAR)
|
13 |
+
data_load_time_str = datetime.datetime.utcnow().strftime("%m/%d/%Y %I:%M %p")
|
14 |
+
return data, data_load_time_str
|
15 |
+
|
16 |
+
|
17 |
+
def get_page():
|
18 |
+
page_title = f"FTN Charting - {YEAR}"
|
19 |
+
st.set_page_config(page_title=page_title, page_icon=DEFAULT_ICON, layout="wide")
|
20 |
+
common_page_config()
|
21 |
+
st.title(page_title)
|
22 |
+
if f"ftn_charting_ftn_charting_{SEASON}" not in get_current_tables():
|
23 |
+
st.write("Data not loaded.")
|
24 |
+
st.write("Check loaded data [here](./Load_Data)")
|
25 |
+
return
|
26 |
+
data, data_load_time_str = load_data()
|
27 |
+
st.write(f"Data loaded as of: {data_load_time_str} UTC")
|
28 |
+
|
29 |
+
with st.container():
|
30 |
+
filtered_data = data
|
31 |
+
st.dataframe(
|
32 |
+
filtered_data,
|
33 |
+
hide_index=True,
|
34 |
+
# height=35 * (len(filtered_data) + 1) + 12,
|
35 |
+
use_container_width=False,
|
36 |
+
)
|
37 |
+
|
38 |
+
|
39 |
+
if __name__ == "__main__":
|
40 |
+
get_page()
|
src/pages/98_Load_Data.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
import streamlit as st
|
3 |
+
|
4 |
+
from config import DEFAULT_ICON
|
5 |
+
from shared_page import common_page_config
|
6 |
+
|
7 |
+
from queries.nflverse.github_data import load_assets, get_current_tables
|
8 |
+
|
9 |
+
|
10 |
+
def get_page():
|
11 |
+
page_title = "Data Loader"
|
12 |
+
st.set_page_config(page_title=page_title, page_icon=DEFAULT_ICON, layout="wide")
|
13 |
+
common_page_config()
|
14 |
+
st.title(page_title)
|
15 |
+
|
16 |
+
current_tables_list = get_current_tables()
|
17 |
+
|
18 |
+
if st.button("Refresh Data"):
|
19 |
+
load_assets()
|
20 |
+
st.rerun()
|
21 |
+
|
22 |
+
if selected_table := st.selectbox("Describe a table:", current_tables_list, index=0):
|
23 |
+
describe_df = duckdb.sql(f"DESCRIBE {selected_table}").df()
|
24 |
+
st.dataframe(
|
25 |
+
describe_df,
|
26 |
+
hide_index=True,
|
27 |
+
use_container_width=True,
|
28 |
+
)
|
29 |
+
|
30 |
+
if st.checkbox("Explore data"):
|
31 |
+
st.dataframe(duckdb.sql(f"SELECT * FROM {selected_table} LIMIT 50").df())
|
32 |
+
|
33 |
+
|
34 |
+
if __name__ == "__main__":
|
35 |
+
get_page()
|
src/pages/99_Keeper_Rules.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
|
3 |
+
from config import DEFAULT_ICON, LEAGUE_NAME
|
4 |
+
from shared_page import common_page_config
|
5 |
+
|
6 |
+
|
7 |
+
page_title = f"{LEAGUE_NAME} Keeper Rules"
|
8 |
+
st.set_page_config(page_title=page_title, page_icon=DEFAULT_ICON)
|
9 |
+
common_page_config()
|
10 |
+
st.title(page_title)
|
11 |
+
st.markdown(
|
12 |
+
"""
|
13 |
+
***Note: First LOFG keeper season was 2019***
|
14 |
+
|
15 |
+
1. Teams can keep up to 3 players from the prior season.
|
16 |
+
- Note: changed from 2 to 3 beginning with 2022 season
|
17 |
+
2. Players are kept at the expense of a draft pick.
|
18 |
+
- The round of that draft pick will depend on when the player was drafted in the prior season.
|
19 |
+
3. Players drafted in the 1st round of the prior season will not be eligible to keep.
|
20 |
+
4. Players drafted in rounds 2 and after can be kept at the expense of one higher round
|
21 |
+
- Ex: player drafted in round 2 can be kept for your 1st round pick.
|
22 |
+
5. Players who were not drafted and were picked up during the season can be kept for 10th round picks (FA Keeper cost)
|
23 |
+
- Changed beginning 2022 from 9th round
|
24 |
+
6. Post-Draft Acquired Keeper cost Minimum -
|
25 |
+
- Players drafted by other teams have a keeper cost of minimum of round player drafted and FA Keeper Cost.
|
26 |
+
- This rewards people who drafted the players, versus those who picked them up off waivers.
|
27 |
+
7. If you select 2 players with the same keeper cost, one of the players would count as one round higher.
|
28 |
+
- Ex: Two players both have keeper cost of 10th round.
|
29 |
+
- One of those players would instead be counted as your 9th round pick if both are kept.
|
30 |
+
8. Players can not be kept for more than 2 consecutive seasons
|
31 |
+
- Ex: player drafted in 2019 in 6th round, 2020 for 5th round, and 2021 for 4th round - can't be kept in 2022
|
32 |
+
- Exception to the above rule is that players originally drafted in 2nd round can only be kept for one season.
|
33 |
+
9. Players traded in a season are not subject to the FA Keeper cost minimum cost
|
34 |
+
In other words, keeper rights transfer with the trade
|
35 |
+
|
36 |
+
"""
|
37 |
+
)
|
src/pages/9_Team_Formations.py
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import datetime
|
2 |
+
import pandas as pd
|
3 |
+
import streamlit as st
|
4 |
+
|
5 |
+
from config import DEFAULT_ICON
|
6 |
+
from shared_page import common_page_config
|
7 |
+
|
8 |
+
from queries.footballguys.constants import YEAR
|
9 |
+
from queries.nflverse.github_data import get_pbp_participation, get_current_tables, SEASON
|
10 |
+
|
11 |
+
|
12 |
+
def load_data():
|
13 |
+
data = get_pbp_participation(YEAR)
|
14 |
+
teams_list = sorted(filter(None, data.possession_team.unique()))
|
15 |
+
# position_list = data.position.unique()
|
16 |
+
# weeks_list = sorted(data.week.unique())
|
17 |
+
data_load_time_str = datetime.datetime.utcnow().strftime("%m/%d/%Y %I:%M %p")
|
18 |
+
return data, teams_list, data_load_time_str
|
19 |
+
|
20 |
+
|
21 |
+
def get_page():
|
22 |
+
page_title = f"Team Formations - {YEAR}"
|
23 |
+
st.set_page_config(page_title=page_title, page_icon=DEFAULT_ICON, layout="wide")
|
24 |
+
common_page_config()
|
25 |
+
st.title(page_title)
|
26 |
+
if f"ftn_charting_ftn_charting_{SEASON}" not in get_current_tables():
|
27 |
+
st.write("Data not loaded.")
|
28 |
+
st.write("Check loaded data [here](./Load_Data)")
|
29 |
+
return
|
30 |
+
data, teams_list, data_load_time_str = load_data()
|
31 |
+
st.write(f"Data loaded as of: {data_load_time_str} UTC")
|
32 |
+
default_groups = [
|
33 |
+
"down",
|
34 |
+
"play_type",
|
35 |
+
"offense_personnel",
|
36 |
+
]
|
37 |
+
group_options = [
|
38 |
+
"week",
|
39 |
+
"down",
|
40 |
+
"qtr",
|
41 |
+
"ydstogo",
|
42 |
+
"play_type",
|
43 |
+
"pass_length",
|
44 |
+
"pass_location",
|
45 |
+
"possession_team",
|
46 |
+
"offense_formation",
|
47 |
+
"offense_personnel",
|
48 |
+
"number_of_pass_rushers",
|
49 |
+
"defenders_in_box",
|
50 |
+
"defense_personnel",
|
51 |
+
]
|
52 |
+
group_by_selected = st.multiselect("Group by:", group_options) or default_groups
|
53 |
+
team_selected = st.selectbox("Team:", teams_list)
|
54 |
+
week_selection = st.slider(
|
55 |
+
"Filter Week Range:",
|
56 |
+
min_value=data["week"].min(),
|
57 |
+
max_value=data["week"].max(),
|
58 |
+
value=(data["week"].min(), data["week"].max()),
|
59 |
+
step=1,
|
60 |
+
)
|
61 |
+
|
62 |
+
with st.container():
|
63 |
+
filtered_data = data[
|
64 |
+
(data.possession_team == team_selected)
|
65 |
+
& (data.play_type.isin(["pass", "run"]))
|
66 |
+
& (data["week"].between(*week_selection))
|
67 |
+
]
|
68 |
+
|
69 |
+
st.dataframe(
|
70 |
+
pd.pivot_table(
|
71 |
+
filtered_data,
|
72 |
+
values="count_col",
|
73 |
+
index=group_by_selected,
|
74 |
+
columns="week",
|
75 |
+
aggfunc={"count_col": "sum"},
|
76 |
+
# margins=True,
|
77 |
+
),
|
78 |
+
use_container_width=False,
|
79 |
+
)
|
80 |
+
|
81 |
+
|
82 |
+
if __name__ == "__main__":
|
83 |
+
get_page()
|
src/queries/__init__.py
ADDED
File without changes
|
src/queries/footballguys/__init__.py
ADDED
File without changes
|
src/queries/footballguys/constants.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import List, Mapping
|
2 |
+
|
3 |
+
# constants relevant to parsing from footballguys
|
4 |
+
|
5 |
+
SNAP_PAGE_POSITON_ORDER: List[str] = [
|
6 |
+
"QB",
|
7 |
+
"RB",
|
8 |
+
"WR",
|
9 |
+
"TE",
|
10 |
+
"DT",
|
11 |
+
"DE",
|
12 |
+
"ILB",
|
13 |
+
"OLB",
|
14 |
+
"CB",
|
15 |
+
"S",
|
16 |
+
]
|
17 |
+
|
18 |
+
POSITIONS_TO_OFFENSE_DEFENSE: Mapping[str, str] = {
|
19 |
+
"QB": "OFF",
|
20 |
+
"RB": "OFF",
|
21 |
+
"WR": "OFF",
|
22 |
+
"TE": "OFF",
|
23 |
+
"DT": "DEF",
|
24 |
+
"DE": "DEF",
|
25 |
+
"ILB": "DEF",
|
26 |
+
"OLB": "DEF",
|
27 |
+
"S": "DEF",
|
28 |
+
"CB": "DEF",
|
29 |
+
}
|
30 |
+
|
31 |
+
|
32 |
+
BASE_URL = "https://www.footballguys.com/stats"
|
33 |
+
|
34 |
+
YEAR = 2023
|
src/queries/footballguys/helpers.py
ADDED
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from io import StringIO
|
2 |
+
import lxml.html
|
3 |
+
import pandas as pd
|
4 |
+
import requests
|
5 |
+
from typing import List
|
6 |
+
from queries.footballguys import constants as fbgc
|
7 |
+
|
8 |
+
|
9 |
+
def url_to_pandas(url) -> List[pd.DataFrame]:
|
10 |
+
page = requests.get(url)
|
11 |
+
table = pd.read_html(StringIO(page.text.replace("<br>", "-")))
|
12 |
+
return table
|
13 |
+
|
14 |
+
|
15 |
+
def create_html_table_from_header_body(header_html_str: str, body_html_str: str):
|
16 |
+
return f"""
|
17 |
+
<table>
|
18 |
+
{header_html_str}
|
19 |
+
{body_html_str}
|
20 |
+
</table>
|
21 |
+
"""
|
22 |
+
|
23 |
+
|
24 |
+
def extract_snaps_to_pandas(url: str):
|
25 |
+
root = lxml.html.document_fromstring(requests.get(url).text)
|
26 |
+
table_element_list = root.xpath("""//*[@id="stats_snapcounts_data"]/div/table""")
|
27 |
+
assert isinstance(table_element_list, list)
|
28 |
+
table_element = table_element_list[0]
|
29 |
+
assert isinstance(table_element, lxml.html.HtmlElement)
|
30 |
+
table_child_list = table_element.getchildren()
|
31 |
+
assert len(table_child_list) % 2 == 0 # check is even
|
32 |
+
half_len = int(len(table_child_list) / 2)
|
33 |
+
df_list = []
|
34 |
+
for i in range(half_len):
|
35 |
+
table_html = create_html_table_from_header_body(
|
36 |
+
lxml.html.tostring(table_child_list[2 * i]), lxml.html.tostring(table_child_list[2 * i + 1])
|
37 |
+
).replace("\\n", "")
|
38 |
+
df = pd.read_html(table_html)[0]
|
39 |
+
# First column contains name and is initially labeled as each position, example "Quarterback"
|
40 |
+
# Insert column at front called POS and fill with current first column label
|
41 |
+
position_name = df.columns[0]
|
42 |
+
df.insert(0, "POS", position_name)
|
43 |
+
df.rename(columns={position_name: "name"}, inplace=True)
|
44 |
+
df_list.append(df)
|
45 |
+
return df_list
|
46 |
+
|
47 |
+
|
48 |
+
def add_snap_off_def_column(team_snap_df: pd.DataFrame):
|
49 |
+
off_def = team_snap_df["POS"].apply(lambda x: fbgc.POSITIONS_TO_OFFENSE_DEFENSE[x])
|
50 |
+
team_snap_df.insert(0, "OFF/DEF", off_def)
|
51 |
+
|
52 |
+
|
53 |
+
def add_snap_position_column(
|
54 |
+
team_snap_df_list: List[pd.DataFrame],
|
55 |
+
position_name_array: List[str] = fbgc.SNAP_PAGE_POSITON_ORDER,
|
56 |
+
):
|
57 |
+
# blank player names between positions, so we can use cumsum
|
58 |
+
# 8/22/23 - We are currently failing here because snap counts are incorrectly not split by position atm
|
59 |
+
assert len(team_snap_df_list) == len(position_name_array)
|
60 |
+
for pos_idx, pos_df in enumerate(team_snap_df_list):
|
61 |
+
pos_df.insert(0, "POS", position_name_array[pos_idx])
|
62 |
+
|
63 |
+
|
64 |
+
def set_multilevel_columns(df):
|
65 |
+
new_cols = [tuple(x.split("-")) if "-" in x else (x, x) for x in df.columns]
|
66 |
+
df.columns = pd.MultiIndex.from_tuples(new_cols)
|
67 |
+
|
68 |
+
|
69 |
+
def parse_snaps(team_short_name: str, base_url: str = fbgc.BASE_URL, year: int = fbgc.YEAR) -> pd.DataFrame:
|
70 |
+
print(f"Attempting to parse snaps for {team_short_name}")
|
71 |
+
team_snap_df_list = parse_team_page(team_short_name, base_url, "snap-counts", year)
|
72 |
+
team_snap_df = pd.concat(team_snap_df_list)
|
73 |
+
# add_snap_off_def_column(team_snap_df)
|
74 |
+
split_snap_count_percents(team_snap_df)
|
75 |
+
team_snap_df.dropna(subset=["name"], inplace=True)
|
76 |
+
# set_multilevel_columns(team_snap_df)
|
77 |
+
return team_snap_df
|
78 |
+
|
79 |
+
|
80 |
+
def add_targets_position(team_df: pd.DataFrame):
|
81 |
+
# fill blanks up by reversing index, fill down, and re-reversing
|
82 |
+
positions = team_df.name.apply(lambda x: x.replace(" Totals", "") if " Totals" in x else None)[::-1].ffill()[::-1]
|
83 |
+
team_df.insert(0, "POS", positions)
|
84 |
+
|
85 |
+
|
86 |
+
def parse_targets(team_short_name: str, base_url: str = fbgc.BASE_URL, year: int = fbgc.YEAR) -> pd.DataFrame:
|
87 |
+
# snaps are index 2
|
88 |
+
print(f"Attempting to parse targets for {team_short_name}")
|
89 |
+
team_df = parse_team_page(team_short_name, base_url, "targets", year)[0]
|
90 |
+
add_targets_position(team_df)
|
91 |
+
return team_df[team_df.name.notna()]
|
92 |
+
|
93 |
+
|
94 |
+
def parse_redzone(team_short_name: str, base_url: str = fbgc.BASE_URL, year: int = fbgc.YEAR) -> pd.DataFrame:
|
95 |
+
# snaps are index 3
|
96 |
+
print(f"Attempting to parse redzone for {team_short_name}")
|
97 |
+
team_df = parse_team_page(team_short_name, base_url, "redzone", year)[0]
|
98 |
+
add_targets_position(team_df)
|
99 |
+
return team_df[team_df.name.notna()]
|
100 |
+
|
101 |
+
|
102 |
+
def split_snap_count_percents(team_snap_df: pd.DataFrame):
|
103 |
+
for week in range(1, 18):
|
104 |
+
if f"Wk {week}" not in team_snap_df.columns:
|
105 |
+
continue
|
106 |
+
# if values are all NaN column will be dtype float 64 and should skip
|
107 |
+
if team_snap_df[f"Wk {week}"].dtype == float:
|
108 |
+
team_snap_df[f"{week}-count"] = 0
|
109 |
+
team_snap_df[f"{week}-%"] = 0.0
|
110 |
+
else:
|
111 |
+
week_split = team_snap_df[f"Wk {week}"].astype(str).str.split("-")
|
112 |
+
week_count = week_split.apply(lambda x: 0 if len(x) == 1 or x[0] == "" else int(x[0]))
|
113 |
+
week_pct = week_split.apply(lambda x: 0.0 if len(x) == 1 else float(x[1].strip("%")) / 100.0)
|
114 |
+
team_snap_df[f"{week}-count"] = week_count
|
115 |
+
team_snap_df[f"{week}-%"] = week_pct
|
116 |
+
team_snap_df.drop(columns=f"Wk {week}", inplace=True)
|
117 |
+
|
118 |
+
|
119 |
+
def parse_team_page(
|
120 |
+
team_short_name: str,
|
121 |
+
base_url: str,
|
122 |
+
stat_name: str,
|
123 |
+
year: int,
|
124 |
+
) -> List[pd.DataFrame]:
|
125 |
+
url = f"{base_url}/{stat_name}/teams?team={team_short_name}&year={year}"
|
126 |
+
if stat_name == "snap-counts":
|
127 |
+
all_tables = extract_snaps_to_pandas(url)
|
128 |
+
else:
|
129 |
+
all_tables = url_to_pandas(url)
|
130 |
+
return all_tables
|
src/queries/footballguys/refresh.py
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from domain.teams import NFLTeam, ALL_TEAMS
|
2 |
+
from queries.footballguys.helpers import parse_snaps, parse_targets, parse_redzone
|
3 |
+
from typing import List, Callable, Optional
|
4 |
+
import pandas as pd
|
5 |
+
|
6 |
+
|
7 |
+
def add_team_name_columns(team_df: pd.DataFrame, team_short_name: str, team_name: str):
|
8 |
+
team_df.insert(0, "TEAM", team_short_name)
|
9 |
+
team_df.insert(1, "TEAM_NAME", team_name)
|
10 |
+
|
11 |
+
|
12 |
+
def apply_intended_column_sorting(df: pd.DataFrame, first_columns: List[str]) -> pd.DataFrame:
|
13 |
+
first_columns_in_df = [col for col in first_columns if col in df.columns]
|
14 |
+
remaining_columns = [col for col in df.columns if col not in first_columns_in_df]
|
15 |
+
return df[first_columns_in_df + remaining_columns]
|
16 |
+
|
17 |
+
|
18 |
+
def get_all_teams_stat_type(
|
19 |
+
all_teams_list: List[NFLTeam],
|
20 |
+
parsing_function: Callable,
|
21 |
+
store_key: str,
|
22 |
+
intended_first_columns: Optional[List[str]] = None,
|
23 |
+
):
|
24 |
+
team_df_list = []
|
25 |
+
for team in all_teams_list:
|
26 |
+
team_df = parsing_function(team.footballguys_short_name)
|
27 |
+
add_team_name_columns(team_df, team.team_short_name, team.team_name)
|
28 |
+
team_df_list.append(team_df)
|
29 |
+
df = pd.concat(team_df_list)
|
30 |
+
if intended_first_columns:
|
31 |
+
df = apply_intended_column_sorting(df, intended_first_columns)
|
32 |
+
print(f"footballguy {store_key} loaded")
|
33 |
+
return df
|
34 |
+
|
35 |
+
|
36 |
+
def request_stat(stat_name: str) -> pd.DataFrame:
|
37 |
+
intended_col_sort = None
|
38 |
+
if stat_name == "targets":
|
39 |
+
parse_fxn = parse_targets
|
40 |
+
intended_col_sort = ["TEAM", "TEAM_NAME", "POS", "name", "total"]
|
41 |
+
elif stat_name == "snap-counts":
|
42 |
+
parse_fxn = parse_snaps
|
43 |
+
elif stat_name == "redzone":
|
44 |
+
parse_fxn = parse_redzone
|
45 |
+
intended_col_sort = ["TEAM", "TEAM_NAME", "POS", "name", "total"]
|
46 |
+
return get_all_teams_stat_type(ALL_TEAMS, parse_fxn, stat_name, intended_col_sort)
|
src/queries/nbcsports/player_news.py
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from bs4 import BeautifulSoup
|
2 |
+
import datetime
|
3 |
+
import pandas as pd
|
4 |
+
import requests
|
5 |
+
from typing import Mapping
|
6 |
+
|
7 |
+
NEWS_URL = "https://www.nbcsports.com/fantasy/football/player-news"
|
8 |
+
|
9 |
+
|
10 |
+
def find_soup_text_with_default(soup, element: str, find_search_map: Mapping[str, str]):
|
11 |
+
find_result = soup.find(element, find_search_map)
|
12 |
+
if not find_result:
|
13 |
+
return ""
|
14 |
+
return find_result.text.strip()
|
15 |
+
|
16 |
+
|
17 |
+
def parse_player_div(player_div):
|
18 |
+
return {
|
19 |
+
"Date/Time": player_div.find("div", {"class": "PlayerNewsPost-date"}).get("data-date"),
|
20 |
+
"Name": find_soup_text_with_default(player_div, "div", {"class": "PlayerNewsPost-name"}),
|
21 |
+
"Team": find_soup_text_with_default(player_div, "span", {"class": "PlayerNewsPost-team-abbr"}).upper(),
|
22 |
+
"Position": find_soup_text_with_default(player_div, "span", {"class": "PlayerNewsPost-position"}).title(),
|
23 |
+
"Headline": find_soup_text_with_default(player_div, "div", {"class": "PlayerNewsPost-headline"}),
|
24 |
+
"Analysis": find_soup_text_with_default(player_div, "div", {"class": "PlayerNewsPost-analysis"}),
|
25 |
+
}
|
26 |
+
|
27 |
+
|
28 |
+
def get_nfl_player_news(page_number: int = 1) -> pd.DataFrame:
|
29 |
+
url = f"{NEWS_URL}?p={page_number}"
|
30 |
+
request_page = requests.get(url)
|
31 |
+
soup = BeautifulSoup(request_page.content)
|
32 |
+
player_div_list = soup.find_all("div", {"class": "PlayerNewsPost"})
|
33 |
+
if not player_div_list:
|
34 |
+
return pd.DataFrame()
|
35 |
+
parsed_player_list = [parse_player_div(d) for d in player_div_list]
|
36 |
+
df = pd.DataFrame(parsed_player_list)
|
37 |
+
df["Date/Time"] = pd.to_datetime(df["Date/Time"])
|
38 |
+
return df
|
39 |
+
|
40 |
+
|
41 |
+
def get_player_news_window_hours(hours: int = 1):
|
42 |
+
end_date = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(hours=hours)
|
43 |
+
page = 1
|
44 |
+
max_pages = 20
|
45 |
+
date_reached = False
|
46 |
+
df_list = []
|
47 |
+
while page < max_pages and not date_reached:
|
48 |
+
last_news = get_nfl_player_news(page)
|
49 |
+
df_list.append(last_news)
|
50 |
+
date_reached = min(last_news["Date/Time"]) < end_date
|
51 |
+
page += 1
|
52 |
+
return pd.concat(df_list)
|
src/queries/nfl_teams/__init__.py
ADDED
File without changes
|
src/queries/nfl_teams/practice_reports.py
ADDED
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from bs4 import BeautifulSoup
|
2 |
+
import datetime
|
3 |
+
from multiprocessing import Pool
|
4 |
+
import numpy as np
|
5 |
+
import pandas as pd
|
6 |
+
from pydantic import BaseModel, Field
|
7 |
+
import requests
|
8 |
+
from typing import Optional
|
9 |
+
from urllib.parse import urljoin
|
10 |
+
|
11 |
+
from domain.teams import ALL_TEAMS, NFLTeam
|
12 |
+
|
13 |
+
|
14 |
+
MULTIPROCESSING_ENABLED = False
|
15 |
+
|
16 |
+
PRACTICE_WEEK = {
|
17 |
+
"Mon": 0,
|
18 |
+
"Tue": 1,
|
19 |
+
"Wed": 2,
|
20 |
+
"Thu": 3,
|
21 |
+
"Fri": 4,
|
22 |
+
"Sat": 5,
|
23 |
+
"Sun": 6,
|
24 |
+
"Monday": 0,
|
25 |
+
"Tuesday": 1,
|
26 |
+
"Wednesday": 2,
|
27 |
+
"Thursday": 3,
|
28 |
+
"Friday": 4,
|
29 |
+
"Saturday": 5,
|
30 |
+
"Sunday": 6,
|
31 |
+
}
|
32 |
+
|
33 |
+
|
34 |
+
DAY_OF_WEEK_STRING_MAPPING = {
|
35 |
+
"Monday": "Mon",
|
36 |
+
"Tuesday": "Tue",
|
37 |
+
"Wednesday": "Wed",
|
38 |
+
"Thursday": "Thu",
|
39 |
+
"Friday": "Fri",
|
40 |
+
"Saturday": "Sat",
|
41 |
+
"Sunday": "Sun",
|
42 |
+
}
|
43 |
+
|
44 |
+
|
45 |
+
WEEK_1_BEGIN_DATE = datetime.datetime(2023, 9, 4)
|
46 |
+
CURRENT_DATE = datetime.datetime.now()
|
47 |
+
CURRENT_WEEK = max(1, int(1 + (CURRENT_DATE - WEEK_1_BEGIN_DATE).days / 7))
|
48 |
+
CURRENT_SEASON = 2023
|
49 |
+
|
50 |
+
|
51 |
+
class PracticeReportRawRow(BaseModel):
|
52 |
+
Team: str
|
53 |
+
Player: str
|
54 |
+
Position: str
|
55 |
+
Injury: str
|
56 |
+
Sun: Optional[str] = None
|
57 |
+
Mon: Optional[str] = None
|
58 |
+
Tue: Optional[str] = None
|
59 |
+
Wed: Optional[str] = None
|
60 |
+
Thu: Optional[str] = None
|
61 |
+
Fri: Optional[str] = None
|
62 |
+
Sat: Optional[str] = None
|
63 |
+
game_status: str = Field(alias="Game Status")
|
64 |
+
|
65 |
+
@classmethod
|
66 |
+
def replace_nan(self, value) -> str:
|
67 |
+
if isinstance(value, float):
|
68 |
+
if np.isnan(value):
|
69 |
+
return ""
|
70 |
+
return value
|
71 |
+
|
72 |
+
@classmethod
|
73 |
+
def from_raw(cls, input_dict) -> "PracticeReportRawRow":
|
74 |
+
return cls(**{DAY_OF_WEEK_STRING_MAPPING.get(k, k): cls.replace_nan(v) for k, v in input_dict.items()})
|
75 |
+
|
76 |
+
|
77 |
+
def get_injury_report_dataframe(team: NFLTeam):
|
78 |
+
injury_report_url = urljoin(team.injury_report_url, f"week/REG-{CURRENT_WEEK}")
|
79 |
+
report_request = requests.get(injury_report_url)
|
80 |
+
report_soup = BeautifulSoup(report_request.content)
|
81 |
+
team_names_spans = report_soup.find_all("span", {"class": "nfl-o-injury-report__club-name"})
|
82 |
+
assert team_names_spans
|
83 |
+
team_names_str = [x.get_text() for x in team_names_spans]
|
84 |
+
assert team_names_str[0] == team.team_full_name
|
85 |
+
tables = report_soup.find_all("table")
|
86 |
+
df_report = pd.read_html(str(tables))[0]
|
87 |
+
return df_report
|
88 |
+
|
89 |
+
|
90 |
+
def scrape_team_injury_report(team: NFLTeam) -> pd.DataFrame:
|
91 |
+
print(f"Scraping Injury Report for: {team.team_full_name}")
|
92 |
+
try:
|
93 |
+
team_report = get_injury_report_dataframe(team)
|
94 |
+
except Exception:
|
95 |
+
print(f"Failed to scrape practice report for: {team.team_full_name}")
|
96 |
+
return pd.DataFrame()
|
97 |
+
validated_row_list = []
|
98 |
+
for df_row_dict in team_report.to_dict("records"):
|
99 |
+
row_to_add = df_row_dict
|
100 |
+
row_to_add["Team"] = team.team_full_name
|
101 |
+
validated_row_list.append(PracticeReportRawRow.from_raw(row_to_add))
|
102 |
+
validated_df = pd.DataFrame([x.dict() for x in validated_row_list])
|
103 |
+
# drop all na columns
|
104 |
+
validated_df.dropna(axis=1, how="all", inplace=True)
|
105 |
+
# replace day of week with practice day from 1-3
|
106 |
+
day_idx = 1
|
107 |
+
last_practice_day = None
|
108 |
+
for col in validated_df.columns:
|
109 |
+
if col in PRACTICE_WEEK:
|
110 |
+
validated_df.rename(columns={col: str(day_idx)}, inplace=True)
|
111 |
+
day_idx += 1
|
112 |
+
last_practice_day = col
|
113 |
+
validated_df["Last Practice Day"] = last_practice_day
|
114 |
+
return validated_df
|
115 |
+
|
116 |
+
|
117 |
+
def scrape_all_team_injury_report() -> pd.DataFrame:
|
118 |
+
if MULTIPROCESSING_ENABLED:
|
119 |
+
with Pool() as pool:
|
120 |
+
team_df_list = pool.map(scrape_team_injury_report, ALL_TEAMS)
|
121 |
+
else:
|
122 |
+
team_df_list = [scrape_team_injury_report(team) for team in ALL_TEAMS]
|
123 |
+
return pd.concat(team_df_list)
|
src/queries/nflverse/__init__.py
ADDED
File without changes
|
src/queries/nflverse/github_data.py
ADDED
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import duckdb
|
2 |
+
import pandas as pd
|
3 |
+
import os
|
4 |
+
from typing import Callable
|
5 |
+
|
6 |
+
|
7 |
+
duckdb.default_connection.execute("SET GLOBAL pandas_analyze_sample=100000")
|
8 |
+
|
9 |
+
BASE_URL = "https://github.com/nflverse/nflverse-data/releases/download/"
|
10 |
+
|
11 |
+
|
12 |
+
FANTASY_POSITIONS = [
|
13 |
+
"QB",
|
14 |
+
"RB",
|
15 |
+
"WR",
|
16 |
+
"TE",
|
17 |
+
"FB",
|
18 |
+
"K",
|
19 |
+
]
|
20 |
+
|
21 |
+
|
22 |
+
def get_snap_counts(season_int: int) -> pd.DataFrame:
|
23 |
+
df = duckdb.sql(f"SELECT * from snap_counts_snap_counts_{season_int}").df()
|
24 |
+
df["fantasy_position"] = df["position"].isin(FANTASY_POSITIONS)
|
25 |
+
return df
|
26 |
+
|
27 |
+
|
28 |
+
def get_play_by_play(season_int: int) -> pd.DataFrame:
|
29 |
+
df = duckdb.sql(f"SELECT * from pbp_play_by_play_{season_int}").df()
|
30 |
+
return df
|
31 |
+
|
32 |
+
|
33 |
+
def get_player_stats(season_int: int) -> pd.DataFrame:
|
34 |
+
df = duckdb.sql("SELECT * from player_stats_player_stats").df()
|
35 |
+
return df
|
36 |
+
|
37 |
+
|
38 |
+
def get_ftn_charting(season_int: int) -> pd.DataFrame:
|
39 |
+
df = duckdb.sql(f"SELECT * from ftn_charting_ftn_charting_{season_int}").df()
|
40 |
+
return df
|
41 |
+
|
42 |
+
|
43 |
+
def get_pbp_participation(season_int: int) -> pd.DataFrame:
|
44 |
+
df = duckdb.sql(
|
45 |
+
f"""
|
46 |
+
SELECT
|
47 |
+
a.*
|
48 |
+
, b.*
|
49 |
+
, 1 as count_col
|
50 |
+
from pbp_participation_pbp_participation_{season_int} a
|
51 |
+
left join pbp_play_by_play_{season_int} b
|
52 |
+
on a.play_id = b.play_id
|
53 |
+
and a.nflverse_game_id = b.game_id
|
54 |
+
where b.week is not null
|
55 |
+
"""
|
56 |
+
).df()
|
57 |
+
return df
|
58 |
+
|
59 |
+
|
60 |
+
def get_nextgen_stats(season_int: int, stat_category: str) -> pd.DataFrame:
|
61 |
+
df = duckdb.sql(f"SELECT * from nextgen_stats_ngs_{stat_category} where season = {season_int}").df()
|
62 |
+
return df
|
63 |
+
|
64 |
+
|
65 |
+
SEASON = "2023"
|
66 |
+
|
67 |
+
NFLVERSE_ASSETS = [
|
68 |
+
("ftn_charting", f"ftn_charting_{SEASON}.parquet"),
|
69 |
+
("espn_data", "qbr_season_level.parquet"),
|
70 |
+
("espn_data", "qbr_week_level.parquet"),
|
71 |
+
("players", "players.parquet"),
|
72 |
+
("pbp_participation", f"pbp_participation_{SEASON}.parquet"),
|
73 |
+
("snap_counts", f"snap_counts_{SEASON}.parquet"),
|
74 |
+
("player_stats", f"player_stats_{SEASON}.parquet"),
|
75 |
+
("player_stats", f"player_stats_def_{SEASON}.parquet"),
|
76 |
+
("player_stats", f"player_stats_kicking_{SEASON}.parquet"),
|
77 |
+
("pfr_advstats", "advstats_season_def.parquet"),
|
78 |
+
("pfr_advstats", "advstats_season_pass.parquet"),
|
79 |
+
("pfr_advstats", "advstats_season_rec.parquet"),
|
80 |
+
("pfr_advstats", "advstats_season_rush.parquet"),
|
81 |
+
("pfr_advstats", f"advstats_week_def_{SEASON}.parquet"),
|
82 |
+
("pfr_advstats", f"advstats_week_pass_{SEASON}.parquet"),
|
83 |
+
("pfr_advstats", f"advstats_week_rec_{SEASON}.parquet"),
|
84 |
+
("pfr_advstats", f"advstats_week_rush_{SEASON}.parquet"),
|
85 |
+
("pbp", f"play_by_play_{SEASON}.parquet"),
|
86 |
+
("nextgen_stats", "ngs_passing.parquet"),
|
87 |
+
("nextgen_stats", "ngs_receiving.parquet"),
|
88 |
+
("nextgen_stats", "ngs_rushing.parquet"),
|
89 |
+
]
|
90 |
+
|
91 |
+
|
92 |
+
class NflVerseDataAsset:
|
93 |
+
def __init__(
|
94 |
+
self,
|
95 |
+
release_tag: str,
|
96 |
+
asset_name: str,
|
97 |
+
dataframe_mutation_fxn: Callable[[pd.DataFrame], pd.DataFrame] = lambda x: x,
|
98 |
+
):
|
99 |
+
self.release_tag = release_tag
|
100 |
+
self.asset_name = asset_name
|
101 |
+
self.dataframe_mutation_fxn = dataframe_mutation_fxn
|
102 |
+
self.table_name = f"{release_tag}_{asset_name.rsplit('.', 1)[0]}"
|
103 |
+
|
104 |
+
def load_parquet_asset_to_df(self) -> pd.DataFrame:
|
105 |
+
location = os.path.join(BASE_URL, self.release_tag, self.asset_name)
|
106 |
+
df = pd.read_parquet(location)
|
107 |
+
return df
|
108 |
+
|
109 |
+
def register_asset_to_duckdb(self) -> None:
|
110 |
+
df = self.load_parquet_asset_to_df()
|
111 |
+
df = self.dataframe_mutation_fxn(df)
|
112 |
+
duckdb.register(self.table_name, df)
|
113 |
+
|
114 |
+
|
115 |
+
def load_assets():
|
116 |
+
for tag, asset in NFLVERSE_ASSETS:
|
117 |
+
asset = NflVerseDataAsset(tag, asset)
|
118 |
+
asset.register_asset_to_duckdb()
|
119 |
+
|
120 |
+
|
121 |
+
def get_current_tables() -> list[str]:
|
122 |
+
current_tables_df = duckdb.sql("SHOW TABLES").df()
|
123 |
+
return current_tables_df["name"].tolist()
|
src/shared_page.py
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import streamlit as st
|
3 |
+
|
4 |
+
from page_selector import remove_seasonal_pages
|
5 |
+
from login_component import get_authorization_button
|
6 |
+
|
7 |
+
|
8 |
+
def get_local_style():
|
9 |
+
code_str = ""
|
10 |
+
with open(os.path.join(os.path.dirname(__file__), "style.css")) as f:
|
11 |
+
code_str = "<style>{}</style>".format(f.read())
|
12 |
+
return code_str
|
13 |
+
|
14 |
+
|
15 |
+
def local_css():
|
16 |
+
return st.markdown(get_local_style(), unsafe_allow_html=True)
|
17 |
+
|
18 |
+
|
19 |
+
def common_page_config():
|
20 |
+
local_css()
|
21 |
+
get_authorization_button()
|
22 |
+
remove_seasonal_pages()
|
src/start.sh
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
streamlit run Home.py --server.port=8501 --server.address=0.0.0.0
|
src/streamlit_filter.py
ADDED
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# https://blog.streamlit.io/auto-generate-a-dataframe-filtering-ui-in-streamlit-with-filter_dataframe/
|
2 |
+
|
3 |
+
from pandas.api.types import (
|
4 |
+
is_categorical_dtype,
|
5 |
+
is_datetime64_any_dtype,
|
6 |
+
is_numeric_dtype,
|
7 |
+
is_object_dtype,
|
8 |
+
)
|
9 |
+
import pandas as pd
|
10 |
+
import streamlit as st
|
11 |
+
|
12 |
+
|
13 |
+
def filter_dataframe(df: pd.DataFrame, force_on: bool = False, force_on_columns: list[str] = []) -> pd.DataFrame:
|
14 |
+
"""
|
15 |
+
Adds a UI on top of a dataframe to let viewers filter columns
|
16 |
+
|
17 |
+
Args:
|
18 |
+
df (pd.DataFrame): Original dataframe
|
19 |
+
|
20 |
+
Returns:
|
21 |
+
pd.DataFrame: Filtered dataframe
|
22 |
+
"""
|
23 |
+
if force_on:
|
24 |
+
modify = True
|
25 |
+
else:
|
26 |
+
modify = st.checkbox("Add more filters")
|
27 |
+
|
28 |
+
if not modify:
|
29 |
+
return df
|
30 |
+
|
31 |
+
df = df.copy()
|
32 |
+
|
33 |
+
# Try to convert datetimes into a standard format (datetime, no timezone)
|
34 |
+
for col in df.columns:
|
35 |
+
if is_object_dtype(df[col]):
|
36 |
+
try:
|
37 |
+
df[col] = pd.to_datetime(df[col])
|
38 |
+
except Exception:
|
39 |
+
pass
|
40 |
+
|
41 |
+
if is_datetime64_any_dtype(df[col]):
|
42 |
+
df[col] = df[col].dt.tz_localize(None)
|
43 |
+
|
44 |
+
modification_container = st.container()
|
45 |
+
|
46 |
+
with modification_container:
|
47 |
+
to_filter_columns = st.multiselect("Filter dataframe on", df.columns) + force_on_columns
|
48 |
+
for column in to_filter_columns:
|
49 |
+
left, right = st.columns((1, 20))
|
50 |
+
# Treat columns with < 17 unique values as categorical
|
51 |
+
if is_categorical_dtype(df[column]) or df[column].nunique() < 50:
|
52 |
+
user_cat_input = right.multiselect(
|
53 |
+
f"Values for {column}",
|
54 |
+
df[column].unique(),
|
55 |
+
default=list(df[column].unique()),
|
56 |
+
)
|
57 |
+
df = df[df[column].isin(user_cat_input)]
|
58 |
+
elif is_numeric_dtype(df[column]):
|
59 |
+
_min = float(df[column].min())
|
60 |
+
_max = float(df[column].max())
|
61 |
+
step = (_max - _min) / 100
|
62 |
+
user_num_input = right.slider(
|
63 |
+
f"Values for {column}",
|
64 |
+
min_value=_min,
|
65 |
+
max_value=_max,
|
66 |
+
value=(_min, _max),
|
67 |
+
step=step,
|
68 |
+
)
|
69 |
+
df = df[df[column].between(*user_num_input)]
|
70 |
+
elif is_datetime64_any_dtype(df[column]):
|
71 |
+
user_date_input = right.date_input(
|
72 |
+
f"Values for {column}",
|
73 |
+
value=(
|
74 |
+
df[column].min(),
|
75 |
+
df[column].max(),
|
76 |
+
),
|
77 |
+
)
|
78 |
+
if isinstance(user_date_input, tuple):
|
79 |
+
if len(user_date_input) == 2:
|
80 |
+
user_date_input_dt = tuple(map(pd.to_datetime, user_date_input))
|
81 |
+
start_date, end_date = user_date_input_dt
|
82 |
+
df = df.loc[df[column].between(start_date, end_date)]
|
83 |
+
else:
|
84 |
+
user_text_input = right.text_input(
|
85 |
+
f"Substring or regex in {column}",
|
86 |
+
)
|
87 |
+
if user_text_input:
|
88 |
+
df = df[df[column].astype(str).str.contains(user_text_input)]
|
89 |
+
|
90 |
+
return df
|
91 |
+
|
92 |
+
|
93 |
+
def get_multiselect_for_df_column(df: pd.DataFrame, column_name: str) -> list:
|
94 |
+
options_list = sorted(df[column_name].unique().tolist())
|
95 |
+
if len(options_list) > 1:
|
96 |
+
selected = (
|
97 |
+
st.multiselect(column_name.title(), options_list, placeholder=f"Select a {column_name} to filter")
|
98 |
+
or options_list
|
99 |
+
)
|
100 |
+
else:
|
101 |
+
selected = options_list
|
102 |
+
return selected
|
src/style.css
ADDED
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
.mrs-grid-player {
|
2 |
+
font-size: x-small;
|
3 |
+
margin-bottom: 5px;
|
4 |
+
padding-left: 4px;
|
5 |
+
padding-right: 4px;
|
6 |
+
}
|
7 |
+
|
8 |
+
.tier {
|
9 |
+
text-align: center;
|
10 |
+
}
|
11 |
+
|
12 |
+
.timeslot {
|
13 |
+
text-align: center;
|
14 |
+
}
|
15 |
+
|
16 |
+
.tier1 {
|
17 |
+
border: 2px solid blue;
|
18 |
+
}
|
19 |
+
.tier2 {
|
20 |
+
border: 2px solid gold;
|
21 |
+
}
|
22 |
+
|
23 |
+
.tier3 {
|
24 |
+
border: 2px solid silver;
|
25 |
+
}
|
26 |
+
|
27 |
+
.tier4 {
|
28 |
+
border: 2px solid brown;
|
29 |
+
}
|
30 |
+
|
31 |
+
.drop-player {
|
32 |
+
color: red;
|
33 |
+
}
|
34 |
+
|
35 |
+
.light-hold-player {
|
36 |
+
color: yellow;
|
37 |
+
}
|
38 |
+
|
39 |
+
.hold-player {
|
40 |
+
color: rgba(68, 235, 62, 0.774);
|
41 |
+
}
|
42 |
+
|
43 |
+
.playerslot {
|
44 |
+
border-left: 1px solid white;
|
45 |
+
border-right: 1px solid white;
|
46 |
+
}
|
47 |
+
|
48 |
+
.grid-legend {
|
49 |
+
color: white;
|
50 |
+
border: 1px solid white;
|
51 |
+
display: flex;
|
52 |
+
gap: 5px;
|
53 |
+
justify-content: center;
|
54 |
+
font-size: x-small;
|
55 |
+
}
|
56 |
+
|
57 |
+
.grid-container-6 {
|
58 |
+
display: grid;
|
59 |
+
grid-template-columns: repeat(6, 1fr);
|
60 |
+
grid-template-rows: repeat(9, auto);
|
61 |
+
grid-column-gap: 0px;
|
62 |
+
grid-row-gap: 10px;
|
63 |
+
color: white;
|
64 |
+
|
65 |
+
.timeslot1 { grid-area: 1 / 1 / 2 / 2; }
|
66 |
+
.timeslot2 { grid-area: 1 / 2 / 2 / 3; }
|
67 |
+
.timeslot3 { grid-area: 1 / 3 / 2 / 4; }
|
68 |
+
.timeslot4 { grid-area: 1 / 4 / 2 / 5; }
|
69 |
+
.timeslot5 { grid-area: 1 / 5 / 2 / 6; }
|
70 |
+
.timeslot6 { grid-area: 1 / 6 / 2 / 7; }
|
71 |
+
.tier1 { grid-area: 2 / 1 / 3 / 7; }
|
72 |
+
.playerslot1 { grid-area: 3 / 1 / 4 / 2; }
|
73 |
+
.playerslot2 { grid-area: 3 / 2 / 4 / 3; }
|
74 |
+
.playerslot3 { grid-area: 3 / 3 / 4 / 4; }
|
75 |
+
.playerslot4 { grid-area: 3 / 4 / 4 / 5; }
|
76 |
+
.playerslot5 { grid-area: 3 / 5 / 4 / 6; }
|
77 |
+
.playerslot6 { grid-area: 3 / 6 / 4 / 7; }
|
78 |
+
.tier2 { grid-area: 4 / 1 / 5 / 7; }
|
79 |
+
.playerslot7 { grid-area: 5 / 1 / 6 / 2; }
|
80 |
+
.playerslot8 { grid-area: 5 / 2 / 6 / 3; }
|
81 |
+
.playerslot9 { grid-area: 5 / 3 / 6 / 4; }
|
82 |
+
.playerslot10 { grid-area: 5 / 4 / 6 / 5; }
|
83 |
+
.playerslot11 { grid-area: 5 / 5 / 6 / 6; }
|
84 |
+
.playerslot12 { grid-area: 5 / 6 / 6 / 7; }
|
85 |
+
.tier3 { grid-area: 6 / 1 / 7 / 7; }
|
86 |
+
.playerslot13 { grid-area: 7 / 1 / 8 / 2; }
|
87 |
+
.playerslot14 { grid-area: 7 / 2 / 8 / 3; }
|
88 |
+
.playerslot15 { grid-area: 7 / 3 / 8 / 4; }
|
89 |
+
.playerslot16 { grid-area: 7 / 4 / 8 / 5; }
|
90 |
+
.playerslot17 { grid-area: 7 / 5 / 8 / 6; }
|
91 |
+
.playerslot18 { grid-area: 7 / 6 / 8 / 7; }
|
92 |
+
.tier4 { grid-area: 8 / 1 / 9 / 7; }
|
93 |
+
.playerslot19 { grid-area: 9 / 1 / 10 / 2; }
|
94 |
+
.playerslot20 { grid-area: 9 / 2 / 10 / 3; }
|
95 |
+
.playerslot21 { grid-area: 9 / 3 / 10 / 4; }
|
96 |
+
.playerslot22 { grid-area: 9 / 4 / 10 / 5; }
|
97 |
+
.playerslot23 { grid-area: 9 / 5 / 10 / 6; }
|
98 |
+
.playerslot24 { grid-area: 9 / 6 / 10 / 7; }
|
99 |
+
|
100 |
+
}
|
101 |
+
|
102 |
+
.grid-container-7 {
|
103 |
+
display: grid;
|
104 |
+
grid-template-columns: repeat(7, 1fr);
|
105 |
+
grid-template-rows: repeat(9, auto);
|
106 |
+
grid-column-gap: 0px;
|
107 |
+
grid-row-gap: 10px;
|
108 |
+
color: white;
|
109 |
+
|
110 |
+
.timeslot1 { grid-area: 1 / 1 / 2 / 2; }
|
111 |
+
.timeslot2 { grid-area: 1 / 2 / 2 / 3; }
|
112 |
+
.timeslot3 { grid-area: 1 / 3 / 2 / 4; }
|
113 |
+
.timeslot4 { grid-area: 1 / 4 / 2 / 5; }
|
114 |
+
.timeslot5 { grid-area: 1 / 5 / 2 / 6; }
|
115 |
+
.timeslot6 { grid-area: 1 / 6 / 2 / 7; }
|
116 |
+
.timeslot7 { grid-area: 1 / 7 / 2 / 8; }
|
117 |
+
.tier1 { grid-area: 2 / 1 / 3 / 8; }
|
118 |
+
.playerslot1 { grid-area: 3 / 1 / 4 / 2; }
|
119 |
+
.playerslot2 { grid-area: 3 / 2 / 4 / 3; }
|
120 |
+
.playerslot3 { grid-area: 3 / 3 / 4 / 4; }
|
121 |
+
.playerslot4 { grid-area: 3 / 4 / 4 / 5; }
|
122 |
+
.playerslot5 { grid-area: 3 / 5 / 4 / 6; }
|
123 |
+
.playerslot6 { grid-area: 3 / 6 / 4 / 7; }
|
124 |
+
.playerslot7 { grid-area: 3 / 7 / 4 / 8; }
|
125 |
+
.tier2 { grid-area: 4 / 1 / 5 / 8; }
|
126 |
+
.playerslot8 { grid-area: 5 / 1 / 6 / 2; }
|
127 |
+
.playerslot9 { grid-area: 5 / 2 / 6 / 3; }
|
128 |
+
.playerslot10 { grid-area: 5 / 3 / 6 / 4; }
|
129 |
+
.playerslot11 { grid-area: 5 / 4 / 6 / 5; }
|
130 |
+
.playerslot12 { grid-area: 5 / 5 / 6 / 6; }
|
131 |
+
.playerslot13 { grid-area: 5 / 6 / 6 / 7; }
|
132 |
+
.playerslot14 { grid-area: 5 / 7 / 6 / 8; }
|
133 |
+
.tier3 { grid-area: 6 / 1 / 7 / 8; }
|
134 |
+
.playerslot15 { grid-area: 7 / 1 / 8 / 2; }
|
135 |
+
.playerslot16 { grid-area: 7 / 2 / 8 / 3; }
|
136 |
+
.playerslot17 { grid-area: 7 / 3 / 8 / 4; }
|
137 |
+
.playerslot18 { grid-area: 7 / 4 / 8 / 5; }
|
138 |
+
.playerslot19 { grid-area: 7 / 5 / 8 / 6; }
|
139 |
+
.playerslot20 { grid-area: 7 / 6 / 8 / 7; }
|
140 |
+
.playerslot21 { grid-area: 7 / 7 / 8 / 8; }
|
141 |
+
.tier4 { grid-area: 8 / 1 / 9 / 8; }
|
142 |
+
.playerslot22 { grid-area: 9 / 1 / 10 / 2; }
|
143 |
+
.playerslot23 { grid-area: 9 / 2 / 10 / 3; }
|
144 |
+
.playerslot24 { grid-area: 9 / 3 / 10 / 4; }
|
145 |
+
.playerslot25 { grid-area: 9 / 4 / 10 / 5; }
|
146 |
+
.playerslot26 { grid-area: 9 / 5 / 10 / 6; }
|
147 |
+
.playerslot27 { grid-area: 9 / 6 / 10 / 7; }
|
148 |
+
.playerslot28 { grid-area: 9 / 7 / 10 / 8; }
|
149 |
+
|
150 |
+
}
|
tests/contract/test_nbcsports_player_news.py
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
from queries.nbcsports import player_news
|
4 |
+
|
5 |
+
|
6 |
+
@pytest.mark.parametrize("page_number", [(1), (2)])
|
7 |
+
def test_get_nfl_player_news(page_number: int):
|
8 |
+
_ = player_news.get_nfl_player_news(page_number)
|
9 |
+
|
10 |
+
|
11 |
+
@pytest.mark.parametrize("hours", [(1), (10)])
|
12 |
+
def test_get_player_news_window_hours(hours: int):
|
13 |
+
_ = player_news.get_player_news_window_hours(hours)
|