00BER commited on
Commit
525ed3a
·
1 Parent(s): d91ed7c

Upload 8 files

Browse files
training/Dockerfile ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ FROM jupyter/scipy-notebook
2
+
3
+ COPY ./requirements.txt .
4
+
5
+ RUN pip install -r requirements.txt
training/MSML-602-Final-Project-Final-Version.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
training/MSML-602-Final-Project.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
training/README.md ADDED
@@ -0,0 +1 @@
 
 
1
+ MSML 602 Assignments/Projects
training/data/weather-data.tar.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b8096982477f4058aee79f5b4b41f8dacf9e5bcda5d97434ccbf2a573189e14
3
+ size 38169226
training/docker-compose.yml ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ version: "3.5"
2
+ services:
3
+ #db:
4
+ # hostname: db
5
+ # image: postgres:9.6
6
+ # environment:
7
+ # - POSTGRES_USER=postgres
8
+ # - POSTGRES_PASSWORD=password
9
+ # ports:
10
+ # - "5432:5432"
11
+ # volumes:
12
+ # - db-volume:/var/lib/postgresql/data:cached
13
+ # networks:
14
+ # - jupyter
15
+ # logging:
16
+ # driver: "json-file"
17
+ # options:
18
+ # max-size: "10m"
19
+
20
+ jupyter:
21
+ image: myjupyter:latest
22
+ ports:
23
+ - "8888:8888"
24
+ volumes:
25
+ - ./src:/home/jovyan/work
26
+ networks:
27
+ - jupyter
28
+ deploy:
29
+ resources:
30
+ reservations:
31
+ devices:
32
+ - capabilities: [gpu]
33
+ logging:
34
+ driver: "json-file"
35
+ options:
36
+ max-size: "10m"
37
+
38
+ #volumes:
39
+ # db-volume:
40
+
41
+ networks:
42
+ jupyter:
training/downloader.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import datetime
3
+ from datetime import date, timedelta
4
+
5
+ DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
6
+ API_KEY = "e1f10a1e78da46f5b10a1e78da96f525"
7
+ BASE_URL = "https://api.weather.com/v1/location/KDCA:9:US/observations/historical.json?apiKey={api_key}&units=e&startDate={start_date}&endDate={end_date}"
8
+
9
+ urls = []
10
+ target_date = date(2000, 1, 1)
11
+ today = datetime.date.today()
12
+ while target_date != today:
13
+ end_date = target_date + timedelta(days=1)
14
+ start_date_str = target_date.strftime("%Y%m%d")
15
+ target_url = BASE_URL.format(api_key=API_KEY, start_date=start_date_str, end_date=start_date_str)
16
+ urls.append(target_url)
17
+ target_date = end_date
18
+
19
+ from concurrent.futures import ThreadPoolExecutor, as_completed
20
+ from time import perf_counter as time
21
+
22
+ from requests_cache import CachedSession
23
+
24
+
25
+ def send_requests():
26
+ session = CachedSession('./data/weather_api_cache')
27
+ start = time()
28
+
29
+ with ThreadPoolExecutor(max_workers=16) as executor:
30
+ future_to_url = {executor.submit(session.get, url): url for url in urls}
31
+
32
+ for future in as_completed(future_to_url):
33
+ url = future_to_url[future]
34
+ response = future.result()
35
+ from_cache = 'hit' if response.from_cache else 'miss'
36
+ print(f'{url} is {len(response.content)} bytes (cache {from_cache})')
37
+
38
+ print(f'Elapsed: {time() - start:.3f} seconds')
39
+
40
+
41
+ if __name__ == '__main__':
42
+ send_requests()
43
+ # send_requests()
training/requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ PuLP==2.6.0
2
+ networkx==2.8.7
3
+ tensorflow==2.11.0
4
+ keras==2.11.0