Upload 13 files
Browse files- data/data.py +10 -6
- data/dynamic.py +3 -1
- data/static.py +1 -1
data/data.py
CHANGED
@@ -2,6 +2,7 @@ import os
|
|
2 |
import requests
|
3 |
import pandas as pd
|
4 |
from plotly.subplots import make_subplots
|
|
|
5 |
import plotly
|
6 |
import plotly.graph_objects as go
|
7 |
from tqdm import tqdm
|
@@ -24,7 +25,7 @@ HEADERS = {
|
|
24 |
FILEPATH = "data/data.json"
|
25 |
FILEURL = f"https://api.github.com/repos/{OWNER}/{REPO}/contents/{FILEPATH}"
|
26 |
|
27 |
-
def
|
28 |
fileurl = f"https://api.github.com/repos/{OWNER}/{REPO}/contents/{filepath}"
|
29 |
|
30 |
response = requests.get(fileurl, headers=HEADERS)
|
@@ -45,9 +46,10 @@ def uf(filepath, content):
|
|
45 |
resp = requests.put(fileurl, headers=HEADERS, json=data)
|
46 |
print("β
Upload success" if resp.ok else f"β Error: {resp.text}")
|
47 |
|
48 |
-
|
49 |
|
50 |
-
|
|
|
51 |
|
52 |
lhs = [7, 15, 30, 60, 90, 120, 240, 365, 1000, 1440]
|
53 |
|
@@ -82,15 +84,17 @@ os.makedirs(data_dir, exist_ok=True)
|
|
82 |
|
83 |
makeslcy = lambda x: -0.575 - (7 - x) * (0.575 / 7) * 5
|
84 |
|
|
|
|
|
85 |
def ambil_data_emas():
|
86 |
-
data_is_ready = ir() # os.path.exists(data_path)
|
87 |
-
data_is_change = False
|
88 |
later_day = False
|
89 |
jam_sudah_cukup = False
|
90 |
|
|
|
|
|
|
|
91 |
if data_is_ready:
|
92 |
# with open(data_path, "r") as f: data = json.load(f)
|
93 |
-
data = json.loads(rf())
|
94 |
|
95 |
# Ambil tanggal terakhir dari lastUpdate
|
96 |
last_updates = [pd.to_datetime(d["lastUpdate"]) for d in data["data"]["priceList"]]
|
|
|
2 |
import requests
|
3 |
import pandas as pd
|
4 |
from plotly.subplots import make_subplots
|
5 |
+
import threading
|
6 |
import plotly
|
7 |
import plotly.graph_objects as go
|
8 |
from tqdm import tqdm
|
|
|
25 |
FILEPATH = "data/data.json"
|
26 |
FILEURL = f"https://api.github.com/repos/{OWNER}/{REPO}/contents/{FILEPATH}"
|
27 |
|
28 |
+
def puf(filepath, content):
|
29 |
fileurl = f"https://api.github.com/repos/{OWNER}/{REPO}/contents/{filepath}"
|
30 |
|
31 |
response = requests.get(fileurl, headers=HEADERS)
|
|
|
46 |
resp = requests.put(fileurl, headers=HEADERS, json=data)
|
47 |
print("β
Upload success" if resp.ok else f"β Error: {resp.text}")
|
48 |
|
49 |
+
uf = lambda filepath, content: threading.Thread(target=puf, args=(filepath, content)).start()
|
50 |
|
51 |
+
rf = lambda: json.loads(base64.b64decode(response.json()['content']).decode() if (response := requests.get(FILEURL, headers=HEADERS)).status_code == 200 else "{}")
|
52 |
+
ir = lambda d=None: (d or rf() != {})
|
53 |
|
54 |
lhs = [7, 15, 30, 60, 90, 120, 240, 365, 1000, 1440]
|
55 |
|
|
|
84 |
|
85 |
makeslcy = lambda x: -0.575 - (7 - x) * (0.575 / 7) * 5
|
86 |
|
87 |
+
data = None
|
88 |
+
|
89 |
def ambil_data_emas():
|
|
|
|
|
90 |
later_day = False
|
91 |
jam_sudah_cukup = False
|
92 |
|
93 |
+
data = data or rf()
|
94 |
+
data_is_ready = ir(data) # os.path.exists(data_path)
|
95 |
+
|
96 |
if data_is_ready:
|
97 |
# with open(data_path, "r") as f: data = json.load(f)
|
|
|
98 |
|
99 |
# Ambil tanggal terakhir dari lastUpdate
|
100 |
last_updates = [pd.to_datetime(d["lastUpdate"]) for d in data["data"]["priceList"]]
|
data/dynamic.py
CHANGED
@@ -1,6 +1,8 @@
|
|
1 |
from data.data import *
|
2 |
|
3 |
-
|
|
|
|
|
4 |
fig = go.Figure()
|
5 |
|
6 |
df = pd.DataFrame(data['data']['priceList'])[::-1][:max(lhs)]
|
|
|
1 |
from data.data import *
|
2 |
|
3 |
+
data = ambil_data_emas()
|
4 |
+
|
5 |
+
def generate(d_h=default_hari, is_img=False, data = ambil_data_emas()):
|
6 |
fig = go.Figure()
|
7 |
|
8 |
df = pd.DataFrame(data['data']['priceList'])[::-1][:max(lhs)]
|
data/static.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
from data.data import *
|
2 |
|
3 |
-
def generate(lama_hari, is_img=False):
|
4 |
lama_hari = lama_hari if lama_hari >= lhs[0] else lhs[0]
|
5 |
|
6 |
# Preprocessing DataFrame
|
|
|
1 |
from data.data import *
|
2 |
|
3 |
+
def generate(lama_hari, is_img=False, data = ambil_data_emas()):
|
4 |
lama_hari = lama_hari if lama_hari >= lhs[0] else lhs[0]
|
5 |
|
6 |
# Preprocessing DataFrame
|