Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +3 -0
- cf/trace_processing/clean.py +229 -0
- cf/trace_processing/convertTTLDict.py +55 -0
- cf/trace_processing/simpleAnalysis.py +29 -0
- cf/trace_processing/usedInCF/anonymization.py +93 -0
- cf/trace_processing/usedInCF/parseNginxLog.py +295 -0
- cf2/cloudflare-research-cache-request/clickhouse/colo_0819.coloSIN.clean.zst +3 -0
- cf2/cloudflare-research-cache-request/clickhouse/colo_0820.coloSIN.clean.zst +3 -0
- cf2/cloudflare-research-cache-request/clickhouse/colo_0821.coloSIN.clean.zst +3 -0
- cf2/cloudflare-research-cache-request/clickhouse/colo_0827.coloATL.clean.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo137_0820_137m18.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo137_0820_137m8.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo137_0821_137m18.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo137_0821_137m8.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo137_0822_137m18.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo137_0822_137m8.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo137_0823_137m18.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo137_0823_137m8.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo137_0824_137m18.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo137_0824_137m8.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo137_0825_137m18.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo137_0825_137m8.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo137_0826_137m18.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo137_0826_137m8.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo137_0827_137m18.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo137_0827_137m8.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo156_0820_156m1.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo156_0820_156m2.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo156_0821_156m1.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo156_0821_156m2.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo156_0822_156m1.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo156_0822_156m2.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo156_0823_156m1.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo156_0823_156m2.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo156_0824_156m1.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo156_0824_156m2.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo156_0825_156m1.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo156_0825_156m2.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo156_0826_156m1.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo156_0826_156m2.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo156_0827_156m1.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo156_0827_156m2.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo27_0820_27m144.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo27_0820_27m376.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo27_0821_27m144.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo27_0821_27m376.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo27_0822_27m144.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo27_0822_27m376.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo27_0823_27m144.zst +3 -0
- cf2/cloudflare-research-cache-request/nginxlog/colo27_0823_27m376.zst +3 -0
.gitattributes
CHANGED
@@ -147,3 +147,6 @@ cf/full/colo28.raw.sort.zst.xad filter=lfs diff=lfs merge=lfs -text
|
|
147 |
alibabaBlock/alibabaBlock2020.csv.zst.xac filter=lfs diff=lfs merge=lfs -text
|
148 |
alibabaBlock/alibabaBlock2020.csv.zst.xab filter=lfs diff=lfs merge=lfs -text
|
149 |
alibabaBlock/alibabaBlock2020.csv.zst.xaa filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
147 |
alibabaBlock/alibabaBlock2020.csv.zst.xac filter=lfs diff=lfs merge=lfs -text
|
148 |
alibabaBlock/alibabaBlock2020.csv.zst.xab filter=lfs diff=lfs merge=lfs -text
|
149 |
alibabaBlock/alibabaBlock2020.csv.zst.xaa filter=lfs diff=lfs merge=lfs -text
|
150 |
+
wiki/2019/wiki.upload.2019.short filter=lfs diff=lfs merge=lfs -text
|
151 |
+
wiki/2016/wiki.upload.2016.short filter=lfs diff=lfs merge=lfs -text
|
152 |
+
wiki/2007/wiki.2007.sort.hash.sample100.csv filter=lfs diff=lfs merge=lfs -text
|
cf/trace_processing/clean.py
ADDED
@@ -0,0 +1,229 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
this script takes the raw dataset, clean it up, fill in TTL, and geenrate a cleaned dataset
|
3 |
+
|
4 |
+
"""
|
5 |
+
|
6 |
+
|
7 |
+
import os, sys
|
8 |
+
import time
|
9 |
+
import csv
|
10 |
+
import requests
|
11 |
+
import pickle, json
|
12 |
+
from inspect import currentframe, getframeinfo
|
13 |
+
from collections import defaultdict, Counter
|
14 |
+
import hashlib
|
15 |
+
import struct
|
16 |
+
import socket
|
17 |
+
|
18 |
+
# import geoip2.database
|
19 |
+
# ip_database = geoip2.database.Reader("GeoLite2-Country.mmdb")
|
20 |
+
|
21 |
+
|
22 |
+
CONTENT_TYPE_SET = set(['jpeg', 'bin', 'json', 'js', 'png', 'webp', '', 'ts', 'css', 'html', 'm3u8', 'svg', 'gif', 'empty', 'dat', 'mp4', 'xml', 'txt', 'ors', 'woff', 'bundle', 'woff2', 'tgz', 'jpg', 'cer', 'plist', 'mpga' , 'ico', 'pbf', 'ccbi', 'qt', 'ttf', 'webm', 'tar', 'zip', 'ics', 'rss', 'aac', 'pdf', 'conda', 'eot', 'debounce', 'crl', 'm4s', 'enc', 'otf', 'wav', 'm4a', 'flat', 'atlas', 'der', 'wys', 'oga', 'gz', 'manifest', 'get', 'mp3', 'php', 'swf', 'skel', 'jar', 'template', 'odft', 'bmp', 'apk', 'rpm', 'xz', 'mpd', 'io', 'btx', 'fnt', 'xhtml', 'pub', 'exe', 'unityweb', 'csv', 'mat', 'events', 'ogx', 'geo', 'docx', 'sprite', 'micro', 'mid', 'jp2', 'doc', 'identity', 'vtt', 'terrain', 'xlsx', 'atom', 'epub', 'chunk', 'si d', 'm3u', '7z', 'invert', 'findkey', 'axd', 'xls', 'db', 'mpf', 'lua', 'webvtt', 'unity3d', 'appcache', 'tiff', 'rtf', 'wasm', 'jpgx', 'gzip', 'md', 'ppt', 'lb', 'ashx', 'm4v', 'py', 'srt', 'psd', 'settings', 'dmg', 'asp', 'sh', 'aspx', 'patch', 'dib', 'mov', 'rar', 'last', 'gcfg', 'yml', 'atf', 'java', 'stl', 'chm', 'crc', 'yaml', 'ai', 'c', 'unity', 'srx', 'scss', 'includes', 'flac', 'caf', 'extra', 'xslt', 'gpg', 'rdf', 'bam', ])
|
23 |
+
EXTENSION_SET = set(['jpg', '', 'tgz', 'js', 'png', 'json', 'm3u8', 'ts', 'css', 'webp', 'jpeg', 'svg', 'php', 'gif', 'mp4', 'dat', 'woff2', 'html', 'bundle', 'woff', 'rss', 'axd', 'crt', 'plist', 'mp3', 'txt', 'ico', 'pbf', 'gz', 'mov', 'ttf', 'm4s', 'ccbi', 'bin', 'lb', 'gzip', 'aspx', 'webm', 'jpgx', 'bz2', 'xml', 'zip ', 'ics', 'aac', 'pdf', 'conda', 'asp', 'ashx', 'eot', 'do', 'htm', 'atlas', 'crl', 'id', 'otf', 'map', 'bam', 'lua', 'xsd', 'erik', 'fca', 'enc', 'rpoly', 'properties', 'wav', 'h', 'flat', 'ogg', 'wys', 'manifest', 'm4a', 'jar', 'ece', 'gmp', 'dds', 'sig', 'pk2mz', 'swf', 'debounce', 'skel', 'template', 'acz', 'pack', 'cdiff', 'cson', 'jsonp', 'hash', 'dmg', 'sml', 'apk', 'lzma', 'jsp', 'rpm', 'cdf', 'io', 'csv', 'xz', 'tga', 'mpd', 'sha256', 'fnt', 'btx', 'depmap', 'distro', 'unity3d', 'svga', 'omit', 'xhtml', 'vipr', 'uniq', 'res', 'pub', 'b3dm', 'exe', 'fit', 'ln4', 'crop', 'pairs', 'prefab', 'pl', 'mkv', 'mat', 'cef', 'wmt', 'bif', 'dxt', 'vtt', 'deb', 'lm', 'geo', 'py', 'sprite', 'vghddata', 'docx', 'jfif', 'nfs', 'js', 'gifv', 'dlt', 'mid', 'data', 'unityweb', 'cms', 'jp2', 'identity', 'gcd', 'img', 'bmp', 'doc', 'cur', 'ect', 'page', 'pic', 'db', 'mjs', 'tif', 'meta', 'image', 'faces', 'prop', 'dll', 'xlsx', 'pfx', 'box', 'ani', 'chunk', 'terrain', 'epub', '7z', 'jnx', 'midi', 'tfl', 'asr', 'act', 'xrf', 'mpf', 'ln3', 'ejs', 'lani', 'avif', 'sh', 'inc', 'vue', 'xaf', 'webvtt', 'pptx', 'aff', 'wasm', 'flow', 'jmm', 'atom', 'ovpn', 'log', 'so', 'xpf', 'xls', 'anm', 'pngx', 'cmfv', 'gaf', 'aiu', 'srt', 'hvm', 'dwg', 'yml', 'mem', 'mobile', 'cvd', '3ds', 'java', 'lmat', 'md', 'sha1', 'm4v', 'tar', 'vbs', 'msi', 'rtf', 'svgz', 'appcache', 'psd', 'tmx', 'eps', 's3d', 'vpk', 'ini', 'stl', 'link', 'shtml', 'ppt', 'pkg', 'br', 'ttc', 'patch', 'dib', 'gcfg', 'yaml', 'atf', 'app', 'heic', 'lyric', 'simple', 'rss2', 'ebr', 'unity', 'rar', 'rgx', 'obj', 'md2', 'chm', 'crc', 'trf', 'gpi', 'lib', 'jpe', 'scss', 'xsl', 'pkmlz', 'cgi', 'srx', 'mdr' 'stat', 'sqlite', 'tiff', 'flac', 'sep', 'caf', 'mps', 'tdb', 'jpeg', 'cfm', 'gpg', 'geojson', 'sql', 'scml', 'gtt', 'bat', 'c', 'xmf', 'gsm', 'fsp', 'gcode', 'gifx', 'odt', 'opus', 'rbxm', 'gl', 'apkm', 'pak', 'util', 'cr2', 'conf', 'dylib', 'dict', 'm3u', 'cer', 'cpp', 'md5', 'xlsm', 'tsx', 'javascript'])
|
24 |
+
|
25 |
+
def processing(datafile, sample_ratio=1):
|
26 |
+
ifile = open(datafile, errors="ignore")
|
27 |
+
reader = csv.reader(ifile, delimiter="\t")
|
28 |
+
|
29 |
+
ofilename = datafile
|
30 |
+
if sample_ratio > 1:
|
31 |
+
ofilename = "{}.sample{}".format(datafile, sample_ratio)
|
32 |
+
|
33 |
+
ofile = open(ofilename + ".clean", "w")
|
34 |
+
ofilebin = open(ofilename + ".cf.bin", "wb")
|
35 |
+
# ts, obj, sz, ttl, age, hostname, content (h), extension (h), n_level, n_param, method, colo
|
36 |
+
# 41 bytes
|
37 |
+
s = struct.Struct("<IQQiiihhhbbb")
|
38 |
+
|
39 |
+
ttl_dict = {}
|
40 |
+
colo = 28
|
41 |
+
content_mapping = defaultdict(int)
|
42 |
+
extension_mapping = defaultdict(int)
|
43 |
+
hostname_mapping = defaultdict(int)
|
44 |
+
|
45 |
+
content_cnt = Counter()
|
46 |
+
extension_cnt = Counter()
|
47 |
+
hostname_cnt = Counter()
|
48 |
+
method_cnt = Counter()
|
49 |
+
n_level_cnt = Counter()
|
50 |
+
n_param_cnt = Counter()
|
51 |
+
|
52 |
+
|
53 |
+
with open("ttl_dict.pickle", "rb") as f:
|
54 |
+
# for allcolo data, we do not have content_type,
|
55 |
+
# and we use convertTTLDict.py to convert ttl_dict to a different format
|
56 |
+
# with open("ttl_dict_new.pickle", "rb") as f:
|
57 |
+
ttl_dict = pickle.load(f)
|
58 |
+
|
59 |
+
n = 0
|
60 |
+
n_no_ttl, n_use_ttl_dict = 0, 0
|
61 |
+
no_age = 0
|
62 |
+
n_time_change = 0
|
63 |
+
last_ts = 0
|
64 |
+
for row in reader:
|
65 |
+
# ts country obj sz content tieredHit ttl age cstatus method zoneId
|
66 |
+
|
67 |
+
# colo28
|
68 |
+
# ts obj sz content tier ttl age cstatus method
|
69 |
+
# 1628639938 14769378147408796115 8688 jpeg 0 2666327358 39582683 miss GET s3.amazonaws.com /media.pinterest.com/150x150/3a/36/13/3a3613c5086a070f43afce13bf856b79.jpg
|
70 |
+
|
71 |
+
# all colo
|
72 |
+
# ts client colo obj sz tier cStatus
|
73 |
+
# 626591102 1433663661 87 10824946938601195600 2944141 0 hit static-v6.starmakerstudios.com /production/uploading/recordings/10977524122478104/master.mp4
|
74 |
+
|
75 |
+
n += 1
|
76 |
+
try:
|
77 |
+
ts, obj, sz, content_type, tieed_hit, ttl, age, cstatus, method, hostname, path, query_string = row
|
78 |
+
|
79 |
+
# for allcolo data, some information is missing
|
80 |
+
# ts, client, colo, obj, sz, tiered_hit, cstatus, hostname, path, query_string = row
|
81 |
+
# ttl, age, content_type, method = 86400*365*100, 86400*365*100, "", "GET"
|
82 |
+
# colo = int(colo)
|
83 |
+
# client = socket.inet_ntop(socket.AF_INET, struct.pack("!L", int(client)))
|
84 |
+
# client_country = ip_database.country(client).country.iso_code
|
85 |
+
except Exception as e:
|
86 |
+
print(e, row)
|
87 |
+
continue
|
88 |
+
|
89 |
+
if sample_ratio > 1 and int(obj) % sample_ratio != 1:
|
90 |
+
continue
|
91 |
+
|
92 |
+
ts, ttl, age = int(ts), int(ttl), int(age)
|
93 |
+
if ts < last_ts:
|
94 |
+
# ts = last_ts
|
95 |
+
n_time_change += 1
|
96 |
+
last_ts = ts
|
97 |
+
|
98 |
+
# hostname = str(int(hashlib.md5(hostname.encode()).hexdigest(), 16) % (2**30))
|
99 |
+
|
100 |
+
if ttl > 86400*365 and (hostname, content_type) in ttl_dict:
|
101 |
+
ttl = int(ttl_dict[(hostname, content_type)])
|
102 |
+
n_use_ttl_dict += 1
|
103 |
+
|
104 |
+
|
105 |
+
has_query_string = len(query_string.strip()) > 0
|
106 |
+
if has_query_string:
|
107 |
+
has_query_string = 1
|
108 |
+
else:
|
109 |
+
has_query_string = 0
|
110 |
+
|
111 |
+
n_param = query_string.count("&")+1
|
112 |
+
n_level = path.count("/") - 1
|
113 |
+
|
114 |
+
content_type = content_type.lower()
|
115 |
+
extension = path.split(".")[-1].lower()
|
116 |
+
if '=' in extension:
|
117 |
+
extension.split("=")[0]
|
118 |
+
if '%' in extension:
|
119 |
+
extension.split("%")[0]
|
120 |
+
if '@' in extension:
|
121 |
+
extension.split("@")[0]
|
122 |
+
|
123 |
+
if "-" in extension or "/" in extension or "_" in extension or len(extension) > 10 or extension == "unknown" or extension == "empty":
|
124 |
+
extension = ""
|
125 |
+
|
126 |
+
if extension.isdigit():
|
127 |
+
extension = ""
|
128 |
+
|
129 |
+
extension = extension.strip()
|
130 |
+
|
131 |
+
if content_type not in CONTENT_TYPE_SET:
|
132 |
+
content_type = ""
|
133 |
+
|
134 |
+
if extension not in EXTENSION_SET:
|
135 |
+
extension = "unknown"
|
136 |
+
|
137 |
+
|
138 |
+
# for allcolo data, we do not have content_type
|
139 |
+
# if ttl > 86400 * 365 * 30:
|
140 |
+
# if hostname in ttl_dict:
|
141 |
+
# if extension in ttl_dict[hostname]:
|
142 |
+
# ttl = int(ttl_dict[hostname][extension])
|
143 |
+
# else:
|
144 |
+
# ttl = int(list(ttl_dict[hostname].values())[0])
|
145 |
+
# n_use_ttl_dict += 1
|
146 |
+
# else:
|
147 |
+
# ttl = -1
|
148 |
+
|
149 |
+
|
150 |
+
if ttl > 86400*365*30:
|
151 |
+
n_no_ttl += 1
|
152 |
+
ttl = -1
|
153 |
+
|
154 |
+
if age > 86400*365*30:
|
155 |
+
no_age += 1
|
156 |
+
age = -1
|
157 |
+
|
158 |
+
|
159 |
+
ofile.write(",".join([str(ts), obj, str(sz), content_type, extension, str(n_level), str(ttl), str(age), cstatus, method, hostname, str(has_query_string), str(n_param)])+"\n")
|
160 |
+
|
161 |
+
if n % 20000000 == 0:
|
162 |
+
# print(sorted(content_cnt.items(), key=lambda x:x[1], reverse=True))
|
163 |
+
# print(sorted(extension_cnt.items(), key=lambda x:x[1], reverse=True))
|
164 |
+
print(sorted(method_cnt.items(), key=lambda x:x[1], reverse=True))
|
165 |
+
# print(sorted(n_level_cnt.items(), key=lambda x:x[1], reverse=True))
|
166 |
+
# print(sorted(n_param_cnt.items(), key=lambda x:x[1], reverse=True))
|
167 |
+
print("{:.0f} {} req, {} missingTTL, {} replaceTTL, {} noAge, {} timeDiff".format(time.time(), n//1e6, n_no_ttl//1e6, n_use_ttl_dict//1e6, no_age//1e6, n_time_change))
|
168 |
+
|
169 |
+
content_int = content_mapping.get(content_type, len(content_mapping)+1)
|
170 |
+
extension_int = extension_mapping.get(extension, len(extension_mapping)+1)
|
171 |
+
hostname_int = hostname_mapping.get(hostname, len(content_mapping)+1)
|
172 |
+
content_mapping[content_type] = content_int
|
173 |
+
extension_mapping[extension] = extension_int
|
174 |
+
hostname_mapping[hostname] = hostname_int
|
175 |
+
|
176 |
+
method_int = 0
|
177 |
+
if method == "GET":
|
178 |
+
method_int = 1
|
179 |
+
elif method == "PURGE":
|
180 |
+
method_int = 2
|
181 |
+
else:
|
182 |
+
print(f"unknown method {method}")
|
183 |
+
|
184 |
+
if has_query_string == 0:
|
185 |
+
n_param = 0
|
186 |
+
|
187 |
+
if n_level > 127:
|
188 |
+
n_level = 127
|
189 |
+
|
190 |
+
if n_param > 127:
|
191 |
+
n_param = 127
|
192 |
+
|
193 |
+
# ts, obj, sz, ttl, age, hostname, content (h), extension (h), n_level (b), n_param (b), method (b), colo (b)
|
194 |
+
try:
|
195 |
+
ofilebin.write(s.pack(int(ts), int(obj), int(sz), ttl, age, hostname_int, content_int, extension_int, colo, n_level, n_param, method_int))
|
196 |
+
except Exception as e:
|
197 |
+
print(e)
|
198 |
+
print(row)
|
199 |
+
print(int(ts), int(obj), int(sz), ttl, age, hostname_int, content_int, extension_int, n_level, n_param, method_int, colo)
|
200 |
+
print(",".join([str(ts), obj, str(sz), content_type, extension, str(n_level), str(ttl), str(age), cstatus, method, hostname, str(has_query_string), str(n_param)])+"\n")
|
201 |
+
|
202 |
+
|
203 |
+
content_cnt[content_type] += 1
|
204 |
+
extension_cnt[extension] += 1
|
205 |
+
hostname_cnt[hostname] += 1
|
206 |
+
method_cnt[method] += 1
|
207 |
+
n_level_cnt[n_level] += 1
|
208 |
+
n_param_cnt[n_param] += 1
|
209 |
+
|
210 |
+
ifile.close()
|
211 |
+
ofile.close()
|
212 |
+
ofilebin.close()
|
213 |
+
|
214 |
+
with open("{}_stat.json".format(ofilename), "w") as ofile:
|
215 |
+
json.dump((content_cnt, extension_cnt, hostname_cnt, method_cnt, n_level_cnt, n_param_cnt), ofile)
|
216 |
+
|
217 |
+
with open("{}_content_mapping.json".format(ofilename), "w") as ofile:
|
218 |
+
json.dump(content_mapping, ofile)
|
219 |
+
with open("{}_extension_mapping.json".format(ofilename), "w") as ofile:
|
220 |
+
json.dump(extension_mapping, ofile)
|
221 |
+
with open("{}_hostname_mapping.json".format(ofilename), "w") as ofile:
|
222 |
+
json.dump(hostname_mapping, ofile)
|
223 |
+
|
224 |
+
|
225 |
+
if __name__ == "__main__":
|
226 |
+
processing(sys.argv[1])
|
227 |
+
|
228 |
+
|
229 |
+
|
cf/trace_processing/convertTTLDict.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
|
3 |
+
import os, sys
|
4 |
+
import time
|
5 |
+
import csv
|
6 |
+
import requests
|
7 |
+
import pickle, json
|
8 |
+
from inspect import currentframe, getframeinfo
|
9 |
+
from collections import defaultdict, Counter
|
10 |
+
import hashlib
|
11 |
+
import struct
|
12 |
+
import socket
|
13 |
+
|
14 |
+
|
15 |
+
CONTENT_TYPE_SET = set(['jpeg', 'bin', 'json', 'js', 'png', 'webp', '', 'ts', 'css', 'html', 'm3u8', 'svg', 'gif', 'empty', 'dat', 'mp4', 'xml', 'txt', 'ors', 'woff', 'bundle', 'woff2', 'tgz', 'jpg', 'cer', 'plist', 'mpga' , 'ico', 'pbf', 'ccbi', 'qt', 'ttf', 'webm', 'tar', 'zip', 'ics', 'rss', 'aac', 'pdf', 'conda', 'eot', 'debounce', 'crl', 'm4s', 'enc', 'otf', 'wav', 'm4a', 'flat', 'atlas', 'der', 'wys', 'oga', 'gz', 'manifest', 'get', 'mp3', 'php', 'swf', 'skel', 'jar', 'template', 'odft', 'bmp', 'apk', 'rpm', 'xz', 'mpd', 'io', 'btx', 'fnt', 'xhtml', 'pub', 'exe', 'unityweb', 'csv', 'mat', 'events', 'ogx', 'geo', 'docx', 'sprite', 'micro', 'mid', 'jp2', 'doc', 'identity', 'vtt', 'terrain', 'xlsx', 'atom', 'epub', 'chunk', 'si d', 'm3u', '7z', 'invert', 'findkey', 'axd', 'xls', 'db', 'mpf', 'lua', 'webvtt', 'unity3d', 'appcache', 'tiff', 'rtf', 'wasm', 'jpgx', 'gzip', 'md', 'ppt', 'lb', 'ashx', 'm4v', 'py', 'srt', 'psd', 'settings', 'dmg', 'asp', 'sh', 'aspx', 'patch', 'dib', 'mov', 'rar', 'last', 'gcfg', 'yml', 'atf', 'java', 'stl', 'chm', 'crc', 'yaml', 'ai', 'c', 'unity', 'srx', 'scss', 'includes', 'flac', 'caf', 'extra', 'xslt', 'gpg', 'rdf', 'bam', ])
|
16 |
+
EXTENSION_SET = set(['jpg', '', 'tgz', 'js', 'png', 'json', 'm3u8', 'ts', 'css', 'webp', 'jpeg', 'svg', 'php', 'gif', 'mp4', 'dat', 'woff2', 'html', 'bundle', 'woff', 'rss', 'axd', 'crt', 'plist', 'mp3', 'txt', 'ico', 'pbf', 'gz', 'mov', 'ttf', 'm4s', 'ccbi', 'bin', 'lb', 'gzip', 'aspx', 'webm', 'jpgx', 'bz2', 'xml', 'zip ', 'ics', 'aac', 'pdf', 'conda', 'asp', 'ashx', 'eot', 'do', 'htm', 'atlas', 'crl', 'id', 'otf', 'map', 'bam', 'lua', 'xsd', 'erik', 'fca', 'enc', 'rpoly', 'properties', 'wav', 'h', 'flat', 'ogg', 'wys', 'manifest', 'm4a', 'jar', 'ece', 'gmp', 'dds', 'sig', 'pk2mz', 'swf', 'debounce', 'skel', 'template', 'acz', 'pack', 'cdiff', 'cson', 'jsonp', 'hash', 'dmg', 'sml', 'apk', 'lzma', 'jsp', 'rpm', 'cdf', 'io', 'csv', 'xz', 'tga', 'mpd', 'sha256', 'fnt', 'btx', 'depmap', 'distro', 'unity3d', 'svga', 'omit', 'xhtml', 'vipr', 'uniq', 'res', 'pub', 'b3dm', 'exe', 'fit', 'ln4', 'crop', 'pairs', 'prefab', 'pl', 'mkv', 'mat', 'cef', 'wmt', 'bif', 'dxt', 'vtt', 'deb', 'lm', 'geo', 'py', 'sprite', 'vghddata', 'docx', 'jfif', 'nfs', 'js', 'gifv', 'dlt', 'mid', 'data', 'unityweb', 'cms', 'jp2', 'identity', 'gcd', 'img', 'bmp', 'doc', 'cur', 'ect', 'page', 'pic', 'db', 'mjs', 'tif', 'meta', 'image', 'faces', 'prop', 'dll', 'xlsx', 'pfx', 'box', 'ani', 'chunk', 'terrain', 'epub', '7z', 'jnx', 'midi', 'tfl', 'asr', 'act', 'xrf', 'mpf', 'ln3', 'ejs', 'lani', 'avif', 'sh', 'inc', 'vue', 'xaf', 'webvtt', 'pptx', 'aff', 'wasm', 'flow', 'jmm', 'atom', 'ovpn', 'log', 'so', 'xpf', 'xls', 'anm', 'pngx', 'cmfv', 'gaf', 'aiu', 'srt', 'hvm', 'dwg', 'yml', 'mem', 'mobile', 'cvd', '3ds', 'java', 'lmat', 'md', 'sha1', 'm4v', 'tar', 'vbs', 'msi', 'rtf', 'svgz', 'appcache', 'psd', 'tmx', 'eps', 's3d', 'vpk', 'ini', 'stl', 'link', 'shtml', 'ppt', 'pkg', 'br', 'ttc', 'patch', 'dib', 'gcfg', 'yaml', 'atf', 'app', 'heic', 'lyric', 'simple', 'rss2', 'ebr', 'unity', 'rar', 'rgx', 'obj', 'md2', 'chm', 'crc', 'trf', 'gpi', 'lib', 'jpe', 'scss', 'xsl', 'pkmlz', 'cgi', 'srx', 'mdr' 'stat', 'sqlite', 'tiff', 'flac', 'sep', 'caf', 'mps', 'tdb', 'jpeg', 'cfm', 'gpg', 'geojson', 'sql', 'scml', 'gtt', 'bat', 'c', 'xmf', 'gsm', 'fsp', 'gcode', 'gifx', 'odt', 'opus', 'rbxm', 'gl', 'apkm', 'pak', 'util', 'cr2', 'conf', 'dylib', 'dict', 'm3u', 'cer', 'cpp', 'md5', 'xlsm', 'tsx', 'javascript'])
|
17 |
+
|
18 |
+
|
19 |
+
def convert_ttl_dict():
|
20 |
+
""" because some data such as allcolo.sort does not have content type information,
|
21 |
+
and the key is (hostname, content_type),
|
22 |
+
so we convert it into a chained dict with hostname, extension as keys,
|
23 |
+
and if we cannot find TTL for a given hostname, extension, we use a TTL of other content type for the same hostname
|
24 |
+
"""
|
25 |
+
|
26 |
+
ttl_dict_new = defaultdict(dict)
|
27 |
+
|
28 |
+
with open("ttl_dict.pickle", "rb") as f:
|
29 |
+
ttl_dict = pickle.load(f)
|
30 |
+
|
31 |
+
|
32 |
+
seen_content_type = set()
|
33 |
+
for (hostname, content_type), ttl in ttl_dict.items():
|
34 |
+
ttl_dict_new[hostname][content_type] = ttl
|
35 |
+
ttl_dict_new[hostname]["unknown"] = ttl
|
36 |
+
ttl_dict_new[hostname][""] = ttl
|
37 |
+
|
38 |
+
if content_type not in seen_content_type:
|
39 |
+
seen_content_type.add(content_type)
|
40 |
+
print(content_type)
|
41 |
+
|
42 |
+
|
43 |
+
with open("ttl_dict_new.pickle", "wb") as ofile:
|
44 |
+
pickle.dump(ttl_dict_new, ofile)
|
45 |
+
|
46 |
+
|
47 |
+
if __name__ == "__main__":
|
48 |
+
convert_ttl_dict()
|
49 |
+
|
50 |
+
|
51 |
+
|
52 |
+
|
53 |
+
|
54 |
+
|
55 |
+
|
cf/trace_processing/simpleAnalysis.py
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
this script performs simple analysis of the stat generated in cleaning
|
3 |
+
|
4 |
+
"""
|
5 |
+
|
6 |
+
|
7 |
+
import os, sys
|
8 |
+
import time
|
9 |
+
import csv
|
10 |
+
import pickle, json
|
11 |
+
from collections import defaultdict, Counter
|
12 |
+
import struct
|
13 |
+
|
14 |
+
|
15 |
+
def analysis1(datafile):
|
16 |
+
with open("{}_stat.json".format(datafile), "r") as ifile:
|
17 |
+
content_cnt, extension_cnt, hostname_cnt, method_cnt, n_level_cnt, n_param_cnt = json.load(ifile)
|
18 |
+
|
19 |
+
n = 0
|
20 |
+
for k, v in sorted(hostname_cnt.items(), key=lambda x: -x[1]):
|
21 |
+
n += 1
|
22 |
+
print(k, v//1e6)
|
23 |
+
if n > 2000:
|
24 |
+
break
|
25 |
+
|
26 |
+
|
27 |
+
if __name__ == "__main__":
|
28 |
+
analysis1(sys.argv[1])
|
29 |
+
|
cf/trace_processing/usedInCF/anonymization.py
ADDED
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
|
3 |
+
import os, sys
|
4 |
+
import time
|
5 |
+
import csv
|
6 |
+
import requests
|
7 |
+
import pickle, json
|
8 |
+
from inspect import currentframe, getframeinfo
|
9 |
+
from collections import defaultdict
|
10 |
+
import hashlib
|
11 |
+
|
12 |
+
|
13 |
+
def processing(datafile):
|
14 |
+
ifile = open(datafile, errors="ignore")
|
15 |
+
reader = csv.reader(ifile)
|
16 |
+
ofile = open(datafile + ".clean", "w")
|
17 |
+
writer = csv.writer(ofile)
|
18 |
+
ttl_dict = {}
|
19 |
+
|
20 |
+
with open("ttl_dict.pickle", "rb") as f:
|
21 |
+
ttl_dict = pickle.load(f)
|
22 |
+
|
23 |
+
n = 0
|
24 |
+
n_no_ttl, n_use_ttl_dict = 0, 0
|
25 |
+
no_age = 0
|
26 |
+
n_time_change = 0
|
27 |
+
last_ts = 0
|
28 |
+
for row in reader:
|
29 |
+
# ts country obj sz content tier ttl age cstatus method zoneId
|
30 |
+
# 1628962150 238 8881329091665286219 1031315 bin 0 2666005146 1628962150 hit GET 438702142 ark-us-static-online.yo-star.com /assetbundle/official/Android/assets/21-07-28-08-08-11-1b23f7/avg_images_avg_6_17.dat
|
31 |
+
# 1628962464 238 11714083853013678418 1403 svg 0 2666004832 758240 miss GET 444690466 www.envive.com /Assets/Envive-DTC/img/img-1200x200-mobile-back.svg
|
32 |
+
n += 1
|
33 |
+
try:
|
34 |
+
# ts, country, colo, obj, sz, content_type, tieed_hit, ttl, age, cstatus, method, zone_id, edgehost, referhost, hostname, path, query_string = row
|
35 |
+
ts, country, colo, obj, obj2, sz, content_type, tieed_hit, ttl, age, cstatus, method, zone_id, edgehost, referhost, hostname, path, query_string = row
|
36 |
+
except Exception as e:
|
37 |
+
print(e, row)
|
38 |
+
continue
|
39 |
+
|
40 |
+
ts, ttl, age = int(ts), int(ttl), int(age)
|
41 |
+
if ts < last_ts:
|
42 |
+
# ts = last_ts
|
43 |
+
n_time_change += 1
|
44 |
+
last_ts = ts
|
45 |
+
|
46 |
+
if ttl > 86400*365 and (hostname, content_type) in ttl_dict:
|
47 |
+
ttl = ttl_dict[(hostname, content_type)]
|
48 |
+
n_use_ttl_dict += 1
|
49 |
+
if ttl > 86400*365*30:
|
50 |
+
n_no_ttl += 1
|
51 |
+
ttl = -1
|
52 |
+
|
53 |
+
if age > 86400*365*30:
|
54 |
+
no_age += 1
|
55 |
+
age = -1
|
56 |
+
|
57 |
+
# hostname = str(int(hashlib.sha1(hostname.encode()).hexdigest(), 16) % (2**30))
|
58 |
+
# edgehost = str(int(hashlib.sha1(edgehost.encode()).hexdigest(), 16) % (2**30))
|
59 |
+
# referhost = str(int(hashlib.sha1(referhost.encode()).hexdigest(), 16) % (2**30))
|
60 |
+
# zone_id = str(int(hashlib.sha1(zone_id.encode()).hexdigest(), 16) % (2**30))
|
61 |
+
|
62 |
+
hostname = str(int(hashlib.md5(hostname.encode()).hexdigest(), 16) % (2**30))
|
63 |
+
edgehost = str(int(hashlib.md5(edgehost.encode()).hexdigest(), 16) % (2**30))
|
64 |
+
referhost = str(int(hashlib.md5(referhost.encode()).hexdigest(), 16) % (2**30))
|
65 |
+
zone_id = str(int(hashlib.md5(zone_id.encode()).hexdigest(), 16) % (2**30))
|
66 |
+
|
67 |
+
has_query_string = len(query_string.strip()) > 0
|
68 |
+
if has_query_string:
|
69 |
+
has_query_string = "1"
|
70 |
+
else:
|
71 |
+
has_query_string = "0"
|
72 |
+
|
73 |
+
n_param = str(query_string.count("&")+1)
|
74 |
+
n_level = str(path.count("/") - 1)
|
75 |
+
|
76 |
+
extension = path.split(".")[-1]
|
77 |
+
if "-" in extension or "/" in extension or "_" in extension or len(extension) > 10:
|
78 |
+
extension = ""
|
79 |
+
|
80 |
+
ofile.write(",".join([str(ts), country, colo, obj, obj2, sz, content_type, extension, n_level, str(ttl), str(age), cstatus, method, zone_id, hostname, edgehost, referhost, has_query_string, n_param])+"\n")
|
81 |
+
|
82 |
+
if n % 20000000 == 0:
|
83 |
+
print("{:.0f} {} req, {} missingTTL, {} replaceTTL, {} noAge, {} timeDiff".format(time.time(), n//1e6, n_no_ttl//1e6, n_use_ttl_dict//1e6, no_age//1e6, n_time_change))
|
84 |
+
|
85 |
+
|
86 |
+
ifile.close()
|
87 |
+
ofile.close()
|
88 |
+
|
89 |
+
|
90 |
+
if __name__ == "__main__":
|
91 |
+
# processing("colo_0814_17_0")
|
92 |
+
processing(sys.argv[1])
|
93 |
+
|
cf/trace_processing/usedInCF/parseNginxLog.py
ADDED
@@ -0,0 +1,295 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
|
3 |
+
import os, sys
|
4 |
+
import csv
|
5 |
+
import time
|
6 |
+
import datetime
|
7 |
+
import time, datetime
|
8 |
+
import pickle, json
|
9 |
+
from threading import Thread, Lock
|
10 |
+
from multiprocessing import Process, Queue, Lock
|
11 |
+
from concurrent.futures import ProcessPoolExecutor, as_completed
|
12 |
+
from inspect import currentframe, getframeinfo
|
13 |
+
from collections import defaultdict
|
14 |
+
import hashlib
|
15 |
+
import re
|
16 |
+
|
17 |
+
import IP2Location
|
18 |
+
import geoip2.database
|
19 |
+
ip_database = geoip2.database.Reader("GeoLite2-Country.mmdb")
|
20 |
+
# ip_database_v4 = IP2Location.IP2Location("IP-COUNTRY.BIN", "SHARE_MEMORY")
|
21 |
+
# ip_database_v6 = IP2Location.IP2Location("IPV6-COUNTRY.BIN", "SHARE_MEMORY")
|
22 |
+
|
23 |
+
|
24 |
+
s = '2021-08-19T23:00:39.753 28m480 2021/08/19 23:00:39 [error] 1094334#1094334: *4470570 [lua] log_main.lua:420: extra_logging(): {"resp_size":"1889","length":"39","zone":"103893895","body_size":"39","cstatus":"MISS","plan":"ENT","src":"cache","cc":"no-store, no-cache, must-revalidate, max-age=0","jira":"CACHE-8300","method":"GET","key":"0ec9788cd6532247d8369d483f8b95a6","type":"text\/plain; charset=UTF-8","obody_size":"39","osize":"296","ts":1629414039.753} while logging request, client: 97.113.133.242, id: 6816ffd443380939, server: 28c480, request: "GET /SIE/?Q_BID=tracfone&Q_CLIENTTYPE=MobileAndroid&Q_CLIENTVERSION=1.13&Q_DEVICEOS=9_28&Q_DEVICETYPE=motorola_moto%20g%286%29&Q_LOC=com.tracfone.tracfone.myaccount&Q_PageView=1&Q_SIID=SI_3EkJhpdZLwLyWA5&r=1629414039 HTTP/1.1", upstream: "https://162.247.217.100:443/SIE/?Q_BID=tracfone&Q_CLIENTTYPE=MobileAndroid&Q_CLIENTVERSION=1.13&Q_DEVICEOS=9_28&Q_DEVICETYPE=motorola_moto%20g%286%29&Q_LOC=com.tracfone.tracfone.myaccount&Q_PageView=1&Q_SIID=SI_3EkJhpdZLwLyWA5&r=1629414039", host: "zncvomiueeqmfxynn-tracfone.siteintercept.qualtrics.com", referrer: "com.tracfone.tracfone.myaccount"'
|
25 |
+
s = '2021-08-19T23:53:08.148 137m8 2021/08/19 23:53:08 [error] 3542366#3542366: *735895 [lua] log_main.lua:420: extra_logging(): {"resp_size":"21739","expire":"Fri, 20 Aug 2021 23:53:08 GMT","zone":"103899181","body_size":"18223","cstatus":"MISS","last_modified":["Fri, 25 Jun 2021 23:13:29 GMT","Fri, 25 Jun 2021 23:13:29 GMT"],"plan":"ENT","src":"cache","cc":"public, max-age=86400","jira":"CACHE-8300","method":"GET","key":"5edda60a313d23db437a9e50792855cd","type":"image\/jpeg","obody_size":"18223","osize":"20307","ts":1629417188.148} while logging request, client: 73.214.193.14, id: 68174cb0eec5f176, server: 137c8, request: "GET /media/hms/THUM/4/DSnoJ?_a=ChgyMDIxMDgxOTIzNDUwNjIwNDo1NjU5MTkSBTg0NTg3GgpPTkVfU0VBUkNIIg4xOTIuMjAzLjIzMy4zNSoHMjAzMjMyMDIKMjU0NDkzOTkwODoLSW5saW5lSW1hZ2VCATBSBk9ubGluZVoDSUxJYgRUSFVNagoyMDIxLzAxLzAxcgoyMDIxLzEyLzMxegCCASlQLTEwMDc4NTYtMjgzNjUtQ1VTVE9NRVItMTAwMDAyNTUtNTc2OTM3M5IBBk9ubGluZcoBeU1vemlsbGEvNS4wIChNYWNpbnRvc2g7IEludGVsIE1hYyBPUyBYIDEwXzE1XzcpIEFwcGxlV2ViS2l0LzUzNy4zNiAoS0hUTUwsIGxpa2UgR2Vja28pIENocm9tZS85Mi4wLjQ1MTUuMTA3IFNhZmFyaS81MzcuMzbSARJTY2hvbGFybHkgSm91cm5hbHOaAgdQcmVQYWlkqgIsT1M6RU1TLVRleHRQbHVzR3JhcGhpY3NDb250YWluZXItcHJlcGFyZUJvZHnKAg9BcnRpY2xlfEZlYXR1cmXiAgDyAgD6AgFZggMDV2ViigMcQ0lEOjIwMjEwODE5MjM0NTA2MjYzOjg5NDM4Ng%3D%3D&_s=oosxWzL5AWX7t4Ii7OiQIFf4jVY%3D HTTP/1.1", upstream: "http://unix:/cf/socks/nginx-be.sock/@@argo/?ray=68174cb0eec5f176", host: "media.proquest.com", referrer: "https://www-proquest-com.reddog.rmu.edu/"'
|
26 |
+
s = '2021-08-20T00:48:24.913 35m780 2021/08/20 00:48:24 [error] 61992#61992: *4137811 [lua] log_main.lua:420: extra_logging(): {"resp_size":"6814","length":"4372","expire":"Fri, 24 Jun 2022 16:43:55 GMT","zone":"26539756","body_size":"4372","cstatus":"MISS","last_modified":"Thu, 24 Jun 2021 16:43:55 GMT","src":"cache","cc":"public, max-age=31536000, s-maxage=31536000, stale-while-revalidate=63072000, stale-if-error=63072000","jira":"CACHE-8300","method":"GET","key":"9b5c61632e311f75ec698a8950bd0eb0","type":"image\/jpeg","ts":1629420504.913} while logging request, client: 113.184.81.90, id: 68179daba80701e1, server: 35c780, request: "GET /bloganchoi.com/wp-content/uploads/2017/09/lee-jong-suk-anh-bia-218x150.jpg?fit=218%2C20000&quality=95&ssl=1 HTTP/1.1", host: "i.bloganchoi.com", referrer: "https://bloganchoi.com/"'
|
27 |
+
s = '2021-08-21T00:17:14.098 27m376 2021/08/21 00:17:14 [error] 803136#803136: *164921146 [lua] log_main.lua:420: extra_logging(): {"resp_size":"196878","length":"194247","zone":"203788237","body_size":"194247","cstatus":"HIT","plan":"FREE","src":"cache","cc":"public, max-age=31536000","jira":"CACHE-8300","method":"GET","key":"702d065cf85479789d3ffec28759280d","type":"\xecmage\/jpeg","ts":1629505034.098} while logging request, client: 34.73.126.234, id: 681fad5ef847f309, server: 27c376, request: "GET /assets/image/314fc099-6532-470f-8cd5-bb096d0ab5f3.jpeg HTTP/1.1", host: "www.merfolkslullaby.com"'
|
28 |
+
regex = re.compile(r'(?P<ts>.+?) (?P<node>.+?) .+? (?P<jmsg>{.+?}) while logging request, client: (?P<ip>[0-9a-z.:]+?), .+? request: "(?P<request>.+?)"(?P<upstream>, upstream: ".*?")?(?P<host>, host: ".*?")?(?P<referrer>, referrer: ".*?")?')
|
29 |
+
# when request is too long and log line ends with partial request
|
30 |
+
regex2 = re.compile(r'(?P<ts>.+?) (?P<node>.+?) .+? (?P<jmsg>{.+?}) while logging request, client: (?P<ip>[0-9a-z.:]+?), .+? request: "(?P<request>.+?)(?P<host>.*?)')
|
31 |
+
|
32 |
+
n = 0
|
33 |
+
n_dynamic = 0
|
34 |
+
n_expire_err = 0
|
35 |
+
n_last_modified_err = 0
|
36 |
+
|
37 |
+
def parse_test():
|
38 |
+
# regex = re.compile(r'(?P<ts>.+?) (?P<node>.+?) .+? (?P<jmsg>{.+?}) while logging request, client: (?P<ip>[0-9.]+?), .+? request: "(?P<request>.+?)", ')
|
39 |
+
m = regex.match(s)
|
40 |
+
# print(m)
|
41 |
+
# print(m.group("ts"))
|
42 |
+
# print(m.group("node"))
|
43 |
+
# print(jmsg)
|
44 |
+
# print(m.group("request"))
|
45 |
+
# print(m.group("ip"))
|
46 |
+
|
47 |
+
# req_ts = datetime.datetime.strptime(m.group("ts").split(".")[0], "%Y-%m-%dT%H:%M:%S")
|
48 |
+
# exp_ts = datetime.datetime.strptime(jmsg["expire"], "%Y-%m-%dT%H:%M:%S")
|
49 |
+
print(m)
|
50 |
+
|
51 |
+
def parse_logline(log_line):
|
52 |
+
global n_expire_err, n_last_modified_err, n_dynamic
|
53 |
+
|
54 |
+
m = regex.match(log_line)
|
55 |
+
if not m:
|
56 |
+
m = regex2.match(log_line)
|
57 |
+
|
58 |
+
jmsg = json.loads(m.group("jmsg"))
|
59 |
+
|
60 |
+
cstatus = jmsg.pop("cstatus", "noStatus")
|
61 |
+
if cstatus == "DYNAMIC":
|
62 |
+
n_dynamic += 1
|
63 |
+
return None
|
64 |
+
|
65 |
+
if "key" not in jmsg:
|
66 |
+
assert "/cdn-cgi/build/nginx-cache" in log_line or "multipart\/mirage-bag-base64" in log_line, "key error "
|
67 |
+
return None
|
68 |
+
|
69 |
+
ts = str(int(jmsg.pop("ts")))
|
70 |
+
obj_id = jmsg.pop("key")
|
71 |
+
body_sz = jmsg.pop("body_size")
|
72 |
+
resp_sz = jmsg.pop("resp_size")
|
73 |
+
obody_sz = jmsg.pop("obody_size", 0)
|
74 |
+
oresp_sz = jmsg.pop("osize", 0)
|
75 |
+
method = jmsg.pop("method")
|
76 |
+
content_type = jmsg.pop("type", "no_type").replace(" ", "")
|
77 |
+
if ";" in content_type:
|
78 |
+
# assert len(content_type.split(";")) == 2, "content type error 1 " + content_type
|
79 |
+
# assert content_type.split(";")[1].lower() == "charset=utf-8", "content type error 2 " + content_type
|
80 |
+
content_type = content_type.split(";")[0]
|
81 |
+
if "/" in content_type:
|
82 |
+
content_type = content_type.split("/")[1]
|
83 |
+
|
84 |
+
content_length = jmsg.pop("length", "")
|
85 |
+
zone = jmsg.pop("zone", "-1")
|
86 |
+
plan = jmsg.pop("plan", "noPlan")
|
87 |
+
src = jmsg.pop("src")
|
88 |
+
range = jmsg.pop("range", "")
|
89 |
+
if range:
|
90 |
+
if range.startswith("bytes"):
|
91 |
+
range = range[6:]
|
92 |
+
range = range.split("/")[0]
|
93 |
+
# assert(int(range[0].split("-")[1]) - int(range[0].split("-")[0]) + 1 == int(range[1]))
|
94 |
+
jmsg.pop("jira")
|
95 |
+
|
96 |
+
cache_control = jmsg.pop("cc", "no-cache-control")
|
97 |
+
if isinstance(cache_control, list):
|
98 |
+
cache_control = ";".join(cache_control)
|
99 |
+
else:
|
100 |
+
cache_control = cache_control.replace(",", ";").replace(" ", "")
|
101 |
+
|
102 |
+
req_time = m.group("ts").strip()
|
103 |
+
req_time = datetime.datetime.strptime(req_time.split(".")[0], "%Y-%m-%dT%H:%M:%S")
|
104 |
+
|
105 |
+
expire_ttl = -1
|
106 |
+
expire = jmsg.pop("expire", "")
|
107 |
+
if not isinstance(expire, list):
|
108 |
+
expire = expire.strip()
|
109 |
+
if expire:
|
110 |
+
try:
|
111 |
+
expire = datetime.datetime.strptime(expire, "%a, %d %b %Y %H:%M:%S GMT")
|
112 |
+
expire_ttl = int((expire - req_time).total_seconds())
|
113 |
+
except Exception as e:
|
114 |
+
try:
|
115 |
+
expire_ttl = int(expire)
|
116 |
+
except Exception as e:
|
117 |
+
n_expire_err += 1
|
118 |
+
|
119 |
+
age = -1
|
120 |
+
last_modified = jmsg.pop("last_modified", "")
|
121 |
+
if not isinstance(last_modified, list):
|
122 |
+
last_modified = last_modified.strip()
|
123 |
+
if last_modified:
|
124 |
+
try:
|
125 |
+
last_modified = datetime.datetime.strptime(last_modified, "%a, %d %b %Y %H:%M:%S GMT")
|
126 |
+
age = int((req_time - last_modified).total_seconds())
|
127 |
+
except Exception as e:
|
128 |
+
n_last_modified_err += 1
|
129 |
+
|
130 |
+
extension = ""
|
131 |
+
if len(m.group("request").split()) > 1:
|
132 |
+
path = m.group("request").split()[1]
|
133 |
+
if '?' in path:
|
134 |
+
path = path[:path.find("?")]
|
135 |
+
if '.' in path:
|
136 |
+
extension = path.split(".")[-1]
|
137 |
+
if "?" in extension:
|
138 |
+
extension = extension[:extension.find("?")]
|
139 |
+
|
140 |
+
client = m.group("ip")
|
141 |
+
if client:
|
142 |
+
if client == '2a06:98c0:3600::103':
|
143 |
+
client = "cf"
|
144 |
+
try:
|
145 |
+
# client_country = ""
|
146 |
+
client_country = ip_database.country(client).country.iso_code
|
147 |
+
if client_country != None:
|
148 |
+
client = client_country
|
149 |
+
except Exception as e:
|
150 |
+
pass
|
151 |
+
|
152 |
+
node = m.group("node")
|
153 |
+
host = m.group("host")
|
154 |
+
if host:
|
155 |
+
host = host.split(":")[1].strip(' "')
|
156 |
+
host = str(int(hashlib.sha1(host.encode()).hexdigest(), 16) % (2**30))
|
157 |
+
else:
|
158 |
+
host = "-1"
|
159 |
+
|
160 |
+
assert len(jmsg) == 0, "jmsg is not empty" + str(jmsg)
|
161 |
+
|
162 |
+
chunked = "0"
|
163 |
+
if content_length == "":
|
164 |
+
chunked = "chunked"
|
165 |
+
|
166 |
+
if obody_sz == 0:
|
167 |
+
if cstatus not in ("HIT", "UPDATING", "noStatus"):
|
168 |
+
print("cstatus error ")
|
169 |
+
obody_sz = body_sz
|
170 |
+
elif body_sz != obody_sz:
|
171 |
+
# print(f"body size error {body_sz} {obody_sz}")
|
172 |
+
pass
|
173 |
+
|
174 |
+
has_query = "0"
|
175 |
+
n_param = "0"
|
176 |
+
if "?" in m.group("request"):
|
177 |
+
has_query = "1"
|
178 |
+
n_param = str(m.group("request").count("&") + 1)
|
179 |
+
|
180 |
+
n_level = "0"
|
181 |
+
try:
|
182 |
+
n_level = str(m.group("request").split(" ")[1].count("/") - 1)
|
183 |
+
except Exception as e:
|
184 |
+
pass
|
185 |
+
|
186 |
+
return node, client, src, ts, obj_id, body_sz, method, content_type, extension, n_level, expire_ttl, age, cstatus, zone, plan, chunked, has_query, n_param, cache_control, host, range
|
187 |
+
# print(f"{node}, {ts:16}, {body_sz:>8}, {method:>6}, {content_type:>6}, {cstatus:>6}, {plan:>4}, {src:>6}, {chunked}, {has_query}, {expire_ttl:>8}, {age:>8}, {extension:6}, {host}")
|
188 |
+
|
189 |
+
|
190 |
+
def parse_one_file(datapath, ofile_dict):
|
191 |
+
global n
|
192 |
+
ifile = open(datapath, errors="ignore")
|
193 |
+
for line in ifile:
|
194 |
+
n += 1
|
195 |
+
try:
|
196 |
+
t = parse_logline(line[:-1])
|
197 |
+
if not t:
|
198 |
+
continue
|
199 |
+
|
200 |
+
node, client, src, ts, obj_id, body_sz, method, content_type, extension, n_level, expire_ttl, age, cstatus, zone, plan, chunked, has_query, n_param, cache_control, host, http_range = t
|
201 |
+
ofile_dict[node].write(",".join([client, src, ts, obj_id, body_sz, method, content_type, extension, n_level, str(expire_ttl), str(age), cstatus, zone, plan, chunked, has_query, n_param, cache_control, host, http_range]) + "\n")
|
202 |
+
|
203 |
+
except Exception as e:
|
204 |
+
print(t)
|
205 |
+
print(e, line)
|
206 |
+
ifile.close()
|
207 |
+
|
208 |
+
|
209 |
+
def parse_one_file_parallel(datapath):
|
210 |
+
output_path_base = datapath.replace("raw", "data")
|
211 |
+
ifile = open(datapath, errors="ignore")
|
212 |
+
ofile_dict = {}
|
213 |
+
|
214 |
+
|
215 |
+
first_ts = -1
|
216 |
+
for line in ifile:
|
217 |
+
try:
|
218 |
+
t = parse_logline(line[:-1])
|
219 |
+
if not t:
|
220 |
+
continue
|
221 |
+
|
222 |
+
node, client, src, ts, obj_id, body_sz, method, content_type, extension, n_level, expire_ttl, age, cstatus, zone, plan, chunked, has_query, n_param, cache_control, host, http_range = t
|
223 |
+
if node not in ofile_dict:
|
224 |
+
ofile_dict[node] = open("{}_{}".format(output_path_base, node), "w")
|
225 |
+
ofile_dict[node].write(",".join([client, src, ts, obj_id, body_sz, method, content_type, extension, n_level, str(expire_ttl), str(age), cstatus, zone, plan, chunked, has_query, n_param, cache_control, host, http_range]) + "\n")
|
226 |
+
|
227 |
+
if first_ts == -1:
|
228 |
+
first_ts = ts
|
229 |
+
|
230 |
+
except Exception as e:
|
231 |
+
print(t)
|
232 |
+
print(e, line)
|
233 |
+
ifile.close()
|
234 |
+
for ofile in ofile_dict.values():
|
235 |
+
ofile.close()
|
236 |
+
|
237 |
+
print("{} time {} - {} ({}s)".format(datapath, first_ts, ts, ts - first_ts))
|
238 |
+
|
239 |
+
|
240 |
+
def run(colo, day):
|
241 |
+
ofile_dict = {}
|
242 |
+
for node in ("27m144", "27m376", "28m142", "28m480", "156m1", "156m2", "35m360", "35m780", "137m8", "137m18"):
|
243 |
+
if node.startswith(colo):
|
244 |
+
ofile_dict[node] = open("data/" + node, "a")
|
245 |
+
|
246 |
+
data = f"raw/colo{colo}_081923"
|
247 |
+
t = parse_one_file(data, ofile_dict)
|
248 |
+
|
249 |
+
for h in range(24):
|
250 |
+
if h < 10:
|
251 |
+
data = f"raw/colo{colo}_08{day}0{h}"
|
252 |
+
else:
|
253 |
+
data = f"raw/colo{colo}_08{day}{h}"
|
254 |
+
if not os.path.exists(data):
|
255 |
+
continue
|
256 |
+
|
257 |
+
t = parse_one_file(data, ofile_dict)
|
258 |
+
print("{:.0f} {}, {}, {} dynamic, {} expireEr, {} lastModifiedErr".format(time.time(), data, n, n_dynamic, n_expire_err, n_last_modified_err))
|
259 |
+
|
260 |
+
for ofile in ofile_dict.values():
|
261 |
+
ofile.close()
|
262 |
+
|
263 |
+
|
264 |
+
def run_parallel():
|
265 |
+
futures_dict = {}
|
266 |
+
with ProcessPoolExecutor(max_workers=80) as ppe:
|
267 |
+
for colo in (27, 28, 137, 156, 35):
|
268 |
+
for day in (20, 21, 22, 23, ):
|
269 |
+
data = f"raw/colo{colo}_08{day}"
|
270 |
+
# for h in range(24):
|
271 |
+
# if h < 10:
|
272 |
+
# data = f"raw/colo{colo}_08{day}0{h}"
|
273 |
+
# else:
|
274 |
+
# data = f"raw/colo{colo}_08{day}{h}"
|
275 |
+
# if not os.path.exists(data):
|
276 |
+
# continue
|
277 |
+
# parse_one_file_parallel(data)
|
278 |
+
futures_dict[ppe.submit(parse_one_file_parallel, data)] = data
|
279 |
+
|
280 |
+
for future in as_completed(futures_dict):
|
281 |
+
print("{} has finished running".format(futures_dict[future]))
|
282 |
+
|
283 |
+
|
284 |
+
if __name__ == "__main__":
|
285 |
+
# parse_one_file("27/081923")
|
286 |
+
|
287 |
+
# parse_test()
|
288 |
+
parse_logline(s)
|
289 |
+
# parse_one_file("raw/colo137_081923")
|
290 |
+
# run(sys.argv[1], 20)
|
291 |
+
# run_parallel()
|
292 |
+
|
293 |
+
|
294 |
+
|
295 |
+
|
cf2/cloudflare-research-cache-request/clickhouse/colo_0819.coloSIN.clean.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f30e76fd21db8adcedce9648ce7f3da1bb48cf94886b90a9ce66827bbe8962c1
|
3 |
+
size 48343478994
|
cf2/cloudflare-research-cache-request/clickhouse/colo_0820.coloSIN.clean.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:81c7fd4479e7d4cc07c1d381c065a045e20391759127e7927757c8fdbbf0deaf
|
3 |
+
size 43136631770
|
cf2/cloudflare-research-cache-request/clickhouse/colo_0821.coloSIN.clean.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ab704dc4e826ac6b3b55a2342068f0a47df6206a9b5608408b49beddcd4a069c
|
3 |
+
size 35014286790
|
cf2/cloudflare-research-cache-request/clickhouse/colo_0827.coloATL.clean.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2a031eb12a37bf0fa2c86807804ea61c2d28a81e5d5ba18e7d556e928219d920
|
3 |
+
size 25484803575
|
cf2/cloudflare-research-cache-request/nginxlog/colo137_0820_137m18.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:948e5c8af8c6e2653f92ad50bef5d9aad41f6c6cc4d842b767ecf4753eb2a086
|
3 |
+
size 1565452946
|
cf2/cloudflare-research-cache-request/nginxlog/colo137_0820_137m8.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e7c8ce235ffc37523b526c0285cd2948a2d33a0333c01aa2f95efc6c4c417d74
|
3 |
+
size 1568539296
|
cf2/cloudflare-research-cache-request/nginxlog/colo137_0821_137m18.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:945b67243b9e7bd2818e948fb55bafbebf622a168dd1f30f6c28fd5dec1ad4f8
|
3 |
+
size 1483552908
|
cf2/cloudflare-research-cache-request/nginxlog/colo137_0821_137m8.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:169b8bcccf7df5d567d9f4872d0a997c97f28ffb00936d02a255eb59a9cbff58
|
3 |
+
size 1491585330
|
cf2/cloudflare-research-cache-request/nginxlog/colo137_0822_137m18.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e85641f0226a6868ff7e1d127e09bf0a23d6d174e572c66d38284e699bdd0e4
|
3 |
+
size 1663548910
|
cf2/cloudflare-research-cache-request/nginxlog/colo137_0822_137m8.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2ac1262b57c03763a653cd82b227f561b59437b054d0b00b81108bb232efe688
|
3 |
+
size 1679425936
|
cf2/cloudflare-research-cache-request/nginxlog/colo137_0823_137m18.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0ae2b06fa1fb68bed18511f0441d10bf202e48f3a439564a1cb4a36303dcd148
|
3 |
+
size 1577016160
|
cf2/cloudflare-research-cache-request/nginxlog/colo137_0823_137m8.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:51bf3d1c89ab3d94a229f00cfd54cbf58f891dec499f1078d786469120dcb70b
|
3 |
+
size 1581720941
|
cf2/cloudflare-research-cache-request/nginxlog/colo137_0824_137m18.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:67a585614661e5c1c75068af476b269222dba07223d078cae01cbdb399b399a6
|
3 |
+
size 1494385459
|
cf2/cloudflare-research-cache-request/nginxlog/colo137_0824_137m8.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:152012978d83e930a9fa163cd7db641d235078466368d9534f6e1dfdfb12a88f
|
3 |
+
size 1497219195
|
cf2/cloudflare-research-cache-request/nginxlog/colo137_0825_137m18.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e57460dcffdbe381b0393c3d981230de1e0286a1b8308b7e37d54032aa9f790e
|
3 |
+
size 1507129111
|
cf2/cloudflare-research-cache-request/nginxlog/colo137_0825_137m8.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fc3134bebff709853ab238808cb42c4c70b2a4afa950c7e340c2b26d7ddc4059
|
3 |
+
size 1518737081
|
cf2/cloudflare-research-cache-request/nginxlog/colo137_0826_137m18.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bd947a49e1879cd3580f54829b0c1ed0ae40a308e333e9e85fc97c1d7de9b363
|
3 |
+
size 1386241353
|
cf2/cloudflare-research-cache-request/nginxlog/colo137_0826_137m8.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:143ccdbb149ae605d74f8b0380630fb94d2c85d73337d837329076c9b528f05b
|
3 |
+
size 1359979434
|
cf2/cloudflare-research-cache-request/nginxlog/colo137_0827_137m18.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f141d8358cd4c3deb257e80db3bf60a3456a97825031d02a71f7db5a94dacd3c
|
3 |
+
size 895085987
|
cf2/cloudflare-research-cache-request/nginxlog/colo137_0827_137m8.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:75a3b68c1c80780311b70733ce86ff2645d9a2829f92311d1badf89578864983
|
3 |
+
size 908849830
|
cf2/cloudflare-research-cache-request/nginxlog/colo156_0820_156m1.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:624fc4a07a3b9af6adea63ed65912b3d07f293666c3f97faab83578f8b3f480e
|
3 |
+
size 708918048
|
cf2/cloudflare-research-cache-request/nginxlog/colo156_0820_156m2.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d7c216d89d41c0c303051b3fae5f3814535ad958944ee6f90988642d8ff81f2d
|
3 |
+
size 709094693
|
cf2/cloudflare-research-cache-request/nginxlog/colo156_0821_156m1.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2fed9e83db60529283cc958fed854aebe681123c9b3b21abbb11050ae8007d10
|
3 |
+
size 750065151
|
cf2/cloudflare-research-cache-request/nginxlog/colo156_0821_156m2.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9d6a4ab4d83ea427a34e0cdb477609228cb6045f9d359200b7b12f1bb4db56d5
|
3 |
+
size 746525802
|
cf2/cloudflare-research-cache-request/nginxlog/colo156_0822_156m1.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d71346e1a775b36fdc2b29dd8212813a685826587c064a4c29df7d2491be0701
|
3 |
+
size 780364989
|
cf2/cloudflare-research-cache-request/nginxlog/colo156_0822_156m2.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:33b4426ec7b6f6715946cdbb96ff529bb5ece385299cf5b420d65132574242d4
|
3 |
+
size 776598245
|
cf2/cloudflare-research-cache-request/nginxlog/colo156_0823_156m1.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a15d898d707b24304b648162361a138eab78f1df9355ad66e0e5a7523c21ab4b
|
3 |
+
size 667443752
|
cf2/cloudflare-research-cache-request/nginxlog/colo156_0823_156m2.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cb67c108b860c41799350646ad30080c991dd2a63f1b6935fa8c645743481336
|
3 |
+
size 663321037
|
cf2/cloudflare-research-cache-request/nginxlog/colo156_0824_156m1.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8f42e09d698b8b86382840fda50c9a4f15a83b88de5d9b5bc339ece03073e5f3
|
3 |
+
size 653941130
|
cf2/cloudflare-research-cache-request/nginxlog/colo156_0824_156m2.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5523486cf834fa4cdee1699572ed0b25e5613c538eb6590ce33c3a4c367af526
|
3 |
+
size 656010988
|
cf2/cloudflare-research-cache-request/nginxlog/colo156_0825_156m1.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4e3fa2d6cbdd84388281a664f875d71117023182cedf28bb74aee488dee43b94
|
3 |
+
size 648255080
|
cf2/cloudflare-research-cache-request/nginxlog/colo156_0825_156m2.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6d1c3885e7fe277a6ff18ac075dbeb2b1bba34e6201788ff8fe577cbd956b109
|
3 |
+
size 654726321
|
cf2/cloudflare-research-cache-request/nginxlog/colo156_0826_156m1.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:36fe37bf73ab2f7e4795f185835c41c91c605003370d6a0ba914b4f9674c161d
|
3 |
+
size 619963340
|
cf2/cloudflare-research-cache-request/nginxlog/colo156_0826_156m2.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:17dba9821b498a98f12a09386efd2d5e418b46bf0bcf322f957622677b709242
|
3 |
+
size 642686458
|
cf2/cloudflare-research-cache-request/nginxlog/colo156_0827_156m1.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:510c32fdf5569fabeb724169417213f39fe1a95724daeb5b6693472226e9828d
|
3 |
+
size 407393552
|
cf2/cloudflare-research-cache-request/nginxlog/colo156_0827_156m2.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b7ee415a77c3b0260f45914f90b7a19e9d829a181a30434852af67c952a61b28
|
3 |
+
size 400543279
|
cf2/cloudflare-research-cache-request/nginxlog/colo27_0820_27m144.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:912edfdd1b884a3e7c4a8a7535333dbb66dfa050805786903802b87559811b92
|
3 |
+
size 1993022126
|
cf2/cloudflare-research-cache-request/nginxlog/colo27_0820_27m376.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e5cbc67e8f9e17e5a4c5db21a4bad18c5a8d7367bf38cd21ac538f9c1177e57d
|
3 |
+
size 2156823668
|
cf2/cloudflare-research-cache-request/nginxlog/colo27_0821_27m144.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d371a549e057e6b0494081e5bed9359178283d6bf0f099c037cb7f54937fe980
|
3 |
+
size 1920271345
|
cf2/cloudflare-research-cache-request/nginxlog/colo27_0821_27m376.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0a9b31b78287c99996bdf8df52a15548fb8d77e18b409994aa36bd163ad3abcf
|
3 |
+
size 2093437392
|
cf2/cloudflare-research-cache-request/nginxlog/colo27_0822_27m144.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:da4b8091d7136a76e3d3c07133dee1f703d09a31162062febb99665d6d063ed4
|
3 |
+
size 1987320631
|
cf2/cloudflare-research-cache-request/nginxlog/colo27_0822_27m376.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:82a675b95835d2fcdf1c968014b330c69970c60c09b7c375afd3488a06a503a0
|
3 |
+
size 2149116728
|
cf2/cloudflare-research-cache-request/nginxlog/colo27_0823_27m144.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c21da5368504867e1e1e0b812ab031485bf2e2f5ab93dd0055492f1ffa881947
|
3 |
+
size 2011333671
|
cf2/cloudflare-research-cache-request/nginxlog/colo27_0823_27m376.zst
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ac23c7b5c4a2bd5b88ebd9aec10a56ffb4ec44a75a71caf2c6a8dfebb171a855
|
3 |
+
size 2132064435
|