code
stringlengths 10
805k
| def_use_chains
sequencelengths 0
667
|
---|---|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('stocks', '0003_auto_20151129_1623'),
]
operations = [
migrations.AlterField(
model_name='floor',
name='floorPlayer',
field=models.ForeignKey(to='stocks.Player', related_name='FloorPlayer'),
),
migrations.AlterField(
model_name='stock',
name='last_updated',
field=models.DateTimeField(default=datetime.datetime(2015, 11, 29, 22, 5, 30, 24205, tzinfo=utc)),
),
]
| [
[
[
47,
63
]
],
[
[
87,
93
],
[
409,
415
],
[
601,
607
]
],
[
[
95,
105
],
[
178,
188
],
[
304,
314
],
[
495,
505
]
],
[
[
113,
121
],
[
630,
638
]
],
[
[
156,
159
],
[
687,
690
]
],
[
[
168,
177
]
]
] |
# Name: Breno Maurício de Freitas Viana
# NUSP: 11920060
# Course Code: SCC5830
# Year/Semester: 2021/1
# Assignment 5: Image Descriptors
import math
import numpy as np
import imageio
from scipy import ndimage
np.seterr(divide='ignore', invalid='ignore')
LEVELS = 256
# ----- (1) Read Parameters
# Get the location of the object image `f`
f = input().rstrip()
# Get the location of the large image `g`
g = input().rstrip()
# Get the quantisation parameter `b`
b = int(input())
# --- Load images
# Object image `f`
f = imageio.imread(f)
# Large image `g`
g = imageio.imread(g)
# ----- (2) Preprocessing and Quantisation
def luminance(img):
"""
Get a RGB image as input and return a black&white image.
"""
N, M, _ = img.shape
out = np.empty(img.shape)
out = 0.299 * img[:,:,0] + 0.587 * img[:,:,1] + 0.114 * img[:,:,2]
return out.astype(np.uint8)
# --- Convert the images to black&white
f = luminance(f)
g = luminance(g)
# --- Quantise the images to `b` bits
B = 8 - b
f = f >> B
g = g >> B
# ----- (3) Image Descriptors
def nh_descriptor(f):
"""
Return the normalized histogram descriptor.
"""
hist, _ = np.histogram(f, bins=[i for i in range(2 ** b + 1)])
hist = hist / hist.sum()
dc = hist / np.linalg.norm(hist)
return dc
def ht_descriptor(f):
"""
Return the Haralick texture descriptors (intensity-level co-ocurrence matrix).
"""
# Calculate the co-occurence matrix
N, M = f.shape
C = np.zeros((LEVELS, LEVELS))
for x in range(N - 1):
for y in range(M - 1):
i = f[x, y]
j = f[x + 1, y + 1]
C[i][j] += 1
C = C / C.sum()
#
# Computing the descriptors
N, M = C.shape
#
energy = np.power(C, 2).sum()
#
epsilon = 0.001
entropy = - (C * np.log(C + epsilon)).sum()
#
A = np.fromfunction(lambda i, j: (i - j) ** 2, (N, M), dtype=int)
contrast = (1 / math.pow(N, 2)) * (C * A).sum()
#
mu_i, si_i = 0, 0
mu_j, si_j = 0, 0
for k in range(N):
a1 = C[k,:].sum()
mu_i += k * a1
si_i += math.pow(k - mu_i, 2) * a1
#
a2 = C[:,k].sum()
mu_j += k * a2
si_j += math.pow(k - mu_j, 2) * a2
#
A = np.fromfunction(lambda i, j: (i - j) ** 2, (N, M), dtype=int)
correlation = (A * C).sum() - mu_i * mu_j
correlation /= (si_i * si_j)
#
homogeneity = 0
#
A = np.fromfunction(lambda i, j: (1 + abs(i - j)), (N, M), dtype=int)
homogeneity = (C * A).sum()
#
# Return the Haralick texture descriptors
dt = np.array([energy, entropy, contrast, correlation, homogeneity])
dt = dt / np.linalg.norm(dt)
return dt
def hg_descriptor(f):
"""
Return the histogram of oriented gradients descriptor.
"""
wsx = np.array([[-1, -2, -1], [0, 0, 0], [1, 2, 1]])
wsy = np.array([[-1, 0, 1], [-2, 0, 2], [-1, 0, 1]])
#
f = f.astype(np.float64)
fx = ndimage.convolve(f, wsx)
fy = ndimage.convolve(f, wsy)
#
N, M = f.shape
#
div = np.sqrt(np.power(fx, 2) + np.power(fy, 2)).sum()
Mg = np.sqrt(np.power(fx, 2) + np.power(fy, 2)) / div
#
sigma = np.zeros(f.shape)
sigma = np.arctan(fy / fx) + np.pi / 2
sigma = np.degrees(sigma)
sigma = np.digitize(sigma, np.arange(0, 180, 20))
sigma = sigma.astype(np.uint8)
#
dg = np.zeros(9)
for x in range(N):
for y in range(M):
dg[sigma[x][y] - 1] += Mg[x][y]
#
dg = dg / np.linalg.norm(dg)
return dg
# --- Compute the image descriptors
# Calculate the object image descriptors
dc = nh_descriptor(f)
dt = ht_descriptor(f)
dg = hg_descriptor(f)
d = np.concatenate((dc, dt, dg))
# ----- (4) Finding Our Object
def distance(d, di):
"""
Calculate the distance of two descriptors.
"""
return math.sqrt(np.power(d - di, 2).sum())
# --- Search for the object image location in the original image
size = f.shape[0]
step = size // 2
N, M = g.shape
N = N // step
M = M // step
dist = np.iinfo(np.uint8).max
pos_x = None
pos_y = None
for i in range(N - 1):
for j in range(M - 1):
# Calculate the window
window = g[i*step:i*step+size, j*step:j*step+size]
# Calculate the descriptors of the window
window_dc = nh_descriptor(window)
window_dt = ht_descriptor(window)
window_dg = hg_descriptor(window)
window_d = np.concatenate((window_dc, window_dt, window_dg))
# Calculate the distance between the window and the object image
ndist = distance(d, window_d)
if dist > ndist:
dist = ndist
pos_x, pos_y = i, j
# --- Print the found location
print(pos_x, pos_y)
| [
[
[
147,
151
],
[
1855,
1859
],
[
2005,
2009
],
[
2091,
2095
],
[
3630,
3634
]
],
[
[
159,
170
],
[
214,
216
],
[
3479,
3481
],
[
3821,
3823
],
[
3830,
3832
],
[
4177,
4179
],
[
754,
756
],
[
863,
865
],
[
1146,
1148
],
[
1240,
1242
],
[
1450,
1452
],
[
1676,
1678
],
[
1738,
1740
],
[
1775,
1777
],
[
2128,
2130
],
[
2297,
2299
],
[
2448,
2450
],
[
2524,
2526
],
[
2655,
2657
],
[
2710,
2712
],
[
2776,
2778
],
[
2885,
2887
],
[
2893,
2895
],
[
2911,
2913
],
[
2941,
2943
],
[
2949,
2951
],
[
2967,
2969
],
[
3004,
3006
],
[
3032,
3034
],
[
3053,
3055
],
[
3073,
3075
],
[
3101,
3103
],
[
3120,
3122
],
[
3166,
3168
],
[
3187,
3189
],
[
3297,
3299
],
[
3640,
3642
]
],
[
[
178,
185
],
[
528,
535
],
[
568,
575
]
],
[
[
204,
211
],
[
2795,
2802
],
[
2827,
2834
]
],
[
[
260,
266
],
[
1460,
1466
],
[
1468,
1474
]
],
[
[
346,
347
],
[
543,
544
]
],
[
[
409,
410
],
[
583,
584
]
],
[
[
467,
468
],
[
997,
998
],
[
1190,
1191
]
],
[
[
524,
525
],
[
929,
930
]
],
[
[
564,
565
],
[
946,
947
]
],
[
[
637,
646
],
[
919,
928
],
[
936,
945
]
],
[
[
915,
916
],
[
1003,
1004
]
],
[
[
932,
933
],
[
1014,
1015
]
],
[
[
989,
990
],
[
1008,
1009
],
[
1019,
1020
]
],
[
[
999,
1000
],
[
3427,
3428
],
[
3449,
3450
],
[
3471,
3472
],
[
3742,
3743
]
],
[
[
1010,
1011
],
[
3777,
3778
],
[
3960,
3961
]
],
[
[
1058,
1071
],
[
3413,
3426
],
[
4064,
4077
]
],
[
[
1278,
1291
],
[
3435,
3448
],
[
4102,
4115
]
],
[
[
2560,
2573
],
[
3457,
3470
],
[
4140,
4153
]
],
[
[
3408,
3410
],
[
3495,
3497
]
],
[
[
3430,
3432
],
[
3499,
3501
]
],
[
[
3452,
3454
],
[
3503,
3505
]
],
[
[
3475,
3476
],
[
4317,
4318
]
],
[
[
3547,
3555
],
[
4308,
4316
]
],
[
[
3735,
3739
],
[
3760,
3764
],
[
3976,
3980
],
[
3996,
4000
]
],
[
[
3753,
3757
],
[
3794,
3798
],
[
3808,
3812
],
[
3964,
3968
],
[
3971,
3975
],
[
3984,
3988
],
[
3991,
3995
]
],
[
[
3770,
3771
],
[
3789,
3790
]
],
[
[
3773,
3774
],
[
3803,
3804
]
],
[
[
3785,
3786
],
[
3887,
3888
]
],
[
[
3799,
3800
],
[
3912,
3913
]
],
[
[
3814,
3818
],
[
4337,
4341
]
],
[
[
3845,
3850
],
[
4436,
4441
]
],
[
[
3858,
3863
],
[
4443,
4448
]
],
[
[
3876,
3877
],
[
3962,
3963
],
[
3969,
3970
],
[
4391,
4392
]
],
[
[
3901,
3902
],
[
3982,
3983
],
[
3989,
3990
],
[
4394,
4395
]
],
[
[
3951,
3957
],
[
4078,
4084
],
[
4116,
4122
],
[
4154,
4160
]
],
[
[
4052,
4061
],
[
4193,
4202
]
],
[
[
4090,
4099
],
[
4204,
4213
]
],
[
[
4128,
4137
],
[
4215,
4224
]
],
[
[
4166,
4174
],
[
4320,
4328
]
],
[
[
4300,
4305
],
[
4344,
4349
],
[
4364,
4369
]
],
[
[
4357,
4361
],
[
4337,
4341
]
],
[
[
4376,
4381
],
[
4436,
4441
]
],
[
[
4383,
4388
],
[
4443,
4448
]
]
] |
"""Test trunk lock."""
import pytest
from tests.tesla_mock import TeslaMock
from teslajsonpy.controller import Controller
from teslajsonpy.trunk import TrunkLock
def test_has_battery(monkeypatch):
"""Test has_battery()."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_lock = TrunkLock(_data, _controller)
assert not _lock.has_battery()
def test_is_locked_on_init(monkeypatch):
"""Test is_locked() after initialization."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_lock = TrunkLock(_data, _controller)
assert _lock is not None
assert not _lock.is_locked()
@pytest.mark.asyncio
async def test_is_locked_after_update(monkeypatch):
"""Test is_locked() after an update."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_data["vehicle_state"]["rt"] = 0
_lock = TrunkLock(_data, _controller)
await _lock.async_update()
assert _lock is not None
assert _lock.is_locked()
@pytest.mark.asyncio
async def test_unlock(monkeypatch):
"""Test unlock()."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_data["vehicle_state"]["rt"] = 0
_lock = TrunkLock(_data, _controller)
await _lock.async_update()
await _lock.unlock()
assert _lock is not None
assert not _lock.is_locked()
@pytest.mark.asyncio
async def test_unlock_already_unlocked(monkeypatch):
"""Test unlock() when already unlocked."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_data["vehicle_state"]["rt"] = 123
_lock = TrunkLock(_data, _controller)
await _lock.async_update()
await _lock.unlock()
assert _lock is not None
assert not _lock.is_locked()
# Reset to default for next tests
_data["vehicle_state"]["rt"] = 0
@pytest.mark.asyncio
async def test_lock(monkeypatch):
"""Test lock()."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_data["vehicle_state"]["rt"] = 123
_lock = TrunkLock(_data, _controller)
await _lock.async_update()
await _lock.lock()
assert _lock is not None
assert _lock.is_locked()
# Reset to default for next tests
_data["vehicle_state"]["rt"] = 0
@pytest.mark.asyncio
async def test_lock_already_locked(monkeypatch):
"""Test lock() when already locked."""
_mock = TeslaMock(monkeypatch)
_controller = Controller(None)
_data = _mock.data_request_vehicle()
_data["vehicle_state"]["rt"] = 0
_lock = TrunkLock(_data, _controller)
await _lock.async_update()
await _lock.lock()
assert _lock is not None
assert _lock.is_locked()
| [
[
[
31,
37
],
[
736,
742
],
[
1138,
1144
],
[
1534,
1540
],
[
2047,
2053
],
[
2511,
2517
]
],
[
[
68,
77
],
[
245,
254
],
[
528,
537
],
[
865,
874
],
[
1232,
1241
],
[
1667,
1676
],
[
2137,
2146
],
[
2636,
2645
]
],
[
[
114,
124
],
[
286,
296
],
[
569,
579
],
[
906,
916
],
[
1273,
1283
],
[
1708,
1718
],
[
2178,
2188
],
[
2677,
2687
]
],
[
[
155,
164
],
[
357,
366
],
[
640,
649
],
[
1014,
1023
],
[
1381,
1390
],
[
1818,
1827
],
[
2288,
2297
],
[
2785,
2794
]
],
[
[
171,
187
]
],
[
[
429,
451
]
],
[
[
756,
1134
]
],
[
[
1158,
1530
]
],
[
[
1554,
2043
]
],
[
[
2067,
2507
]
],
[
[
2531,
2928
]
]
] |
import sys
from cx_Freeze import setup, Executable
setup(
name='YtMusic-Lib-Tracker',
url='https://github.com/czifumasa/ytmusic-lib-tracker',
author='Łukasz Lenart',
author_email='[email protected]',
version='0.1',
license='MIT',
description='Useful tools for youtube music. Exporting library to csv, tracking changes in library, summary of transfer from GPM',
long_description=open('README.md').read(),
options={"build_exe": {
'packages': ['ytmusicapi', 'unidecode'],
'excludes': ['tkinter', 'test', 'unittest', 'pydoc_data'],
'include_files': ['config.ini'],
'optimize': 2,
}},
executables=[Executable('ytmusiclibtracker.py', base='console', icon='ytmlt.ico', targetName='YTMusicLibTracker')]
) | [
[
[
7,
10
]
],
[
[
33,
38
],
[
52,
57
]
],
[
[
40,
50
],
[
679,
689
]
]
] |
#!/usr/bin/python
# -*- coding:utf-8 -*-
import RPi.GPIO as GPIO
import time
CS = 5
Clock = 25
Address = 24
DataOut = 23
Button = 7
class TRSensor(object):
def __init__(self,numSensors = 5):
self.numSensors = numSensors
self.calibratedMin = [0] * self.numSensors
self.calibratedMax = [1023] * self.numSensors
self.last_value = 0
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(Clock,GPIO.OUT)
GPIO.setup(Address,GPIO.OUT)
GPIO.setup(CS,GPIO.OUT)
GPIO.setup(DataOut,GPIO.IN,GPIO.PUD_UP)
GPIO.setup(Button,GPIO.IN,GPIO.PUD_UP)
"""
Reads the sensor values into an array. There *MUST* be space
for as many values as there were sensors specified in the constructor.
Example usage:
unsigned int sensor_values[8];
sensors.read(sensor_values);
The values returned are a measure of the reflectance in abstract units,
with higher values corresponding to lower reflectance (e.g. a black
surface or a void).
"""
def AnalogRead(self):
value = [0]*(self.numSensors+1)
#Read Channel0~channel6 AD value
for j in range(0,self.numSensors+1):
GPIO.output(CS, GPIO.LOW)
for i in range(0,4):
#sent 4-bit Address
if(((j) >> (3 - i)) & 0x01):
GPIO.output(Address,GPIO.HIGH)
else:
GPIO.output(Address,GPIO.LOW)
#read MSB 4-bit data
value[j] <<= 1
if(GPIO.input(DataOut)):
value[j] |= 0x01
GPIO.output(Clock,GPIO.HIGH)
GPIO.output(Clock,GPIO.LOW)
for i in range(0,6):
#read LSB 8-bit data
value[j] <<= 1
if(GPIO.input(DataOut)):
value[j] |= 0x01
GPIO.output(Clock,GPIO.HIGH)
GPIO.output(Clock,GPIO.LOW)
#no mean ,just delay
# for i in range(0,6):
# GPIO.output(Clock,GPIO.HIGH)
# GPIO.output(Clock,GPIO.LOW)
time.sleep(0.0001)
GPIO.output(CS,GPIO.HIGH)
# print value[1:]
return value[1:]
"""
Reads the sensors 10 times and uses the results for
calibration. The sensor values are not returned; instead, the
maximum and minimum values found over time are stored internally
and used for the readCalibrated() method.
"""
def calibrate(self):
max_sensor_values = [0]*self.numSensors
min_sensor_values = [0]*self.numSensors
for j in range(0,10):
sensor_values = self.AnalogRead()
for i in range(0,self.numSensors):
# set the max we found THIS time
if((j == 0) or max_sensor_values[i] < sensor_values[i]):
max_sensor_values[i] = sensor_values[i]
# set the min we found THIS time
if((j == 0) or min_sensor_values[i] > sensor_values[i]):
min_sensor_values[i] = sensor_values[i]
# record the min and max calibration values
for i in range(0,self.numSensors):
if(min_sensor_values[i] > self.calibratedMin[i]):
self.calibratedMin[i] = min_sensor_values[i]
if(max_sensor_values[i] < self.calibratedMax[i]):
self.calibratedMax[i] = max_sensor_values[i]
"""
Returns values calibrated to a value between 0 and 1000, where
0 corresponds to the minimum value read by calibrate() and 1000
corresponds to the maximum value. Calibration values are
stored separately for each sensor, so that differences in the
sensors are accounted for automatically.
"""
def readCalibrated(self):
value = 0
#read the needed values
sensor_values = self.AnalogRead()
for i in range (0,self.numSensors):
denominator = self.calibratedMax[i] - self.calibratedMin[i]
if(denominator != 0):
value = (sensor_values[i] - self.calibratedMin[i])* 1000 / denominator
if(value < 0):
value = 0
elif(value > 1000):
value = 1000
sensor_values[i] = value
#print("readCalibrated",sensor_values)
return sensor_values
"""
Operates the same as read calibrated, but also returns an
estimated position of the robot with respect to a line. The
estimate is made using a weighted average of the sensor indices
multiplied by 1000, so that a return value of 0 indicates that
the line is directly below sensor 0, a return value of 1000
indicates that the line is directly below sensor 1, 2000
indicates that it's below sensor 2000, etc. Intermediate
values indicate that the line is between two sensors. The
formula is:
0*value0 + 1000*value1 + 2000*value2 + ...
--------------------------------------------
value0 + value1 + value2 + ...
By default, this function assumes a dark line (high values)
surrounded by white (low values). If your line is light on
black, set the optional second argument white_line to true. In
this case, each sensor value will be replaced by (1000-value)
before the averaging.
"""
def readLine(self, white_line = 0):
sensor_values = self.readCalibrated()
avg = 0
sum = 0
on_line = 0
for i in range(0,self.numSensors):
value = sensor_values[i]
if(white_line):
value = 1000-value
# keep track of whether we see the line at all
if(value > 200):
on_line = 1
# only average in values that are above a noise threshold
if(value > 50):
avg += value * (i * 1000); # this is for the weighted total,
sum += value; #this is for the denominator
if(on_line != 1):
# If it last read to the left of center, return 0.
if(self.last_value < (self.numSensors - 1)*1000/2):
#print("left")
self.last_value = 0
# If it last read to the right of center, return the max.
else:
#print("right")
self.last_value = (self.numSensors - 1)*1000
else:
self.last_value = avg/sum
return self.last_value,sensor_values
# Simple example prints accel/mag data once per second:
if __name__ == '__main__':
TR = TRSensor()
print("TRSensor Example")
while True:
print(TR.AnalogRead())
time.sleep(0.2)
| [
[
[
48,
64
],
[
342,
346
],
[
355,
359
],
[
367,
371
],
[
393,
397
],
[
410,
414
],
[
422,
426
],
[
441,
445
],
[
453,
457
],
[
467,
471
],
[
479,
483
],
[
498,
502
],
[
506,
510
],
[
521,
525
],
[
539,
543
],
[
547,
551
],
[
1082,
1086
],
[
1098,
1102
],
[
1194,
1198
],
[
1214,
1218
],
[
1240,
1244
],
[
1260,
1264
],
[
1321,
1325
],
[
1369,
1373
],
[
1387,
1391
],
[
1402,
1406
],
[
1420,
1424
],
[
1505,
1509
],
[
1553,
1557
],
[
1571,
1575
],
[
1586,
1590
],
[
1604,
1608
],
[
1755,
1759
],
[
1770,
1774
]
],
[
[
72,
76
],
[
5644,
5648
],
[
1733,
1737
]
],
[
[
78,
80
],
[
464,
466
],
[
1094,
1096
],
[
1767,
1769
]
],
[
[
85,
90
],
[
404,
409
],
[
1381,
1386
],
[
1414,
1419
],
[
1565,
1570
],
[
1598,
1603
]
],
[
[
96,
103
],
[
433,
440
],
[
1206,
1213
],
[
1252,
1259
]
],
[
[
109,
116
],
[
490,
497
],
[
1332,
1339
],
[
1516,
1523
]
],
[
[
122,
128
],
[
532,
538
]
],
[
[
140,
148
],
[
5566,
5574
]
],
[
[
5561,
5563
],
[
5625,
5627
]
]
] |
import connexion
from openapi_server.annotator.phi_types import PhiType
from openapi_server.get_annotations import get_annotations
from openapi_server.models.error import Error # noqa: E501
from openapi_server.models.text_date_annotation_request import \
TextDateAnnotationRequest # noqa: E501
from openapi_server.models.text_date_annotation_response import \
TextDateAnnotationResponse # noqa: E501
def create_text_date_annotations(): # noqa: E501
"""Annotate dates in a clinical note
Return the date annotations found in a clinical note # noqa: E501
:rtype: TextDateAnnotations
"""
res = None
status = None
if connexion.request.is_json:
try:
annotation_request = TextDateAnnotationRequest.from_dict(
connexion.request.get_json()) # noqa: E501
note = annotation_request.note
annotations = get_annotations(note, phi_type=PhiType.DATE)
res = TextDateAnnotationResponse(annotations)
status = 200
except Exception as error:
status = 500
res = Error("Internal error", status, str(error))
return res, status
| [
[
[
7,
16
],
[
656,
665
],
[
782,
791
]
],
[
[
64,
71
],
[
926,
933
]
],
[
[
115,
130
],
[
895,
910
]
],
[
[
171,
176
],
[
1102,
1107
]
],
[
[
260,
285
],
[
729,
754
]
],
[
[
370,
396
],
[
959,
985
]
],
[
[
417,
445
]
]
] |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import torch
import logging
from .lr_scheduler import WarmupMultiStepLR
def make_optimizer(cfg, model):
logger = logging.getLogger("fcos_core.trainer")
params = []
for key, value in model.named_parameters():
if not value.requires_grad:
continue
lr = cfg.SOLVER.BASE_LR
weight_decay = cfg.SOLVER.WEIGHT_DECAY
if "bias" in key:
lr = cfg.SOLVER.BASE_LR * cfg.SOLVER.BIAS_LR_FACTOR
weight_decay = cfg.SOLVER.WEIGHT_DECAY_BIAS
if key.endswith(".offset.weight") or key.endswith(".offset.bias"):
logger.info("set lr factor of {} as {}".format(
key, cfg.SOLVER.DCONV_OFFSETS_LR_FACTOR
))
lr *= cfg.SOLVER.DCONV_OFFSETS_LR_FACTOR
params += [{"params": [value], "lr": lr, "weight_decay": weight_decay}]
optimizer = torch.optim.SGD(params, lr, momentum=cfg.SOLVER.MOMENTUM)
if cfg.SOLVER.ADAM:
optimizer = torch.optim.Adam(params)
return optimizer
def make_lr_scheduler(cfg, optimizer):
return WarmupMultiStepLR(
optimizer,
cfg.SOLVER.STEPS,
cfg.SOLVER.GAMMA,
warmup_factor=cfg.SOLVER.WARMUP_FACTOR,
warmup_iters=cfg.SOLVER.WARMUP_ITERS,
warmup_method=cfg.SOLVER.WARMUP_METHOD,
)
| [
[
[
79,
84
],
[
932,
937
],
[
1034,
1039
]
],
[
[
92,
99
],
[
191,
198
]
],
[
[
126,
143
],
[
1132,
1149
]
],
[
[
150,
164
]
],
[
[
1086,
1103
]
]
] |
# -*- coding: utf-8 -*-
"""Patched version of PyPi Kitchen's Python 3 getwriter function. Removes
extraneous newlines."""
import codecs
from kitchen.text.converters import to_bytes
def getwriter(encoding):
"""Return a :class:`codecs.StreamWriter` that resists tracing back.
:arg encoding: Encoding to use for transforming :class:`str` strings
into byte :class:`bytes`.
:rtype: :class:`codecs.StreamWriter`
:returns: :class:`~codecs.StreamWriter` that you can instantiate to wrap
output streams to automatically translate :class:`str` strings into
:attr:`encoding`.
This is a reimplemetation of :func:`codecs.getwriter` that returns
a :class:`~codecs.StreamWriter` that resists issuing tracebacks. The
:class:`~codecs.StreamWriter` that is returned uses
:func:`kitchen.text.converters.to_bytes` to convert :class:`str`
strings into byte :class:`bytes`. The departures from
:func:`codecs.getwriter` are:
1) The :class:`~codecs.StreamWriter` that is returned will take byte
:class:`bytes` as well as :class:`str` strings. Any byte
:class:`bytes` will be passed through unmodified.
2) The default error handler for unknown bytes is to ``replace`` the bytes
with the unknown character (``?`` in most ascii-based encodings, ``�``
in the utf encodings) whereas :func:`codecs.getwriter` defaults to
``strict``. Like :class:`codecs.StreamWriter`, the returned
:class:`~codecs.StreamWriter` can have its error handler changed in
code by setting ``stream.errors = 'new_handler_name'``
Example usage::
$ LC_ALL=C python
>>> import sys
>>> from kitchen.text.converters import getwriter
>>> UTF8Writer = getwriter('utf-8')
>>> unwrapped_stdout = sys.stdout
>>> sys.stdout = UTF8Writer(unwrapped_stdout)
>>> print 'caf\\xc3\\xa9'
café
>>> print u'caf\\xe9'
café
>>> ASCIIWriter = getwriter('ascii')
>>> sys.stdout = ASCIIWriter(unwrapped_stdout)
>>> print 'caf\\xc3\\xa9'
café
>>> print u'caf\\xe9'
caf?
.. seealso::
API docs for :class:`codecs.StreamWriter` and :func:`codecs.getwriter`
and `Print Fails <http://wiki.python.org/moin/PrintFails>`_ on the
python wiki.
.. versionadded:: kitchen 0.2a2, API: kitchen.text 1.1.0
"""
class _StreamWriter(codecs.StreamWriter):
# :W0223: We don't need to implement all methods of StreamWriter.
# This is not the actual class that gets used but a replacement for
# the actual class.
# :C0111: We're implementing an API from the stdlib. Just point
# people at that documentation instead of writing docstrings here.
# pylint:disable-msg=W0223,C0111
def __init__(self, stream, errors='replace'):
codecs.StreamWriter.__init__(self, stream, errors)
def encode(self, msg, errors='replace'):
return (to_bytes(msg, encoding=self.encoding, errors=errors),
len(msg))
_StreamWriter.encoding = encoding
return _StreamWriter
| [
[
[
129,
135
],
[
2442,
2448
],
[
2903,
2909
]
],
[
[
173,
181
],
[
3024,
3032
]
],
[
[
188,
197
]
]
] |
input = """
colored(2,g) :- not diff_col(2,g).
colored(2,y) :- not diff_col(2,y).
colored(3,g) :- not diff_col(3,g).
colored(3,y) :- not diff_col(3,y).
diff_col(2,g) :- colored(2,y).
diff_col(3,g) :- colored(3,y).
diff_col(2,y) :- colored(2,g).
diff_col(3,y) :- colored(3,g).
no_stable :- colored(2,2), colored(3,2), not no_stable.
no_stable :- colored(2,3), colored(3,3), not no_stable.
no_stable :- colored(2,g), colored(3,g), not no_stable.
no_stable :- colored(2,y), colored(3,y), not no_stable.
"""
output = """
colored(2,g) :- not diff_col(2,g).
colored(2,y) :- not diff_col(2,y).
colored(3,g) :- not diff_col(3,g).
colored(3,y) :- not diff_col(3,y).
diff_col(2,g) :- colored(2,y).
diff_col(3,g) :- colored(3,y).
diff_col(2,y) :- colored(2,g).
diff_col(3,y) :- colored(3,g).
no_stable :- colored(2,2), colored(3,2), not no_stable.
no_stable :- colored(2,3), colored(3,3), not no_stable.
no_stable :- colored(2,g), colored(3,g), not no_stable.
no_stable :- colored(2,y), colored(3,y), not no_stable.
"""
| [
[
[
0,
5
]
],
[
[
520,
526
]
]
] |
import logging
from datetime import datetime
import botocore.loaders
import botocore.regions
from boto3 import Session as Boto3Session
from botocore.exceptions import ClientError
from .exceptions import CLIMisconfiguredError, DownstreamError
LOG = logging.getLogger(__name__)
BOTO_CRED_KEYS = ("aws_access_key_id", "aws_secret_access_key", "aws_session_token")
LOWER_CAMEL_CRED_KEYS = ("accessKeyId", "secretAccessKey", "sessionToken")
def create_sdk_session(region_name=None):
def _known_error(msg):
raise CLIMisconfiguredError(
msg + ". Please ensure your AWS CLI is configured correctly: "
"https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-configure.html"
)
session = Boto3Session(region_name=region_name)
if session.region_name is None:
_known_error("No region specified")
if session.get_credentials() is None:
_known_error("No credentials specified")
return session
def get_temporary_credentials(session, key_names=BOTO_CRED_KEYS, role_arn=None):
sts_client = session.client(
"sts",
endpoint_url=get_service_endpoint("sts", session.region_name),
region_name=session.region_name,
)
if role_arn:
session_name = "CloudFormationContractTest-{:%Y%m%d%H%M%S}".format(
datetime.now()
)
try:
response = sts_client.assume_role(
RoleArn=role_arn, RoleSessionName=session_name, DurationSeconds=900
)
except ClientError:
# pylint: disable=W1201
LOG.debug(
"Getting session token resulted in unknown ClientError. "
+ "Could not assume specified role '%s'.",
role_arn,
)
raise DownstreamError() from Exception(
"Could not assume specified role '{}'".format(role_arn)
)
temp = response["Credentials"]
creds = (temp["AccessKeyId"], temp["SecretAccessKey"], temp["SessionToken"])
else:
frozen = session.get_credentials().get_frozen_credentials()
if frozen.token:
creds = (frozen.access_key, frozen.secret_key, frozen.token)
else:
try:
response = sts_client.get_session_token(DurationSeconds=900)
except ClientError as e:
LOG.debug(
"Getting session token resulted in unknown ClientError", exc_info=e
)
raise DownstreamError("Could not retrieve session token") from e
temp = response["Credentials"]
creds = (temp["AccessKeyId"], temp["SecretAccessKey"], temp["SessionToken"])
return dict(zip(key_names, creds))
def get_service_endpoint(service, region):
loader = botocore.loaders.create_loader()
data = loader.load_data("endpoints")
resolver = botocore.regions.EndpointResolver(data)
endpoint_data = resolver.construct_endpoint(service, region)
return "https://" + endpoint_data["hostname"]
def get_account(session, temporary_credentials):
sts_client = session.client(
"sts",
endpoint_url=get_service_endpoint("sts", session.region_name),
region_name=session.region_name,
aws_access_key_id=temporary_credentials["accessKeyId"],
aws_secret_access_key=temporary_credentials["secretAccessKey"],
aws_session_token=temporary_credentials["sessionToken"],
)
response = sts_client.get_caller_identity()
return response.get("Account")
| [
[
[
7,
14
],
[
251,
258
]
],
[
[
36,
44
],
[
1320,
1328
]
],
[
[
53,
69
]
],
[
[
77,
93
],
[
2789,
2797
],
[
2878,
2886
]
],
[
[
112,
135
],
[
735,
747
]
],
[
[
168,
179
],
[
1518,
1529
],
[
2328,
2339
]
],
[
[
205,
226
],
[
525,
546
]
],
[
[
228,
243
],
[
1781,
1796
],
[
2501,
2516
]
],
[
[
245,
248
],
[
1579,
1582
],
[
2362,
2365
]
],
[
[
280,
294
],
[
1017,
1031
]
],
[
[
365,
386
]
],
[
[
446,
464
]
],
[
[
972,
997
]
],
[
[
2737,
2757
],
[
1118,
1138
],
[
3153,
3173
]
],
[
[
3039,
3050
]
]
] |
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""PyTorch CUTOFFBERT model. """
import math
import os
import warnings
import numpy as np
from dataclasses import dataclass
from typing import Optional, Tuple
import torch
import torch.utils.checkpoint
import torch.nn.functional as F
from packaging import version
from torch import nn
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss, KLDivLoss
from torch.distributions.beta import Beta
from ...activations import ACT2FN
from ...file_utils import (
ModelOutput,
add_code_sample_docstrings,
add_start_docstrings,
add_start_docstrings_to_model_forward,
replace_return_docstrings,
)
from ...modeling_outputs import (
BaseModelOutputWithPastAndCrossAttentions,
BaseModelOutputWithPoolingAndCrossAttentions,
CausalLMOutputWithCrossAttentions,
MaskedLMOutput,
MultipleChoiceModelOutput,
NextSentencePredictorOutput,
QuestionAnsweringModelOutput,
SequenceClassifierOutput,
TokenClassifierOutput,
DualPassageEncoderModelOutput,
)
from ...modeling_utils import (
PreTrainedModel,
apply_chunking_to_forward,
find_pruneable_heads_and_indices,
prune_linear_layer,
)
from ...utils import logging
from .configuration_cutoffbert import CutoffBertConfig
from ..bert.modeling_bert import BertEmbeddings as CutoffBertEmbeddings
from ..bert.modeling_bert import BertEncoder as CutoffBertEncoder
from ..bert.modeling_bert import BertPooler as CutoffBertPooler
logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "bert-base-uncased"
_CONFIG_FOR_DOC = "CutoffBertConfig"
_TOKENIZER_FOR_DOC = "CutoffBertTokenizer"
CUTOFFBERT_PRETRAINED_MODEL_ARCHIVE_LIST = [
"bert-base-uncased",
"bert-large-uncased",
"bert-base-cased",
"bert-large-cased",
"bert-base-multilingual-uncased",
"bert-base-multilingual-cased",
# See all BERT models at https://huggingface.co/models?filter=bert
]
def load_tf_weights_in_cutoffbert(model, config, tf_checkpoint_path):
"""Load tf checkpoints in a pytorch model."""
try:
import re
import numpy as np
import tensorflow as tf
except ImportError:
logger.error(
"Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see "
"https://www.tensorflow.org/install/ for installation instructions."
)
raise
tf_path = os.path.abspath(tf_checkpoint_path)
logger.info(f"Converting TensorFlow checkpoint from {tf_path}")
# Load weights from TF model
init_vars = tf.train.list_variables(tf_path)
names = []
arrays = []
for name, shape in init_vars:
logger.info(f"Loading TF weight {name} with shape {shape}")
array = tf.train.load_variable(tf_path, name)
names.append(name)
arrays.append(array)
for name, array in zip(names, arrays):
name = name.split("/")
# adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v
# which are not required for using pretrained model
if any(
n in ["adam_v", "adam_m", "AdamWeightDecayOptimizer", "AdamWeightDecayOptimizer_1", "global_step"]
for n in name
):
logger.info(f"Skipping {'/'.join(name)}")
continue
pointer = model
for m_name in name:
if re.fullmatch(r"[A-Za-z]+_\d+", m_name):
scope_names = re.split(r"_(\d+)", m_name)
else:
scope_names = [m_name]
if scope_names[0] == "kernel" or scope_names[0] == "gamma":
pointer = getattr(pointer, "weight")
elif scope_names[0] == "output_bias" or scope_names[0] == "beta":
pointer = getattr(pointer, "bias")
elif scope_names[0] == "output_weights":
pointer = getattr(pointer, "weight")
elif scope_names[0] == "squad":
pointer = getattr(pointer, "classifier")
else:
try:
pointer = getattr(pointer, scope_names[0])
except AttributeError:
logger.info(f"Skipping {'/'.join(name)}")
continue
if len(scope_names) >= 2:
num = int(scope_names[1])
pointer = pointer[num]
if m_name[-11:] == "_embeddings":
pointer = getattr(pointer, "weight")
elif m_name == "kernel":
array = np.transpose(array)
try:
assert (
pointer.shape == array.shape
), f"Pointer shape {pointer.shape} and array shape {array.shape} mismatched"
except AssertionError as e:
e.args += (pointer.shape, array.shape)
raise
logger.info(f"Initialize PyTorch weight {name}")
pointer.data = torch.from_numpy(array)
return model
class CutoffBertPreTrainedModel(PreTrainedModel):
"""
An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
models.
"""
config_class = CutoffBertConfig
load_tf_weights = load_tf_weights_in_cutoffbert
base_model_prefix = "bert"
_keys_to_ignore_on_load_missing = [r"position_ids"]
def _init_weights(self, module):
"""Initialize the weights"""
if isinstance(module, nn.Linear):
# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
elif isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
CUTOFFBERT_START_DOCSTRING = r"""
This model inherits from :class:`~transformers.PreTrainedModel`. Check the superclass documentation for the generic
methods the library implements for all its model (such as downloading or saving, resizing the input embeddings,
pruning heads etc.)
This model is also a PyTorch `torch.nn.Module <https://pytorch.org/docs/stable/nn.html#torch.nn.Module>`__
subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to
general usage and behavior.
Parameters:
config (:class:`~transformers.BertConfig`): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the
configuration. Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model
weights.
"""
CUTOFFBERT_INPUTS_DOCSTRING = r"""
Args:
input_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`):
Indices of input sequence tokens in the vocabulary.
Indices can be obtained using :class:`~transformers.BertTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for
details.
`What are input IDs? <../glossary.html#input-ids>`__
attention_mask (:obj:`torch.FloatTensor` of shape :obj:`({0})`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
token_type_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
Segment token indices to indicate first and second portions of the inputs. Indices are selected in ``[0,
1]``:
- 0 corresponds to a `sentence A` token,
- 1 corresponds to a `sentence B` token.
`What are token type IDs? <../glossary.html#token-type-ids>`_
position_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
Indices of positions of each input sequence tokens in the position embeddings. Selected in the range ``[0,
config.max_position_embeddings - 1]``.
`What are position IDs? <../glossary.html#position-ids>`_
head_mask (:obj:`torch.FloatTensor` of shape :obj:`(num_heads,)` or :obj:`(num_layers, num_heads)`, `optional`):
Mask to nullify selected heads of the self-attention modules. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`({0}, hidden_size)`, `optional`):
Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
This is useful if you want more control over how to convert :obj:`input_ids` indices into associated
vectors than the model's internal embedding lookup matrix.
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned
tensors for more detail.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for
more detail.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
"""
@add_start_docstrings(
"The bare CutoffBert Model transformer outputting raw hidden-states without any specific head on top.",
CUTOFFBERT_START_DOCSTRING,
)
class CutoffBertModel(CutoffBertPreTrainedModel):
"""
The model can behave as an encoder (with only self-attention) as well as a decoder, in which case a layer of
cross-attention is added between the self-attention layers, following the architecture described in `Attention is
all you need <https://arxiv.org/abs/1706.03762>`__ by Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit,
Llion Jones, Aidan N. Gomez, Lukasz Kaiser and Illia Polosukhin.
To behave as an decoder the model needs to be initialized with the :obj:`is_decoder` argument of the configuration
set to :obj:`True`. To be used in a Seq2Seq model, the model needs to initialized with both :obj:`is_decoder`
argument and :obj:`add_cross_attention` set to :obj:`True`; an :obj:`encoder_hidden_states` is then expected as an
input to the forward pass.
"""
def __init__(self, config, add_pooling_layer=True):
super().__init__(config)
self.config = config
self.embeddings = CutoffBertEmbeddings(config)
self.encoder = CutoffBertEncoder(config)
self.pooler = CutoffBertPooler(config) if add_pooling_layer else None
self.init_weights()
def get_input_embeddings(self):
return self.embeddings.word_embeddings
def set_input_embeddings(self, value):
self.embeddings.word_embeddings = value
def _prune_heads(self, heads_to_prune):
"""
Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
class PreTrainedModel
"""
for layer, heads in heads_to_prune.items():
self.encoder.layer[layer].attention.prune_heads(heads)
@add_start_docstrings_to_model_forward(CUTOFFBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
tokenizer_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=BaseModelOutputWithPoolingAndCrossAttentions,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
past_key_values=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if
the model is configured as a decoder.
encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in
the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):
Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding.
If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids`
(those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)`
instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`.
use_cache (:obj:`bool`, `optional`):
If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up
decoding (see :obj:`past_key_values`).
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if self.config.is_decoder:
use_cache = use_cache if use_cache is not None else self.config.use_cache
else:
use_cache = False
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif input_ids is not None:
input_shape = input_ids.size()
batch_size, seq_length = input_shape
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
batch_size, seq_length = input_shape
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
device = input_ids.device if input_ids is not None else inputs_embeds.device
# past_key_values_length
past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0
if attention_mask is None:
attention_mask = torch.ones(((batch_size, seq_length + past_key_values_length)), device=device)
if token_type_ids is None:
if hasattr(self.embeddings, "token_type_ids"):
buffered_token_type_ids = self.embeddings.token_type_ids[:, :seq_length]
buffered_token_type_ids_expanded = buffered_token_type_ids.expand(batch_size, seq_length)
token_type_ids = buffered_token_type_ids_expanded
else:
token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=device)
# We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length]
# ourselves in which case we just need to make it broadcastable to all heads.
extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(attention_mask, input_shape, device)
# If a 2D or 3D attention mask is provided for the cross-attention
# we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length]
if self.config.is_decoder and encoder_hidden_states is not None:
encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size()
encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length)
if encoder_attention_mask is None:
encoder_attention_mask = torch.ones(encoder_hidden_shape, device=device)
encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask)
else:
encoder_extended_attention_mask = None
# Prepare head mask if needed
# 1.0 in head_mask indicate we keep the head
# attention_probs has shape bsz x n_heads x N x N
# input head_mask has shape [num_heads] or [num_hidden_layers x num_heads]
# and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length]
head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers)
embedding_output = self.embeddings(
input_ids=input_ids,
position_ids=position_ids,
token_type_ids=token_type_ids,
inputs_embeds=inputs_embeds,
past_key_values_length=past_key_values_length,
)
encoder_outputs = self.encoder(
embedding_output,
attention_mask=extended_attention_mask,
head_mask=head_mask,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_extended_attention_mask,
past_key_values=past_key_values,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = encoder_outputs[0]
pooled_output = self.pooler(sequence_output) if self.pooler is not None else None
if not return_dict:
return (sequence_output, pooled_output) + encoder_outputs[1:]
return BaseModelOutputWithPoolingAndCrossAttentions(
last_hidden_state=sequence_output,
pooler_output=pooled_output,
past_key_values=encoder_outputs.past_key_values,
hidden_states=encoder_outputs.hidden_states,
attentions=encoder_outputs.attentions,
cross_attentions=encoder_outputs.cross_attentions,
)
@add_start_docstrings(
"""
CutoffBert Model transformer with a sequence classification head on top (a linear layer on top of the pooled
output) + Cut-off data augmentation support.
""",
CUTOFFBERT_START_DOCSTRING,
)
class CutoffBertForSequenceClassification(CutoffBertPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.cls_token_id = config.cls_token_id
self.sep_token_id = config.sep_token_id
self.mask_token_id = config.mask_token_id
self.masking_prob = config.cutoff_masking_prob
self.temperature = config.cutoff_temperature
self.mask_loss_wgt = config.cutoff_mask_loss_wgt
self.js_loss_wgt = config.cutoff_js_loss_wgt
self.config = config
self.bert = CutoffBertModel(config)
classifier_dropout = (
config.classifier_dropout if config.classifier_dropout is not None else config.hidden_dropout_prob
)
self.dropout = nn.Dropout(classifier_dropout)
self.classifier = nn.Linear(config.hidden_size, config.num_labels)
self.init_weights()
def _apply_cutoff(self, inputs):
masked_inputs = inputs.clone()
valid_masking_indices = (inputs != self.cls_token_id) & (inputs != self.sep_token_id)
random_masking_indices = torch.bernoulli(torch.full(inputs.shape, self.masking_prob, device=inputs.device)).bool()
masking_indices = random_masking_indices & valid_masking_indices
masked_inputs[masking_indices] = self.mask_token_id
return masked_inputs
@add_start_docstrings_to_model_forward(CUTOFFBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
tokenizer_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=SequenceClassifierOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
labels=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for computing the sequence classification/regression loss. Indices should be in :obj:`[0, ...,
config.num_labels - 1]`. If :obj:`config.num_labels == 1` a regression loss is computed (Mean-Square loss),
If :obj:`config.num_labels > 1` a classification loss is computed (Cross-Entropy).
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if labels is None:
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
pooled_output = self.dropout(outputs[1])
logits = self.classifier(pooled_output)
if not return_dict:
return (logits,) + outputs[2:]
return SequenceClassifierOutput(
logits=logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
b, l = input_ids.size()
masked_input_ids = self._apply_cutoff(input_ids.clone())
flatten_input_ids = torch.stack((input_ids, masked_input_ids), dim=1).reshape(-1, l)
flatten_attention_mask = attention_mask.unsqueeze(1).expand(-1, 2, -1).reshape(-1, l) if attention_mask is not None else None
flatten_token_type_ids = token_type_ids.unsqueeze(1).expand(-1, 2, -1).reshape(-1, l) if token_type_ids is not None else None
flatten_position_ids = position_ids.unsqueeze(1).expand(-1, 2, -1).reshape(-1, l) if position_ids is not None else None
flatten_inputs_embeds = inputs_embeds.unsqueeze(1).expand(-1, 2, -1, -1).reshape(-1, l, self.config.hidden_size) if inputs_embeds is not None else None
flatten_outputs = self.bert(
flatten_input_ids,
attention_mask=flatten_attention_mask,
token_type_ids=flatten_token_type_ids,
position_ids=flatten_position_ids,
head_mask=head_mask,
inputs_embeds=flatten_inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
flatten_pooled_output = self.dropout(flatten_outputs[1])
flatten_logits = self.classifier(flatten_pooled_output)
logits, masked_logits = flatten_logits.reshape(b, 2, self.config.num_labels).chunk(2, dim=1)
logits, masked_logits = logits.squeeze(dim=1).contiguous(), masked_logits.squeeze(dim=1).contiguous()
loss_fct = CrossEntropyLoss()
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
if self.mask_loss_wgt is not None and self.mask_loss_wgt > 0.0:
mask_loss = loss_fct(masked_logits.view(-1, self.num_labels), labels.view(-1))
loss += mask_loss * self.mask_loss_wgt
if self.js_loss_wgt is not None and self.js_loss_wgt > 0.0:
kl_loss_fct = KLDivLoss(reduction="batchmean")
src_logits, trg_logits = logits, masked_logits
mean_logits = (src_logits + trg_logits) * 0.5
src_loss = kl_loss_fct(
F.log_softmax(src_logits / self.temperature, dim=-1),
F.softmax(mean_logits / self.temperature, dim=-1)
) * (self.temperature ** 2)
trg_loss = kl_loss_fct(
F.log_softmax(trg_logits / self.temperature, dim=-1),
F.softmax(mean_logits / self.temperature, dim=-1)
) * (self.temperature ** 2)
js_loss = (src_loss + trg_loss) * 0.5
loss += js_loss * self.js_loss_wgt
if not return_dict:
return (loss, logits)
return SequenceClassifierOutput(
loss=loss,
logits=logits,
)
| [
[
[
751,
755
]
],
[
[
763,
765
],
[
3076,
3078
]
],
[
[
773,
781
]
],
[
[
789,
800
]
],
[
[
825,
834
]
],
[
[
854,
862
]
],
[
[
864,
869
]
],
[
[
878,
883
]
],
[
[
891,
913
],
[
5527,
5532
],
[
16193,
16198
],
[
16679,
16684
],
[
16710,
16715
],
[
16963,
16968
],
[
17547,
17552
],
[
20965,
20970
],
[
20981,
20986
],
[
23368,
23373
]
],
[
[
921,
945
],
[
25408,
25409
],
[
25478,
25479
],
[
25620,
25621
],
[
25690,
25691
]
],
[
[
968,
975
]
],
[
[
994,
996
],
[
6046,
6048
],
[
6417,
6419
],
[
6658,
6660
],
[
20622,
20624
],
[
20679,
20681
]
],
[
[
1018,
1035
]
],
[
[
1037,
1053
],
[
24802,
24818
]
],
[
[
1055,
1062
]
],
[
[
1064,
1073
],
[
25206,
25215
]
],
[
[
1111,
1115
]
],
[
[
1144,
1150
]
],
[
[
1183,
1194
]
],
[
[
1200,
1226
],
[
12559,
12585
],
[
21333,
21359
]
],
[
[
1232,
1252
],
[
10566,
10586
],
[
19595,
19615
]
],
[
[
1258,
1295
],
[
12449,
12486
],
[
21223,
21260
]
],
[
[
1301,
1326
]
],
[
[
1368,
1409
]
],
[
[
1415,
1459
],
[
12691,
12735
],
[
19216,
19260
]
],
[
[
1465,
1498
]
],
[
[
1504,
1518
]
],
[
[
1524,
1549
]
],
[
[
1555,
1582
]
],
[
[
1588,
1616
]
],
[
[
1622,
1646
],
[
21465,
21489
],
[
23071,
23095
],
[
25956,
25980
]
],
[
[
1652,
1673
]
],
[
[
1679,
1708
]
],
[
[
1748,
1763
],
[
5602,
5617
]
],
[
[
1769,
1794
]
],
[
[
1800,
1832
]
],
[
[
1838,
1856
]
],
[
[
1881,
1888
],
[
2157,
2164
]
],
[
[
1927,
1943
],
[
5785,
5801
]
],
[
[
1977,
2015
],
[
11745,
11765
]
],
[
[
2049,
2081
],
[
11797,
11814
]
],
[
[
2115,
2145
],
[
11846,
11862
]
],
[
[
2148,
2154
],
[
2841,
2847
],
[
3116,
3122
],
[
3335,
3341
],
[
3913,
3919
],
[
4820,
4826
],
[
5455,
5461
]
],
[
[
2187,
2206
],
[
12650,
12669
],
[
21424,
21443
]
],
[
[
2229,
2244
],
[
12758,
12773
],
[
21512,
21527
]
],
[
[
2266,
2284
],
[
12611,
12629
],
[
21385,
21403
]
],
[
[
2310,
2350
]
],
[
[
2606,
2635
],
[
5824,
5853
]
],
[
[
5576,
5601
],
[
10752,
10777
],
[
19872,
19897
]
],
[
[
6754,
6780
],
[
10700,
10726
],
[
19800,
19826
]
],
[
[
7692,
7719
],
[
12487,
12514
],
[
21261,
21288
]
],
[
[
10736,
10751
],
[
20423,
20438
]
],
[
[
19836,
19871
]
]
] |
#!/usr/bin/env python3
# Very basic bitstream to SVF converter
# This file is Copyright (c) 2018 David Shah <[email protected]>
import sys
import textwrap
max_row_size = 100000
def bitreverse(x):
y = 0
for i in range(8):
if (x >> (7 - i)) & 1 == 1:
y |= (1 << i)
return y
def bit_to_svf(bit, svf):
with open(bit, 'rb') as bitf:
bs = bitf.read()
# Autodetect IDCODE from bitstream
idcode_cmd = bytes([0xE2, 0x00, 0x00, 0x00])
idcode = None
for i in range(len(bs) - 4):
if bs[i:i+4] == idcode_cmd:
idcode = bs[i+4] << 24
idcode |= bs[i+5] << 16
idcode |= bs[i+6] << 8
idcode |= bs[i+7]
break
if idcode is None:
print("Failed to find IDCODE in bitstream, check bitstream is valid")
sys.exit(1)
print("IDCODE in bitstream is 0x%08x" % idcode)
bitf.seek(0)
with open(svf, 'w') as svf:
print("""
HDR 0;
HIR 0;
TDR 0;
TIR 0;
ENDDR DRPAUSE;
ENDIR IRPAUSE;
STATE IDLE;
""", file=svf)
print("""
SIR 8 TDI (E0);
SDR 32 TDI (00000000)
TDO ({:08X})
MASK (FFFFFFFF);
""".format(idcode), file=svf)
print("""
SIR 8 TDI (1C);
SDR 510 TDI (3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF
FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF);
SIR 8 TDI (C6);
SDR 8 TDI (00);
RUNTEST IDLE 2 TCK 1.00E-02 SEC;
SIR 8 TDI (3C);
SDR 32 TDI (00000000)
TDO (00000000)
MASK (0000B000);
SIR 8 TDI (46);
SDR 8 TDI (01);
RUNTEST IDLE 2 TCK 1.00E-02 SEC;
SIR 8 TDI (7A);
RUNTEST IDLE 2 TCK 1.00E-02 SEC;
""", file=svf)
while True:
chunk = bitf.read(max_row_size//8)
if not chunk:
break
# Convert chunk to bit-reversed hex
br_chunk = [bitreverse(x) for x in chunk]
hex_chunk = ["{:02X}".format(x) for x in reversed(br_chunk)]
print("\n".join(textwrap.wrap("SDR {} TDI ({});".format(8*len(chunk), "".join(hex_chunk)), 100)), file=svf)
print("""
SIR 8 TDI (FF);
RUNTEST IDLE 100 TCK 1.00E-02 SEC;
SIR 8 TDI (C0);
RUNTEST IDLE 2 TCK 1.00E-03 SEC;
SDR 32 TDI (00000000)
TDO (00000000)
MASK (FFFFFFFF);
! Shift in ISC DISABLE(0x26) instruction
SIR 8 TDI (26);
RUNTEST IDLE 2 TCK 2.00E-01 SEC;
! Shift in BYPASS(0xFF) instruction
SIR 8 TDI (FF);
RUNTEST IDLE 2 TCK 1.00E-03 SEC;
! Shift in LSC_READ_STATUS(0x3C) instruction
SIR 8 TDI (3C);
SDR 32 TDI (00000000)
TDO (00000100)
MASK (00002100);
""", file=svf)
if __name__ == "__main__":
bit_to_svf(sys.argv[1], sys.argv[2]) | [
[
[
131,
134
],
[
2976,
2979
],
[
2989,
2992
],
[
878,
881
]
],
[
[
142,
150
],
[
2230,
2238
]
],
[
[
152,
164
],
[
1938,
1950
]
],
[
[
179,
189
],
[
2091,
2101
]
],
[
[
307,
317
],
[
2965,
2975
]
]
] |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Manages the details on the images used in the build and the run stage."""
import json
import os.path
#: Global variable used to cache in memory the content of images.json
_data = None
def data():
"""Returns a dictionary with the static data on the images.
The dictionary is read from a JSON file lazily the first time
this function is called.
"""
global _data
if not _data:
json_dir = os.path.abspath(os.path.dirname(__file__))
json_file = os.path.join(json_dir, 'images.json')
with open(json_file) as f:
_data = json.load(f)
return _data
def build_info(image, spack_version):
"""Returns the name of the build image and its tag.
Args:
image (str): image to be used at run-time. Should be of the form
<image_name>:<image_tag> e.g. "ubuntu:18.04"
spack_version (str): version of Spack that we want to use to build
Returns:
A tuple with (image_name, image_tag) for the build image
"""
# Don't handle error here, as a wrong image should have been
# caught by the JSON schema
image_data = data()[image]
build_image = image_data['build']
# Try to check if we have a tag for this Spack version
try:
# Translate version from git to docker if necessary
build_tag = image_data['build_tags'].get(spack_version, spack_version)
except KeyError:
msg = ('the image "{0}" has no tag for Spack version "{1}" '
'[valid versions are {2}]')
msg = msg.format(build_image, spack_version,
', '.join(image_data['build_tags'].keys()))
raise ValueError(msg)
return build_image, build_tag
def package_info(image):
"""Returns the commands used to update system repositories, install
system packages and clean afterwards.
Args:
image (str): image to be used at run-time. Should be of the form
<image_name>:<image_tag> e.g. "ubuntu:18.04"
Returns:
A tuple of (update, install, clean) commands.
"""
image_data = data()[image]
update = image_data['update']
install = image_data['install']
clean = image_data['clean']
return update, install, clean
| [
[
[
281,
285
],
[
777,
781
]
],
[
[
293,
300
],
[
621,
623
],
[
637,
639
],
[
684,
686
]
],
[
[
372,
377
],
[
595,
600
]
],
[
[
391,
395
],
[
1320,
1324
],
[
2275,
2279
]
],
[
[
813,
823
]
],
[
[
1906,
1918
]
],
[
[
769,
774
],
[
801,
806
]
]
] |
# Name: VolumeExtractChannel
import inviwopy as ivw
import numpy as np
class VolumeExtractChannel(ivw.Processor):
def __init__(self, id, name):
ivw.Processor.__init__(self, id, name)
self.inport = ivw.data.VolumeInport("inport")
self.addInport(self.inport, owner=False)
self.outport = ivw.data.VolumeOutport("outport")
self.addOutport(self.outport, owner=False)
self.channel = ivw.properties.IntProperty("channel", "channel", 0, 0, 4, 1)
self.addProperty(self.channel, owner=False)
@staticmethod
def processorInfo():
return ivw.ProcessorInfo(
classIdentifier = "org.inviwo.VolumeExtractChannel",
displayName = "Volume Extract Channel",
category = "Volume Operation",
codeState = ivw.CodeState.Stable,
tags = ivw.Tags.PY
)
def getProcessorInfo(self):
return VolumeExtractChannel.processorInfo()
def process(self):
volume = self.inport.getData()
if len(volume.data.shape) <= 3:
self.outport.setData(volume)
return
channels = volume.data.shape[3]
volumeSlice = volume.data[:,:,:, np.clip(self.channel.value, 0, channels-1)]
newVolume = ivw.data.Volume(volumeSlice)
newVolume.dataMap = volume.dataMap
newVolume.modelMatrix = volume.modelMatrix
newVolume.worldMatrix = volume.worldMatrix
newVolume.copyMetaDataFrom(volume)
newVolume.swizzlemask = volume.swizzlemask
newVolume.interpolation = volume.interpolation
newVolume.wrapping = volume.wrapping
self.outport.setData(newVolume)
| [
[
[
38,
53
],
[
101,
104
],
[
159,
162
],
[
220,
223
],
[
324,
327
],
[
433,
436
],
[
605,
608
],
[
785,
788
],
[
820,
823
],
[
1239,
1242
]
],
[
[
61,
72
],
[
1175,
1177
]
],
[
[
80,
100
],
[
890,
910
]
]
] |
# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
# General Public License as public by the Free Software Foundation; version 2.0
# or (at your option) any later version. You can redistribute it and/or
# modify it under the terms of either of these two licenses.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# You should have received a copy of the licenses; if not, see
# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
# License, Version 2.0.
#
| [] |
# -*- test-case-name: twisted.web2.test.test_httpauth -*-
from twisted.cred import credentials, error
from twisted.web2.auth.interfaces import ICredentialFactory
from zope.interface import implements
class BasicCredentialFactory(object):
"""
Credential Factory for HTTP Basic Authentication
"""
implements(ICredentialFactory)
scheme = 'basic'
def __init__(self, realm):
self.realm = realm
def getChallenge(self, peer):
return {'realm': self.realm}
def decode(self, response, request):
try:
creds = (response + '===').decode('base64')
except:
raise error.LoginFailed('Invalid credentials')
creds = creds.split(':', 1)
if len(creds) == 2:
return credentials.UsernamePassword(*creds)
else:
raise error.LoginFailed('Invalid credentials')
| [
[
[
84,
95
],
[
769,
780
]
],
[
[
97,
102
],
[
644,
649
],
[
838,
843
]
],
[
[
144,
162
],
[
326,
344
]
],
[
[
191,
201
],
[
315,
325
]
],
[
[
209,
231
]
]
] |
from couchbase.management.admin import Admin
from couchbase_core.mapper import BijectiveMapping, \
StringEnum, Identity, Timedelta, Bijection, StringEnumLoose
from ..options import OptionBlockTimeOut, forward_args
from couchbase.management.generic import GenericManager
from typing import *
from couchbase_core import abstractmethod, mk_formstr
from couchbase_core.durability import Durability
from couchbase.exceptions import HTTPException, ErrorMapper, BucketAlreadyExistsException, BucketDoesNotExistException
import enum
import datetime
class BucketManagerErrorHandler(ErrorMapper):
@staticmethod
def mapping():
# type (...)->Mapping[str, CBErrorType]
return {HTTPException: {'Bucket with given name (already|still) exists': BucketAlreadyExistsException,
'Requested resource not found': BucketDoesNotExistException}}
@BucketManagerErrorHandler.wrap
class BucketManager(GenericManager):
def __init__(self, # type: BucketManager
admin_bucket # type: Admin
):
"""Bucket Manager
:param admin_bucket: Admin bucket
"""
super(BucketManager, self).__init__(admin_bucket)
def create_bucket(self, # type: BucketManager
settings, # type: CreateBucketSettings
*options, # type: CreateBucketOptions
**kwargs # type: Any
):
"""
Creates a new bucket.
:param: CreateBucketSettings settings: settings for the bucket.
:param: CreateBucketOptions options: options for setting the bucket.
:param: Any kwargs: override corresponding values in the options.
:raises: BucketAlreadyExistsException
:raises: InvalidArgumentsException
"""
# prune the missing settings...
params= settings.as_dict()#*options, **kwargs)
# insure flushEnabled is an int
params['flushEnabled'] = int(params.get('flushEnabled', 0))
# send it
return self._admin_bucket.http_request(
path='/pools/default/buckets',
method='POST',
content=mk_formstr(params),
content_type='application/x-www-form-urlencoded',
**forward_args(kwargs, *options))
def update_bucket(self, # type: BucketManager
settings, # type: BucketSettings
*options, # type: UpdateBucketOptions
**kwargs # type: Any
):
"""
Updates a bucket. Every setting must be set to what the user wants it to be after the update.
Any settings that are not set to their desired values may be reverted to default values by the server.
:param BucketSettings settings: settings for updating the bucket.
:param UpdateBucketOptions options: options for updating the bucket.
:param Any kwargs: override corresponding values in the options.
:raises: InvalidArgumentsException
:raises: BucketDoesNotExistException
"""
# prune the missing settings...
params = settings.as_dict ()#*options, **kwargs)
# insure flushEnabled is an int
params['flushEnabled'] = int(params.get('flushEnabled', 0))
# send it
return self._admin_bucket.http_request(
path='/pools/default/buckets/' + settings.name,
method='POST',
content_type='application/x-www-form-urlencoded',
content=mk_formstr(params),
**forward_args(kwargs, *options))
def drop_bucket(self, # type: BucketManager
bucket_name, # type: str
*options, # type: DropBucketOptions
**kwargs # type: Any
):
# type: (...) -> None
"""
Removes a bucket.
:param str bucket_name: the name of the bucket.
:param DropBucketOptions options: options for dropping the bucket.
:param Any kwargs: override corresponding value in the options.
:raises: BucketNotFoundException
:raises: InvalidArgumentsException
"""
return self._admin_bucket.http_request(
path='/pools/default/buckets/' + bucket_name,
method='DELETE',
**forward_args(kwargs, *options))
def get_bucket(self, # type: BucketManager
bucket_name, # type: str
*options, # type: GetBucketOptions
**kwargs # type: Any
):
# type: (...) -> BucketSettings
"""
Gets a bucket's settings.
:param str bucket_name: the name of the bucket.
:param GetBucketOptions options: options for getting the bucket.
:param Any kwargs: override corresponding values in options.
:returns: settings for the bucket. Note: the ram quota returned is in bytes
not mb so requires x / 1024 twice. Also Note: FlushEnabled is not a setting returned by the server, if flush is enabled then the doFlush endpoint will be listed and should be used to populate the field.
:rtype: BucketSettings
:raises: BucketNotFoundException
:raises: InvalidArgumentsException
"""
return BucketSettings.from_raw(
self._admin_bucket.http_request(
path='/pools/default/buckets/' + bucket_name,
method='GET',
**forward_args(kwargs, *options)
).value)
def get_all_buckets(self, # type: BucketManager
*options, # type: GetAllBucketOptions
**kwargs # type: Any
):
# type: (...) -> Iterable[BucketSettings]
"""
Gets all bucket settings. Note, # type: the ram quota returned is in bytes
not mb so requires x / 1024 twice.
:param GetAllBucketOptions options: options for getting all buckets.
:param Any kwargs: override corresponding value in options.
:returns: An iterable of settings for each bucket.
:rtype: Iterable[BucketSettings]
"""
return list(
map(lambda x: BucketSettings(**x),
self._admin_bucket.http_request(
path='/pools/default/buckets',
method='GET',
**forward_args(kwargs, *options)
).value))
def flush_bucket(self, # type: BucketManager
bucket_name, # type: str
*options, # type: FlushBucketOptions
**kwargs # type: Any
):
# using the ns_server REST interface
"""
Flushes a bucket (uses the ns_server REST interface).
:param str bucket_name: the name of the bucket.
:param FlushBucketOptions options: options for flushing the bucket.
:param Any kwargs: override corresponding value in options.
:raises: BucketNotFoundException
:raises: InvalidArgumentsException
:raises: FlushDisabledException
"""
self._admin_bucket.http_request(
path="/pools/default/buckets/{bucket_name}/controller/doFlush".format(bucket_name=bucket_name),
method='POST',
**forward_args(kwargs, *options))
class EvictionPolicyType(enum.Enum):
NOT_RECENTLY_USED = "nruEviction"
NO_EVICTION = "noEviction"
FULL = "fullEviction"
VALUE_ONLY = "valueOnly"
class EjectionMethod(enum.Enum):
FULL_EVICTION = "fullEviction"
VALUE_ONLY = "valueOnly"
class BucketType(enum.Enum):
COUCHBASE = "membase"
MEMCACHED = "memcached"
EPHEMERAL = "ephemeral"
class CompressionMode(enum.Enum):
OFF = "off"
PASSIVE = "passive"
ACTIVE = "active"
class ConflictResolutionType(enum.Enum):
TIMESTAMP = "lww"
SEQUENCE_NUMBER = "seqno"
class BucketSettings(dict):
mapping = BijectiveMapping({'flushEnabled': {'flush_enabled': Bijection(int.__bool__, bool.__int__)},
'numReplicas': {'num_replicas': Identity(int)},
'ramQuotaMB': {'ram_quota_mb': Identity(int)},
'replicaNumber': {'num_replicas': Identity(int)},
'replicaIndex': {'replica_index': Identity(bool)},
'bucketType': {'bucket_type': -StringEnumLoose(BucketType)},
'maxTTL': {'max_ttl': -Timedelta(int)},
'compressionMode': {'compression_mode': -StringEnum(CompressionMode)},
'conflictResolutionType': {
'conflict_resolution_type': -StringEnumLoose(ConflictResolutionType)},
'evictionPolicy': {'eviction_policy': -StringEnumLoose(EvictionPolicyType)},
'ejectionMethod': {'ejection_method': -StringEnumLoose(EjectionMethod)},
'name': {'name': Identity(str)},
'durabilityMinLevel': {'minimum_durability_level': Identity(str)}})
@overload
def __init__(self,
name=None, # type: str
flush_enabled=False, # type: bool
ram_quota_mb=None, # type: int
num_replicas=None, # type: int
replica_index=None, # type: bool
bucket_type=None, # type: BucketType
eviction_policy=None, # type: EvictionPolicyType
max_ttl=None, # type: Union[datetime.timedelta,float,int]
compression_mode=None # type: CompressionMode
):
# type: (...) -> None
pass
def __init__(self, **kwargs):
"""BucketSettings provides a means of mapping bucket settings into an object.
"""
if kwargs.get('bucket_type',None) == "couchbase":
kwargs['bucket_type'] = BucketType.COUCHBASE
"""
PYCBC-956
Bucket min durability setting is represented as string on the wire.
See Durability enum for string representations
"""
durability = kwargs.pop('minimum_durability_level', None)
if durability:
if isinstance(durability, Durability):
kwargs['minimum_durability_level'] = durability.to_server_str()
else:
kwargs['minimum_durability_level'] = Durability.from_server_str(durability)
super(BucketSettings, self).__init__(**self.mapping.sanitize_src(kwargs))
def as_dict(self, *options, **kwargs):
final_opts = dict(**Admin.bc_defaults)
final_opts.update(**forward_args(kwargs,*options))
params=self.mapping.to_src(self)
params.update({
'authType': 'sasl',
'saslPassword': final_opts['bucket_password']
})
return params
@classmethod
def from_raw(cls,
raw_info # type: Mapping[str, Any]
):
# type: (...) -> BucketSettings
result = cls(**cls.mapping.to_dest(raw_info))
quota = raw_info.get('quota', {})
# convert rawRAM to MB
if 'rawRAM' in quota:
result['ram_quota_mb'] = quota.get('rawRAM') / 1024 / 1024
else:
result['ram_quota_mb'] = None
controllers = raw_info.get('controllers', {})
result['flush_enabled'] = ('flush' in controllers)
return result
@property
def name(self):
# type: (...) -> str
"""Name (string) - The name of the bucket."""
return self.get('name')
@property
def flush_enabled(self):
# type: (...) -> bool
"""Whether or not flush should be enabled on the bucket. Default to false."""
return self.get('flush_enabled', False)
@property
def ram_quota_mb(self):
# type: (...) -> int
"""Ram Quota in mb for the bucket. (rawRAM in the server payload)"""
return self.get('ram_quota_mb')
@property
def num_replicas(self):
# type: (...) -> int
"""NumReplicas (int) - The number of replicas for documents."""
return self.get('replica_number')
@property
def replica_index(self):
# type: (...) -> bool
""" Whether replica indexes should be enabled for the bucket."""
return self.get('replica_index')
@property
def bucket_type(self):
# type: (...) -> BucketType
"""BucketType {couchbase (sent on wire as membase), memcached, ephemeral}
The type of the bucket. Default to couchbase."""
return self.get('bucketType')
@property
def eviction_policy(self):
# type: (...) -> EvictionPolicyType
"""{fullEviction | valueOnly}. The eviction policy to use."""
return self.get('eviction_policy')
@property
def max_ttl(self):
# type: (...) -> datetime.timedelta
"""Value for the maxTTL of new documents created without a ttl."""
return self.get('max_ttl')
@property
def compression_mode(self):
# type: (...) -> CompressionMode
"""{off | passive | active} - The compression mode to use."""
return self.get('compression_mode')
class CreateBucketSettings(BucketSettings):
@overload
def __init__(self,
name=None, # type: str
flush_enabled=False, # type: bool
ram_quota_mb=None, # type: int
num_replicas=None, # type: int
replica_index=None, # type: bool
bucket_type=None, # type: BucketType
eviction_policy=None, # type: EvictionPolicyType
max_ttl=None, # type: Union[datetime.timedelta,float,int]
compression_mode=None, # type: CompressionMode
conflict_resolution_type=None, # type: ConflictResolutionType
bucket_password=None, # type: str
ejection_method=None # type: EjectionMethod
):
"""
Bucket creation settings.
:param name: name of the bucket
:param flush_enabled: whether flush is enabled
:param ram_quota_mb: raw quota in megabytes
:param num_replicas: number of replicas
:param replica_index: whether this is a replica index
:param bucket_type: type of bucket
:param eviction_policy: policy for eviction
:param max_ttl: max time to live for bucket
:param compression_mode: compression mode
:param ejection_method: ejection method (deprecated, please use eviction_policy instead)
"""
def __init__(self, **kwargs):
BucketSettings.__init__(self, **kwargs)
@property
def conflict_resolution_type(self):
# type: (...) -> ConflictResolutionType
return self.get('conflict_resolution_type')
class CreateBucketOptions(OptionBlockTimeOut):
pass
class UpdateBucketOptions(OptionBlockTimeOut):
pass
class DropBucketOptions(OptionBlockTimeOut):
pass
class GetAllBucketOptions(OptionBlockTimeOut):
pass
class GetBucketOptions(OptionBlockTimeOut):
pass
class FlushBucketOptions(OptionBlockTimeOut):
pass
| [
[
[
39,
44
],
[
10792,
10797
]
],
[
[
79,
95
],
[
8037,
8053
]
],
[
[
103,
113
],
[
8691,
8701
]
],
[
[
115,
123
],
[
8193,
8201
],
[
8272,
8280
],
[
8354,
8362
],
[
8436,
8444
],
[
9151,
9159
],
[
9250,
9258
]
],
[
[
125,
134
],
[
8601,
8610
]
],
[
[
136,
145
],
[
8089,
8098
]
],
[
[
147,
162
],
[
8516,
8531
],
[
8846,
8861
],
[
8959,
8974
],
[
9068,
9083
]
],
[
[
185,
203
],
[
15069,
15087
],
[
15127,
15145
],
[
15183,
15201
],
[
15241,
15259
],
[
15296,
15314
],
[
15353,
15371
]
],
[
[
205,
217
],
[
2285,
2297
],
[
3584,
3596
],
[
4366,
4378
],
[
5527,
5539
],
[
6445,
6457
],
[
7393,
7405
],
[
10839,
10851
]
],
[
[
259,
273
],
[
937,
951
]
],
[
[
293,
294
],
[
9273,
9281
],
[
13455,
13463
]
],
[
[
322,
336
]
],
[
[
338,
348
],
[
2189,
2199
],
[
3550,
3560
]
],
[
[
387,
397
],
[
10434,
10444
],
[
10598,
10608
]
],
[
[
431,
444
],
[
694,
707
]
],
[
[
446,
457
],
[
579,
590
]
],
[
[
459,
487
],
[
759,
787
]
],
[
[
489,
516
],
[
853,
880
]
],
[
[
524,
528
],
[
7452,
7456
],
[
7611,
7615
],
[
7706,
7710
],
[
7824,
7828
],
[
7929,
7933
]
],
[
[
536,
544
]
],
[
[
553,
578
],
[
886,
911
]
],
[
[
923,
936
],
[
1167,
1180
]
],
[
[
7433,
7451
],
[
8975,
8993
]
],
[
[
7596,
7610
],
[
9084,
9098
]
],
[
[
7695,
7705
],
[
8532,
8542
],
[
10100,
10110
]
],
[
[
7808,
7823
],
[
8702,
8717
]
],
[
[
7906,
7928
],
[
8862,
8884
]
],
[
[
8001,
8015
],
[
13433,
13447
],
[
5355,
5369
],
[
10652,
10666
],
[
14846,
14860
],
[
6268,
6282
]
],
[
[
13412,
13432
]
],
[
[
15049,
15068
]
],
[
[
15107,
15126
]
],
[
[
15165,
15182
]
],
[
[
15221,
15240
]
],
[
[
15279,
15295
]
],
[
[
15334,
15352
]
]
] |
"""
This module implements the core class hierarchy for implementing EO tasks. An EO task is any class the inherits
from the abstract EOTask class. Each EO task has to implement the execute method; invoking __call__ on a EO task
instance invokes the execute method. EO tasks are meant primarily to operate on EO patches (i.e. instances of EOPatch).
EO task classes are generally lightweight (i.e. not too complicated), short, and do one thing well. For example, an
EO task might take as input an EOPatch containing cloud mask and return as a result the cloud coverage for that mask.
Credits:
Copyright (c) 2017-2019 Matej Aleksandrov, Matej Batič, Andrej Burja, Eva Erzin (Sinergise)
Copyright (c) 2017-2019 Grega Milčinski, Matic Lubej, Devis Peresutti, Jernej Puc, Tomislav Slijepčević (Sinergise)
Copyright (c) 2017-2019 Blaž Sovdat, Jovan Višnjić, Anže Zupanc, Lojze Žust (Sinergise)
This source code is licensed under the MIT license found in the LICENSE
file in the root directory of this source tree.
"""
import sys
import logging
import datetime
import inspect
from collections import OrderedDict
from abc import ABC, abstractmethod
import attr
from .utilities import FeatureParser
LOGGER = logging.getLogger(__name__)
class EOTask(ABC):
"""Base class for EOTask."""
def __new__(cls, *args, **kwargs):
"""Stores initialization parameters and the order to the instance attribute `init_args`."""
self = super().__new__(cls)
init_args = OrderedDict()
for arg, value in zip(inspect.getfullargspec(self.__init__).args[1: len(args) + 1], args):
init_args[arg] = repr(value)
for arg in inspect.getfullargspec(self.__init__).args[len(args) + 1:]:
if arg in kwargs:
init_args[arg] = repr(kwargs[arg])
self.private_task_config = _PrivateTaskConfig(init_args=init_args)
return self
def __mul__(self, other):
"""Creates a composite task of this and passed task."""
return CompositeTask(other, self)
def __call__(self, *eopatches, monitor=False, **kwargs):
"""Executes the task."""
# if monitor:
# return self.execute_and_monitor(*eopatches, **kwargs)
return self._execute_handling(*eopatches, **kwargs)
def execute_and_monitor(self, *eopatches, **kwargs):
""" In the current version nothing additional happens in this method
"""
return self._execute_handling(*eopatches, **kwargs)
def _execute_handling(self, *eopatches, **kwargs):
""" Handles measuring execution time and error propagation
"""
self.private_task_config.start_time = datetime.datetime.now()
try:
return_value = self.execute(*eopatches, **kwargs)
self.private_task_config.end_time = datetime.datetime.now()
return return_value
except BaseException as exception:
traceback = sys.exc_info()[2]
# Some special exceptions don't accept an error message as a parameter and raise a TypeError in such case.
try:
errmsg = 'During execution of task {}: {}'.format(self.__class__.__name__, exception)
extended_exception = type(exception)(errmsg)
except TypeError:
extended_exception = exception
raise extended_exception.with_traceback(traceback)
@abstractmethod
def execute(self, *eopatches, **kwargs):
""" Implement execute function
"""
raise NotImplementedError
@staticmethod
def _parse_features(features, new_names=False, rename_function=None, default_feature_type=None,
allowed_feature_types=None):
""" See eolearn.core.utilities.FeatureParser class.
"""
return FeatureParser(features, new_names=new_names, rename_function=rename_function,
default_feature_type=default_feature_type, allowed_feature_types=allowed_feature_types)
@attr.s(cmp=False)
class _PrivateTaskConfig:
""" A container for general EOTask parameters required during EOWorkflow and EOExecution
:param init_args: A dictionary of parameters and values used for EOTask initialization
:type init_args: OrderedDict
:param uuid: An unique hexadecimal identifier string a task gets in EOWorkflow
:type uuid: str or None
:param start_time: Time when task execution started
:type start_time: datetime.datetime or None
:param end_time: Time when task execution ended
:type end_time: datetime.datetime or None
"""
init_args = attr.ib()
uuid = attr.ib(default=None)
start_time = attr.ib(default=None)
end_time = attr.ib(default=None)
def __add__(self, other):
return _PrivateTaskConfig(init_args=OrderedDict(list(self.init_args.items()) + list(other.init_args.items())))
class CompositeTask(EOTask):
"""Creates a task that is composite of two tasks.
Note: Instead of directly using this task it might be more convenient to use `'*'` operation between tasks.
Example: `composite_task = task1 * task2`
:param eotask1: Task which will be executed first
:type eotask1: EOTask
:param eotask2: Task which will be executed on results of first task
:type eotask2: EOTask
"""
def __init__(self, eotask1, eotask2):
self.eotask1 = eotask1
self.eotask2 = eotask2
self.private_task_config = eotask1.private_task_config + eotask2.private_task_config
def execute(self, *eopatches, **kwargs):
return self.eotask2.execute(self.eotask1.execute(*eopatches, **kwargs))
| [
[
[
1023,
1026
],
[
2938,
2941
]
],
[
[
1034,
1041
],
[
1206,
1213
]
],
[
[
1049,
1057
],
[
2667,
2675
],
[
2815,
2823
]
],
[
[
1065,
1072
],
[
1529,
1536
],
[
1658,
1665
]
],
[
[
1097,
1108
],
[
1485,
1496
],
[
4798,
4809
]
],
[
[
1125,
1128
],
[
1249,
1252
]
],
[
[
1130,
1144
],
[
3403,
3417
]
],
[
[
1153,
1157
],
[
4005,
4009
],
[
4604,
4608
],
[
4625,
4629
],
[
4664,
4668
],
[
4701,
4705
]
],
[
[
1182,
1195
],
[
3807,
3820
]
],
[
[
1197,
1203
]
],
[
[
1242,
1248
],
[
4895,
4901
]
],
[
[
4029,
4047
],
[
1835,
1853
],
[
4769,
4787
]
],
[
[
4881,
4894
],
[
2006,
2019
]
]
] |
from kol.request.GenericRequest import GenericRequest
from kol.manager import PatternManager
import kol.Error as Error
from kol.util import Report
class RespondToTradeRequest(GenericRequest):
def __init__(self, session, tradeid, items=None, meat=0, message=""):
super(RespondToTradeRequest, self).__super__(session)
self.url = session.serverURL + "makeoffer.php"
self.requestData['action'] = 'counter'
self.requestData['pwd'] = session.pwd
self.requestData['whichoffer'] = tradeid
self.requestData['offermeat'] = meat
self.requestData['memo2'] = message
ctr = 1
for item in items:
self.requestData['whichitem' + str(ctr)] = item['itemID']
self.requestData['howmany' + str(ctr)] = item['quantity']
ctr += 1
def parseResponse(self):
noMeatPattern = PatternManager.getOrCompilePattern('traderHasNotEnoughMeat')
if noMeatPattern.search(self.responseText):
raise Error.Error("You don't have as much meat as you're promising.", Error.NOT_ENOUGH_MEAT)
noItemsPattern = PatternManager.getOrCompilePattern('traderHasNotEnoughItems')
if noItemsPattern.search(self.responseText):
raise Error.Error("You don't have as many items as you're promising.", Error.NOT_ENOUGH_ITEMS)
#Not testing for an offer being cancelled due to a bug in KoL - space reserved
successPattern = PatternManager.getOrCompilePattern('tradeResponseSentSuccessfully')
if successPattern.search(self.responseText):
Report.trace("request", "Response to trade " + str(self.requestData['whichoffer']) + ' sent successfully.')
else:
raise Error.Error("Unknown error sending response to trade " + str(self.requestData['whichoffer']), Error.REQUEST_GENERIC) | [
[
[
39,
53
],
[
176,
190
]
],
[
[
78,
92
],
[
882,
896
],
[
1134,
1148
],
[
1486,
1500
]
],
[
[
100,
118
],
[
1013,
1018
],
[
1077,
1082
],
[
1267,
1272
],
[
1332,
1337
],
[
1759,
1764
],
[
1853,
1858
]
],
[
[
140,
146
],
[
1619,
1625
]
],
[
[
154,
175
],
[
286,
307
]
]
] |
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Caps(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "volume"
_path_str = "volume.caps"
_valid_props = {"x", "y", "z"}
# x
# -
@property
def x(self):
"""
The 'x' property is an instance of X
that may be specified as:
- An instance of :class:`plotly.graph_objs.volume.caps.X`
- A dict of string/value properties that will be passed
to the X constructor
Supported dict properties:
fill
Sets the fill ratio of the `caps`. The default
fill value of the `caps` is 1 meaning that they
are entirely shaded. On the other hand Applying
a `fill` ratio less than one would allow the
creation of openings parallel to the edges.
show
Sets the fill ratio of the `slices`. The
default fill value of the x `slices` is 1
meaning that they are entirely shaded. On the
other hand Applying a `fill` ratio less than
one would allow the creation of openings
parallel to the edges.
Returns
-------
plotly.graph_objs.volume.caps.X
"""
return self["x"]
@x.setter
def x(self, val):
self["x"] = val
# y
# -
@property
def y(self):
"""
The 'y' property is an instance of Y
that may be specified as:
- An instance of :class:`plotly.graph_objs.volume.caps.Y`
- A dict of string/value properties that will be passed
to the Y constructor
Supported dict properties:
fill
Sets the fill ratio of the `caps`. The default
fill value of the `caps` is 1 meaning that they
are entirely shaded. On the other hand Applying
a `fill` ratio less than one would allow the
creation of openings parallel to the edges.
show
Sets the fill ratio of the `slices`. The
default fill value of the y `slices` is 1
meaning that they are entirely shaded. On the
other hand Applying a `fill` ratio less than
one would allow the creation of openings
parallel to the edges.
Returns
-------
plotly.graph_objs.volume.caps.Y
"""
return self["y"]
@y.setter
def y(self, val):
self["y"] = val
# z
# -
@property
def z(self):
"""
The 'z' property is an instance of Z
that may be specified as:
- An instance of :class:`plotly.graph_objs.volume.caps.Z`
- A dict of string/value properties that will be passed
to the Z constructor
Supported dict properties:
fill
Sets the fill ratio of the `caps`. The default
fill value of the `caps` is 1 meaning that they
are entirely shaded. On the other hand Applying
a `fill` ratio less than one would allow the
creation of openings parallel to the edges.
show
Sets the fill ratio of the `slices`. The
default fill value of the z `slices` is 1
meaning that they are entirely shaded. On the
other hand Applying a `fill` ratio less than
one would allow the creation of openings
parallel to the edges.
Returns
-------
plotly.graph_objs.volume.caps.Z
"""
return self["z"]
@z.setter
def z(self, val):
self["z"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
x
:class:`plotly.graph_objects.volume.caps.X` instance or
dict with compatible properties
y
:class:`plotly.graph_objects.volume.caps.Y` instance or
dict with compatible properties
z
:class:`plotly.graph_objects.volume.caps.Z` instance or
dict with compatible properties
"""
def __init__(self, arg=None, x=None, y=None, z=None, **kwargs):
"""
Construct a new Caps object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.volume.Caps`
x
:class:`plotly.graph_objects.volume.caps.X` instance or
dict with compatible properties
y
:class:`plotly.graph_objects.volume.caps.Y` instance or
dict with compatible properties
z
:class:`plotly.graph_objects.volume.caps.Z` instance or
dict with compatible properties
Returns
-------
Caps
"""
super(Caps, self).__init__("caps")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.volume.Caps
constructor must be a dict or
an instance of :class:`plotly.graph_objs.volume.Caps`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("x", None)
_v = x if x is not None else _v
if _v is not None:
self["x"] = _v
_v = arg.pop("y", None)
_v = y if y is not None else _v
if _v is not None:
self["y"] = _v
_v = arg.pop("z", None)
_v = z if z is not None else _v
if _v is not None:
self["z"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
| [
[
[
33,
82
],
[
117,
140
]
],
[
[
90,
103
],
[
5720,
5725
]
],
[
[
112,
116
],
[
5363,
5367
]
]
] |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcdn.endpoint import endpoint_data
class DescribeCdnDeletedDomainsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cdn', '2018-05-10', 'DescribeCdnDeletedDomains')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_PageNumber(self):
return self.get_query_params().get('PageNumber')
def set_PageNumber(self,PageNumber):
self.add_query_param('PageNumber',PageNumber)
def get_PageSize(self):
return self.get_query_params().get('PageSize')
def set_PageSize(self,PageSize):
self.add_query_param('PageSize',PageSize)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId) | [
[
[
826,
836
],
[
925,
935
],
[
962,
972
]
],
[
[
871,
884
],
[
1134,
1147
],
[
1245,
1258
]
],
[
[
892,
924
]
]
] |
class Buffer:
def __init__(self):
self.lst = list()
def add(self, *a):
for value in a:
self.lst.append(value)
while len(self.lst) >= 5:
s = 0
for i in range(5):
s += self.lst.pop(0)
print(s)
def get_current_part(self):
return self.lst
| [
[
[
6,
12
]
]
] |
class GeometryObject(APIObject, IDisposable):
""" The common base class for all geometric primitives. """
def Dispose(self):
""" Dispose(self: APIObject,A_0: bool) """
pass
def Equals(self, obj):
"""
Equals(self: GeometryObject,obj: object) -> bool
Determines whether the specified System.Object is equal to the current
System.Object.
obj: Another object.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: GeometryObject) -> int
Gets the integer value of the geometry object as hash code
"""
pass
def ReleaseManagedResources(self, *args):
""" ReleaseManagedResources(self: APIObject) """
pass
def ReleaseUnmanagedResources(self, *args):
""" ReleaseUnmanagedResources(self: GeometryObject) """
pass
def __enter__(self, *args):
""" __enter__(self: IDisposable) -> object """
pass
def __eq__(self, *args):
""" x.__eq__(y) <==> x==y """
pass
def __exit__(self, *args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __ne__(self, *args):
pass
GraphicsStyleId = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""The ElementId of the GeometryObject's GraphicsStyle
Get: GraphicsStyleId(self: GeometryObject) -> ElementId
"""
IsElementGeometry = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Indicates whether this geometry is obtained directly from an Element.
Get: IsElementGeometry(self: GeometryObject) -> bool
"""
Visibility = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""The visibility.
Get: Visibility(self: GeometryObject) -> Visibility
"""
| [
[
[
6,
20
]
]
] |
import os
import math
import argparse
import gym
from agents.q_agent import Q, Agent, Trainer
RECORD_PATH = os.path.join(os.path.dirname(__file__), "./upload")
def main(episodes, render, monitor):
env = gym.make("CartPole-v0")
q = Q(
env.action_space.n,
env.observation_space,
bin_size=[7, 7, 7, 7],
low_bound=[-5, -0.5, -5, -0.5],
high_bound=[5, 0.5, 5, 0.5]
)
agent = Agent(q, epsilon=0.05)
learning_decay = lambda lr, t: 1 / (t + 1) ** 0.5
epsilon_decay = lambda eps, t: 1 / (t + 1) ** 0.5
trainer = Trainer(
agent,
gamma=0.95,
learning_rate=0.1, learning_rate_decay=learning_decay,
epsilon=1.0, epsilon_decay=epsilon_decay,
max_step=250)
if monitor:
env.monitor.start(RECORD_PATH)
trainer.train(env, episode_count=episodes, render=render)
if monitor:
env.monitor.close()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="train & run cartpole ")
parser.add_argument("--episode", type=int, default=1000, help="episode to train")
parser.add_argument("--render", action="store_true", help="render the screen")
parser.add_argument("--monitor", action="store_true", help="monitor")
parser.add_argument("--upload", type=str, default="", help="upload key to openai gym (training is not executed)")
args = parser.parse_args()
if args.upload:
if os.path.isdir(RECORD_PATH):
gym.upload(RECORD_PATH, api_key=args.upload)
else:
main(args.episode, args.render, args.monitor)
| [
[
[
7,
9
],
[
110,
112
],
[
123,
125
],
[
1458,
1460
]
],
[
[
17,
21
]
],
[
[
29,
37
],
[
972,
980
]
],
[
[
45,
48
],
[
1498,
1501
],
[
211,
214
]
],
[
[
76,
77
],
[
245,
246
]
],
[
[
79,
84
],
[
439,
444
]
],
[
[
86,
93
],
[
585,
592
]
],
[
[
96,
107
],
[
1472,
1483
],
[
1509,
1520
],
[
809,
820
]
],
[
[
168,
172
],
[
1561,
1565
]
],
[
[
963,
969
],
[
1037,
1043
],
[
1123,
1129
],
[
1206,
1212
],
[
1280,
1286
],
[
1406,
1412
]
],
[
[
1399,
1403
],
[
1434,
1438
],
[
1530,
1534
],
[
1566,
1570
],
[
1580,
1584
],
[
1593,
1597
]
]
] |
import os
import subprocess
import numpy as np
from tqdm import tqdm
from typing import Dict
MAX_FREQ = 7999
def to_str(v):
if isinstance(v, tuple):
s = " ".join(str(x) for x in v)
elif isinstance(v, float) or isinstance(v, int):
s = str(v)
else:
assert False
return s
def build_sox_distortions(audio_file, params):
param_str = " ".join([k + " " + to_str(v) for k, v in params.items()])
sox_params = "sox {} -p {} ".format(audio_file, param_str)
return sox_params
def build_sox_noise(
audio_file,
amod_lowpass_cutoff=0.1,
lowpass_cutoff=MAX_FREQ,
highpass_cutoff=1,
noise_gain=-4,
):
"""
play original.wav synth whitenoise lowpass 0.1 synth whitenoise amod gain -n 0 lowpass 100 highpass 1
"""
sox_params = "sox {audio_file} -p synth whitenoise lowpass {amod_lowpass_cutoff} synth whitenoise amod gain -n {noise_gain} lowpass {lowpass_cutoff} highpass {highpass_cutoff}".format(
audio_file=audio_file,
amod_lowpass_cutoff=amod_lowpass_cutoff,
lowpass_cutoff=lowpass_cutoff,
highpass_cutoff=highpass_cutoff,
noise_gain=noise_gain,
)
return sox_params
def build_varying_amplitude_factor(audio_file, lowpass_cutoff=1, ac_gain=-9):
ac = "sox {} -p synth whitenoise lowpass {} gain -n {}".format(
audio_file, lowpass_cutoff, ac_gain
)
dc = "sox {} -p gain -90 dcshift 0.5".format(audio_file)
return "sox -m <({}) <({}) -p".format(ac, dc)
def multiply_signals(signal_a, signal_b):
return ("sox -T <({signal_a}) <({signal_b}) -p").format(
signal_a=signal_a, signal_b=signal_b,
)
def build_sox_interference(
interfere_file, interfere_signal, lowpass_cutoff=1, ac_gain=-6
):
factor = build_varying_amplitude_factor(interfere_file, lowpass_cutoff, ac_gain)
return multiply_signals(factor, interfere_signal)
def add_signals_trim_to_len(original, signals, augmented):
signals_to_add = " ".join(["<(%s)" % s for s in signals])
sox_cmd = "sox -m {signals} -b 16 {augmented} trim 0 $(soxi -D {original})".format(
signals=signals_to_add, original=original, augmented=augmented
)
return sox_cmd
def build_random_bandpass(min_low=50, min_band_width=100, max_high=1000) -> Dict:
d = {}
max_high_cutoff = MAX_FREQ
if np.random.choice([True, False], p=[0.5, 0.5]):
lowpass = int(round(np.random.uniform(low=min_low, high=MAX_FREQ)))
d["lowpass"] = lowpass
max_high_cutoff = lowpass - min_band_width
if np.random.choice([True, False], p=[0.5, 0.5]):
highpass = int(
round(np.random.uniform(low=1, high=min(max_high, max_high_cutoff)))
)
d["highpass"] = highpass
return d
def augment_with_sox(original_file, audio_files, augmented_file):
interfere_file = np.random.choice(audio_files)
min_SNR = 20 # normal:20, less:30, evenless:40
min_SIR = 5 # normal:10, less:20, evenless:30
signal_gain = round(np.random.uniform(low=-10, high=0), 2)
signal_params = {
"tempo": round(np.random.triangular(left=0.7, mode=1.0, right=1.3), 2),
"pitch": int(
round(np.random.triangular(left=-200, mode=0, right=200))
), # normal 100, less: 50, evenless: 30
"reverb": (int(round(np.random.uniform(low=0, high=50))), 50, 100, 100, 0, 0,),
"gain -n": signal_gain,
}
signal_params.update(build_random_bandpass(1000, 1000, 100))
interfere_params = {
"tempo": round(np.random.uniform(low=0.6, high=1.4), 2),
"pitch": int(round(np.random.uniform(low=-500, high=500))),
"reverb": (int(round(np.random.uniform(low=0, high=100))), 50, 100, 100, 0, 0),
"gain -n": round(np.random.uniform(low=-50, high=signal_gain - min_SIR), 2),
}
interfere_params.update(build_random_bandpass(50, 100, 1000))
# params = {'signal_params':signal_params,'interfere_params':interfere_params,'noise_power':noise_power}
# pprint(params)
signal = build_sox_distortions(original_file, signal_params)
interfere_signal = build_sox_distortions(interfere_file, interfere_params)
noise_power = round(np.random.uniform(-60, signal_gain - min_SNR), 2)
lowpass = int(round(np.random.uniform(low=100, high=MAX_FREQ)))
highpass = int(round(np.random.uniform(low=1, high=lowpass)))
noise = build_sox_noise(
original_file, np.random.uniform(0.1, 2), lowpass, highpass, noise_power
)
interf = build_sox_interference(
interfere_file,
interfere_signal,
lowpass_cutoff=np.random.uniform(0.5, 2),
ac_gain=int(round(np.random.uniform(-9, -3))),
)
sox_cmd = add_signals_trim_to_len(
original_file, [signal, noise, interf], augmented_file
)
FNULL = open(os.devnull, "w")
subprocess.call(["bash", "-c", sox_cmd], stdout=FNULL, stderr=subprocess.STDOUT)
# subprocess.call(["bash", "-c", sox_cmd])
# output = subprocess.check_output(["bash", "-c", sox_cmd])
# if len(output)>0 and 'FAIL' in output:
# print(output)
# return 1 if len(output)>0 else 0
def augment_with_specific_params():
signal_gain = 0
signal_params = dict(tempo=1.0, pitch=0, reverb=0)
signal_params["gain -n"] = 0
signal = build_sox_distortions(original, signal_params)
interfere_signal = build_sox_distortions(
interfering, dict(gain=signal_gain - 10, tempo=0.8, pitch=100, reverb=50)
)
noise = build_sox_noise(
original, noise_gain=signal_gain - 20, lowpass_cutoff=6000, highpass_cutoff=10
)
interf = build_sox_interference(interfering, interfere_signal)
sox_cmd = add_signals_trim_to_len(original, [signal, noise, interf], augmented)
subprocess.call(["bash", "-c", sox_cmd])
if __name__ == "__main__":
import librosa
original = "../../original.wav"
augmented = "/tmp/augmented.wav"
interfering = "../../interference.wav"
# augment_with_specific_params()
for k in range(9):
augment_with_sox(original, [interfering], "/tmp/augmented_%d.wav" % k)
# assert False
# path = os.environ['HOME']+"/data/asr_data/SPANISH"
# audio_files = librosa.util.find_files(path)
#
# with open('spanish_train_manifest.csv') as f:
# audio_text_files = f.readlines()
# audio_files = [x.strip().split(",")[0] for x in audio_text_files]
#
# for k in tqdm(range(100000)):
# original = np.random.choice(audio_files)
# random_augmentation(original, audio_files, augmented)
| [
[
[
7,
9
],
[
4813,
4815
]
],
[
[
17,
27
],
[
4834,
4844
],
[
4896,
4906
],
[
5751,
5761
]
],
[
[
36,
47
],
[
2341,
2343
],
[
2416,
2418
],
[
2554,
2556
],
[
2643,
2645
],
[
2852,
2854
],
[
3010,
3012
],
[
3094,
3096
],
[
3191,
3193
],
[
3321,
3323
],
[
3532,
3534
],
[
3601,
3603
],
[
3671,
3673
],
[
3755,
3757
],
[
4188,
4190
],
[
4262,
4264
],
[
4331,
4333
],
[
4424,
4426
],
[
4599,
4601
],
[
4652,
4654
]
],
[
[
65,
69
]
],
[
[
89,
93
],
[
2286,
2290
]
],
[
[
95,
103
],
[
610,
618
],
[
2325,
2333
],
[
2452,
2460
],
[
4294,
4302
]
],
[
[
117,
123
],
[
399,
405
]
],
[
[
320,
341
],
[
4032,
4053
],
[
4107,
4128
],
[
5293,
5314
],
[
5363,
5384
]
],
[
[
529,
544
],
[
4384,
4399
],
[
5486,
5501
]
],
[
[
1202,
1232
],
[
1775,
1805
]
],
[
[
1511,
1527
],
[
1858,
1874
]
],
[
[
1668,
1690
],
[
4502,
4524
],
[
5609,
5631
]
],
[
[
1907,
1930
],
[
4702,
4725
],
[
5677,
5700
]
],
[
[
2214,
2235
],
[
3443,
3464
],
[
3849,
3870
]
],
[
[
2769,
2785
],
[
6026,
6042
]
],
[
[
5140,
5168
]
],
[
[
5832,
5839
]
],
[
[
5844,
5852
],
[
6043,
6051
],
[
5315,
5323
],
[
5511,
5519
],
[
5701,
5709
]
],
[
[
5880,
5889
],
[
5736,
5745
]
],
[
[
5917,
5928
],
[
6054,
6065
],
[
5394,
5405
],
[
5632,
5643
]
],
[
[
6003,
6004
],
[
6094,
6095
]
]
] |
#!/usr/bin/env python
# Copyright 2014 The Swarming Authors. All rights reserved.
# Use of this source code is governed by the Apache v2.0 license that can be
# found in the LICENSE file.
import logging
import os
import sys
import tempfile
import shutil
import unittest
import re
# Import this first before manipulating sys.path to ensure it can load fine.
import logging_utils
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, ROOT_DIR)
import test_env
test_env.setup_test_env()
from depot_tools import auto_stub
_LOG_HEADER = r'^%s \d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d\d\d: ' % os.getpid()
class TestLoggingUtils(auto_stub.TestCase):
def test_Capture(self):
root = logging.RootLogger(logging.DEBUG)
with logging_utils.CaptureLogs('foo', root) as log:
root.debug('foo')
result = log.read()
self.assertTrue(re.match(_LOG_HEADER + 'DEBUG foo\n$', result), result)
def test_prepare_logging(self):
root = logging.RootLogger(logging.DEBUG)
tmp_dir = tempfile.mkdtemp(prefix='logging_utils_test')
try:
filepath = os.path.join(tmp_dir, 'test.log')
logging_utils.prepare_logging(filepath, root)
root.debug('foo')
with open(filepath, 'rb') as f:
result = f.read()
finally:
shutil.rmtree(tmp_dir)
# It'd be nice to figure out a way to ensure it's properly in UTC but it's
# tricky to do reliably.
self.assertTrue(re.match(_LOG_HEADER + 'DEBUG foo\n$', result), result)
if __name__ == '__main__':
unittest.main()
| [
[
[
196,
203
],
[
723,
730
],
[
742,
749
],
[
985,
992
],
[
1004,
1011
]
],
[
[
211,
213
],
[
392,
394
],
[
408,
410
],
[
424,
426
],
[
628,
630
],
[
1105,
1107
]
],
[
[
221,
224
],
[
452,
455
]
],
[
[
232,
240
],
[
1033,
1041
]
],
[
[
248,
254
],
[
1298,
1304
]
],
[
[
262,
270
],
[
1536,
1544
]
],
[
[
278,
280
],
[
883,
885
],
[
1449,
1451
]
],
[
[
366,
379
],
[
766,
779
],
[
1145,
1158
]
],
[
[
381,
389
],
[
471,
479
]
],
[
[
489,
497
],
[
499,
507
]
],
[
[
550,
559
],
[
665,
674
]
],
[
[
561,
572
],
[
892,
903
],
[
1458,
1469
]
],
[
[
648,
664
]
]
] |
# -*- Python -*-
# This file is licensed under a pytorch-style license
# See frontends/pytorch/LICENSE for license information.
import torch
import npcomp.frontends.pytorch as torch_mlir
import npcomp.frontends.pytorch.test as test
# RUN: %PYTHON %s | FileCheck %s
dev = torch_mlir.mlir_device()
t0 = torch.randn((4,4), device=dev)
t1 = torch.randn((4,4), device=dev)
t2 = t0 + t1
#
# Check the result tensor against the CPU
#
t0_cpu = t0.to('cpu')
t1_cpu = t1.to('cpu')
t2_cpu = t2.to('cpu')
print (t0_cpu, " +\n", t1_cpu, " =\n", t2_cpu)
# CHECK: PASS! add2 check
test.compare(t2, t0_cpu + t1_cpu, "add2")
| [
[
[
136,
141
],
[
304,
309
],
[
340,
345
]
],
[
[
149,
187
],
[
274,
284
]
],
[
[
195,
232
],
[
573,
577
]
],
[
[
268,
271
],
[
330,
333
],
[
366,
369
]
],
[
[
299,
301
],
[
377,
379
],
[
441,
443
]
],
[
[
335,
337
],
[
382,
384
],
[
463,
465
]
],
[
[
372,
374
],
[
485,
487
],
[
586,
588
]
],
[
[
432,
438
],
[
506,
512
],
[
590,
596
]
],
[
[
454,
460
],
[
522,
528
],
[
599,
605
]
],
[
[
476,
482
],
[
538,
544
]
]
] |
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops.operations import _grad_ops as G
from mindspore.ops import composite as C
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
class NetTanhGrad(nn.Cell):
def __init__(self):
super(NetTanhGrad, self).__init__()
self.tanh_grad = G.TanhGrad()
def construct(self, y, grad):
return self.tanh_grad(y, grad)
class NetTanhGradGrad(nn.Cell):
def __init__(self, forward_net):
super(NetTanhGradGrad, self).__init__()
self.forward_net = forward_net
self.gradOps = C.GradOperation(get_all=True, sens_param=True)
def construct(self, y, grad, dout):
backward_net = self.gradOps(self.forward_net)
return backward_net(y, grad, dout)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def tanh_grad_grad_base(dtype, loss):
np.random.seed(1)
shape = (4, 2)
y_np = (np.random.rand(*shape) * 2 - 1).astype(dtype)
grad_np = (np.random.rand(*shape) * 20 - 10).astype(dtype)
dout_np = (np.random.rand(*shape) * 20 - 10).astype(dtype)
y_np_32 = y_np.astype(np.float32)
grad_np_32 = grad_np.astype(np.float32)
dout_np_32 = dout_np.astype(np.float32)
dy_np = (dout_np_32 * grad_np_32 * (-2.0) * y_np_32).astype(dtype)
dgrad_np = (dout_np_32 * (1 - y_np_32 * y_np_32)).astype(dtype)
y_ms = Tensor(y_np)
grad_ms = Tensor(grad_np)
dout_ms = Tensor(dout_np)
forward_net = NetTanhGrad()
net = NetTanhGradGrad(forward_net)
dy_ms, dgrad_ms = net(y_ms, grad_ms, dout_ms)
assert np.allclose(dy_ms.asnumpy(), dy_np, loss, loss)
assert np.allclose(dgrad_ms.asnumpy(), dgrad_np, loss, loss)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_tanh_grad_grad_float16():
tanh_grad_grad_base(np.float16, 1e-3)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_tanh_grad_grad_float32():
tanh_grad_grad_base(np.float32, 1e-4)
| [
[
[
675,
686
],
[
1656,
1658
],
[
1705,
1707
],
[
1766,
1768
],
[
1829,
1831
],
[
1904,
1906
],
[
1948,
1950
],
[
1992,
1994
],
[
2361,
2363
],
[
2420,
2422
],
[
2619,
2621
],
[
2782,
2784
]
],
[
[
694,
700
],
[
1531,
1537
],
[
1551,
1557
],
[
1590,
1596
],
[
2477,
2483
],
[
2497,
2503
],
[
2536,
2542
],
[
2640,
2646
],
[
2660,
2666
],
[
2699,
2705
]
],
[
[
709,
737
],
[
887,
894
],
[
912,
919
]
],
[
[
745,
763
],
[
972,
974
],
[
1186,
1188
]
],
[
[
786,
792
],
[
2155,
2161
],
[
2182,
2188
],
[
2212,
2218
]
],
[
[
830,
844
],
[
1075,
1076
]
],
[
[
871,
885
],
[
1343,
1344
]
],
[
[
960,
971
],
[
1020,
1031
],
[
2246,
2257
]
],
[
[
1170,
1185
],
[
1247,
1262
],
[
2270,
2285
]
],
[
[
1618,
1637
],
[
2599,
2618
],
[
2762,
2781
]
],
[
[
2564,
2591
]
],
[
[
2727,
2754
]
]
] |
from rest_framework.permissions import SAFE_METHODS, BasePermission
class IsAdminOrReadOnly(BasePermission):
"""
The request is authenticated as an Admin user or is Read Only
"""
def has_permission(self, request, view):
return bool(
request.method in SAFE_METHODS or
request.user and
request.user.is_staff
)
| [
[
[
39,
51
],
[
290,
302
]
],
[
[
53,
67
],
[
94,
108
]
],
[
[
76,
93
]
]
] |
#!/usr/bin/env python3
import argparse
import io
import sys
from urllib.request import urlopen
import urllib.error
import time
import datetime
from retrying import retry
URL = "http://unreliable.labs.crossref.org/error"
ONE_SECOND=1000
ONE_HOUR=((ONE_SECOND*60)*60)
ONE_DAY=(ONE_HOUR*24)
@retry(wait_exponential_multiplier=1000,wait_exponential_max=(ONE_HOUR * 6))
def fetch(url):
global s
d = time.time() - s
print("time: " + str(d))
s = time.time()
try:
with urlopen(url) as response:
result = response.read().decode('utf8')
print("Done fetching...")
return result
except urllib.error.URLError as e:
print("Error: " + str(e))
raise e
def main():
print("Starting...")
print(fetch(ARGS.url))
print("Done")
s = time.time()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Stubbornly, but intelligently keep retrying to GET the same URL")
parser.add_argument("-u", "--url", help="the URL to be stubborn about",
type=str, default=URL)
ARGS = parser.parse_args()
main()
| [
[
[
30,
38
],
[
871,
879
]
],
[
[
46,
48
]
],
[
[
56,
59
]
],
[
[
87,
94
],
[
495,
502
]
],
[
[
102,
114
],
[
648,
654
]
],
[
[
122,
126
],
[
815,
819
],
[
407,
411
],
[
460,
464
]
],
[
[
134,
142
]
],
[
[
164,
169
],
[
294,
299
]
],
[
[
172,
175
],
[
1101,
1104
]
],
[
[
223,
233
],
[
250,
260
]
],
[
[
239,
247
],
[
278,
286
],
[
355,
363
]
],
[
[
269,
276
]
],
[
[
374,
379
],
[
774,
779
]
],
[
[
731,
735
],
[
1141,
1145
]
],
[
[
811,
812
],
[
421,
422
]
],
[
[
862,
868
],
[
987,
993
],
[
1117,
1123
]
],
[
[
1110,
1114
],
[
780,
784
]
],
[
[
456,
457
]
]
] |
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 8
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import isi_sdk_8_2_1
from isi_sdk_8_2_1.models.auth_id_ntoken_privilege_item import AuthIdNtokenPrivilegeItem # noqa: E501
from isi_sdk_8_2_1.rest import ApiException
class TestAuthIdNtokenPrivilegeItem(unittest.TestCase):
"""AuthIdNtokenPrivilegeItem unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testAuthIdNtokenPrivilegeItem(self):
"""Test AuthIdNtokenPrivilegeItem"""
# FIXME: construct object with mandatory attributes with example values
# model = isi_sdk_8_2_1.models.auth_id_ntoken_privilege_item.AuthIdNtokenPrivilegeItem() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
[
[
259,
274
]
],
[
[
283,
291
],
[
499,
507
],
[
972,
980
]
],
[
[
300,
313
]
],
[
[
377,
402
]
],
[
[
448,
460
]
],
[
[
469,
498
]
]
] |
#
# Copyright (C) 2019 Databricks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
A wrapper class for Spark DataFrame to behave similar to pandas DataFrame.
"""
import re
import warnings
from functools import partial, reduce
from typing import Any, Optional, List, Tuple, Union
import numpy as np
import pandas as pd
from pandas.api.types import is_datetime64_dtype, is_datetime64tz_dtype, is_list_like, \
is_dict_like
from pyspark import sql as spark
from pyspark.sql import functions as F, Column
from pyspark.sql.types import (BooleanType, ByteType, DecimalType, DoubleType, FloatType,
IntegerType, LongType, ShortType, StructField, StructType,
to_arrow_type)
from pyspark.sql.utils import AnalysisException
from databricks import koalas as ks # For running doctests and reference resolution in PyCharm.
from databricks.koalas.utils import default_session, validate_arguments_and_invoke_function
from databricks.koalas.generic import _Frame, max_display_count
from databricks.koalas.metadata import Metadata
from databricks.koalas.missing.frame import _MissingPandasLikeDataFrame
from databricks.koalas.ml import corr
from databricks.koalas.typedef import infer_pd_series_spark_type
# These regular expression patterns are complied and defined here to avoid to compile the same
# pattern every time it is used in _repr_ and _repr_html_ in DataFrame.
# Two patterns basically seek the footer string from Pandas'
REPR_PATTERN = re.compile(r"\n\n\[(?P<rows>[0-9]+) rows x (?P<columns>[0-9]+) columns\]$")
REPR_HTML_PATTERN = re.compile(
r"\n\<p\>(?P<rows>[0-9]+) rows × (?P<columns>[0-9]+) columns\<\/p\>\n\<\/div\>$")
class DataFrame(_Frame):
"""
Koala DataFrame that corresponds to Pandas DataFrame logically. This holds Spark DataFrame
internally.
:ivar _sdf: Spark Column instance
:ivar _metadata: Metadata related to column names and index information.
Parameters
----------
data : numpy ndarray (structured or homogeneous), dict, Pandas DataFrame or Spark DataFrame
Dict can contain Series, arrays, constants, or list-like objects
If data is a dict, argument order is maintained for Python 3.6
and later.
Note that if `data` is a Pandas DataFrame, other arguments should not be used.
If `data` is a Spark DataFrame, all other arguments except `index` should not be used.
index : Index or array-like
Index to use for resulting frame. Will default to RangeIndex if
no indexing information part of input data and no index provided
If `data` is a Spark DataFrame, `index` is expected to be `Metadata`.
columns : Index or array-like
Column labels to use for resulting frame. Will default to
RangeIndex (0, 1, 2, ..., n) if no column labels are provided
dtype : dtype, default None
Data type to force. Only a single dtype is allowed. If None, infer
copy : boolean, default False
Copy data from inputs. Only affects DataFrame / 2d ndarray input
Examples
--------
Constructing DataFrame from a dictionary.
>>> d = {'col1': [1, 2], 'col2': [3, 4]}
>>> df = ks.DataFrame(data=d, columns=['col1', 'col2'])
>>> df
col1 col2
0 1 3
1 2 4
Constructing DataFrame from Pandas DataFrame
>>> df = ks.DataFrame(pd.DataFrame(data=d, columns=['col1', 'col2']))
>>> df
col1 col2
0 1 3
1 2 4
Notice that the inferred dtype is int64.
>>> df.dtypes
col1 int64
col2 int64
dtype: object
To enforce a single dtype:
>>> df = ks.DataFrame(data=d, dtype=np.int8)
>>> df.dtypes
col1 int8
col2 int8
dtype: object
Constructing DataFrame from numpy ndarray:
>>> df2 = ks.DataFrame(np.random.randint(low=0, high=10, size=(5, 5)),
... columns=['a', 'b', 'c', 'd', 'e'])
>>> df2 # doctest: +SKIP
a b c d e
0 3 1 4 9 8
1 4 8 4 8 4
2 7 6 5 6 7
3 8 7 9 1 0
4 2 5 4 3 9
"""
def __init__(self, data=None, index=None, columns=None, dtype=None, copy=False):
if isinstance(data, pd.DataFrame):
assert index is None
assert columns is None
assert dtype is None
assert not copy
self._init_from_pandas(data)
elif isinstance(data, spark.DataFrame):
assert columns is None
assert dtype is None
assert not copy
self._init_from_spark(data, index)
else:
pdf = pd.DataFrame(data=data, index=index, columns=columns, dtype=dtype, copy=copy)
self._init_from_pandas(pdf)
def _init_from_pandas(self, pdf):
metadata = Metadata.from_pandas(pdf)
reset_index = pdf.reset_index()
reset_index.columns = metadata.columns
schema = StructType([StructField(name, infer_pd_series_spark_type(col),
nullable=bool(col.isnull().any()))
for name, col in reset_index.iteritems()])
for name, col in reset_index.iteritems():
dt = col.dtype
if is_datetime64_dtype(dt) or is_datetime64tz_dtype(dt):
continue
reset_index[name] = col.replace({np.nan: None})
self._init_from_spark(default_session().createDataFrame(reset_index, schema=schema),
metadata)
def _init_from_spark(self, sdf, metadata=None):
self._sdf = sdf
if metadata is None:
self._metadata = Metadata(data_columns=self._sdf.schema.fieldNames())
else:
self._metadata = metadata
@property
def _index_columns(self):
return [self._sdf.__getitem__(field)
for field in self._metadata.index_columns]
def _reduce_for_stat_function(self, sfun):
"""
Applies sfun to each column and returns a pd.Series where the number of rows equal the
number of columns.
:param sfun: either an 1-arg function that takes a Column and returns a Column, or
a 2-arg function that takes a Column and its DataType and returns a Column.
"""
from inspect import signature
exprs = []
num_args = len(signature(sfun).parameters)
for col in self.columns:
col_sdf = self._sdf[col]
col_type = self._sdf.schema[col].dataType
if isinstance(col_type, BooleanType) and sfun.__name__ not in ('min', 'max'):
# Stat functions cannot be used with boolean values by default
# Thus, cast to integer (true to 1 and false to 0)
# Exclude the min and max methods though since those work with booleans
col_sdf = col_sdf.cast('integer')
if num_args == 1:
# Only pass in the column if sfun accepts only one arg
col_sdf = sfun(col_sdf)
else: # must be 2
assert num_args == 2
# Pass in both the column and its data type if sfun accepts two args
col_sdf = sfun(col_sdf, col_type)
exprs.append(col_sdf.alias(col))
sdf = self._sdf.select(*exprs)
pdf = sdf.toPandas()
assert len(pdf) == 1, (sdf, pdf)
row = pdf.iloc[0]
row.name = None
return row # Return first row as a Series
def corr(self, method='pearson'):
"""
Compute pairwise correlation of columns, excluding NA/null values.
Parameters
----------
method : {'pearson', 'spearman'}
* pearson : standard correlation coefficient
* spearman : Spearman rank correlation
Returns
-------
y : pandas.DataFrame
See Also
--------
Series.corr
Examples
--------
>>> df = ks.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)],
... columns=['dogs', 'cats'])
>>> df.corr('pearson')
dogs cats
dogs 1.000000 -0.851064
cats -0.851064 1.000000
>>> df.corr('spearman')
dogs cats
dogs 1.000000 -0.948683
cats -0.948683 1.000000
Notes
-----
There are behavior differences between Koalas and pandas.
* the `method` argument only accepts 'pearson', 'spearman'
* the data should not contain NaNs. Koalas will return an error.
* Koalas doesn't support the following argument(s).
* `min_periods` argument is not supported
"""
return corr(self, method)
def iteritems(self):
"""
Iterator over (column name, Series) pairs.
Iterates over the DataFrame columns, returning a tuple with
the column name and the content as a Series.
Returns
-------
label : object
The column names for the DataFrame being iterated over.
content : Series
The column entries belonging to each label, as a Series.
Examples
--------
>>> df = ks.DataFrame({'species': ['bear', 'bear', 'marsupial'],
... 'population': [1864, 22000, 80000]},
... index=['panda', 'polar', 'koala'],
... columns=['species', 'population'])
>>> df
species population
panda bear 1864
polar bear 22000
koala marsupial 80000
>>> for label, content in df.iteritems():
... print('label:', label)
... print('content:', content.to_string())
...
label: species
content: panda bear
polar bear
koala marsupial
label: population
content: panda 1864
polar 22000
koala 80000
"""
cols = list(self.columns)
return list((col_name, self[col_name]) for col_name in cols)
def to_clipboard(self, excel=True, sep=None, **kwargs):
"""
Copy object to the system clipboard.
Write a text representation of object to the system clipboard.
This can be pasted into Excel, for example.
.. note:: This method should only be used if the resulting DataFrame is expected
to be small, as all the data is loaded into the driver's memory.
Parameters
----------
excel : bool, default True
- True, use the provided separator, writing in a csv format for
allowing easy pasting into excel.
- False, write a string representation of the object to the
clipboard.
sep : str, default ``'\\t'``
Field delimiter.
**kwargs
These parameters will be passed to DataFrame.to_csv.
Notes
-----
Requirements for your platform.
- Linux : `xclip`, or `xsel` (with `gtk` or `PyQt4` modules)
- Windows : none
- OS X : none
Examples
--------
Copy the contents of a DataFrame to the clipboard.
>>> df = ks.DataFrame([[1, 2, 3], [4, 5, 6]], columns=['A', 'B', 'C']) # doctest: +SKIP
>>> df.to_clipboard(sep=',') # doctest: +SKIP
... # Wrote the following to the system clipboard:
... # ,A,B,C
... # 0,1,2,3
... # 1,4,5,6
We can omit the the index by passing the keyword `index` and setting
it to false.
>>> df.to_clipboard(sep=',', index=False) # doctest: +SKIP
... # Wrote the following to the system clipboard:
... # A,B,C
... # 1,2,3
... # 4,5,6
This function also works for Series:
>>> df = ks.Series([1, 2, 3, 4, 5, 6, 7], name='x') # doctest: +SKIP
>>> df.to_clipboard(sep=',') # doctest: +SKIP
... # Wrote the following to the system clipboard:
... # 0, 1
... # 1, 2
... # 2, 3
... # 3, 4
... # 4, 5
... # 5, 6
... # 6, 7
"""
args = locals()
kdf = self
return validate_arguments_and_invoke_function(
kdf.to_pandas(), self.to_clipboard, pd.DataFrame.to_clipboard, args)
def to_html(self, buf=None, columns=None, col_space=None, header=True, index=True,
na_rep='NaN', formatters=None, float_format=None, sparsify=None, index_names=True,
justify=None, max_rows=None, max_cols=None, show_dimensions=False, decimal='.',
bold_rows=True, classes=None, escape=True, notebook=False, border=None,
table_id=None, render_links=False):
"""
Render a DataFrame as an HTML table.
.. note:: This method should only be used if the resulting Pandas object is expected
to be small, as all the data is loaded into the driver's memory. If the input
is large, set max_rows parameter.
Parameters
----------
buf : StringIO-like, optional
Buffer to write to.
columns : sequence, optional, default None
The subset of columns to write. Writes all columns by default.
col_space : int, optional
The minimum width of each column.
header : bool, optional
Write out the column names. If a list of strings is given, it
is assumed to be aliases for the column names
index : bool, optional, default True
Whether to print index (row) labels.
na_rep : str, optional, default 'NaN'
String representation of NAN to use.
formatters : list or dict of one-param. functions, optional
Formatter functions to apply to columns' elements by position or
name.
The result of each function must be a unicode string.
List must be of length equal to the number of columns.
float_format : one-parameter function, optional, default None
Formatter function to apply to columns' elements if they are
floats. The result of this function must be a unicode string.
sparsify : bool, optional, default True
Set to False for a DataFrame with a hierarchical index to print
every multiindex key at each row.
index_names : bool, optional, default True
Prints the names of the indexes.
justify : str, default None
How to justify the column labels. If None uses the option from
the print configuration (controlled by set_option), 'right' out
of the box. Valid values are
* left
* right
* center
* justify
* justify-all
* start
* end
* inherit
* match-parent
* initial
* unset.
max_rows : int, optional
Maximum number of rows to display in the console.
max_cols : int, optional
Maximum number of columns to display in the console.
show_dimensions : bool, default False
Display DataFrame dimensions (number of rows by number of columns).
decimal : str, default '.'
Character recognized as decimal separator, e.g. ',' in Europe.
bold_rows : bool, default True
Make the row labels bold in the output.
classes : str or list or tuple, default None
CSS class(es) to apply to the resulting html table.
escape : bool, default True
Convert the characters <, >, and & to HTML-safe sequences.
notebook : {True, False}, default False
Whether the generated HTML is for IPython Notebook.
border : int
A ``border=border`` attribute is included in the opening
`<table>` tag. Default ``pd.options.html.border``.
table_id : str, optional
A css id is included in the opening `<table>` tag if specified.
render_links : bool, default False
Convert URLs to HTML links (only works with Pandas 0.24+).
Returns
-------
str (or unicode, depending on data and options)
String representation of the dataframe.
See Also
--------
to_string : Convert DataFrame to a string.
"""
# Make sure locals() call is at the top of the function so we don't capture local variables.
args = locals()
if max_rows is not None:
kdf = self.head(max_rows)
else:
kdf = self
return validate_arguments_and_invoke_function(
kdf.to_pandas(), self.to_html, pd.DataFrame.to_html, args)
def to_string(self, buf=None, columns=None, col_space=None, header=True,
index=True, na_rep='NaN', formatters=None, float_format=None,
sparsify=None, index_names=True, justify=None,
max_rows=None, max_cols=None, show_dimensions=False,
decimal='.', line_width=None):
"""
Render a DataFrame to a console-friendly tabular output.
.. note:: This method should only be used if the resulting Pandas object is expected
to be small, as all the data is loaded into the driver's memory. If the input
is large, set max_rows parameter.
Parameters
----------
buf : StringIO-like, optional
Buffer to write to.
columns : sequence, optional, default None
The subset of columns to write. Writes all columns by default.
col_space : int, optional
The minimum width of each column.
header : bool, optional
Write out the column names. If a list of strings is given, it
is assumed to be aliases for the column names
index : bool, optional, default True
Whether to print index (row) labels.
na_rep : str, optional, default 'NaN'
String representation of NAN to use.
formatters : list or dict of one-param. functions, optional
Formatter functions to apply to columns' elements by position or
name.
The result of each function must be a unicode string.
List must be of length equal to the number of columns.
float_format : one-parameter function, optional, default None
Formatter function to apply to columns' elements if they are
floats. The result of this function must be a unicode string.
sparsify : bool, optional, default True
Set to False for a DataFrame with a hierarchical index to print
every multiindex key at each row.
index_names : bool, optional, default True
Prints the names of the indexes.
justify : str, default None
How to justify the column labels. If None uses the option from
the print configuration (controlled by set_option), 'right' out
of the box. Valid values are
* left
* right
* center
* justify
* justify-all
* start
* end
* inherit
* match-parent
* initial
* unset.
max_rows : int, optional
Maximum number of rows to display in the console.
max_cols : int, optional
Maximum number of columns to display in the console.
show_dimensions : bool, default False
Display DataFrame dimensions (number of rows by number of columns).
decimal : str, default '.'
Character recognized as decimal separator, e.g. ',' in Europe.
line_width : int, optional
Width to wrap a line in characters.
Returns
-------
str (or unicode, depending on data and options)
String representation of the dataframe.
See Also
--------
to_html : Convert DataFrame to HTML.
Examples
--------
>>> df = ks.DataFrame({'col1': [1, 2, 3], 'col2': [4, 5, 6]}, columns=['col1', 'col2'])
>>> print(df.to_string())
col1 col2
0 1 4
1 2 5
2 3 6
>>> print(df.to_string(max_rows=2))
col1 col2
0 1 4
1 2 5
"""
# Make sure locals() call is at the top of the function so we don't capture local variables.
args = locals()
if max_rows is not None:
kdf = self.head(max_rows)
else:
kdf = self
return validate_arguments_and_invoke_function(
kdf.to_pandas(), self.to_string, pd.DataFrame.to_string, args)
def to_dict(self, orient='dict', into=dict):
"""
Convert the DataFrame to a dictionary.
The type of the key-value pairs can be customized with the parameters
(see below).
.. note:: This method should only be used if the resulting Pandas DataFrame is expected
to be small, as all the data is loaded into the driver's memory.
Parameters
----------
orient : str {'dict', 'list', 'series', 'split', 'records', 'index'}
Determines the type of the values of the dictionary.
- 'dict' (default) : dict like {column -> {index -> value}}
- 'list' : dict like {column -> [values]}
- 'series' : dict like {column -> Series(values)}
- 'split' : dict like
{'index' -> [index], 'columns' -> [columns], 'data' -> [values]}
- 'records' : list like
[{column -> value}, ... , {column -> value}]
- 'index' : dict like {index -> {column -> value}}
Abbreviations are allowed. `s` indicates `series` and `sp`
indicates `split`.
into : class, default dict
The collections.abc.Mapping subclass used for all Mappings
in the return value. Can be the actual class or an empty
instance of the mapping type you want. If you want a
collections.defaultdict, you must pass it initialized.
Returns
-------
dict, list or collections.abc.Mapping
Return a collections.abc.Mapping object representing the DataFrame.
The resulting transformation depends on the `orient` parameter.
Examples
--------
>>> df = ks.DataFrame({'col1': [1, 2],
... 'col2': [0.5, 0.75]},
... index=['row1', 'row2'],
... columns=['col1', 'col2'])
>>> df
col1 col2
row1 1 0.50
row2 2 0.75
>>> df_dict = df.to_dict()
>>> sorted([(key, sorted(values.items())) for key, values in df_dict.items()])
[('col1', [('row1', 1), ('row2', 2)]), ('col2', [('row1', 0.5), ('row2', 0.75)])]
You can specify the return orientation.
>>> df_dict = df.to_dict('series')
>>> sorted(df_dict.items())
[('col1', row1 1
row2 2
Name: col1, dtype: int64), ('col2', row1 0.50
row2 0.75
Name: col2, dtype: float64)]
>>> df_dict = df.to_dict('split')
>>> sorted(df_dict.items()) # doctest: +ELLIPSIS
[('columns', ['col1', 'col2']), ('data', [[1..., 0.75]]), ('index', ['row1', 'row2'])]
>>> df_dict = df.to_dict('records')
>>> [sorted(values.items()) for values in df_dict] # doctest: +ELLIPSIS
[[('col1', 1...), ('col2', 0.5)], [('col1', 2...), ('col2', 0.75)]]
>>> df_dict = df.to_dict('index')
>>> sorted([(key, sorted(values.items())) for key, values in df_dict.items()])
[('row1', [('col1', 1), ('col2', 0.5)]), ('row2', [('col1', 2), ('col2', 0.75)])]
You can also specify the mapping type.
>>> from collections import OrderedDict, defaultdict
>>> df.to_dict(into=OrderedDict)
OrderedDict([('col1', OrderedDict([('row1', 1), ('row2', 2)])), \
('col2', OrderedDict([('row1', 0.5), ('row2', 0.75)]))])
If you want a `defaultdict`, you need to initialize it:
>>> dd = defaultdict(list)
>>> df.to_dict('records', into=dd) # doctest: +ELLIPSIS
[defaultdict(<class 'list'>, {'col..., 'col...}), \
defaultdict(<class 'list'>, {'col..., 'col...})]
"""
# Make sure locals() call is at the top of the function so we don't capture local variables.
args = locals()
kdf = self
return validate_arguments_and_invoke_function(
kdf.to_pandas(), self.to_dict, pd.DataFrame.to_dict, args)
def to_latex(self, buf=None, columns=None, col_space=None, header=True, index=True,
na_rep='NaN', formatters=None, float_format=None, sparsify=None, index_names=True,
bold_rows=False, column_format=None, longtable=None, escape=None, encoding=None,
decimal='.', multicolumn=None, multicolumn_format=None, multirow=None):
r"""
Render an object to a LaTeX tabular environment table.
Render an object to a tabular environment table. You can splice this into a LaTeX
document. Requires usepackage{booktabs}.
.. note:: This method should only be used if the resulting Pandas object is expected
to be small, as all the data is loaded into the driver's memory. If the input
is large, consider alternative formats.
Parameters
----------
buf : file descriptor or None
Buffer to write to. If None, the output is returned as a string.
columns : list of label, optional
The subset of columns to write. Writes all columns by default.
col_space : int, optional
The minimum width of each column.
header : bool or list of str, default True
Write out the column names. If a list of strings is given, it is assumed to be aliases
for the column names.
index : bool, default True
Write row names (index).
na_rep : str, default ‘NaN’
Missing data representation.
formatters : list of functions or dict of {str: function}, optional
Formatter functions to apply to columns’ elements by position or name. The result of
each function must be a unicode string. List must be of length equal to the number of
columns.
float_format : str, optional
Format string for floating point numbers.
sparsify : bool, optional
Set to False for a DataFrame with a hierarchical index to print every multiindex key at
each row. By default, the value will be read from the config module.
index_names : bool, default True
Prints the names of the indexes.
bold_rows : bool, default False
Make the row labels bold in the output.
column_format : str, optional
The columns format as specified in LaTeX table format e.g. ‘rcl’ for 3 columns. By
default, ‘l’ will be used for all columns except columns of numbers, which default
to ‘r’.
longtable : bool, optional
By default, the value will be read from the pandas config module. Use a longtable
environment instead of tabular. Requires adding a usepackage{longtable} to your LaTeX
preamble.
escape : bool, optional
By default, the value will be read from the pandas config module. When set to False
prevents from escaping latex special characters in column names.
encoding : str, optional
A string representing the encoding to use in the output file, defaults to ‘ascii’ on
Python 2 and ‘utf-8’ on Python 3.
decimal : str, default ‘.’
Character recognized as decimal separator, e.g. ‘,’ in Europe.
multicolumn : bool, default True
Use multicolumn to enhance MultiIndex columns. The default will be read from the config
module.
multicolumn_format : str, default ‘l’
The alignment for multicolumns, similar to column_format The default will be read from
the config module.
multirow : bool, default False
Use multirow to enhance MultiIndex rows. Requires adding a usepackage{multirow} to your
LaTeX preamble. Will print centered labels (instead of top-aligned) across the contained
rows, separating groups via clines. The default will be read from the pandas config
module.
Returns
-------
str or None
If buf is None, returns the resulting LateX format as a string. Otherwise returns None.
See Also
--------
DataFrame.to_string : Render a DataFrame to a console-friendly
tabular output.
DataFrame.to_html : Render a DataFrame as an HTML table.
Examples
--------
>>> df = pd.DataFrame({'name': ['Raphael', 'Donatello'],
... 'mask': ['red', 'purple'],
... 'weapon': ['sai', 'bo staff']},
... columns=['name', 'mask', 'weapon'])
>>> df.to_latex(index=False) # doctest: +NORMALIZE_WHITESPACE
'\\begin{tabular}{lll}\n\\toprule\n name & mask & weapon
\\\\\n\\midrule\n Raphael & red & sai \\\\\n Donatello &
purple & bo staff \\\\\n\\bottomrule\n\\end{tabular}\n'
"""
args = locals()
kdf = self
return validate_arguments_and_invoke_function(
kdf.to_pandas(), self.to_latex, pd.DataFrame.to_latex, args)
@property
def index(self):
"""The index (row labels) Column of the DataFrame.
Currently supported only when the DataFrame has a single index.
"""
from databricks.koalas.series import Series
if len(self._metadata.index_map) != 1:
raise KeyError('Currently supported only when the DataFrame has a single index.')
return Series(self._index_columns[0], anchor=self, index=[])
def set_index(self, keys, drop=True, append=False, inplace=False):
"""Set the DataFrame index (row labels) using one or more existing columns.
Set the DataFrame index (row labels) using one or more existing
columns or arrays (of the correct length). The index can replace the
existing index or expand on it.
Parameters
----------
keys : label or array-like or list of labels/arrays
This parameter can be either a single column key, a single array of
the same length as the calling DataFrame, or a list containing an
arbitrary combination of column keys and arrays. Here, "array"
encompasses :class:`Series`, :class:`Index` and ``np.ndarray``.
drop : bool, default True
Delete columns to be used as the new index.
append : bool, default False
Whether to append columns to existing index.
inplace : bool, default False
Modify the DataFrame in place (do not create a new object).
Returns
-------
DataFrame
Changed row labels.
See Also
--------
DataFrame.reset_index : Opposite of set_index.
Examples
--------
>>> df = ks.DataFrame({'month': [1, 4, 7, 10],
... 'year': [2012, 2014, 2013, 2014],
... 'sale': [55, 40, 84, 31]},
... columns=['month', 'year', 'sale'])
>>> df
month year sale
0 1 2012 55
1 4 2014 40
2 7 2013 84
3 10 2014 31
Set the index to become the 'month' column:
>>> df.set_index('month') # doctest: +NORMALIZE_WHITESPACE
year sale
month
1 2012 55
4 2014 40
7 2013 84
10 2014 31
Create a MultiIndex using columns 'year' and 'month':
>>> df.set_index(['year', 'month']) # doctest: +NORMALIZE_WHITESPACE
sale
year month
2012 1 55
2014 4 40
2013 7 84
2014 10 31
"""
if isinstance(keys, str):
keys = [keys]
else:
keys = list(keys)
for key in keys:
if key not in self.columns:
raise KeyError(key)
if drop:
data_columns = [column for column in self._metadata.data_columns if column not in keys]
else:
data_columns = self._metadata.data_columns
if append:
index_map = self._metadata.index_map + [(column, column) for column in keys]
else:
index_map = [(column, column) for column in keys]
metadata = self._metadata.copy(data_columns=data_columns, index_map=index_map)
# Sync Spark's columns as well.
sdf = self._sdf.select(['`{}`'.format(name) for name in metadata.columns])
if inplace:
self._metadata = metadata
self._sdf = sdf
else:
kdf = self.copy()
kdf._metadata = metadata
kdf._sdf = sdf
return kdf
def reset_index(self, level=None, drop=False, inplace=False):
"""Reset the index, or a level of it.
For DataFrame with multi-level index, return new DataFrame with labeling information in
the columns under the index names, defaulting to 'level_0', 'level_1', etc. if any are None.
For a standard index, the index name will be used (if set), otherwise a default 'index' or
'level_0' (if 'index' is already taken) will be used.
Parameters
----------
level : int, str, tuple, or list, default None
Only remove the given levels from the index. Removes all levels by
default.
drop : bool, default False
Do not try to insert index into dataframe columns. This resets
the index to the default integer index.
inplace : bool, default False
Modify the DataFrame in place (do not create a new object).
Returns
-------
DataFrame
DataFrame with the new index.
See Also
--------
DataFrame.set_index : Opposite of reset_index.
Examples
--------
>>> df = ks.DataFrame([('bird', 389.0),
... ('bird', 24.0),
... ('mammal', 80.5),
... ('mammal', np.nan)],
... index=['falcon', 'parrot', 'lion', 'monkey'],
... columns=('class', 'max_speed'))
>>> df
class max_speed
falcon bird 389.0
parrot bird 24.0
lion mammal 80.5
monkey mammal NaN
When we reset the index, the old index is added as a column. Unlike pandas, Koalas
does not automatically add a sequential index. The following 0, 1, 2, 3 are only
there when we display the DataFrame.
>>> df.reset_index()
index class max_speed
0 falcon bird 389.0
1 parrot bird 24.0
2 lion mammal 80.5
3 monkey mammal NaN
We can use the `drop` parameter to avoid the old index being added as
a column:
>>> df.reset_index(drop=True)
class max_speed
0 bird 389.0
1 bird 24.0
2 mammal 80.5
3 mammal NaN
"""
# TODO: add example of MultiIndex back. See https://github.com/databricks/koalas/issues/301
if len(self._metadata.index_map) == 0:
raise NotImplementedError('Can\'t reset index because there is no index.')
multi_index = len(self._metadata.index_map) > 1
def rename(index):
if multi_index:
return 'level_{}'.format(index)
else:
if 'index' not in self._metadata.data_columns:
return 'index'
else:
return 'level_{}'.format(index)
if level is None:
new_index_map = [(column, name if name is not None else rename(i))
for i, (column, name) in enumerate(self._metadata.index_map)]
index_map = []
else:
if isinstance(level, (int, str)):
level = [level]
level = list(level)
if all(isinstance(l, int) for l in level):
for lev in level:
if lev >= len(self._metadata.index_map):
raise IndexError('Too many levels: Index has only {} level, not {}'
.format(len(self._metadata.index_map), lev + 1))
idx = level
elif all(isinstance(lev, str) for lev in level):
idx = []
for l in level:
try:
i = self._metadata.index_columns.index(l)
idx.append(i)
except ValueError:
if multi_index:
raise KeyError('Level unknown not found')
else:
raise KeyError('Level unknown must be same as name ({})'
.format(self._metadata.index_columns[0]))
else:
raise ValueError('Level should be all int or all string.')
idx.sort()
new_index_map = []
index_map = self._metadata.index_map.copy()
for i in idx:
info = self._metadata.index_map[i]
index_column, index_name = info
new_index_map.append(
(index_column,
index_name if index_name is not None else rename(index_name)))
index_map.remove(info)
if drop:
new_index_map = []
metadata = self._metadata.copy(
data_columns=[column for column, _ in new_index_map] + self._metadata.data_columns,
index_map=index_map)
columns = [name for _, name in new_index_map] + self._metadata.data_columns
if inplace:
self._metadata = metadata
self.columns = columns
else:
kdf = self.copy()
kdf._metadata = metadata
kdf.columns = columns
return kdf
def isnull(self):
"""
Detects missing values for items in the current Dataframe.
Return a boolean same-sized Dataframe indicating if the values are NA.
NA values, such as None or numpy.NaN, gets mapped to True values.
Everything else gets mapped to False values.
See Also
--------
Dataframe.notnull
Examples
--------
>>> df = ks.DataFrame([(.2, .3), (.0, None), (.6, None), (.2, .1)])
>>> df.isnull()
0 1
0 False False
1 False True
2 False True
3 False False
>>> df = ks.DataFrame([[None, 'bee', None], ['dog', None, 'fly']])
>>> df.isnull()
0 1 2
0 True False True
1 False True False
"""
kdf = self.copy()
for name, ks in kdf.iteritems():
kdf[name] = ks.isnull()
return kdf
isna = isnull
def notnull(self):
"""
Detects non-missing values for items in the current Dataframe.
This function takes a dataframe and indicates whether it's
values are valid (not missing, which is ``NaN`` in numeric
datatypes, ``None`` or ``NaN`` in objects and ``NaT`` in datetimelike).
See Also
--------
Dataframe.isnull
Examples
--------
>>> df = ks.DataFrame([(.2, .3), (.0, None), (.6, None), (.2, .1)])
>>> df.notnull()
0 1
0 True True
1 True False
2 True False
3 True True
>>> df = ks.DataFrame([['ant', 'bee', 'cat'], ['dog', None, 'fly']])
>>> df.notnull()
0 1 2
0 True True True
1 True False True
"""
kdf = self.copy()
for name, ks in kdf.iteritems():
kdf[name] = ks.notnull()
return kdf
notna = notnull
def to_koalas(self):
"""
Converts the existing DataFrame into a Koalas DataFrame.
This method is monkey-patched into Spark's DataFrame and can be used
to convert a Spark DataFrame into a Koalas DataFrame. If running on
an existing Koalas DataFrame, the method returns itself.
If a Koalas DataFrame is converted to a Spark DataFrame and then back
to Koalas, it will lose the index information and the original index
will be turned into a normal column.
See Also
--------
DataFrame.to_spark
Examples
--------
>>> df = ks.DataFrame({'col1': [1, 2], 'col2': [3, 4]}, columns=['col1', 'col2'])
>>> df
col1 col2
0 1 3
1 2 4
>>> spark_df = df.to_spark()
>>> spark_df
DataFrame[__index_level_0__: bigint, col1: bigint, col2: bigint]
>>> kdf = spark_df.to_koalas()
>>> kdf
__index_level_0__ col1 col2
0 0 1 3
1 1 2 4
Calling to_koalas on a Koalas DataFrame simply returns itself.
>>> df.to_koalas()
col1 col2
0 1 3
1 2 4
"""
if isinstance(self, DataFrame):
return self
else:
return DataFrame(self)
def to_spark(self):
"""
Return the current DataFrame as a Spark DataFrame.
See Also
--------
DataFrame.to_koalas
"""
return self._sdf
def to_pandas(self):
"""
Return a Pandas DataFrame.
.. note:: This method should only be used if the resulting Pandas DataFrame is expected
to be small, as all the data is loaded into the driver's memory.
Examples
--------
>>> df = ks.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)],
... columns=['dogs', 'cats'])
>>> df.to_pandas()
dogs cats
0 0.2 0.3
1 0.0 0.6
2 0.6 0.0
3 0.2 0.1
"""
sdf = self._sdf.select(['`{}`'.format(name) for name in self._metadata.columns])
pdf = sdf.toPandas()
if len(pdf) == 0 and len(sdf.schema) > 0:
# TODO: push to OSS
pdf = pdf.astype({field.name: to_arrow_type(field.dataType).to_pandas_dtype()
for field in sdf.schema})
index_columns = self._metadata.index_columns
if len(index_columns) > 0:
append = False
for index_field in index_columns:
drop = index_field not in self._metadata.data_columns
pdf = pdf.set_index(index_field, drop=drop, append=append)
append = True
pdf = pdf[self._metadata.data_columns]
index_names = self._metadata.index_names
if len(index_names) > 0:
if isinstance(pdf.index, pd.MultiIndex):
pdf.index.names = index_names
else:
pdf.index.name = index_names[0]
return pdf
# Alias to maintain backward compatibility with Spark
toPandas = to_pandas
def assign(self, **kwargs):
"""
Assign new columns to a DataFrame.
Returns a new object with all original columns in addition to new ones.
Existing columns that are re-assigned will be overwritten.
Parameters
----------
**kwargs : dict of {str: callable or Series}
The column names are keywords. If the values are
callable, they are computed on the DataFrame and
assigned to the new columns. The callable must not
change input DataFrame (though Koalas doesn't check it).
If the values are not callable, (e.g. a Series or a literal),
they are simply assigned.
Returns
-------
DataFrame
A new DataFrame with the new columns in addition to
all the existing columns.
Examples
--------
>>> df = ks.DataFrame({'temp_c': [17.0, 25.0]},
... index=['Portland', 'Berkeley'])
>>> df
temp_c
Portland 17.0
Berkeley 25.0
Where the value is a callable, evaluated on `df`:
>>> df.assign(temp_f=lambda x: x.temp_c * 9 / 5 + 32)
temp_c temp_f
Portland 17.0 62.6
Berkeley 25.0 77.0
Alternatively, the same behavior can be achieved by directly
referencing an existing Series or sequence and you can also
create multiple columns within the same assign.
>>> assigned = df.assign(temp_f=df['temp_c'] * 9 / 5 + 32,
... temp_k=df['temp_c'] + 273.15)
>>> assigned[['temp_c', 'temp_f', 'temp_k']]
temp_c temp_f temp_k
Portland 17.0 62.6 290.15
Berkeley 25.0 77.0 298.15
Notes
-----
Assigning multiple columns within the same ``assign`` is possible
but you cannot refer to newly created or modified columns. This
feature is supported in pandas for Python 3.6 and later but not in
Koalas. In Koalas, all items are computed first, and then assigned.
"""
from databricks.koalas.series import Series
for k, v in kwargs.items():
if not (isinstance(v, (Series, spark.Column)) or
callable(v) or pd.api.types.is_scalar(v)):
raise TypeError("Column assignment doesn't support type "
"{0}".format(type(v).__name__))
if callable(v):
kwargs[k] = v(self)
pairs = list(kwargs.items())
sdf = self._sdf
for (name, c) in pairs:
if isinstance(c, Series):
sdf = sdf.withColumn(name, c._scol)
elif isinstance(c, Column):
sdf = sdf.withColumn(name, c)
else:
sdf = sdf.withColumn(name, F.lit(c))
data_columns = self._metadata.data_columns
metadata = self._metadata.copy(
data_columns=(data_columns +
[name for name, _ in pairs if name not in data_columns]))
return DataFrame(sdf, metadata)
def to_records(self, index=True, convert_datetime64=None,
column_dtypes=None, index_dtypes=None):
"""
Convert DataFrame to a NumPy record array.
Index will be included as the first field of the record array if
requested.
.. note:: This method should only be used if the resulting NumPy ndarray is
expected to be small, as all the data is loaded into the driver's memory.
Parameters
----------
index : bool, default True
Include index in resulting record array, stored in 'index'
field or using the index label, if set.
convert_datetime64 : bool, default None
Whether to convert the index to datetime.datetime if it is a
DatetimeIndex.
column_dtypes : str, type, dict, default None
If a string or type, the data type to store all columns. If
a dictionary, a mapping of column names and indices (zero-indexed)
to specific data types.
index_dtypes : str, type, dict, default None
If a string or type, the data type to store all index levels. If
a dictionary, a mapping of index level names and indices
(zero-indexed) to specific data types.
This mapping is applied only if `index=True`.
Returns
-------
numpy.recarray
NumPy ndarray with the DataFrame labels as fields and each row
of the DataFrame as entries.
See Also
--------
DataFrame.from_records: Convert structured or record ndarray
to DataFrame.
numpy.recarray: An ndarray that allows field access using
attributes, analogous to typed columns in a
spreadsheet.
Examples
--------
>>> df = ks.DataFrame({'A': [1, 2], 'B': [0.5, 0.75]},
... index=['a', 'b'])
>>> df
A B
a 1 0.50
b 2 0.75
>>> df.to_records() # doctest: +SKIP
rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)],
dtype=[('index', 'O'), ('A', '<i8'), ('B', '<f8')])
The index can be excluded from the record array:
>>> df.to_records(index=False) # doctest: +SKIP
rec.array([(1, 0.5 ), (2, 0.75)],
dtype=[('A', '<i8'), ('B', '<f8')])
Specification of dtype for columns is new in Pandas 0.24.0.
Data types can be specified for the columns:
>>> df.to_records(column_dtypes={"A": "int32"}) # doctest: +SKIP
rec.array([('a', 1, 0.5 ), ('b', 2, 0.75)],
dtype=[('index', 'O'), ('A', '<i4'), ('B', '<f8')])
Specification of dtype for index is new in Pandas 0.24.0.
Data types can also be specified for the index:
>>> df.to_records(index_dtypes="<S2") # doctest: +SKIP
rec.array([(b'a', 1, 0.5 ), (b'b', 2, 0.75)],
dtype=[('index', 'S2'), ('A', '<i8'), ('B', '<f8')])
"""
args = locals()
kdf = self
return validate_arguments_and_invoke_function(
kdf.to_pandas(), self.to_records, pd.DataFrame.to_records, args)
def copy(self) -> 'DataFrame':
"""
Make a copy of this object's indices and data.
Returns
-------
copy : DataFrame
"""
return DataFrame(self._sdf, self._metadata.copy())
def dropna(self, axis=0, how='any', thresh=None, subset=None, inplace=False):
"""
Remove missing values.
Parameters
----------
axis : {0 or 'index'}, default 0
Determine if rows or columns which contain missing values are
removed.
* 0, or 'index' : Drop rows which contain missing values.
how : {'any', 'all'}, default 'any'
Determine if row or column is removed from DataFrame, when we have
at least one NA or all NA.
* 'any' : If any NA values are present, drop that row or column.
* 'all' : If all values are NA, drop that row or column.
thresh : int, optional
Require that many non-NA values.
subset : array-like, optional
Labels along other axis to consider, e.g. if you are dropping rows
these would be a list of columns to include.
inplace : bool, default False
If True, do operation inplace and return None.
Returns
-------
DataFrame
DataFrame with NA entries dropped from it.
See Also
--------
DataFrame.drop : Drop specified labels from columns.
DataFrame.isnull: Indicate missing values.
DataFrame.notnull : Indicate existing (non-missing) values.
Examples
--------
>>> df = ks.DataFrame({"name": ['Alfred', 'Batman', 'Catwoman'],
... "toy": [None, 'Batmobile', 'Bullwhip'],
... "born": [None, "1940-04-25", None]},
... columns=['name', 'toy', 'born'])
>>> df
name toy born
0 Alfred None None
1 Batman Batmobile 1940-04-25
2 Catwoman Bullwhip None
Drop the rows where at least one element is missing.
>>> df.dropna()
name toy born
1 Batman Batmobile 1940-04-25
Drop the rows where all elements are missing.
>>> df.dropna(how='all')
name toy born
0 Alfred None None
1 Batman Batmobile 1940-04-25
2 Catwoman Bullwhip None
Keep only the rows with at least 2 non-NA values.
>>> df.dropna(thresh=2)
name toy born
1 Batman Batmobile 1940-04-25
2 Catwoman Bullwhip None
Define in which columns to look for missing values.
>>> df.dropna(subset=['name', 'born'])
name toy born
1 Batman Batmobile 1940-04-25
Keep the DataFrame with valid entries in the same variable.
>>> df.dropna(inplace=True)
>>> df
name toy born
1 Batman Batmobile 1940-04-25
"""
if axis == 0 or axis == 'index':
if subset is not None:
if isinstance(subset, str):
columns = [subset]
else:
columns = list(subset)
invalids = [column for column in columns
if column not in self._metadata.data_columns]
if len(invalids) > 0:
raise KeyError(invalids)
else:
columns = list(self.columns)
cnt = reduce(lambda x, y: x + y,
[F.when(self[column].notna()._scol, 1).otherwise(0)
for column in columns],
F.lit(0))
if thresh is not None:
pred = cnt >= F.lit(int(thresh))
elif how == 'any':
pred = cnt == F.lit(len(columns))
elif how == 'all':
pred = cnt > F.lit(0)
else:
if how is not None:
raise ValueError('invalid how option: {h}'.format(h=how))
else:
raise TypeError('must specify how or thresh')
sdf = self._sdf.filter(pred)
if inplace:
self._sdf = sdf
else:
return DataFrame(sdf, self._metadata.copy())
else:
raise NotImplementedError("dropna currently only works for axis=0 or axis='index'")
def fillna(self, value=None, axis=None, inplace=False):
"""Fill NA/NaN values.
Parameters
----------
value : scalar, dict, Series
Value to use to fill holes. alternately a dict/Series of values
specifying which value to use for each column.
DataFrame is not supported.
axis : {0 or `index`}
1 and `columns` are not supported.
inplace : boolean, default False
Fill in place (do not create a new object)
Returns
-------
DataFrame
DataFrame with NA entries filled.
Examples
--------
>>> df = ks.DataFrame({
... 'A': [None, 3, None, None],
... 'B': [2, 4, None, 3],
... 'C': [None, None, None, 1],
... 'D': [0, 1, 5, 4]
... },
... columns=['A', 'B', 'C', 'D'])
>>> df
A B C D
0 NaN 2.0 NaN 0
1 3.0 4.0 NaN 1
2 NaN NaN NaN 5
3 NaN 3.0 1.0 4
Replace all NaN elements with 0s.
>>> df.fillna(0)
A B C D
0 0.0 2.0 0.0 0
1 3.0 4.0 0.0 1
2 0.0 0.0 0.0 5
3 0.0 3.0 1.0 4
Replace all NaN elements in column 'A', 'B', 'C', and 'D', with 0, 1,
2, and 3 respectively.
>>> values = {'A': 0, 'B': 1, 'C': 2, 'D': 3}
>>> df.fillna(value=values)
A B C D
0 0.0 2.0 2.0 0
1 3.0 4.0 2.0 1
2 0.0 1.0 2.0 5
3 0.0 3.0 1.0 4
"""
if axis is None:
axis = 0
if not (axis == 0 or axis == "index"):
raise NotImplementedError("fillna currently only works for axis=0 or axis='index'")
if value is None:
raise ValueError('Currently must specify value')
if not isinstance(value, (float, int, str, bool, dict, pd.Series)):
raise TypeError("Unsupported type %s" % type(value))
if isinstance(value, pd.Series):
value = value.to_dict()
if isinstance(value, dict):
for v in value.values():
if not isinstance(v, (float, int, str, bool)):
raise TypeError("Unsupported type %s" % type(v))
sdf = self._sdf.fillna(value)
if inplace:
self._sdf = sdf
else:
return DataFrame(sdf, self._metadata.copy())
def clip(self, lower: Union[float, int] = None, upper: Union[float, int] = None) \
-> 'DataFrame':
"""
Trim values at input threshold(s).
Assigns values outside boundary to boundary values.
Parameters
----------
lower : float or int, default None
Minimum threshold value. All values below this threshold will be set to it.
upper : float or int, default None
Maximum threshold value. All values above this threshold will be set to it.
Returns
-------
DataFrame
DataFrame with the values outside the clip boundaries replaced.
Examples
--------
>>> ks.DataFrame({'A': [0, 2, 4]}).clip(1, 3)
A
0 1
1 2
2 3
Notes
-----
One difference between this implementation and pandas is that running
pd.DataFrame({'A': ['a', 'b']}).clip(0, 1) will crash with "TypeError: '<=' not supported
between instances of 'str' and 'int'" while ks.DataFrame({'A': ['a', 'b']}).clip(0, 1)
will output the original DataFrame, simply ignoring the incompatible types.
"""
if is_list_like(lower) or is_list_like(upper):
raise ValueError("List-like value are not supported for 'lower' and 'upper' at the " +
"moment")
if lower is None and upper is None:
return self
sdf = self._sdf
numeric_types = (DecimalType, DoubleType, FloatType, ByteType, IntegerType, LongType,
ShortType)
numeric_columns = [c for c in self.columns
if isinstance(sdf.schema[c].dataType, numeric_types)]
nonnumeric_columns = [c for c in self.columns
if not isinstance(sdf.schema[c].dataType, numeric_types)]
if lower is not None:
sdf = sdf.select(*[F.when(F.col(c) < lower, lower).otherwise(F.col(c)).alias(c)
for c in numeric_columns] + nonnumeric_columns)
if upper is not None:
sdf = sdf.select(*[F.when(F.col(c) > upper, upper).otherwise(F.col(c)).alias(c)
for c in numeric_columns] + nonnumeric_columns)
# Restore initial column order
sdf = sdf.select(list(self.columns))
return ks.DataFrame(sdf)
def head(self, n=5):
"""
Return the first `n` rows.
This function returns the first `n` rows for the object based
on position. It is useful for quickly testing if your object
has the right type of data in it.
Parameters
----------
n : int, default 5
Number of rows to select.
Returns
-------
obj_head : same type as caller
The first `n` rows of the caller object.
Examples
--------
>>> df = ks.DataFrame({'animal':['alligator', 'bee', 'falcon', 'lion',
... 'monkey', 'parrot', 'shark', 'whale', 'zebra']})
>>> df
animal
0 alligator
1 bee
2 falcon
3 lion
4 monkey
5 parrot
6 shark
7 whale
8 zebra
Viewing the first 5 lines
>>> df.head()
animal
0 alligator
1 bee
2 falcon
3 lion
4 monkey
Viewing the first `n` lines (three in this case)
>>> df.head(3)
animal
0 alligator
1 bee
2 falcon
"""
return DataFrame(self._sdf.limit(n), self._metadata.copy())
@property
def columns(self):
"""The column labels of the DataFrame."""
return pd.Index(self._metadata.data_columns)
@columns.setter
def columns(self, names):
old_names = self._metadata.data_columns
if len(old_names) != len(names):
raise ValueError(
"Length mismatch: Expected axis has %d elements, new values have %d elements"
% (len(old_names), len(names)))
sdf = self._sdf.select(self._metadata.index_columns +
[self[old_name]._scol.alias(new_name)
for (old_name, new_name) in zip(old_names, names)])
self._sdf = sdf
self._metadata = self._metadata.copy(data_columns=names)
@property
def dtypes(self):
"""Return the dtypes in the DataFrame.
This returns a Series with the data type of each column. The result's index is the original
DataFrame's columns. Columns with mixed types are stored with the object dtype.
Returns
-------
pd.Series
The data type of each column.
Examples
--------
>>> df = ks.DataFrame({'a': list('abc'),
... 'b': list(range(1, 4)),
... 'c': np.arange(3, 6).astype('i1'),
... 'd': np.arange(4.0, 7.0, dtype='float64'),
... 'e': [True, False, True],
... 'f': pd.date_range('20130101', periods=3)},
... columns=['a', 'b', 'c', 'd', 'e', 'f'])
>>> df.dtypes
a object
b int64
c int8
d float64
e bool
f datetime64[ns]
dtype: object
"""
return pd.Series([self[col].dtype for col in self._metadata.data_columns],
index=self._metadata.data_columns)
def count(self):
"""
Count non-NA cells for each column.
The values `None`, `NaN` are considered NA.
Returns
-------
pandas.Series
See Also
--------
Series.count: Number of non-NA elements in a Series.
DataFrame.shape: Number of DataFrame rows and columns (including NA
elements).
DataFrame.isna: Boolean same-sized DataFrame showing places of NA
elements.
Examples
--------
Constructing DataFrame from a dictionary:
>>> df = ks.DataFrame({"Person":
... ["John", "Myla", "Lewis", "John", "Myla"],
... "Age": [24., np.nan, 21., 33, 26],
... "Single": [False, True, True, True, False]},
... columns=["Person", "Age", "Single"])
>>> df
Person Age Single
0 John 24.0 False
1 Myla NaN True
2 Lewis 21.0 True
3 John 33.0 True
4 Myla 26.0 False
Notice the uncounted NA values:
>>> df.count()
Person 5
Age 4
Single 5
dtype: int64
"""
return self._reduce_for_stat_function(_Frame._count_expr)
def drop(self, labels=None, axis=1, columns: Union[str, List[str]] = None):
"""
Drop specified labels from columns.
Remove columns by specifying label names and axis=1 or columns.
When specifying both labels and columns, only labels will be dropped.
Removing rows is yet to be implemented.
Parameters
----------
labels : single label or list-like
Column labels to drop.
axis : {1 or 'columns'}, default 1
.. dropna currently only works for axis=1 'columns'
axis=0 is yet to be implemented.
columns : single label or list-like
Alternative to specifying axis (``labels, axis=1``
is equivalent to ``columns=labels``).
Returns
-------
dropped : DataFrame
See Also
--------
Series.dropna
Examples
--------
>>> df = ks.DataFrame({'x': [1, 2], 'y': [3, 4], 'z': [5, 6], 'w': [7, 8]},
... columns=['x', 'y', 'z', 'w'])
>>> df
x y z w
0 1 3 5 7
1 2 4 6 8
>>> df.drop('x', axis=1)
y z w
0 3 5 7
1 4 6 8
>>> df.drop(['y', 'z'], axis=1)
x w
0 1 7
1 2 8
>>> df.drop(columns=['y', 'z'])
x w
0 1 7
1 2 8
Notes
-----
Currently only axis = 1 is supported in this function,
axis = 0 is yet to be implemented.
"""
if labels is not None:
axis = self._validate_axis(axis)
if axis == 1:
return self.drop(columns=labels)
raise NotImplementedError("Drop currently only works for axis=1")
elif columns is not None:
if isinstance(columns, str):
columns = [columns]
sdf = self._sdf.drop(*columns)
metadata = self._metadata.copy(
data_columns=[column for column in self.columns if column not in columns]
)
return DataFrame(sdf, metadata)
else:
raise ValueError("Need to specify at least one of 'labels' or 'columns'")
def get(self, key, default=None):
"""
Get item from object for given key (DataFrame column, Panel slice,
etc.). Returns default value if not found.
Parameters
----------
key : object
Returns
-------
value : same type as items contained in object
Examples
--------
>>> df = ks.DataFrame({'x':range(3), 'y':['a','b','b'], 'z':['a','b','b']},
... columns=['x', 'y', 'z'])
>>> df
x y z
0 0 a a
1 1 b b
2 2 b b
>>> df.get('x')
0 0
1 1
2 2
Name: x, dtype: int64
>>> df.get(['x', 'y'])
x y
0 0 a
1 1 b
2 2 b
"""
try:
return self._pd_getitem(key)
except (KeyError, ValueError, IndexError):
return default
def sort_values(self, by, ascending=True, inplace=False, na_position='last'):
"""
Sort by the values along either axis.
Parameters
----------
by : str or list of str
ascending : bool or list of bool, default True
Sort ascending vs. descending. Specify list for multiple sort
orders. If this is a list of bools, must match the length of
the by.
inplace : bool, default False
if True, perform operation in-place
na_position : {'first', 'last'}, default 'last'
`first` puts NaNs at the beginning, `last` puts NaNs at the end
Returns
-------
sorted_obj : DataFrame
Examples
--------
>>> df = ks.DataFrame({
... 'col1': ['A', 'B', None, 'D', 'C'],
... 'col2': [2, 9, 8, 7, 4],
... 'col3': [0, 9, 4, 2, 3],
... },
... columns=['col1', 'col2', 'col3'])
>>> df
col1 col2 col3
0 A 2 0
1 B 9 9
2 None 8 4
3 D 7 2
4 C 4 3
Sort by col1
>>> df.sort_values(by=['col1'])
col1 col2 col3
0 A 2 0
1 B 9 9
4 C 4 3
3 D 7 2
2 None 8 4
Sort Descending
>>> df.sort_values(by='col1', ascending=False)
col1 col2 col3
3 D 7 2
4 C 4 3
1 B 9 9
0 A 2 0
2 None 8 4
Sort by multiple columns
>>> df = ks.DataFrame({
... 'col1': ['A', 'A', 'B', None, 'D', 'C'],
... 'col2': [2, 1, 9, 8, 7, 4],
... 'col3': [0, 1, 9, 4, 2, 3],
... },
... columns=['col1', 'col2', 'col3'])
>>> df.sort_values(by=['col1', 'col2'])
col1 col2 col3
1 A 1 1
0 A 2 0
2 B 9 9
5 C 4 3
4 D 7 2
3 None 8 4
"""
if isinstance(by, str):
by = [by]
if isinstance(ascending, bool):
ascending = [ascending] * len(by)
if len(ascending) != len(by):
raise ValueError('Length of ascending ({}) != length of by ({})'
.format(len(ascending), len(by)))
if na_position not in ('first', 'last'):
raise ValueError("invalid na_position: '{}'".format(na_position))
# Mapper: Get a spark column function for (ascending, na_position) combination
# Note that 'asc_nulls_first' and friends were added as of Spark 2.4, see SPARK-23847.
mapper = {
(True, 'first'): lambda x: Column(getattr(x._jc, "asc_nulls_first")()),
(True, 'last'): lambda x: Column(getattr(x._jc, "asc_nulls_last")()),
(False, 'first'): lambda x: Column(getattr(x._jc, "desc_nulls_first")()),
(False, 'last'): lambda x: Column(getattr(x._jc, "desc_nulls_last")()),
}
by = [mapper[(asc, na_position)](self[colname]._scol)
for colname, asc in zip(by, ascending)]
kdf = DataFrame(self._sdf.sort(*by), self._metadata.copy())
if inplace:
self._sdf = kdf._sdf
self._metadata = kdf._metadata
else:
return kdf
# TODO: add keep = First
def nlargest(self, n: int, columns: 'Any') -> 'DataFrame':
"""
Return the first `n` rows ordered by `columns` in descending order.
Return the first `n` rows with the largest values in `columns`, in
descending order. The columns that are not specified are returned as
well, but not used for ordering.
This method is equivalent to
``df.sort_values(columns, ascending=False).head(n)``, but more
performant in Pandas.
In Koalas, thanks to Spark's lazy execution and query optimizer,
the two would have same performance.
Parameters
----------
n : int
Number of rows to return.
columns : label or list of labels
Column label(s) to order by.
Returns
-------
DataFrame
The first `n` rows ordered by the given columns in descending
order.
See Also
--------
DataFrame.nsmallest : Return the first `n` rows ordered by `columns` in
ascending order.
DataFrame.sort_values : Sort DataFrame by the values.
DataFrame.head : Return the first `n` rows without re-ordering.
Notes
-----
This function cannot be used with all column types. For example, when
specifying columns with `object` or `category` dtypes, ``TypeError`` is
raised.
Examples
--------
>>> df = ks.DataFrame({'X': [1, 2, 3, 5, 6, 7, np.nan],
... 'Y': [6, 7, 8, 9, 10, 11, 12]})
>>> df
X Y
0 1.0 6
1 2.0 7
2 3.0 8
3 5.0 9
4 6.0 10
5 7.0 11
6 NaN 12
In the following example, we will use ``nlargest`` to select the three
rows having the largest values in column "population".
>>> df.nlargest(n=3, columns='X')
X Y
5 7.0 11
4 6.0 10
3 5.0 9
>>> df.nlargest(n=3, columns=['Y', 'X'])
X Y
6 NaN 12
5 7.0 11
4 6.0 10
"""
return self.sort_values(by=columns, ascending=False).head(n=n)
# TODO: add keep = First
def nsmallest(self, n: int, columns: 'Any') -> 'DataFrame':
"""
Return the first `n` rows ordered by `columns` in ascending order.
Return the first `n` rows with the smallest values in `columns`, in
ascending order. The columns that are not specified are returned as
well, but not used for ordering.
This method is equivalent to
``df.sort_values(columns, ascending=True).head(n)``, but more
performant.
In Koalas, thanks to Spark's lazy execution and query optimizer,
the two would have same performance.
Parameters
----------
n : int
Number of items to retrieve.
columns : list or str
Column name or names to order by.
Returns
-------
DataFrame
See Also
--------
DataFrame.nlargest : Return the first `n` rows ordered by `columns` in
descending order.
DataFrame.sort_values : Sort DataFrame by the values.
DataFrame.head : Return the first `n` rows without re-ordering.
Examples
--------
>>> df = ks.DataFrame({'X': [1, 2, 3, 5, 6, 7, np.nan],
... 'Y': [6, 7, 8, 9, 10, 11, 12]})
>>> df
X Y
0 1.0 6
1 2.0 7
2 3.0 8
3 5.0 9
4 6.0 10
5 7.0 11
6 NaN 12
In the following example, we will use ``nsmallest`` to select the
three rows having the smallest values in column "a".
>>> df.nsmallest(n=3, columns='X') # doctest: +NORMALIZE_WHITESPACE
X Y
0 1.0 6
1 2.0 7
2 3.0 8
To order by the largest values in column "a" and then "c", we can
specify multiple columns like in the next example.
>>> df.nsmallest(n=3, columns=['Y', 'X']) # doctest: +NORMALIZE_WHITESPACE
X Y
0 1.0 6
1 2.0 7
2 3.0 8
"""
return self.sort_values(by=columns, ascending=True).head(n=n)
def isin(self, values):
"""
Whether each element in the DataFrame is contained in values.
Parameters
----------
values : iterable or dict
The sequence of values to test. If values is a dict,
the keys must be the column names, which must match.
Series and DataFrame are not supported.
Returns
-------
DataFrame
DataFrame of booleans showing whether each element in the DataFrame
is contained in values.
Examples
--------
>>> df = ks.DataFrame({'num_legs': [2, 4], 'num_wings': [2, 0]},
... index=['falcon', 'dog'],
... columns=['num_legs', 'num_wings'])
>>> df
num_legs num_wings
falcon 2 2
dog 4 0
When ``values`` is a list check whether every value in the DataFrame
is present in the list (which animals have 0 or 2 legs or wings)
>>> df.isin([0, 2])
num_legs num_wings
falcon True True
dog False True
When ``values`` is a dict, we can pass values to check for each
column separately:
>>> df.isin({'num_wings': [0, 3]})
num_legs num_wings
falcon False False
dog False True
"""
if isinstance(values, (pd.DataFrame, pd.Series)):
raise NotImplementedError("DataFrame and Series are not supported")
if isinstance(values, dict) and not set(values.keys()).issubset(self.columns):
raise AttributeError(
"'DataFrame' object has no attribute %s"
% (set(values.keys()).difference(self.columns)))
_select_columns = self._metadata.index_columns
if isinstance(values, dict):
for col in self.columns:
if col in values:
_select_columns.append(self[col]._scol.isin(values[col]).alias(col))
else:
_select_columns.append(F.lit(False).alias(col))
elif is_list_like(values):
_select_columns += [
self[col]._scol.isin(list(values)).alias(col) for col in self.columns]
else:
raise TypeError('Values should be iterable, Series, DataFrame or dict.')
return DataFrame(self._sdf.select(_select_columns), self._metadata.copy())
def pipe(self, func, *args, **kwargs):
r"""
Apply func(self, \*args, \*\*kwargs).
Parameters
----------
func : function
function to apply to the DataFrame.
``args``, and ``kwargs`` are passed into ``func``.
Alternatively a ``(callable, data_keyword)`` tuple where
``data_keyword`` is a string indicating the keyword of
``callable`` that expects the DataFrames.
args : iterable, optional
positional arguments passed into ``func``.
kwargs : mapping, optional
a dictionary of keyword arguments passed into ``func``.
Returns
-------
object : the return type of ``func``.
Notes
-----
Use ``.pipe`` when chaining together functions that expect
Series, DataFrames or GroupBy objects. For example, given
>>> df = ks.DataFrame({'category': ['A', 'A', 'B'],
... 'col1': [1, 2, 3],
... 'col2': [4, 5, 6]},
... columns=['category', 'col1', 'col2'])
>>> def keep_category_a(df):
... return df[df['category'] == 'A']
>>> def add_one(df, column):
... return df.assign(col3=df[column] + 1)
>>> def multiply(df, column1, column2):
... return df.assign(col4=df[column1] * df[column2])
instead of writing
>>> multiply(add_one(keep_category_a(df), column="col1"), column1="col2", column2="col3")
category col1 col2 col3 col4
0 A 1 4 2 8
1 A 2 5 3 15
You can write
>>> (df.pipe(keep_category_a)
... .pipe(add_one, column="col1")
... .pipe(multiply, column1="col2", column2="col3")
... )
category col1 col2 col3 col4
0 A 1 4 2 8
1 A 2 5 3 15
If you have a function that takes the data as (say) the second
argument, pass a tuple indicating which keyword expects the
data. For example, suppose ``f`` takes its data as ``df``:
>>> def multiply_2(column1, df, column2):
... return df.assign(col4=df[column1] * df[column2])
Then you can write
>>> (df.pipe(keep_category_a)
... .pipe(add_one, column="col1")
... .pipe((multiply_2, 'df'), column1="col2", column2="col3")
... )
category col1 col2 col3 col4
0 A 1 4 2 8
1 A 2 5 3 15
"""
if isinstance(func, tuple):
func, target = func
if target in kwargs:
raise ValueError('%s is both the pipe target and a keyword '
'argument' % target)
kwargs[target] = self
return func(*args, **kwargs)
else:
return func(self, *args, **kwargs)
@property
def shape(self):
"""
Return a tuple representing the dimensionality of the DataFrame.
Examples
--------
>>> df = ks.DataFrame({'col1': [1, 2], 'col2': [3, 4]})
>>> df.shape
(2, 2)
>>> df = ks.DataFrame({'col1': [1, 2], 'col2': [3, 4],
... 'col3': [5, 6]})
>>> df.shape
(2, 3)
"""
return len(self), len(self.columns)
def merge(self, right: 'DataFrame', how: str = 'inner', on: str = None,
left_index: bool = False, right_index: bool = False,
suffixes: Tuple[str, str] = ('_x', '_y')) -> 'DataFrame':
"""
Merge DataFrame objects with a database-style join.
Parameters
----------
right: Object to merge with.
how: Type of merge to be performed.
{‘left’, ‘right’, ‘outer’, ‘inner’}, default ‘inner’
left: use only keys from left frame, similar to a SQL left outer join; preserve key
order.
right: use only keys from right frame, similar to a SQL right outer join; preserve key
order.
outer: use union of keys from both frames, similar to a SQL full outer join; sort keys
lexicographically.
inner: use intersection of keys from both frames, similar to a SQL inner join;
preserve the order of the left keys.
on: Column or index level names to join on. These must be found in both DataFrames. If on
is None and not merging on indexes then this defaults to the intersection of the
columns in both DataFrames.
left_index: Use the index from the left DataFrame as the join key(s). If it is a
MultiIndex, the number of keys in the other DataFrame (either the index or a number of
columns) must match the number of levels.
right_index: Use the index from the right DataFrame as the join key. Same caveats as
left_index.
suffixes: Suffix to apply to overlapping column names in the left and right side,
respectively.
Returns
-------
DataFrame
A DataFrame of the two merged objects.
Examples
--------
>>> left_kdf = ks.DataFrame({'A': [1, 2]})
>>> right_kdf = ks.DataFrame({'B': ['x', 'y']}, index=[1, 2])
>>> left_kdf.merge(right_kdf, left_index=True, right_index=True)
A B
0 2 x
>>> left_kdf.merge(right_kdf, left_index=True, right_index=True, how='left')
A B
0 1 None
1 2 x
>>> left_kdf.merge(right_kdf, left_index=True, right_index=True, how='right')
A B
0 2.0 x
1 NaN y
>>> left_kdf.merge(right_kdf, left_index=True, right_index=True, how='outer')
A B
0 1.0 None
1 2.0 x
2 NaN y
Notes
-----
As described in #263, joining string columns currently returns None for missing values
instead of NaN.
"""
if on is None and not left_index and not right_index:
raise ValueError("At least 'on' or 'left_index' and 'right_index' have to be set")
if on is not None and (left_index or right_index):
raise ValueError("Only 'on' or 'left_index' and 'right_index' can be set")
if how == 'full':
warnings.warn("Warning: While Koalas will accept 'full', you should use 'outer' " +
"instead to be compatible with the pandas merge API", UserWarning)
if how == 'outer':
# 'outer' in pandas equals 'full' in Spark
how = 'full'
if how not in ('inner', 'left', 'right', 'full'):
raise ValueError("The 'how' parameter has to be amongst the following values: ",
"['inner', 'left', 'right', 'outer']")
if on is None:
# FIXME Move index string to constant?
on = '__index_level_0__'
left_table = self._sdf.alias('left_table')
right_table = right._sdf.alias('right_table')
# Unpack suffixes tuple for convenience
left_suffix = suffixes[0]
right_suffix = suffixes[1]
# Append suffixes to columns with the same name to avoid conflicts later
duplicate_columns = list(self.columns & right.columns)
if duplicate_columns:
for duplicate_column_name in duplicate_columns:
left_table = left_table.withColumnRenamed(duplicate_column_name,
duplicate_column_name + left_suffix)
right_table = right_table.withColumnRenamed(duplicate_column_name,
duplicate_column_name + right_suffix)
join_condition = (left_table[on] == right_table[on] if on not in duplicate_columns
else left_table[on + left_suffix] == right_table[on + right_suffix])
joined_table = left_table.join(right_table, join_condition, how=how)
if on in duplicate_columns:
# Merge duplicate key columns
joined_table = joined_table.withColumnRenamed(on + left_suffix, on)
joined_table = joined_table.drop(on + right_suffix)
# Remove auxiliary index
# FIXME Move index string to constant?
joined_table = joined_table.drop('__index_level_0__')
kdf = DataFrame(joined_table)
return kdf
def sample(self, n: Optional[int] = None, frac: Optional[float] = None, replace: bool = False,
random_state: Optional[int] = None) -> 'DataFrame':
"""
Return a random sample of items from an axis of object.
Please call this function using named argument by specifing the ``frac`` argument.
You can use `random_state` for reproducibility. However, note that different from pandas,
specifying a seed in Koalas/Spark does not guarantee the sampled rows will be fixed. The
result set depends on not only the seed, but also how the data is distributed across
machines and to some extent network randomness when shuffle operations are involved. Even
in the simplest case, the result set will depend on the system's CPU core count.
Parameters
----------
n : int, optional
Number of items to return. This is currently NOT supported. Use frac instead.
frac : float, optional
Fraction of axis items to return.
replace : bool, default False
Sample with or without replacement.
random_state : int, optional
Seed for the random number generator (if int).
Returns
-------
Series or DataFrame
A new object of same type as caller containing the sampled items.
Examples
--------
>>> df = ks.DataFrame({'num_legs': [2, 4, 8, 0],
... 'num_wings': [2, 0, 0, 0],
... 'num_specimen_seen': [10, 2, 1, 8]},
... index=['falcon', 'dog', 'spider', 'fish'],
... columns=['num_legs', 'num_wings', 'num_specimen_seen'])
>>> df # doctest: +SKIP
num_legs num_wings num_specimen_seen
falcon 2 2 10
dog 4 0 2
spider 8 0 1
fish 0 0 8
A random 25% sample of the ``DataFrame``.
Note that we use `random_state` to ensure the reproducibility of
the examples.
>>> df.sample(frac=0.25, random_state=1) # doctest: +SKIP
num_legs num_wings num_specimen_seen
falcon 2 2 10
fish 0 0 8
Extract 25% random elements from the ``Series`` ``df['num_legs']``, with replacement,
so the same items could appear more than once.
>>> df['num_legs'].sample(frac=0.4, replace=True, random_state=1) # doctest: +SKIP
falcon 2
spider 8
spider 8
Name: num_legs, dtype: int64
Specifying the exact number of items to return is not supported at the moment.
>>> df.sample(n=5) # doctest: +ELLIPSIS
Traceback (most recent call last):
...
NotImplementedError: Function sample currently does not support specifying ...
"""
# Note: we don't run any of the doctests because the result can change depending on the
# system's core count.
if n is not None:
raise NotImplementedError("Function sample currently does not support specifying "
"exact number of items to return. Use frac instead.")
if frac is None:
raise ValueError("frac must be specified.")
sdf = self._sdf.sample(withReplacement=replace, fraction=frac, seed=random_state)
return DataFrame(sdf, self._metadata.copy())
def astype(self, dtype) -> 'DataFrame':
"""
Cast a pandas object to a specified dtype ``dtype``.
Parameters
----------
dtype : data type, or dict of column name -> data type
Use a numpy.dtype or Python type to cast entire pandas object to
the same type. Alternatively, use {col: dtype, ...}, where col is a
column label and dtype is a numpy.dtype or Python type to cast one
or more of the DataFrame's columns to column-specific types.
Returns
-------
casted : same type as caller
See Also
--------
to_datetime : Convert argument to datetime.
Examples
--------
>>> df = ks.DataFrame({'a': [1, 2, 3], 'b': [1, 2, 3]}, dtype='int64')
>>> df
a b
0 1 1
1 2 2
2 3 3
Convert to float type:
>>> df.astype('float')
a b
0 1.0 1.0
1 2.0 2.0
2 3.0 3.0
Convert to int64 type back:
>>> df.astype('int64')
a b
0 1 1
1 2 2
2 3 3
Convert column a to float type:
>>> df.astype({'a': float})
a b
0 1.0 1
1 2.0 2
2 3.0 3
"""
results = []
if is_dict_like(dtype):
for col_name in dtype.keys():
if col_name not in self.columns:
raise KeyError('Only a column name can be used for the '
'key in a dtype mappings argument.')
for col_name, col in self.iteritems():
if col_name in dtype:
results.append(col.astype(dtype=dtype[col_name]))
else:
results.append(col)
else:
for col_name, col in self.iteritems():
results.append(col.astype(dtype=dtype))
sdf = self._sdf.select(
self._metadata.index_columns + list(map(lambda ser: ser._scol, results)))
return DataFrame(sdf, self._metadata.copy())
def _pd_getitem(self, key):
from databricks.koalas.series import Series
if key is None:
raise KeyError("none key")
if isinstance(key, str):
try:
return Series(self._sdf.__getitem__(key), anchor=self,
index=self._metadata.index_map)
except AnalysisException:
raise KeyError(key)
if np.isscalar(key) or isinstance(key, (tuple, str)):
raise NotImplementedError(key)
elif isinstance(key, slice):
return self.loc[key]
if isinstance(key, (pd.Series, np.ndarray, pd.Index)):
raise NotImplementedError(key)
if isinstance(key, list):
return self.loc[:, key]
if isinstance(key, DataFrame):
# TODO Should not implement alignment, too dangerous?
return Series(self._sdf.__getitem__(key), anchor=self, index=self._metadata.index_map)
if isinstance(key, Series):
# TODO Should not implement alignment, too dangerous?
# It is assumed to be only a filter, otherwise .loc should be used.
bcol = key._scol.cast("boolean")
return DataFrame(self._sdf.filter(bcol), self._metadata.copy())
raise NotImplementedError(key)
def __repr__(self):
pdf = self.head(max_display_count + 1).to_pandas()
pdf_length = len(pdf)
repr_string = repr(pdf.iloc[:max_display_count])
if pdf_length > max_display_count:
match = REPR_PATTERN.search(repr_string)
if match is not None:
nrows = match.group("rows")
ncols = match.group("columns")
footer = ("\n\n[Showing only the first {nrows} rows x {ncols} columns]"
.format(nrows=nrows, ncols=ncols))
return REPR_PATTERN.sub(footer, repr_string)
return repr_string
def _repr_html_(self):
pdf = self.head(max_display_count + 1).to_pandas()
pdf_length = len(pdf)
repr_html = pdf[:max_display_count]._repr_html_()
if pdf_length > max_display_count:
match = REPR_HTML_PATTERN.search(repr_html)
if match is not None:
nrows = match.group("rows")
ncols = match.group("columns")
by = chr(215)
footer = ('\n<p>Showing only the first {rows} rows {by} {cols} columns</p>\n</div>'
.format(rows=nrows,
by=by,
cols=ncols))
return REPR_HTML_PATTERN.sub(footer, repr_html)
return repr_html
def __getitem__(self, key):
return self._pd_getitem(key)
def __setitem__(self, key, value):
from databricks.koalas.series import Series
# For now, we don't support realignment against different dataframes.
# This is too expensive in Spark.
# Are we assigning against a column?
if isinstance(value, Series):
assert value._kdf is self, \
"Cannot combine column argument because it comes from a different dataframe"
if isinstance(key, (tuple, list)):
assert isinstance(value.schema, StructType)
field_names = value.schema.fieldNames()
kdf = self.assign(**{k: value[c] for k, c in zip(key, field_names)})
else:
kdf = self.assign(**{key: value})
self._sdf = kdf._sdf
self._metadata = kdf._metadata
def __getattr__(self, key: str) -> Any:
from databricks.koalas.series import Series
if key.startswith("__") or key.startswith("_pandas_") or key.startswith("_spark_"):
raise AttributeError(key)
if hasattr(_MissingPandasLikeDataFrame, key):
property_or_func = getattr(_MissingPandasLikeDataFrame, key)
if isinstance(property_or_func, property):
return property_or_func.fget(self) # type: ignore
else:
return partial(property_or_func, self)
return Series(self._sdf.__getattr__(key), anchor=self, index=self._metadata.index_map)
def __len__(self):
return self._sdf.count()
def __dir__(self):
fields = [f for f in self._sdf.schema.fieldNames() if ' ' not in f]
return super(DataFrame, self).__dir__() + fields
@classmethod
def _validate_axis(cls, axis=0):
if axis not in (0, 1, 'index', 'columns', None):
raise ValueError('No axis named {0}'.format(axis))
# convert to numeric axis
return {None: 0, 'index': 0, 'columns': 1}.get(axis, axis)
def _reduce_spark_multi(sdf, aggs):
"""
Performs a reduction on a dataframe, the functions being known sql aggregate functions.
"""
assert isinstance(sdf, spark.DataFrame)
sdf0 = sdf.agg(*aggs)
l = sdf0.head(2)
assert len(l) == 1, (sdf, l)
row = l[0]
l2 = list(row)
assert len(l2) == len(aggs), (row, l2)
return l2
| [
[
[
679,
681
],
[
2011,
2013
],
[
2107,
2109
]
],
[
[
689,
697
],
[
85077,
85085
]
],
[
[
720,
727
],
[
97012,
97019
]
],
[
[
729,
735
],
[
54627,
54633
]
],
[
[
755,
758
],
[
96535,
96538
]
],
[
[
760,
768
],
[
87221,
87229
],
[
87249,
87257
],
[
87325,
87333
]
],
[
[
770,
774
],
[
65127,
65131
]
],
[
[
776,
781
],
[
82221,
82226
]
],
[
[
783,
788
],
[
58065,
58070
],
[
58098,
58103
],
[
65116,
65121
]
],
[
[
797,
808
],
[
5887,
5889
],
[
93367,
93369
],
[
93571,
93573
]
],
[
[
816,
828
],
[
4746,
4748
],
[
5154,
5156
],
[
12870,
12872
],
[
17342,
17344
],
[
21389,
21391
],
[
25356,
25358
],
[
30368,
30370
],
[
44371,
44373
],
[
46936,
46938
],
[
50940,
50942
],
[
57520,
57522
],
[
57627,
57629
],
[
61886,
61888
],
[
63626,
63628
],
[
77540,
77542
],
[
77554,
77556
],
[
93560,
93562
],
[
93583,
93585
]
],
[
[
858,
877
],
[
5763,
5782
]
],
[
[
879,
900
],
[
5790,
5811
]
],
[
[
902,
914
],
[
59246,
59258
],
[
59269,
59281
],
[
78246,
78258
]
],
[
[
922,
934
],
[
92177,
92189
]
],
[
[
955,
967
],
[
4961,
4966
],
[
46883,
46888
],
[
97802,
97807
]
],
[
[
992,
1006
],
[
47497,
47498
],
[
54680,
54681
],
[
54806,
54807
],
[
54881,
54882
],
[
54961,
54962
],
[
55041,
55042
],
[
59989,
59990
],
[
59996,
59997
],
[
60031,
60032
],
[
60190,
60191
],
[
60197,
60198
],
[
60232,
60233
],
[
78208,
78209
]
],
[
[
1008,
1014
],
[
47381,
47387
],
[
71100,
71106
],
[
71183,
71189
],
[
71267,
71273
],
[
71352,
71358
]
],
[
[
1046,
1057
],
[
7062,
7073
]
],
[
[
1059,
1067
],
[
59584,
59592
]
],
[
[
1069,
1080
],
[
59548,
59559
]
],
[
[
1082,
1092
],
[
59561,
59571
]
],
[
[
1094,
1103
],
[
59573,
59582
]
],
[
[
1136,
1147
],
[
59594,
59605
]
],
[
[
1149,
1157
],
[
59607,
59615
]
],
[
[
1159,
1168
],
[
59642,
59651
]
],
[
[
1170,
1181
],
[
5472,
5483
]
],
[
[
1183,
1193
],
[
5460,
5470
],
[
96221,
96231
]
],
[
[
1226,
1239
],
[
43759,
43772
]
],
[
[
1271,
1288
],
[
93301,
93318
]
],
[
[
1313,
1325
],
[
60431,
60433
]
],
[
[
1423,
1438
],
[
5932,
5947
]
],
[
[
1440,
1478
],
[
12782,
12820
],
[
17259,
17297
],
[
21304,
21342
],
[
25273,
25311
],
[
30284,
30322
],
[
50854,
50892
]
],
[
[
1517,
1523
],
[
2223,
2229
],
[
65046,
65052
]
],
[
[
1525,
1542
],
[
94303,
94320
],
[
94405,
94422
],
[
94449,
94466
],
[
94935,
94952
],
[
95025,
95042
],
[
95082,
95099
]
],
[
[
1582,
1590
],
[
5330,
5338
],
[
6170,
6178
]
],
[
[
1635,
1662
],
[
96741,
96768
],
[
96815,
96842
]
],
[
[
1696,
1700
],
[
9232,
9236
]
],
[
[
1739,
1765
],
[
5490,
5516
]
],
[
[
1996,
2008
],
[
94488,
94500
],
[
94818,
94830
]
],
[
[
2087,
2104
],
[
95121,
95138
],
[
95569,
95586
]
],
[
[
2213,
2222
],
[
42677,
42686
],
[
42746,
42755
],
[
47739,
47748
],
[
51159,
51168
],
[
55409,
55418
],
[
58000,
58009
],
[
61730,
61739
],
[
67175,
67184
],
[
71537,
71546
],
[
78503,
78512
],
[
87153,
87162
],
[
90788,
90797
],
[
92913,
92922
],
[
93735,
93744
],
[
94158,
94167
],
[
97317,
97326
]
],
[
[
97635,
97654
]
]
] |
import argparse, subprocess, os, re
from jinja2 import Environment, FileSystemLoader
def GetBaseName(full_path):
return os.path.basename(full_path)
class PlantUMLCodeGeneration():
class StateType():
def __init__(self):
self.entry = None
self.during = None
self.exit = None
self.transitions = []
self.submachine = []
def StringMe(self):
return 'Entry: {} During: {} Exit: {} Transitions : {} Submachines: {}'.format(
str(self.entry),
str(self.during),
str(self.exit),
[transition.StringMe() for transition in self.transitions],
[submachine.StringMe() for submachine in self.submachine]
)
class TransitionType():
def __init__(self):
self.destination = None
self.conditions = None
self.actions = None
def StringMe(self):
return 'Destination: {} Condition: {} Action: {}'.format(
str(self.destination),
str(self.conditions),
str(self.actions)
)
class StateMachineType():
def __init__(self):
self.title = None
self.states = {}
self.notes = []
def StringMe(self):
return 'Title: {}\nStates: \n\t{}\nNotes: {}\n'.format(
str(self.title),
'\n\t'.join([state + ' ' + self.states[state].StringMe() for state in self.states]),
str(self.notes)
)
def __init__(self, plantuml_file):
if os.path.isfile(plantuml_file):
self.plantuml_file = plantuml_file
else:
raise Exception('File {} does not exist.'.format(plantuml_file))
def CheckUml(self):
if subprocess.call(['plantuml', '-checkonly', self.plantuml_file]) == 0:
return True
else:
return False
def GenerateCode(self, output_files, templates, no_check = False):
if (no_check == False):
if self.CheckUml() == False:
raise Exception('File {} contains UML errors.'.format(self.plantuml_file))
uml, uml_params = self.ParseStateMachine()
if len(output_files) == len(templates):
for out_file, template in zip(output_files, templates):
self.GenerateFromTemplate(out_file, template, uml, uml_params)
else:
raise Exception('Number of template and output files don\'t match.')
def ParseStateMachine(self):
uml = self.GetUMLText()
uml_params = self.ParseStateMachineAsDict(uml_text = self.GetUMLText(grouped=True))[0]
return uml, uml_params
def GetUMLText(self, grouped = False):
with open(self.plantuml_file, 'r') as plantuml_file:
uml = plantuml_file.readlines()
if grouped == False:
return uml
else:
#Group all strings containing \ at the end
uml_grouped = []
accumulated_string = ''
for line in uml:
#First strip the line to forget about leading and trailing
#spaces
line = line.strip()
#Remove aliases
line = re.sub('state\s+\".*\"\s+as','state', line)
#Accumulate all lines that end with \
if line.endswith('\\'):
accumulated_string += line[:-1]
else:
if accumulated_string == '':
uml_grouped.append(line)
else:
uml_grouped.append(accumulated_string + line)
accumulated_string = ''
return uml_grouped
def ParseStateMachineAsDict(self, uml_text, init_line = 0, submachine = False):
uml_params = self.StateMachineType()
line_num = init_line
opening_braces = 0
closing_braces = 0
while line_num < len(uml_text):
line = uml_text[line_num]
if submachine:
# Pending to refactor this
opening_braces += line.count('{')
closing_braces += line.count('}')
if closing_braces > opening_braces:
break
# Regex magic yay!
matchtransition = re.match('(\[\*\]|\w+)(?:|\s+)-->(?:|\s+)(\w+)(?:(?:|\s+)\:(.*))?',line)
matchstateaction = re.match('(?:state\s+)?(\w+)(?:|\s+)(?:(?:|\s+)\:(.*))?',line)
matchsubmachine = re.match('(?:state\s+)?(\w+)(?:|\s+)\{.*$',line)
if line.startswith('title'):
uml_params.title = line
elif line.startswith('note'):
note_match = re.match('.*\"(.*)\"', line)
if note_match:
uml_params.notes.append(self.__LineCleanup(note_match.group(1)))
elif matchtransition:
self.__AddTransition(uml_params, matchtransition)
elif matchsubmachine:
#Pending to do this in a more elegant way and not depending
# on the order of the ifs
state_name = matchstateaction.group(1)
if uml_params.states.get(state_name) == None:
uml_params.states[state_name] = self.StateType()
sub_info = self.ParseStateMachineAsDict(uml_text, init_line = line_num + 1, submachine = True)
#Set state name as title
sub_info[0].title = state_name + '_submachine'
uml_params.states[state_name].submachine.append(sub_info[0])
line_num = sub_info[1]
elif matchstateaction:
self.__AddStateActions(uml_params, matchstateaction)
line_num += 1
return uml_params, line_num
def __LineCleanup(self, line_string):
cleaned_string = re.sub(r'(?<!\\)\\n','\n',line_string)
cleaned_string = cleaned_string.replace('\\\\','\\').strip()
return cleaned_string
def __AddTransition(self, uml_params, matchtransition):
transition = self.TransitionType()
state_origin = matchtransition.group(1)
transition.destination = matchtransition.group(2)
text = matchtransition.group(3)
if text is not None:
text = text.split('\\ndo:\\n')
conditions = text[0]
transition.conditions = self.__LineCleanup(conditions)
if len(text) > 1:
actions = text[1] if text else None
transition.actions = self.__LineCleanup(actions)
#transition.actions = matchtransition.group(4)
#Check if state exits, if not, create it
if uml_params.states.get(state_origin) == None:
uml_params.states[state_origin] = self.StateType()
uml_params.states[state_origin].transitions.append(transition)
#Also, create destination state if it does not exist
if uml_params.states.get(transition.destination) == None:
uml_params.states[transition.destination] = self.StateType()
def __AddStateActions(self, uml_params, matchstateaction):
state_name = matchstateaction.group(1)
actions = matchstateaction.group(2)
if uml_params.states.get(state_name) == None:
uml_params.states[state_name] = self.StateType()
#Get entry, exit and during
if actions:
#Do a regex split
action_matches = re.split(r'(entry\:|during\:|exit\:)', actions)
#Replace \n by real \n and trim
action_matches = [self.__LineCleanup(line) for line in action_matches]
#The list will start with an empty string (or spaces) if it does not match entry
#any of the keywords. But if it starts with text it is a during
if action_matches[0].strip() != '':
uml_params.states[state_name].during = action_matches[0]
line_num = 1
while line_num < len(action_matches):
if action_matches[line_num] == 'entry:':
uml_params.states[state_name].entry = action_matches[line_num + 1]
line_num += 1
elif action_matches[line_num] == 'during:':
uml_params.states[state_name].during = action_matches[line_num + 1]
line_num += 1
elif action_matches[line_num] == 'exit:':
uml_params.states[state_name].exit = action_matches[line_num + 1]
line_num += 1
else:
raise Exception('Action {} not recognized.'.format(action_matches[line_num]))
line_num += 1
def GenerateFromTemplate(self, output_file, template_file, uml, uml_params):
env = Environment(
loader=FileSystemLoader(os.path.dirname(template_file))
)
template = env.get_template(os.path.basename(template_file))
with open(output_file, 'w') as out_file:
out_file.write(template.render(file_name=output_file, uml=uml,
uml_params=uml_params, get_submachines=self.GetSubmachineObjects,
get_basename=GetBaseName))
def GetSubmachineObjects(self, uml_object):
uml_submachines_list = []
for state in uml_object.states:
if len(uml_object.states[state].submachine) > 0:
for uml_submachine in uml_object.states[state].submachine:
#Set title of submachine as the name of state parent
uml_submachines_list.append(uml_submachine)
#Recursion to get more levels
uml_submachines_list += self.GetSubmachineObjects(uml_submachine)
return uml_submachines_list
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Process PlantUML file to generate code')
parser.add_argument('--input','-i', required = True, dest = 'plantuml_file',
help ='Plant UML file from which to generate code')
parser.add_argument('--output','-o', required = True, dest = 'output_files',
help ='Code file generated. Separate by spaces in case of'
'more than one template', nargs='+')
parser.add_argument('--templates', '-t', dest = 'templates', default = '[templates/C_code.c,templates/C_code.h]',
help = 'Templates to be used separated by spaces', nargs='+')
parser.add_argument('--no-check', action = 'store_true',
help = 'This option is strongly discouraged. With this option'
'you are defining to not check that your PlantUML is valid.')
args = parser.parse_args()
plantuml_obj = PlantUMLCodeGeneration(args.plantuml_file)
#Transform templates to list
plantuml_obj.GenerateCode(args.output_files, args.templates)
| [
[
[
7,
15
],
[
9989,
9997
]
],
[
[
17,
27
],
[
1875,
1885
]
],
[
[
29,
31
],
[
125,
127
],
[
1670,
1672
],
[
9024,
9026
],
[
9103,
9105
]
],
[
[
33,
35
],
[
3382,
3384
],
[
4511,
4513
],
[
4615,
4617
],
[
4708,
4710
],
[
4910,
4912
],
[
6060,
6062
],
[
7647,
7649
]
],
[
[
55,
66
],
[
8975,
8986
]
],
[
[
68,
84
],
[
9007,
9023
]
],
[
[
90,
101
],
[
9366,
9377
]
],
[
[
160,
182
],
[
10939,
10961
]
],
[
[
9980,
9986
],
[
10071,
10077
],
[
10228,
10234
],
[
10453,
10459
],
[
10657,
10663
],
[
10899,
10905
]
],
[
[
10892,
10896
],
[
10962,
10966
],
[
11045,
11049
],
[
11064,
11068
]
],
[
[
10924,
10936
],
[
11019,
11031
]
]
] |
"""
====================
Build image pyramids
====================
The ``pyramid_gaussian`` function takes an image and yields successive images
shrunk by a constant scale factor. Image pyramids are often used, e.g., to
implement algorithms for denoising, texture discrimination, and scale-
invariant detection.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage import data
from skimage.transform import pyramid_gaussian
image = data.astronaut()
rows, cols, dim = image.shape
pyramid = tuple(pyramid_gaussian(image, downscale=2))
composite_image = np.zeros((rows, cols + cols / 2, 3), dtype=np.double)
composite_image[:rows, :cols, :] = pyramid[0]
i_row = 0
for p in pyramid[1:]:
n_rows, n_cols = p.shape[:2]
composite_image[i_row:i_row + n_rows, cols:cols + n_cols] = p
i_row += n_rows
fig, ax = plt.subplots()
ax.imshow(composite_image)
plt.show()
| [
[
[
325,
336
],
[
572,
574
],
[
615,
617
]
],
[
[
344,
368
],
[
836,
839
],
[
878,
881
]
],
[
[
390,
394
],
[
452,
456
]
],
[
[
425,
441
],
[
515,
531
]
],
[
[
444,
449
],
[
487,
492
],
[
532,
537
]
],
[
[
469,
473
],
[
582,
586
],
[
644,
648
]
],
[
[
475,
479
],
[
588,
592
],
[
595,
599
],
[
651,
655
],
[
781,
785
],
[
786,
790
]
],
[
[
481,
484
]
],
[
[
499,
506
],
[
662,
669
],
[
693,
700
]
],
[
[
554,
569
],
[
627,
642
],
[
743,
758
],
[
861,
876
]
],
[
[
674,
679
],
[
759,
764
],
[
765,
770
],
[
809,
814
]
],
[
[
688,
689
],
[
727,
728
],
[
803,
804
]
],
[
[
710,
716
],
[
773,
779
],
[
818,
824
]
],
[
[
718,
724
],
[
793,
799
]
],
[
[
826,
829
]
],
[
[
831,
833
],
[
851,
853
]
]
] |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import unittest
from blank_interaction.services import BlankServiceInterface
from interaction.clients import Calculator
from interaction.types import Point
from thrift.py3.client import get_client, ClientType
from .run_interaction import run_interaction
class InteractionTest(unittest.TestCase):
def setUp(self) -> None:
self.interaction = run_interaction()
def init_client(self) -> Calculator:
return get_client(
Calculator,
port=self.interaction.getPort(),
host="::1",
client_type=ClientType.THRIFT_ROCKET_CLIENT_TYPE,
)
def tearDown(self) -> None:
self.interaction.reset()
def test_basic(self) -> None:
async def inner_test() -> None:
async with self.init_client() as calc:
self.assertEqual(await calc.addPrimitive(0, 0), 0)
async with calc.createAddition() as add:
self.assertEqual(await add.getPrimitive(), 0)
add.accumulatePrimitive(1)
self.assertEqual(await add.getPrimitive(), 1)
point = await add.getPoint()
self.assertEqual(point.x, 0)
self.assertEqual(point.y, 0)
newPoint = Point(x=2, y=3)
await add.accumulatePoint(newPoint)
point = await add.getPoint()
self.assertEqual(point.x, 2)
self.assertEqual(point.y, 3)
await add.noop()
asyncio.run(inner_test())
def test_multiple_interactions(self) -> None:
async def inner_test() -> None:
async with self.init_client() as calc:
self.assertEqual(await calc.addPrimitive(0, 0), 0)
async with calc.createAddition() as add:
self.assertEqual(await add.getPrimitive(), 0)
add.accumulatePrimitive(1)
self.assertEqual(await add.getPrimitive(), 1)
async with calc.createAddition() as add:
self.assertEqual(await add.getPrimitive(), 0)
add.accumulatePrimitive(2)
self.assertEqual(await add.getPrimitive(), 2)
asyncio.run(inner_test())
def test_multiple_clients(self) -> None:
async def inner_test() -> None:
async with self.init_client() as calc:
self.assertEqual(await calc.addPrimitive(0, 0), 0)
async with calc.createAddition() as add:
self.assertEqual(await add.getPrimitive(), 0)
add.accumulatePrimitive(1)
self.assertEqual(await add.getPrimitive(), 1)
async with self.init_client() as calc:
self.assertEqual(await calc.addPrimitive(0, 1), 1)
async with calc.createAddition() as add:
self.assertEqual(await add.getPrimitive(), 0)
add.accumulatePrimitive(2)
self.assertEqual(await add.getPrimitive(), 2)
asyncio.run(inner_test())
def test_terminate_unused(self) -> None:
async def inner_test() -> None:
async with self.init_client() as calc:
async with calc.createAddition() as _:
pass
asyncio.run(inner_test())
def test_terminate_client_error(self) -> None:
class SpecificError(Exception):
pass
async def inner_test() -> None:
try:
async with self.init_client() as calc:
self.assertEqual(await calc.addPrimitive(0, 0), 0)
async with calc.createAddition() as add:
add.accumulatePrimitive(1)
raise SpecificError("Generic error")
except SpecificError:
pass
else:
self.fail("Didn't throw SpecificError")
asyncio.run(inner_test())
| [
[
[
605,
612
],
[
2164,
2171
],
[
2883,
2890
],
[
3715,
3722
],
[
3967,
3974
],
[
4597,
4604
]
],
[
[
620,
628
],
[
893,
901
]
],
[
[
669,
690
]
],
[
[
723,
733
],
[
1017,
1027
],
[
1068,
1078
]
],
[
[
764,
769
],
[
1897,
1902
]
],
[
[
800,
810
],
[
1044,
1054
]
],
[
[
812,
822
],
[
1173,
1183
]
],
[
[
853,
868
],
[
969,
984
]
],
[
[
877,
892
]
]
] |
from flask_socketio import SocketIO
from flask import Flask, make_response, request, session
from flask import render_template, session, url_for, redirect
from threading import RLock
from threading import Thread
from utilslib import list_to_HTML_table
from time import sleep
from ClientStorage import Clients, User
from gameObjects import Game, GameContainter, Player, ChatMmsg
from random import shuffle
#Init server
app = Flask(__name__, template_folder='templates', static_folder='static')
app.config['SECRET_KEY'] = 'lskwod=91230?=)ASD?=)("")@'
socketio = SocketIO(app, async_mode='threading')
timerLock = RLock()
asyncLock = RLock()
clients = Clients()
games = GameContainter()
debugging = False
@app.route('/', methods = ['POST', 'GET'])
@app.route('/index', methods = ['POST', 'GET'])
def index():
verbose = (False or debugging)
error = request.args.get('error')
return make_response(render_template('makeGame.html', title = "Welcome", cool = 123, error = error))
@app.route('/gameRoom', methods = ['POST', 'GET'])
def gameRoom():
global games
verbose = (False or debugging)
argumentsMakeGame = ['name', 'gameName', 'nrOfRounds', 'time', 'newGame']
argumentsJoinGame = ['name', 'gameName', 'newGame']
uniqueID = request.cookies.get('uniqueID')
user = clients.find_User_By_uniqueID(uniqueID)
if (not user):
return redirect(url_for('index') + '?error=No user. Refreshing')
if (not user.gameObject):
data = request.form
#MAKE A NEW GAME
if data['newGame'] == 'yes':
if verbose: print('In server:gameRoom() nrOfRounds set!')
for key in data.keys():
argumentsMakeGame.remove(key)
if argumentsMakeGame:
return redirect(url_for('index') + '?error=Not enough arguments when creating the game')
if verbose: print('In server:gameRoom() arguments needed for making a game are present')
#Creating player and game
game = games.add_Game(gameName=data['gameName'], nrOfRounds=data['nrOfRounds'], timePerRound=data['time'])
player = game.add_Player(name=data['name'], userObject=user)
if (not player):
return redirect(url_for('index') + '?error=Player name already exists in this game...')
if verbose: print('In server:gameRoom() game created with the name {} and user/player added'.format(game.gameName))
#Join an existing game
else:
data = request.form
if verbose: print('In server:gameRoom() joining a game!')
for key in data.keys():
argumentsJoinGame.remove(key)
if argumentsJoinGame:
return redirect(url_for('index') + '?error=Not enough arguments when joining the game')
if verbose: print('In server:gameRoom() Searching for game: {}'.format(data['gameName']))
#Check if game exists
game = games.find_Game_By_Name(data['gameName'], verbose)
if (not game):
if verbose: print('The game was not found')
return redirect(url_for('index') + '?error=Game not found')
#Check if name already taken
for player in game.players:
if player.name == data['name']:
return redirect(url_for('index') + '?error=Name already taken')
player = game.add_Player(name=data['name'], userObject=user)
if verbose: print('In server:gameRoom() Player joined game')
if verbose: print('In server:gameRoom() game created and user/player added')
sendMessageToGame(game, '{} joined the game'.format(data['name']))
emitToGame(game = game, arg = ('refresh_Player_List',{}), lock = timerLock)
else:
if verbose: print('User alreade in game')
error = None
return make_response(render_template('gameRoom.html', title = "Game Room", gameName = user.gameObject.gameName, error = error))
@app.route('/gameRoomContent')
def gameRoomContent():
uniqueID = request.cookies.get('uniqueID')
user = clients.find_User_By_uniqueID(uniqueID)
if userNotComplete(user, verbose = (False or debugging)):
return 'ERROR: Something strange happened. Please leave game and rejoin'
game = user.gameObject
nrOfRounds = game.nrOfRounds
timePerRound = game.timePerRound
gameName = game.gameName
roundNr = game.currentRound
if (user.gameObject.get_Stage() == 'lobby'):
return render_template('lobbyContent.html',
gameName = gameName,
nrOfRounds = nrOfRounds,
timePerRound = timePerRound)
elif (user.gameObject.get_Stage() == 'roundStart'):
return render_template('roundContentStart.html',
timePerRound = timePerRound,
roundNr = roundNr,
nrOfRounds = nrOfRounds)
elif (user.gameObject.get_Stage() == 'roundSupply'):
game.spawnedThread = None
game.reset_Players_Ready()
emitToGame(game = game, arg = ('refresh_Player_List',{}), lock = timerLock)
print('GameContent:')
print(game.get_Search_Strings(user.playerObject))
return render_template('roundContentSupply.html',
nrOfPlayers = game.get_Nr_Of_Players(),
searchStrings = game.get_Search_Strings(user.playerObject),
nrOfEntries = game.nrOfEntry)
elif (user.gameObject.get_Stage() == 'roundVote'):
game.reset_Players_Ready()
return makeVoteContent(user)
elif (user.gameObject.get_Stage() == 'roundEnd'):
game.reset_Players_Ready()
return makeRoundEnd(user)
elif (user.gameObject.get_Stage() == 'gameSummary'):
game.reset_Players_Ready()
return render_template('gameContentSummary.html')
def makeVoteContent(user):
game = user.gameObject
playerObject = user.playerObject
notReady = False
voteEntries = game.get_Vote_Entries(playerObject)
return render_template('roundContentVote.html',
notReady = notReady,
voteEntries = voteEntries)
def makeRoundEnd(user):
game = user.gameObject
playerObject = user.playerObject
playersPoints = {}
for player in game.players:
playersPoints[player.name] = player.points
searchStrings = {}
for entry in game.entries:
searchStrings[entry.searchString] = {}
return render_template('roundContentEnd.html', playersPoints = playersPoints)
@app.route('/playerList')
def playerList():
uniqueID = request.cookies.get('uniqueID')
user = clients.find_User_By_uniqueID(uniqueID)
verbose = (False or debugging)
if userNotComplete(user, verbose = (False or debugging)):
return redirect(url_for('index') + '?error=User not in game')
playerList = user.gameObject.get_Player_Names_And_Status()
if verbose: print('Got {} players'.format(len(playerList)))
return render_template('playerList.html', playerList = playerList)
@app.route('/chatContent')
def chatContent():
uniqueID = request.cookies.get('uniqueID')
user = clients.find_User_By_uniqueID(uniqueID)
if userNotComplete(user, verbose = (False or debugging)):
return redirect(url_for('index') + '?error=User not in game')
chat = user.gameObject.chatMessages
msgs = []
players = []
for msg in chat:
player, msg = msg.get_Player_And_Msg()
msgs.append(str(msg))
players.append(str(player))
if players:
players.reverse()
msgs.reverse()
return render_template('chat.html', players = players, chatMsg = msgs)
@app.route('/leave_Game')
def leaveGame():
verbose = (False or debugging)
uniqueID = request.cookies.get('uniqueID')
user = clients.find_User_By_uniqueID(uniqueID)
if (not user):
if verbose: print('No user')
return redirect(url_for('index'))
game = user.gameObject
game.remove_Player_By_User_Object(user)
name = user.playerObject.name
user.resetUser()
if len(game.players)<1:
games.removeGame(game=game, verbose = verbose)
else:
emitToGame(game = game, arg = ('refresh_Player_List',{}), lock = timerLock)
emitToGame(game = game, arg = ('client_warning',{'msg': name+' left the game'}), lock = timerLock)
print (len(games._games))
return redirect(url_for('index'))
@socketio.on('submit_entry')
def submitEntry(msg):
verbose = (False or debugging)
if verbose: print ('Entry reveived by the server')
uniqueID = request.cookies.get('uniqueID')
user = clients.find_User_By_uniqueID(uniqueID)
if verbose: print ('User retrieved')
if (not user):
if verbose: print('No user found when collecting the data')
return
if user.playerObject.entry:
if verbose: print('User already submitted.')
return
if verbose: print ('Setting entry for user')
user.gameObject.add_Entry(msg['searchString'], msg['suggestion'], user.playerObject)
if verbose: print('Got entry')
if user.gameObject.nrOfEntry >= user.gameObject.get_Nr_Of_Players():
emitToGame(game = user.gameObject, arg = ('refresh_div_content',{'div': 'entryList', 'cont': '/gameRoomContent'}), lock = timerLock)
@socketio.on('submit_supply')
def submitSupply(data):
verbose = (False or debugging)
if verbose: print ('\n---------------------\nSupply reveived by the server')
uniqueID = request.cookies.get('uniqueID')
user = clients.find_User_By_uniqueID(uniqueID)
if (not user):
if verbose: print('No user found when collecting the data')
return
game = user.gameObject
if verbose: print ('The data received is: {}'.format(data))
if verbose: print ('player {} found'.format(user.playerObject.name))
if (not data):
return
if verbose: print('')
if verbose: print('The actual data:')
for key, value in data.items():
if verbose: print('Key: {} \t Value: {}'.format(key, value))
if value == '':
continue
game.entries[int(key)].add_Autocomplete(value, user.playerObject)
if verbose: print('')
game.nrOfSupply += 1
if verbose: print ('The game has received {}nr of supplies\n---------------------\n'.format(game.nrOfSupply))
#All "supplies" are received
if user.gameObject.nrOfSupply >= user.gameObject.get_Nr_Of_Players():
if verbose: print ('We should now refresh the div content')
emitToGame(game = user.gameObject, arg = ('refresh_div_content', {'div': 'contentVote', 'cont': '/gameRoomContent'}), lock = timerLock)
#emitToGame(game = user.gameObject, arg = ('refresh_div_content',{'div': 'entryList', 'cont': '/gameRoomContent'}), lock = timerLock)
if verbose and False:
print('')
for entry in game.entries:
print('-------------------------------------------')
print('The entry with the serch string: \t {}\nHas the following autocompletes added:'.format(entry.searchString))
for supply in entry.otherAutocompletes:
print (supply.autoComplete)
print('-------------------------------------------')
print('')
@socketio.on('submit_favorite')
def submitFavorite(favorite):
print('The server received a favorite: {}'.format(favorite))
uniqueID = request.cookies.get('uniqueID')
user = clients.find_User_By_uniqueID(uniqueID)
game = user.gameObject
autoComplete = game.get_Autocomlete_by_ID(favorite)
if (not autoComplete):
user.playerObject.points -= 1
return
user.playerObject.autocompleteVotedFor = autoComplete
if (autoComplete.isGoogle):
user.playerObject.points += 1
return
autoComplete.playerObject.points += 1
return
@socketio.on('toggle_ready')
def toggleReady(msg):
verbose = (True or debugging)
uniqueID = request.cookies.get('uniqueID')
user = clients.find_User_By_uniqueID(uniqueID)
if (not user):
if verbose: print('No user found when toggling ready')
return
player = user.playerObject
if (not player):
if verbose: print('No player found for the user/client.')
player.ready = not player.ready
game = player.gameObject
#A game object will always exist if there is a playerObject
emitToGame(game = game, arg = ('refresh_Player_List',{}), lock = timerLock)
playersReady = game.all_Players_Ready()
if verbose: print ('STAGE:', game.get_Stage())
#Start round
if playersReady and game.gameStarted == False and not game.spawnedThread:
game.gameStarted = True
game.reset_Players_Ready()
emitToGame(game = game, arg = ('change_content', {'url':'/gameRoomContent'}), lock = timerLock)
emitToGame(game = game, arg = ('client_message', {'msg':'Game started. Have fun!'}), lock = timerLock)
#Start timer
game.spawnedThread = RoundTimer(int(game.timePerRound), user)
game.spawnedThread.start()
return
#End round
if playersReady and game.get_Stage() == 'roundStart':
if verbose: print ('Round ended by users')
user.gameObject.end_Stage()
game.reset_Players_Ready()
if verbose: print('Current stage of game is: {}'.format(user.gameObject.get_Stage()))
emitToGame(game = user.gameObject, arg = ('round_End', {}), lock = timerLock)
emitToGame(game = user.gameObject, arg = ('client_message', {'msg':'Round ended'}), lock = timerLock)
return
#End supply
if playersReady and game.get_Stage() == 'roundSupply':
user.gameObject.end_Stage()
game.reset_Players_Ready()
emitToGame(game = user.gameObject, arg = ('supply_End', {'nrOfEntries': user.gameObject.nrOfEntry}), lock = timerLock)
emitToGame(game = user.gameObject, arg = ('client_message', {'msg':'Round ended'}), lock = timerLock)
return
#End vote
if playersReady and game.get_Stage() == 'roundVote':
user.gameObject.end_Stage()
game.reset_Players_Ready()
emitToGame(game = user.gameObject, arg = ('vote_End', {}), lock = timerLock)
emitToGame(game = user.gameObject, arg = ('client_message', {'msg':'Vote ended'}), lock = timerLock)
return
class RoundTimer(Thread):
def __init__(self, timeToWait, user):
Thread.__init__(self)
self.timeToWait = timeToWait
self.user = user
def run(self):
sleep(self.timeToWait)
if (not self.user.gameObject) or (self.user.gameObject.roundEnded):
return
self.user.gameObject.end_Stage()
emitToGame(game = self.user.gameObject, arg = ('round_End', {'url':'/gameRoomContent'}), lock = timerLock)
emitToGame(game = self.user.gameObject, arg = ('client_message', {'msg':'Round ended'}), lock = timerLock)
return
@socketio.on('handle_chat')
def handleChat(msg):
#update_chat
verbose = (False or debugging)
uniqueID = request.cookies.get('uniqueID')
user = clients.find_User_By_uniqueID(uniqueID)
if (not user):
if verbose: print('No user')
return redirect(url_for('index'))
game = user.gameObject
if (not game):
if verbose: print('No game found when handling chat')
return
game.add_Chat_Msg(chatMsg=msg, playerName=user.playerObject.name)
emitToGame(game=game, arg=('update_chat',{}), lock=timerLock)
@socketio.on('connected')
def client_connect():
verbose = (False or debugging)
'''
I need to identify the user. If the user reloads, the session ID will change.
A unique user-key is provisided for each new user, and the session ID is updated
when the user reconnects. The unique ID is stored in a cookie.
'''
if verbose: print('Someone connected with the IP: {}'.format(request.remote_addr))
uniqueID = request.cookies.get('uniqueID')
if verbose: print('\nUnique ID before update: {}'.format(uniqueID))
if uniqueID:
if verbose: print('Unique ID cookie found')
user = clients.find_User_By_uniqueID(uniqueID)
if user:
if verbose: print('User found')
if request.sid != user.sid:
user.sid = request.sid
if verbose: print('Updated the SID')
else:
user = clients.add_User(sid=request.sid)
if verbose: print('User created')
user.uniqueID = uniqueID
if verbose: print('Unique ID updated')
else:
if verbose: print('Made a new user')
user = clients.add_User(sid=request.sid)
if verbose: print('Emitted to server: set_cookie')
emit(arg=('set_cookie', {'name': 'uniqueID' , 'data': user.uniqueID}), uniqueID = None, lock = timerLock, user= user)
def sendMessageToGame(game, msg):
for player in game.players:
emit(arg = ('client_message', {'msg': msg}), uniqueID = None, lock = timerLock, user= player.userObject)
def emitToGame(arg, game, lock):
for player in game.players:
emit(arg = arg, uniqueID = None, lock = lock, user = player.userObject)
def emit(arg, uniqueID, lock, user = None):
'''
An emit method that requires a lock. Dunno if I need this...
TODO: Find out if i need the lock.
'''
verbose = (False or debugging)
with lock:
if verbose: print ('Did an emit')
if (not user):
userSID = clients.find_User_By_uniqueID(uniqueID).sid
else:
userSID = user.sid
socketio.emit(*arg, room = userSID)
def userNotComplete(user, verbose = (False or debugging)):
if verbose:
print('\nUser name: {}'.format(user.name))
print('User gameObject pointer {}'.format(user.gameObject))
print('User playerObject pointer {}\n'.format(user.playerObject))
if ((not user) or (not user.gameObject) or (not user.playerObject)):
return True
else:
return False
if __name__ == "__main__":
socketio.run(app, debug = False)
| [
[
[
27,
35
],
[
567,
575
]
],
[
[
54,
59
],
[
431,
436
]
],
[
[
61,
74
],
[
900,
913
],
[
3889,
3902
]
],
[
[
76,
83
],
[
863,
870
],
[
1264,
1271
],
[
1486,
1493
],
[
2509,
2516
],
[
4082,
4089
],
[
6806,
6813
],
[
7318,
7325
],
[
7976,
7983
],
[
8795,
8802
],
[
9700,
9707
],
[
11602,
11609
],
[
12153,
12160
],
[
15252,
15259
],
[
16094,
16101
],
[
16131,
16138
],
[
16436,
16443
],
[
16488,
16495
],
[
16607,
16614
],
[
16845,
16852
]
],
[
[
85,
92
]
],
[
[
111,
126
],
[
914,
929
],
[
3903,
3918
],
[
4534,
4549
],
[
4814,
4829
],
[
5340,
5355
],
[
5993,
6008
],
[
6215,
6230
],
[
6675,
6690
],
[
7195,
7210
],
[
7816,
7831
]
],
[
[
128,
135
]
],
[
[
137,
144
],
[
1391,
1398
],
[
1779,
1786
],
[
2244,
2251
],
[
2740,
2747
],
[
3138,
3145
],
[
3347,
3354
],
[
7010,
7017
],
[
7487,
7494
],
[
8139,
8146
],
[
8619,
8626
],
[
15415,
15422
]
],
[
[
146,
154
],
[
1382,
1390
],
[
1770,
1778
],
[
2235,
2243
],
[
2731,
2739
],
[
3129,
3137
],
[
3338,
3346
],
[
7001,
7009
],
[
7478,
7486
],
[
8130,
8138
],
[
8610,
8618
],
[
15406,
15414
]
],
[
[
178,
183
],
[
618,
623
],
[
638,
643
]
],
[
[
206,
212
],
[
14553,
14559
],
[
14614,
14620
]
],
[
[
235,
253
]
],
[
[
272,
277
],
[
14729,
14734
]
],
[
[
304,
311
],
[
656,
663
]
],
[
[
313,
317
]
],
[
[
342,
346
]
],
[
[
348,
362
],
[
675,
689
]
],
[
[
364,
370
]
],
[
[
372,
380
]
],
[
[
401,
408
]
],
[
[
425,
428
],
[
500,
503
],
[
576,
579
],
[
713,
716
],
[
756,
759
],
[
996,
999
],
[
4014,
4017
],
[
6748,
6751
],
[
7258,
7261
],
[
7883,
7886
],
[
18244,
18247
]
],
[
[
556,
564
],
[
8639,
8647
],
[
9516,
9524
],
[
11461,
11469
],
[
12054,
12062
],
[
15137,
15145
],
[
15696,
15704
],
[
18231,
18239
],
[
17769,
17777
]
],
[
[
606,
615
],
[
3787,
3796
],
[
5226,
5235
],
[
8450,
8459
],
[
8557,
8566
],
[
9503,
9512
],
[
10855,
10864
],
[
12654,
12663
],
[
13017,
13026
],
[
13128,
13137
],
[
13646,
13655
],
[
13756,
13765
],
[
14045,
14054
],
[
14155,
14164
],
[
14399,
14408
],
[
14508,
14517
],
[
14993,
15002
],
[
15108,
15117
],
[
15683,
15692
],
[
17020,
17029
],
[
17187,
17196
]
],
[
[
626,
635
]
],
[
[
646,
653
],
[
1307,
1314
],
[
4125,
4132
],
[
6849,
6856
],
[
7361,
7368
],
[
8019,
8026
],
[
8838,
8845
],
[
9743,
9750
],
[
11645,
11652
],
[
12196,
12203
],
[
15295,
15302
],
[
16320,
16327
],
[
16586,
16593
],
[
16824,
16831
],
[
17672,
17679
]
],
[
[
667,
672
],
[
2010,
2015
],
[
2968,
2973
],
[
8320,
8325
],
[
8583,
8588
]
],
[
[
693,
702
],
[
17852,
17861
],
[
840,
849
],
[
1103,
1112
],
[
4215,
4224
],
[
6913,
6922
],
[
6973,
6982
],
[
7450,
7459
],
[
7949,
7958
],
[
8713,
8722
],
[
9593,
9602
],
[
12127,
12136
],
[
15226,
15235
],
[
15767,
15776
],
[
17558,
17567
]
],
[
[
807,
812
]
],
[
[
1050,
1058
]
],
[
[
4048,
4063
]
],
[
[
6041,
6056
],
[
5723,
5738
]
],
[
[
6366,
6378
],
[
5858,
5870
]
],
[
[
6777,
6787
]
],
[
[
7288,
7299
]
],
[
[
7912,
7921
]
],
[
[
8671,
8682
]
],
[
[
9549,
9561
]
],
[
[
11496,
11510
]
],
[
[
12086,
12097
]
],
[
[
14542,
14552
],
[
13190,
13200
]
],
[
[
15168,
15178
]
],
[
[
15725,
15739
]
],
[
[
17048,
17065
],
[
3643,
3660
]
],
[
[
17228,
17238
],
[
3722,
3732
],
[
5161,
5171
],
[
8385,
8395
],
[
8469,
8479
],
[
9381,
9391
],
[
10730,
10740
],
[
12589,
12599
],
[
12932,
12942
],
[
13036,
13046
],
[
13579,
13589
],
[
13665,
13675
],
[
13937,
13947
],
[
14064,
14074
],
[
14333,
14343
],
[
14418,
14428
],
[
14897,
14907
],
[
15012,
15022
],
[
15632,
15642
]
],
[
[
17374,
17378
],
[
16925,
16929
],
[
17118,
17122
],
[
17297,
17301
]
],
[
[
17810,
17825
],
[
4173,
4188
],
[
6931,
6946
],
[
7408,
7423
]
]
] |
from torch.utils.data import Dataset, DataLoader
import glob
import os
import numpy as np
import cv2
import torch
from torchvision import transforms, utils
from skimage.transform import resize
class SegDataset(Dataset):
"""Segmentation Dataset"""
def __init__(self, root_dir, imageFolder, maskFolder, transform=None, seed=None, fraction=None, subset=None, imagecolormode='rgb', maskcolormode='grayscale'):
"""
Args:
root_dir (string): Directory with all the images and should have the following structure.
root
--Images
-----Img 1
-----Img N
--Mask
-----Mask 1
-----Mask N
imageFolder (string) = 'Images' : Name of the folder which contains the Images.
maskFolder (string) = 'Masks : Name of the folder which contains the Masks.
transform (callable, optional): Optional transform to be applied on a sample.
seed: Specify a seed for the train and test split
fraction: A float value from 0 to 1 which specifies the validation split fraction
subset: 'Train' or 'Test' to select the appropriate set.
imagecolormode: 'rgb' or 'grayscale'
maskcolormode: 'rgb' or 'grayscale'
"""
self.color_dict = {'rgb': 1, 'grayscale': 0}
assert(imagecolormode in ['rgb', 'grayscale'])
assert(maskcolormode in ['rgb', 'grayscale'])
self.imagecolorflag = self.color_dict[imagecolormode]
self.maskcolorflag = self.color_dict[maskcolormode]
self.root_dir = root_dir
self.transform = transform
if not fraction:
self.image_names = sorted(
glob.glob(os.path.join(self.root_dir, imageFolder, '*')))
self.mask_names = sorted(
glob.glob(os.path.join(self.root_dir, maskFolder, '*')))
else:
assert(subset in ['Train', 'Test'])
self.fraction = fraction
self.image_list = np.array(
sorted(glob.glob(os.path.join(self.root_dir, imageFolder, '*'))))
self.mask_list = np.array(
sorted(glob.glob(os.path.join(self.root_dir, maskFolder, '*'))))
if seed:
np.random.seed(seed)
indices = np.arange(len(self.image_list))
np.random.shuffle(indices)
self.image_list = self.image_list[indices]
self.mask_list = self.mask_list[indices]
if subset == 'Train':
self.image_names = self.image_list[:int(
np.ceil(len(self.image_list)*(1-self.fraction)))]
self.mask_names = self.mask_list[:int(
np.ceil(len(self.mask_list)*(1-self.fraction)))]
else:
self.image_names = self.image_list[int(
np.ceil(len(self.image_list)*(1-self.fraction))):]
self.mask_names = self.mask_list[int(
np.ceil(len(self.mask_list)*(1-self.fraction))):]
def __len__(self):
return len(self.image_names)
def __getitem__(self, idx):
img_name = self.image_names[idx]
if self.imagecolorflag:
image = cv2.imread(
img_name, self.imagecolorflag).transpose(2, 0, 1)
else:
image = cv2.imread(img_name, self.imagecolorflag)
msk_name = self.mask_names[idx]
if self.maskcolorflag:
mask = cv2.imread(msk_name, self.maskcolorflag).transpose(2, 0, 1)
else:
mask = cv2.imread(msk_name, self.maskcolorflag)
sample = {'image': image, 'mask': mask}
if self.transform:
sample = self.transform(sample)
return sample
# Define few transformations for the Segmentation Dataloader
class Resize(object):
"""Resize image and/or masks."""
def __init__(self, imageresize, maskresize):
self.imageresize = imageresize
self.maskresize = maskresize
def __call__(self, sample):
image, mask = sample['image'], sample['mask']
if len(image.shape) == 3:
image = image.transpose(1, 2, 0)
if len(mask.shape) == 3:
mask = mask.transpose(1, 2, 0)
mask = cv2.resize(mask, self.maskresize, cv2.INTER_AREA)
#mask = 256 * resize(mask, (256, 256), anti_aliasing = True)
image = cv2.resize(image, self.imageresize, cv2.INTER_AREA)
#image = 256 * resize(image, (256, 256), anti_aliasing = True)
if len(image.shape) == 3:
image = image.transpose(2, 0, 1)
if len(mask.shape) == 3:
mask = mask.transpose(2, 0, 1)
return {'image': image,
'mask': mask}
class ToTensor(object):
"""Convert ndarrays in sample to Tensors."""
def __call__(self, sample, maskresize=None, imageresize=None):
image, mask = sample['image'], sample['mask']
if len(mask.shape) == 2:
mask = mask.reshape((1,)+mask.shape)
if len(image.shape) == 2:
image = image.reshape((1,)+image.shape)
return {'image': torch.from_numpy(image),
'mask': torch.from_numpy(mask)}
class Normalize(object):
'''Normalize image'''
def __call__(self, sample):
image, mask = sample['image'], sample['mask']
return {'image': image.type(torch.FloatTensor)/255,
'mask': mask.type(torch.FloatTensor)/255}
def get_dataloader_single_folder(data_dir, imageFolder='Images', maskFolder='Masks', fraction=0.2, batch_size=4):
"""
Create training and testing dataloaders from a single folder.
"""
data_transforms = {
'Train': transforms.Compose([Resize((256, 256), (256, 256)), ToTensor(), Normalize()]),
'Test': transforms.Compose([Resize((256,256), (256, 256)), ToTensor(), Normalize()]),
}
image_datasets = {x: SegDataset(data_dir, imageFolder=imageFolder, maskFolder=maskFolder, seed=100, fraction=fraction, subset=x, transform=data_transforms[x])
for x in ['Train', 'Test']}
dataloaders = {x: DataLoader(image_datasets[x], batch_size=batch_size,
shuffle=True, num_workers=8)
for x in ['Train', 'Test']}
return dataloaders
| [
[
[
29,
36
],
[
212,
219
]
],
[
[
38,
48
],
[
6147,
6157
]
],
[
[
56,
60
],
[
1734,
1738
],
[
1846,
1850
],
[
2065,
2069
],
[
2186,
2190
]
],
[
[
68,
70
],
[
1744,
1746
],
[
1856,
1858
],
[
2075,
2077
],
[
2196,
2198
]
],
[
[
78,
89
],
[
2032,
2034
],
[
2153,
2155
],
[
2281,
2283
],
[
2328,
2330
],
[
2376,
2378
],
[
2630,
2632
],
[
2755,
2757
],
[
2898,
2900
],
[
3023,
3025
]
],
[
[
97,
100
],
[
3260,
3263
],
[
3372,
3375
],
[
3504,
3507
],
[
3597,
3600
],
[
4287,
4290
],
[
4321,
4324
],
[
4423,
4426
],
[
4459,
4462
]
],
[
[
108,
113
],
[
5155,
5160
],
[
5204,
5209
],
[
5404,
5409
],
[
5462,
5467
]
],
[
[
138,
148
],
[
5732,
5742
],
[
5827,
5837
]
],
[
[
150,
155
]
],
[
[
186,
192
]
],
[
[
201,
211
],
[
5937,
5947
]
],
[
[
3851,
3857
],
[
5752,
5758
],
[
5847,
5853
]
],
[
[
4773,
4781
],
[
5784,
5792
],
[
5878,
5886
]
],
[
[
5236,
5245
],
[
5796,
5805
],
[
5890,
5899
]
],
[
[
5495,
5523
]
]
] |
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
import pytest
import vtk
import numpy as np
import sksurgeryvtk.utils.polydata_utils as pdu
import sksurgeryvtk.models.vtk_surface_model as vbs
def test_overlapping_bounds():
radius_0=10.0
radius_1=7.0
centre_1=5.0
radius_2=4.0
centre_2=15.0
radius_3=4.0
centre_3=0.0
sphere_0 = vtk.vtkSphereSource()
sphere_0.SetRadius(radius_0)
sphere_0.SetPhiResolution(12)
sphere_0.SetThetaResolution(12)
sphere_0.SetCenter(0.0, 0.0, 0.0)
sphere_0.Update()
vtk_model_0 = sphere_0.GetOutput()
sphere_1 = vtk.vtkSphereSource()
sphere_1.SetRadius(radius_1)
sphere_1.SetPhiResolution(12)
sphere_1.SetThetaResolution(21)
sphere_1.SetCenter(centre_1, 0.0, 0.0)
sphere_1.Update()
vtk_model_1 = sphere_1.GetOutput()
sphere_2 = vtk.vtkSphereSource()
sphere_2.SetRadius(radius_2)
sphere_2.SetPhiResolution(12)
sphere_2.SetThetaResolution(21)
sphere_2.SetCenter(centre_2, 0.0, 0.0)
sphere_2.Update()
vtk_model_2 = sphere_2.GetOutput()
sphere_3 = vtk.vtkSphereSource()
sphere_3.SetRadius(radius_3)
sphere_3.SetPhiResolution(12)
sphere_3.SetThetaResolution(21)
sphere_3.SetCenter(centre_3, 0.0, 0.0)
sphere_3.Update()
vtk_model_3 = sphere_3.GetOutput()
assert (pdu.check_overlapping_bounds( vtk_model_0, vtk_model_1))
assert (pdu.check_overlapping_bounds( vtk_model_1, vtk_model_0))
assert (not pdu.check_overlapping_bounds( vtk_model_0, vtk_model_2))
assert (not pdu.check_overlapping_bounds( vtk_model_2, vtk_model_0))
assert (pdu.check_overlapping_bounds( vtk_model_0, vtk_model_3))
assert (pdu.check_overlapping_bounds( vtk_model_3, vtk_model_0))
def test_dice_overlap():
radius_0=10.0
radius_1=7.0
centre_1=5.0
sphere_0 = vtk.vtkSphereSource()
sphere_0.SetRadius(radius_0)
sphere_0.SetPhiResolution(60)
sphere_0.SetThetaResolution(60)
sphere_0.SetCenter(0.0, 0.0, 0.0)
sphere_0.Update()
vtk_model_0 = sphere_0.GetOutput()
sphere_1 = vtk.vtkSphereSource()
sphere_1.SetRadius(radius_1)
sphere_1.SetPhiResolution(60)
sphere_1.SetThetaResolution(60)
sphere_1.SetCenter(centre_1, 0.0, 0.0)
sphere_1.Update()
vtk_model_1 = sphere_1.GetOutput()
dice, volume_0, volume_1, volume_01 = pdu.two_polydata_dice(vtk_model_0, vtk_model_1)
np.testing.assert_approx_equal(volume_0, 4.0 * np.pi * radius_0**3.0 / 3.0, significant=2)
np.testing.assert_approx_equal(volume_1, 4.0 * np.pi * radius_1**3.0 / 3.0, significant=2)
#from http://mathworld.wolfram.com/Sphere-SphereIntersection.html
cap_height_0 = ( radius_1 - radius_0 + centre_1) * ( radius_1 + radius_0 - centre_1) / (2 * centre_1)
cap_height_1 = ( radius_0 - radius_1 + centre_1) * ( radius_0 + radius_1 - centre_1) / (2 * centre_1)
cap_vol_0 = np.pi * cap_height_0**2 * ( 3 * radius_0 - cap_height_0) / 3
cap_vol_1 = np.pi * cap_height_1**2 * ( 3 * radius_1 - cap_height_1) / 3
analytic = cap_vol_0 + cap_vol_1
np.testing.assert_approx_equal(volume_01, analytic, significant=2)
np.testing.assert_approx_equal(dice, 2*volume_01 / ( volume_0 + volume_1) , significant=10)
def test_dice_no_overlap():
radius_0=5.5
radius_1=4.3
centre_1=12.0
sphere_0 = vtk.vtkSphereSource()
sphere_0.SetRadius(radius_0)
sphere_0.SetPhiResolution(60)
sphere_0.SetThetaResolution(60)
sphere_0.SetCenter(0.0, 0.0, 0.0)
sphere_0.Update()
vtk_model_0 = sphere_0.GetOutput()
sphere_1 = vtk.vtkSphereSource()
sphere_1.SetRadius(radius_1)
sphere_1.SetPhiResolution(60)
sphere_1.SetThetaResolution(60)
sphere_1.SetCenter(centre_1, 0.0, 0.0)
sphere_1.Update()
vtk_model_1 = sphere_1.GetOutput()
dice, volume_0, volume_1, volume_01 = pdu.two_polydata_dice(vtk_model_0, vtk_model_1)
np.testing.assert_approx_equal(volume_0, 4.0 * np.pi * radius_0**3.0 / 3.0, significant=2)
np.testing.assert_approx_equal(volume_1, 4.0 * np.pi * radius_1**3.0 / 3.0, significant=2)
analytic = 0.0
np.testing.assert_approx_equal(volume_01, analytic, significant=2)
np.testing.assert_approx_equal(dice, 2*volume_01 / ( volume_0 + volume_1) , significant=10)
| [
[
[
57,
63
]
],
[
[
71,
74
],
[
362,
365
],
[
602,
605
],
[
851,
854
],
[
1096,
1099
],
[
1843,
1846
],
[
2083,
2086
],
[
3337,
3340
],
[
3577,
3580
]
],
[
[
82,
93
],
[
2408,
2410
],
[
2455,
2457
],
[
2503,
2505
],
[
2550,
2552
],
[
2893,
2895
],
[
2971,
2973
],
[
3075,
3077
],
[
3148,
3150
],
[
3902,
3904
],
[
3949,
3951
],
[
3997,
3999
],
[
4044,
4046
],
[
4112,
4114
],
[
4185,
4187
]
],
[
[
101,
141
],
[
1339,
1342
],
[
1408,
1411
],
[
1481,
1484
],
[
1554,
1557
],
[
1623,
1626
],
[
1692,
1695
],
[
2355,
2358
],
[
3849,
3852
]
],
[
[
149,
193
]
],
[
[
199,
222
]
],
[
[
1754,
1771
]
],
[
[
3245,
3265
]
]
] |
import string
import random
# --- Defining Variables ---
LOWER_ALPHABET = list(string.ascii_lowercase)
DIGITS = list(string.digits)
UPPER_ALPHABET = list(string.ascii_uppercase)
SYMBOLS = list(string.punctuation)
SYMBOLS_DELETE = ['"', "'", "(", ")", ",", ".", ":", ";", "[", "]", "|", "`", "{", "}"]
for x in SYMBOLS_DELETE:
SYMBOLS.remove(x)
CHAR_TYPES = [LOWER_ALPHABET, DIGITS] # characters used as default
# --- PROGRAM INTRO ---
print("""
#############################################################
# --- Password Generator --- #
#############################################################
# Language: Python #
#############################################################
# #
# This is my very first project with Python #
# Lowercase characteres and digits are used as default #
# #
#############################################################
""")
# --- LENGTH QUESTION ---
while True:
print("Password Length (Min: 8 / Max: 48):")
pass_len = input()
try:
pass_len = int(pass_len)
if pass_len >= 8 and pass_len <= 48:
break
else:
print("\nYou should insert a number between 8 and 16.\n")
except ValueError:
# In case of the user insert a value that cannot be turned into a 'int' type
print("\nYou should insert a NUMBER between 8 and 16.\n")
# --- UPPERCASE AND SYMBOLS QUESTION FUNCTION ---
def question_checker(phrase, char_type):
"""Check if the user inserts a valid value on the upper case and symbols question.
Then append the specific char type list if he answer is "Yes"
"""
while True:
print("")
print(phrase)
answer = input().strip().capitalize()
if answer == "Yes" or answer == "No":
break
else:
print("\nInvalid Value.\n")
def char_assignment(char_check, char_type):
if char_check == "Yes":
return CHAR_TYPES.append(char_type)
else:
pass
char_assignment(answer, char_type)
# --- ASSIGNING UPPERCASE AND/OR SYMBOLS CHARACTERS INTO THE CHAR_TYPES LIST. ---
question_checker("Do you want uppercase letters? [Yes/No]", UPPER_ALPHABET)
question_checker("Do you want symbols? [Yes/No]", SYMBOLS)
# --- CREATE THE PASSWORD ---
def create_password():
password_list = []
for x in range(len(CHAR_TYPES)):
password_list.append(CHAR_TYPES[x][random.randrange(len(CHAR_TYPES[x]))]) # making at least one of all the char types appear in the password
for x in range(pass_len - len(CHAR_TYPES)):
random_chartype = random.randrange(len(CHAR_TYPES))
password_list.append(CHAR_TYPES[random_chartype][random.randrange(len(CHAR_TYPES[random_chartype]))]) # the spaces that remained will be filled with random characteres
random.shuffle(password_list)
password = "".join(password_list)
return password
# --- SHOW OUTPUT ---
def show_password():
print("\n")
print(f"Password: {create_password()} ")
print("\n")
show_password()
# --- REMAKE THE PASSWORD ---
while True:
print("Remake the password? [Yes/No]")
answer = input().strip().capitalize()
if answer == "Yes" or answer == "No":
if answer == "Yes":
show_password()
else:
print("\n")
break
else:
print("\nInvalid Value.\n") | [
[
[
7,
13
],
[
81,
87
],
[
119,
125
],
[
156,
162
],
[
196,
202
]
],
[
[
21,
27
],
[
2586,
2592
],
[
2767,
2773
],
[
2858,
2864
],
[
2983,
2989
]
],
[
[
59,
73
],
[
364,
378
]
],
[
[
105,
111
],
[
380,
386
]
],
[
[
134,
148
],
[
2352,
2366
]
],
[
[
181,
188
],
[
331,
338
],
[
2418,
2425
]
],
[
[
216,
230
],
[
313,
327
]
],
[
[
308,
309
],
[
346,
347
]
],
[
[
350,
360
],
[
2529,
2539
],
[
2572,
2582
],
[
2607,
2617
],
[
2727,
2737
],
[
2788,
2798
],
[
2830,
2840
],
[
2879,
2889
],
[
2107,
2117
]
],
[
[
1173,
1181
],
[
1218,
1226
],
[
2712,
2720
]
],
[
[
1203,
1211
],
[
1235,
1243
],
[
1253,
1261
],
[
2712,
2720
]
],
[
[
1584,
1600
],
[
2292,
2308
],
[
2368,
2384
]
],
[
[
2463,
2478
],
[
3156,
3171
]
],
[
[
3100,
3113
],
[
3196,
3209
],
[
3423,
3436
]
],
[
[
3303,
3309
],
[
3348,
3354
],
[
3367,
3373
],
[
3394,
3400
]
]
] |
# type: ignore
import json
import uuid
from json import JSONDecodeError
from typing import Tuple, Dict, List
import boto3
from melange.drivers.interfaces import Queue, Topic, MessagingDriver, Message
class AWSDriver(MessagingDriver):
def __init__(self, **kwargs):
super().__init__()
self.max_number_of_messages = kwargs.get("max_number_of_messages", 10)
self.visibility_timeout = kwargs.get("visibility_timeout", 100)
self.wait_time_seconds = kwargs.get("wait_time_seconds", 10)
def declare_topic(self, topic_name) -> Topic:
sns = boto3.resource("sns")
topic = sns.create_topic(Name=topic_name)
return topic
def get_queue(self, queue_name) -> Queue:
sqs_res = boto3.resource("sqs")
return sqs_res.get_queue_by_name(QueueName=queue_name)
def declare_queue(
self,
queue_name: str,
*topics_to_bind: Topic,
dead_letter_queue_name: str = None,
**kwargs
) -> Tuple[Queue, Queue]:
try:
queue = self.get_queue(queue_name)
except Exception:
queue = self._create_queue(queue_name, content_based_deduplication="true")
if topics_to_bind:
statements = []
for topic in topics_to_bind:
statement = {
"Sid": "Sid{}".format(uuid.uuid4()),
"Effect": "Allow",
"Principal": "*",
"Resource": queue.attributes["QueueArn"],
"Action": "sqs:SendMessage",
"Condition": {"ArnEquals": {"aws:SourceArn": topic.arn}},
}
statements.append(statement)
subscription = topic.subscribe(
Protocol="sqs",
Endpoint=queue.attributes[
"QueueArn"
], # , Attributes={"RawMessageDelivery": "true"}
)
if kwargs.get("filter_events"):
filter_policy = {"event_type": kwargs["filter_events"]}
else:
filter_policy = {}
subscription.set_attributes(
AttributeName="FilterPolicy",
AttributeValue=json.dumps(filter_policy),
)
policy = {
"Version": "2012-10-17",
"Id": "sqspolicy",
"Statement": statements,
}
queue.set_attributes(Attributes={"Policy": json.dumps(policy)})
dead_letter_queue = None
if dead_letter_queue_name:
try:
dead_letter_queue = self.get_queue(dead_letter_queue_name)
except Exception:
dead_letter_queue = self._create_queue(
dead_letter_queue_name, content_based_deduplication="true"
)
redrive_policy = {
"deadLetterTargetArn": dead_letter_queue.attributes["QueueArn"],
"maxReceiveCount": "4",
}
queue.set_attributes(
Attributes={"RedrivePolicy": json.dumps(redrive_policy)}
)
return queue, dead_letter_queue
def _create_queue(self, queue_name: str, **kwargs) -> Queue:
sqs_res = boto3.resource("sqs")
fifo = queue_name.endswith(".fifo")
attributes = {}
if fifo:
attributes["FifoQueue"] = "true"
attributes["ContentBasedDeduplication"] = (
"true" if kwargs.get("content_based_deduplication") else "false"
)
queue = sqs_res.create_queue(QueueName=queue_name, Attributes=attributes)
return queue
def retrieve_messages(self, queue: Queue, attempt_id=None) -> List[Message]:
kwargs = dict(
MaxNumberOfMessages=self.max_number_of_messages,
VisibilityTimeout=self.visibility_timeout,
WaitTimeSeconds=self.wait_time_seconds,
MessageAttributeNames=["All"],
AttributeNames=["All"],
)
if attempt_id:
kwargs["ReceiveRequestAttemptId"] = attempt_id
messages = queue.receive_messages(**kwargs)
# We need to differentiate here whether the message came from SNS or SQS
return [self._construct_message(message) for message in messages]
def queue_publish(
self,
content: str,
queue,
event_type_name: str = None,
message_group_id: str = None,
message_deduplication_id: str = None,
):
kwargs = dict(MessageBody=json.dumps({"Message": content}))
if event_type_name:
kwargs["MessageAttributes"] = {
"event_type": {"DataType": "String", "StringValue": event_type_name}
}
if message_group_id:
kwargs["MessageGroupId"] = message_group_id
if message_deduplication_id:
kwargs["MessageDeduplicationId"] = message_deduplication_id
queue.send_message(**kwargs)
def publish(
self,
content: str,
topic: Topic,
event_type_name: str,
extra_attributes: Dict = None,
):
args = dict(
Message=content,
MessageAttributes={
"event_type": {"DataType": "String", "StringValue": event_type_name}
},
)
if extra_attributes:
if "subject" in extra_attributes:
args["Subject"] = extra_attributes["subject"]
if "message_attributes" in extra_attributes:
args["MessageAttributes"].update(extra_attributes["message_attributes"])
if "message_structure" in extra_attributes:
args["MessageStructure"] = extra_attributes["message_structure"]
response = topic.publish(**args)
if "MessageId" not in response:
raise ConnectionError("Could not send the event to the SNS TOPIC")
def acknowledge(self, message: Message) -> None:
message.metadata.delete()
def close_connection(self) -> None:
pass
def delete_queue(self, queue: Queue) -> None:
queue.delete()
def delete_topic(self, topic: Topic) -> None:
topic.delete()
def _construct_message(self, message) -> Message:
body = message.body
manifest = ""
try:
message_content = json.loads(body)
if "Message" in message_content:
content = message_content["Message"]
# Does the content have more attributes? If so, it is very likely that the message came from a non-raw
# SNS redirection
if "MessageAttributes" in message_content:
manifest = (
message_content["MessageAttributes"]
.get("event_type", {})
.get("Value")
or ""
)
else:
content = message_content
except JSONDecodeError:
content = body
manifest = (
manifest
or message.message_attributes.get("event_type", {}).get("StringValue")
or ""
)
return Message(message.message_id, content, message, manifest)
| [
[
[
23,
27
],
[
2273,
2277
],
[
2529,
2533
],
[
3141,
3145
],
[
4606,
4610
],
[
6413,
6417
]
],
[
[
35,
39
],
[
1357,
1361
]
],
[
[
57,
72
],
[
7046,
7061
]
],
[
[
92,
97
],
[
994,
999
]
],
[
[
99,
103
],
[
5178,
5182
]
],
[
[
105,
109
],
[
3781,
3785
]
],
[
[
118,
123
],
[
585,
590
],
[
743,
748
],
[
3308,
3313
]
],
[
[
164,
169
],
[
718,
723
],
[
1000,
1005
],
[
1007,
1012
],
[
3283,
3288
],
[
3754,
3759
],
[
6152,
6157
]
],
[
[
171,
176
],
[
564,
569
],
[
917,
922
],
[
5115,
5120
],
[
6226,
6231
]
],
[
[
178,
193
],
[
221,
236
]
],
[
[
195,
202
],
[
3786,
3793
],
[
6011,
6018
],
[
6311,
6318
],
[
7260,
7267
]
],
[
[
211,
220
]
]
] |
'''
Created on 23.08.2017
@author: falensaa
'''
import logging
import sys
import imsnpars.nparser.features
import imsnpars.nparser.network
import imsnpars.nparser.graph.features as gfeatures
from imsnpars.nparser.graph import task, decoder
from imsnpars.nparser.graph.mst import cle
from imsnpars.nparser.labels import task as ltask
def buildMSTDecoder(opts, featBuilder):
if opts.mst == "CLE":
mstAlg = cle.ChuLiuEdmonds()
decod = decoder.FirstOrderDecoder(featBuilder)
else:
logging.error("Unknown algorithm: %s" % opts.mst)
sys.exit()
logging.info("Graph system used: %s" % type(mstAlg))
logging.info("Decoder used: %s" % type(decod))
return mstAlg, decod
def buildGraphFeatureExtractors(featuresD, reprDim):
featIds = { ("h", "0"): gfeatures.FeatId.HEAD,
("d", "0"): gfeatures.FeatId.DEP,
("h", "1"): gfeatures.FeatId.HEAD_P_1,
("h", "2"): gfeatures.FeatId.HEAD_P_2,
("d", "1"): gfeatures.FeatId.DEP_P_1,
("d", "2"): gfeatures.FeatId.DEP_P_2,
("h", "-1"): gfeatures.FeatId.HEAD_M_1,
("h", "-2"): gfeatures.FeatId.HEAD_M_2,
("d", "-1"): gfeatures.FeatId.DEP_M_1,
("d", "-2"): gfeatures.FeatId.DEP_M_2,
("dist", "0") : gfeatures.FeatId.DIST }
mainFeatIds = {"h": gfeatures.FeatId.HEAD,
"d": gfeatures.FeatId.DEP }
featureExtractors = { }
featureBuilders = { }
for feat in featuresD:
if "+" in feat:
name, shift = feat.split("+")
elif "-" in feat:
name, shift = feat.split("-")
shift = "-" + shift
else:
name, shift = feat, "0"
featId = featIds.get((name, shift))
if featId == None:
logging.error("Unknown token id: %s" % feat)
sys.exit()
# for now there is only one builder -- distance
if featId == gfeatures.FeatId.DIST:
featureBuilders[featId] = gfeatures.DistFeatureBuilder(reprDim)
else:
mainFeature = mainFeatIds[name]
if mainFeature not in featureExtractors:
featureExtractors[mainFeature] = gfeatures.TokenFeatExtractor()
featureExtractors[mainFeature].addShift(featId, int(shift))
return featureExtractors, featureBuilders
def buildGraphParser(opts, dummyBuilder, reprBuilder):
reprDim = reprBuilder.getDim()
tokExtractors, featBuilders = buildGraphFeatureExtractors(opts.features, reprDim)
extractor = gfeatures.GraphFeatureExtractor(tokExtractors)
featIds = extractor.getFeatIds() + [ feat.getFeatId() for feat in featBuilders.values() ]
network = imsnpars.nparser.network.ParserNetwork(opts.mlpHiddenDim, opts.nonLinFun, featIds)
featBuilder = imsnpars.nparser.features.FeatReprBuilder(extractor, featBuilders, dummyBuilder, network, opts.parseLayer)
mstAlg, decod = buildMSTDecoder(opts, featBuilder)
if opts.labeler == "graph":
lblDict = ltask.LblTagDict()
parsingTask = task.NNGraphParsingTaskWithLbl(mstAlg, featBuilder, decod, network, opts.augment, lblDict)
else:
parsingTask = task.NNGraphParsingTask(mstAlg, featBuilder, decod, network, opts.augment)
return parsingTask
| [
[
[
57,
64
],
[
513,
520
],
[
591,
598
],
[
648,
655
],
[
1873,
1880
]
],
[
[
72,
75
],
[
571,
574
],
[
1930,
1933
]
],
[
[
84,
109
]
],
[
[
117,
141
],
[
2826,
2834
],
[
2927,
2935
]
],
[
[
149,
193
],
[
802,
811
],
[
853,
862
],
[
903,
912
],
[
958,
967
],
[
1013,
1022
],
[
1067,
1076
],
[
1122,
1131
],
[
1178,
1187
],
[
1234,
1243
],
[
1289,
1298
],
[
1347,
1356
],
[
1400,
1409
],
[
1447,
1456
],
[
2027,
2036
],
[
2088,
2097
],
[
2286,
2295
],
[
2666,
2675
]
],
[
[
229,
233
],
[
3185,
3189
],
[
3308,
3312
]
],
[
[
235,
242
],
[
456,
463
]
],
[
[
282,
285
],
[
420,
423
]
],
[
[
322,
335
],
[
3144,
3149
]
],
[
[
341,
356
],
[
3054,
3069
]
],
[
[
725,
752
],
[
2598,
2625
]
],
[
[
2478,
2494
]
]
] |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.22
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1beta1EventList(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'items': 'list[V1beta1Event]',
'kind': 'str',
'metadata': 'V1ListMeta'
}
attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
def __init__(self, api_version=None, items=None, kind=None, metadata=None, local_vars_configuration=None): # noqa: E501
"""V1beta1EventList - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._items = None
self._kind = None
self._metadata = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
self.items = items
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
@property
def api_version(self):
"""Gets the api_version of this V1beta1EventList. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1beta1EventList. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1beta1EventList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1beta1EventList. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def items(self):
"""Gets the items of this V1beta1EventList. # noqa: E501
items is a list of schema objects. # noqa: E501
:return: The items of this V1beta1EventList. # noqa: E501
:rtype: list[V1beta1Event]
"""
return self._items
@items.setter
def items(self, items):
"""Sets the items of this V1beta1EventList.
items is a list of schema objects. # noqa: E501
:param items: The items of this V1beta1EventList. # noqa: E501
:type: list[V1beta1Event]
"""
if self.local_vars_configuration.client_side_validation and items is None: # noqa: E501
raise ValueError("Invalid value for `items`, must not be `None`") # noqa: E501
self._items = items
@property
def kind(self):
"""Gets the kind of this V1beta1EventList. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1beta1EventList. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1beta1EventList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1beta1EventList. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V1beta1EventList. # noqa: E501
:return: The metadata of this V1beta1EventList. # noqa: E501
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1beta1EventList.
:param metadata: The metadata of this V1beta1EventList. # noqa: E501
:type: V1ListMeta
"""
self._metadata = metadata
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1beta1EventList):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1beta1EventList):
return True
return self.to_dict() != other.to_dict()
| [
[
[
279,
285
],
[
6356,
6362
]
],
[
[
293,
295
]
],
[
[
318,
321
],
[
5552,
5555
]
],
[
[
367,
380
],
[
1419,
1432
]
],
[
[
389,
405
],
[
6596,
6612
],
[
6810,
6826
]
]
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Bout (read bank-out) extracts transactions from pdf bank statements.
_ _
(_) (_)
(_) _ _ _ _ _ _ _ _ _ (_) _ _
(_)(_)(_)(_)_ _ (_)(_)(_) _ (_) (_)(_)(_)(_)(_)
(_) (_)(_) (_)(_) (_) (_)
(_) (_)(_) (_)(_) (_) (_) _
(_) _ _ _(_)(_) _ _ _ (_)(_)_ _ _(_)_ (_)_ _(_)
(_)(_)(_)(_) (_)(_)(_) (_)(_)(_) (_) (_)(_)
"""
import io
import logging
import click
import csv
from collections import namedtuple
from datetime import datetime
logger = logging.getLogger("bout")
profiles = {}
Transaction = namedtuple("Transaction",
["id", "date", "payee", "memo", "amount"])
InvalidTransaction = namedtuple("InvalidTransaction", [])
def get_icici_csv(data_row):
"""Convert a transaction row to tuple.
Details of fields
0: 'D', # Transaction date
2: 'M', # Transaction details
3: 'T', # Deposit
4: 'T-', # Withdrawal
"""
logger.debug("get_icicicsv: Data row = {}".format(data_row))
date = data_row[0].replace('-', '/')
if _valid_date(date):
amt = "-{}".format(data_row[4])
if data_row[3] != "0":
amt = data_row[3]
return Transaction(id=0,
date=date,
payee="", # Empty for ICICI bank account
memo=data_row[2],
amount=amt)
return InvalidTransaction()
def get_icicicc_csv(data_row):
"""Convert a transaction row to tuple.
Details of fields
0: 'D', # Transaction date
2: 'M', # Transaction details
5: 'T', # Amount
"""
logger.debug("get_icicicsv: Data row = {}".format(data_row))
date = data_row[0]
if _valid_date(date, date_format="%d/%m/%Y"):
amt = "-{}".format(data_row[5])
if data_row[6] == "CR":
amt = data_row[5]
return Transaction(id=0,
date=date,
payee="", # Empty for ICICI bank account
memo=data_row[2],
amount=amt)
return InvalidTransaction()
def qif_header():
"""Print qif header."""
click.echo("!Account\nNMyAccount\nTMyBank\n^\n!Type:Bank")
def to_qif(transaction):
"""Transform a cleaned up row to qif format.
Returns:
string of a particular transaction in qif format
See wikipedia for more details of QIF format.
https://en.wikipedia.org/wiki/Quicken_Interchange_Format#Detail_items
"""
logger.debug("to_qif: Input = {}".format(transaction))
return "D{0}\nM{1}\nT{2}\n^\n\n"\
.format(transaction.date, transaction.memo, transaction.amount)
def _valid_date(date_value, date_format="%d/%m/%Y"):
"""Validate a transaction date."""
try:
transaction_date = datetime.strptime(date_value, date_format)
return transaction_date is not None
except ValueError:
return False
def _filter_csv_header(doc, header):
head_skip = False
mem = io.StringIO()
with open(doc, encoding='utf-8', mode='r') as f:
for line in f:
if line.startswith(header):
head_skip = True
continue
if head_skip and (not line or line.isspace()):
break
if head_skip and ',' in line:
mem.write(line)
mem.seek(0)
return csv.reader(mem)
@click.command()
@click.argument("doc", type=click.Path(exists=True))
@click.option("--profile", prompt="Choose a profile", default="icici",
show_default=True,
type=click.Choice(["icici", "icicicc"]),
help="Document type profile.")
@click.option("--debug", is_flag=True, show_default=True,
help="Show diagnostic messages.")
def start(doc, profile, debug):
"""Bout (read bank-out) extracts transactions from csv bank statements."""
if debug:
logging.basicConfig(level=logging.DEBUG)
logger.info("Verbose messages are enabled.")
profiles.update({"icici": get_icici_csv,
"icicicc": get_icicicc_csv})
rows = []
if profile == "icici":
header = "DATE,MODE,PARTICULARS,DEPOSITS,WITHDRAWALS,BALANCE"
rows = _filter_csv_header(doc, header)
elif profile == "icicicc":
header = "Date,Sr.No.,Transaction Details,Reward Point Header,Intl.Amount,Amount(in Rs),BillingAmountSign"
rows = _filter_csv_header(doc, header)
# row -> clean_row
# clean_row, profile -> transaction
# transaction -> qif
create_transaction = profiles[profile]
print_header = False
for r in rows:
transaction = create_transaction(r)
if type(transaction) is not InvalidTransaction:
if not print_header:
qif_header()
print_header = True
click.echo(to_qif(transaction))
if __name__ == '__main__':
start()
| [
[
[
552,
554
],
[
3228,
3230
]
],
[
[
562,
569
],
[
669,
676
],
[
4129,
4136
],
[
4155,
4162
]
],
[
[
577,
582
],
[
3617,
3622
],
[
3634,
3639
],
[
3661,
3666
],
[
3687,
3692
],
[
3809,
3814
],
[
3891,
3896
],
[
2387,
2392
],
[
5057,
5062
]
],
[
[
590,
593
],
[
3598,
3601
]
],
[
[
618,
628
],
[
724,
734
],
[
839,
849
]
],
[
[
650,
658
],
[
3026,
3034
]
],
[
[
660,
666
],
[
1129,
1135
],
[
1839,
1845
],
[
2731,
2737
],
[
4178,
4184
]
],
[
[
696,
704
],
[
4228,
4236
],
[
4785,
4793
]
],
[
[
710,
721
],
[
1373,
1384
],
[
2090,
2101
]
],
[
[
818,
836
],
[
1597,
1615
],
[
2314,
2332
],
[
4927,
4945
]
],
[
[
882,
895
],
[
4254,
4267
]
],
[
[
1624,
1639
],
[
4301,
4316
]
],
[
[
2341,
2351
],
[
4996,
5006
]
],
[
[
2452,
2458
],
[
5068,
5074
]
],
[
[
2902,
2913
],
[
1238,
1249
],
[
1930,
1941
]
],
[
[
3163,
3181
],
[
4446,
4464
],
[
4639,
4657
]
],
[
[
4000,
4005
],
[
5122,
5127
]
]
] |
from colored import *
import staticconf
"""
You might find the colored documentation very useful:
https://pypi.python.org/pypi/colored
"""
ENABLE_COLORIZER = staticconf.read_string('enable_colorizer', default='false').lower() == 'true'
def colorizer_enabled(function):
"""do not colorize if it's not enabled"""
def wrapper(*args):
if ENABLE_COLORIZER:
return function(*args)
elif args:
return args[0]
else:
return args
return wrapper
# attr and colors
ATTR_RESET = attr('reset')
COLOR_INDEX = fg(199)
COLOR_TITLE = fg(45)
COLOR_TAG_0 = fg(10) + attr('bold')
COLOR_TAG_1 = fg(10)
COLOR_TAG_2 = fg(87)
COLOR_TAG_3 = fg(208)
COLOR_TAG_4 = fg(252)
@colorizer_enabled
def color_index(index):
return COLOR_INDEX + index + ATTR_RESET
@colorizer_enabled
def color_title(title):
return COLOR_TITLE + title + ATTR_RESET
def _color_by_score(score):
if score >= 1:
return COLOR_TAG_0
elif score >= 0.9:
return COLOR_TAG_1
elif score >= 0.8:
return COLOR_TAG_2
elif score >= 0.7:
return COLOR_TAG_3
return COLOR_TAG_4
@colorizer_enabled
def _color_tag(tag, score):
return _color_by_score(score) + tag + ATTR_RESET
def color_tags(scored_tags):
return ", ".join((_color_tag(tag, score) for tag, score in scored_tags))
| [
[
[
20,
21
],
[
543,
547
],
[
571,
573
],
[
593,
595
],
[
614,
616
],
[
623,
627
],
[
650,
652
],
[
671,
673
],
[
692,
694
],
[
714,
716
]
],
[
[
29,
39
],
[
160,
170
]
],
[
[
141,
157
],
[
354,
370
]
],
[
[
244,
261
],
[
725,
742
],
[
813,
830
],
[
1149,
1166
]
],
[
[
530,
540
],
[
800,
810
],
[
888,
898
],
[
1237,
1247
]
],
[
[
557,
568
],
[
778,
789
]
],
[
[
579,
590
],
[
866,
877
]
],
[
[
600,
611
],
[
962,
973
]
],
[
[
636,
647
],
[
1012,
1023
]
],
[
[
657,
668
],
[
1062,
1073
]
],
[
[
678,
689
],
[
1112,
1123
]
],
[
[
700,
711
],
[
1135,
1146
]
],
[
[
747,
758
]
],
[
[
835,
846
]
],
[
[
904,
919
],
[
1206,
1221
]
],
[
[
1171,
1181
],
[
1300,
1310
]
],
[
[
1253,
1263
]
]
] |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._azure_media_services import AzureMediaServices
__all__ = ['AzureMediaServices']
# `._patch.py` is used for handwritten extensions to the generated code
# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
from ._patch import patch_sdk
patch_sdk()
| [
[
[
503,
521
]
],
[
[
522,
529
]
],
[
[
763,
772
],
[
773,
782
]
]
] |
from datetime import timedelta
from django.core.urlresolvers import reverse_lazy
from django.contrib.auth.models import User
from django.utils import timezone
from allauth.account.models import EmailAddress
from rest_framework import status
from rest_framework.test import APITestCase, APIClient
from challenges.models import Challenge
from hosts.models import ChallengeHost, ChallengeHostTeam
from participants.models import ParticipantTeam, Participant
class BaseAPITestClass(APITestCase):
def setUp(self):
self.client = APIClient(enforce_csrf_checks=True)
self.user = User.objects.create(
username="someuser",
email="[email protected]",
password="secret_password",
)
EmailAddress.objects.create(
user=self.user, email="[email protected]", primary=True, verified=True
)
self.invite_user = User.objects.create(
username="otheruser",
email="[email protected]",
password="other_secret_password",
)
self.participant_team = ParticipantTeam.objects.create(
team_name="Participant Team", created_by=self.user
)
self.participant = Participant.objects.create(
user=self.user, team=self.participant_team, status=Participant.SELF
)
self.client.force_authenticate(user=self.user)
class GetParticipantTeamTest(BaseAPITestClass):
url = reverse_lazy("participants:get_participant_team_list")
def setUp(self):
super(GetParticipantTeamTest, self).setUp()
self.user2 = User.objects.create(
username="user2",
email="[email protected]",
password="user2_password",
)
EmailAddress.objects.create(
user=self.user2,
email="[email protected]",
primary=True,
verified=True,
)
self.participant2 = Participant.objects.create(
user=self.user2,
status=Participant.ACCEPTED,
team=self.participant_team,
)
def test_get_challenge(self):
expected = [
{
"id": self.participant_team.pk,
"team_name": self.participant_team.team_name,
"created_by": self.user.username,
"team_url": self.participant_team.team_url,
"members": [
{
"member_name": self.participant.user.username,
"status": self.participant.status,
"member_id": self.participant.user.id,
},
{
"member_name": self.participant2.user.username,
"status": self.participant2.status,
"member_id": self.participant2.user.id,
},
],
}
]
response = self.client.get(self.url, {})
self.assertEqual(response.data["results"], expected)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class CreateParticipantTeamTest(BaseAPITestClass):
url = reverse_lazy("participants:get_participant_team_list")
def setUp(self):
super(CreateParticipantTeamTest, self).setUp()
self.data = {"team_name": "New Participant Team"}
def test_create_participant_team_with_all_data(self):
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_create_participant_team_with_team_name_same_as_with_existing_team(
self
):
expected = {
"team_name": [
"participant team with this team name already exists."
]
}
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Creating team with same team name
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, expected)
def test_create_participant_team_with_no_data(self):
del self.data["team_name"]
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
class GetParticularParticipantTeam(BaseAPITestClass):
def setUp(self):
super(GetParticularParticipantTeam, self).setUp()
self.url = reverse_lazy(
"participants:get_participant_team_details",
kwargs={"pk": self.participant_team.pk},
)
self.user2 = User.objects.create(
username="user2",
email="[email protected]",
password="user2_password",
)
EmailAddress.objects.create(
user=self.user2,
email="[email protected]",
primary=True,
verified=True,
)
self.participant2 = Participant.objects.create(
user=self.user2,
status=Participant.ACCEPTED,
team=self.participant_team,
)
def test_get_particular_participant_team(self):
expected = {
"id": self.participant_team.pk,
"team_name": self.participant_team.team_name,
"created_by": self.user.username,
"team_url": self.participant_team.team_url,
"members": [
{
"member_name": self.participant.user.username,
"status": self.participant.status,
"member_id": self.participant.user.id,
},
{
"member_name": self.participant2.user.username,
"status": self.participant2.status,
"member_id": self.participant2.user.id,
},
],
}
response = self.client.get(self.url, {})
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_particular_participant_team_does_not_exist(self):
self.url = reverse_lazy(
"participants:get_participant_team_details",
kwargs={"pk": self.participant_team.pk + 1},
)
expected = {"error": "ParticipantTeam does not exist"}
response = self.client.get(self.url, {})
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_406_NOT_ACCEPTABLE)
class UpdateParticularParticipantTeam(BaseAPITestClass):
def setUp(self):
super(UpdateParticularParticipantTeam, self).setUp()
self.url = reverse_lazy(
"participants:get_participant_team_details",
kwargs={"pk": self.participant_team.pk},
)
self.partial_update_participant_team_name = (
"Partial Update Participant Team"
)
self.update_participant_team_name = "Update Test Participant Team"
self.data = {"team_name": self.update_participant_team_name}
def test_particular_participant_team_partial_update(self):
self.partial_update_data = {
"team_name": self.partial_update_participant_team_name
}
expected = {
"id": self.participant_team.pk,
"team_name": self.partial_update_participant_team_name,
"created_by": self.user.username,
"team_url": self.participant_team.team_url,
}
response = self.client.patch(self.url, self.partial_update_data)
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_particular_participant_team_update(self):
expected = {
"id": self.participant_team.pk,
"team_name": self.update_participant_team_name,
"created_by": self.user.username,
"team_url": self.participant_team.team_url,
}
response = self.client.put(self.url, self.data)
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_particular_participant_team_update_with_no_data(self):
self.data = {"team_name": ""}
response = self.client.put(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
class DeleteParticularParticipantTeam(BaseAPITestClass):
def setUp(self):
super(DeleteParticularParticipantTeam, self).setUp()
self.url = reverse_lazy(
"participants:get_participant_team_details",
kwargs={"pk": self.participant_team.pk},
)
def test_particular_participant_team_delete(self):
response = self.client.delete(self.url, {})
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
class InviteParticipantToTeamTest(BaseAPITestClass):
def setUp(self):
super(InviteParticipantToTeamTest, self).setUp()
self.data = {"email": self.invite_user.email}
self.url = reverse_lazy(
"participants:invite_participant_to_team",
kwargs={"pk": self.participant_team.pk},
)
def test_invite_participant_to_team_with_all_data(self):
expected = {"message": "User has been successfully added to the team!"}
response = self.client.post(self.url, self.data)
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
def test_invite_participant_to_team_with_no_data(self):
del self.data["email"]
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, status.HTTP_406_NOT_ACCEPTABLE)
def test_invite_self_to_team(self):
self.data = {"email": self.user.email}
expected = {"error": "User is already part of the team!"}
response = self.client.post(self.url, self.data)
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_406_NOT_ACCEPTABLE)
def test_invite_to_other_team_which_doesnot_belong_to_user(self):
temp_user = User.objects.create(
username="temp_user", password="test_password"
)
temp_participant_team = ParticipantTeam.objects.create(
team_name="Test Team 1", created_by=temp_user
)
expected = {"error": "You are not a member of this team!"}
self.url = reverse_lazy(
"participants:invite_participant_to_team",
kwargs={"pk": temp_participant_team.pk},
)
response = self.client.post(self.url, self.data)
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_invite_user_which_does_not_exist_to_team(self):
self.data = {"email": "[email protected]"}
expected = {"error": "User does not exist with this email address!"}
response = self.client.post(self.url, self.data)
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_406_NOT_ACCEPTABLE)
def test_particular_participant_team_for_invite_does_not_exist(self):
self.url = reverse_lazy(
"participants:invite_participant_to_team",
kwargs={"pk": self.participant_team.pk + 1},
)
expected = {"error": "Participant Team does not exist"}
response = self.client.post(self.url, {})
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_invite_participant_to_team_when_user_cannot_be_invited(self):
"""
NOTE
user: host user
user1: participant 1
user2: participant 2
"""
self.user2 = User.objects.create(
username="user2",
email="[email protected]",
password="user2_password",
)
EmailAddress.objects.create(
user=self.user2,
email="[email protected]",
primary=True,
verified=True,
)
self.user3 = User.objects.create(
username="user3",
email="[email protected]",
password="user3_password",
)
EmailAddress.objects.create(
user=self.user3,
email="[email protected]",
primary=True,
verified=True,
)
self.participant_team2 = ParticipantTeam.objects.create(
team_name="Participant Team created by user 2",
created_by=self.user2,
)
self.participant_team3 = ParticipantTeam.objects.create(
team_name="Participant Team created by user 3",
created_by=self.user3,
)
self.participant2 = Participant.objects.create(
user=self.user2,
status=Participant.ACCEPTED,
team=self.participant_team2,
)
self.participant3 = Participant.objects.create(
user=self.user3,
status=Participant.ACCEPTED,
team=self.participant_team3,
)
self.challenge_host_team = ChallengeHostTeam.objects.create(
team_name="Test Challenge Host Team", created_by=self.user
)
self.challenge = Challenge.objects.create(
title="Test Challenge",
short_description="Short description for test challenge",
description="Description for test challenge",
terms_and_conditions="Terms and conditions for test challenge",
submission_guidelines="Submission guidelines for test challenge",
creator=self.challenge_host_team,
published=False,
enable_forum=True,
leaderboard_description=None,
anonymous_leaderboard=False,
start_date=timezone.now() - timedelta(days=2),
end_date=timezone.now() + timedelta(days=1),
)
self.client.force_authenticate(user=self.user2)
self.challenge.participant_teams.add(self.participant_team2)
self.challenge.participant_teams.add(self.participant_team3)
self.data = {"email": self.user3.email}
self.url = reverse_lazy(
"participants:invite_participant_to_team",
kwargs={"pk": self.participant_team2.pk},
)
expected = {
"error": "Sorry, the invited user has already participated "
"in atleast one of the challenges which you are already"
" a part of. Please try creating a new team and then invite."
}
response = self.client.post(self.url, self.data)
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_406_NOT_ACCEPTABLE)
class DeleteParticipantFromTeamTest(BaseAPITestClass):
def setUp(self):
super(DeleteParticipantFromTeamTest, self).setUp()
self.participant = Participant.objects.create(
user=self.user, status=Participant.SELF, team=self.participant_team
)
self.user2 = User.objects.create(
username="user2",
email="[email protected]",
password="user2_password",
)
self.participant2 = Participant.objects.create(
user=self.user2,
status=Participant.ACCEPTED,
team=self.participant_team,
)
self.url = reverse_lazy(
"participants:delete_participant_from_team",
kwargs={
"participant_team_pk": self.participant_team.pk,
"participant_pk": self.invite_user.pk,
},
)
def test_participant_does_not_exist_in_team(self):
self.url = reverse_lazy(
"participants:delete_participant_from_team",
kwargs={
"participant_team_pk": self.participant_team.pk,
"participant_pk": self.participant2.pk + 1,
},
)
expected = {"error": "Participant does not exist"}
response = self.client.delete(self.url, {})
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_406_NOT_ACCEPTABLE)
def test_when_participant_team_does_not_exist(self):
self.url = reverse_lazy(
"participants:delete_participant_from_team",
kwargs={
"participant_team_pk": self.participant_team.pk + 1,
"participant_pk": self.participant2.pk,
},
)
expected = {"error": "ParticipantTeam does not exist"}
response = self.client.delete(self.url, {})
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_406_NOT_ACCEPTABLE)
def test_when_participant_is_admin_and_wants_to_delete_himself(self):
self.url = reverse_lazy(
"participants:delete_participant_from_team",
kwargs={
"participant_team_pk": self.participant_team.pk,
"participant_pk": self.participant.pk,
},
)
expected = {
"error": "You are not allowed to remove yourself since you are admin. Please delete the team if you want to do so!" # noqa: ignore=E501
}
response = self.client.delete(self.url, {})
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_406_NOT_ACCEPTABLE)
def test_when_participant_does_not_have_permissions_to_remove_another_participant(
self
):
self.url = reverse_lazy(
"participants:delete_participant_from_team",
kwargs={
"participant_team_pk": self.participant_team.pk,
"participant_pk": self.participant2.pk,
},
)
self.user3 = User.objects.create(
username="user3",
email="[email protected]",
password="user3_password",
)
EmailAddress.objects.create(
user=self.user3,
email="[email protected]",
primary=True,
verified=True,
)
self.participant3 = Participant.objects.create(
user=self.user3,
status=Participant.ACCEPTED,
team=self.participant_team,
)
self.client.force_authenticate(user=self.user3)
expected = {
"error": "Sorry, you do not have permissions to remove this participant"
}
response = self.client.delete(self.url, {})
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_when_a_participant_is_successfully_removed_from_team(self):
self.url = reverse_lazy(
"participants:delete_participant_from_team",
kwargs={
"participant_team_pk": self.participant_team.pk,
"participant_pk": self.participant2.pk,
},
)
response = self.client.delete(self.url, {})
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
class GetTeamsAndCorrespondingChallengesForAParticipant(BaseAPITestClass):
def setUp(self):
super(GetTeamsAndCorrespondingChallengesForAParticipant, self).setUp()
self.user2 = User.objects.create(
username="user2",
email="[email protected]",
password="user2_password",
)
EmailAddress.objects.create(
user=self.user2,
email="[email protected]",
primary=True,
verified=True,
)
self.participant_team2 = ParticipantTeam.objects.create(
team_name="Team B", created_by=self.user2
) # created by user2 and not user
self.participant2 = Participant.objects.create(
user=self.user2,
status=Participant.ACCEPTED,
team=self.participant_team2,
)
self.challenge_host_team = ChallengeHostTeam.objects.create(
team_name="Host Team 1", created_by=self.user2
)
self.challenge1 = Challenge.objects.create(
title="Test Challenge 1",
short_description="Short description for test challenge 1",
description="Description for test challenge 1",
terms_and_conditions="Terms and conditions for test challenge 1",
submission_guidelines="Submission guidelines for test challenge 1",
creator=self.challenge_host_team,
published=False,
is_registration_open=True,
enable_forum=True,
leaderboard_description="Lorem ipsum dolor sit amet, consectetur adipiscing elit",
anonymous_leaderboard=False,
start_date=timezone.now() - timedelta(days=2),
end_date=timezone.now() + timedelta(days=1),
)
self.challenge1.slug = "{}-{}".format(
self.challenge1.title.replace(" ", "-").lower(), self.challenge1.pk
)[:199]
self.challenge1.save()
self.challenge2 = Challenge.objects.create(
title="Test Challenge 2",
short_description="Short description for test challenge 2",
description="Description for test challenge 2",
terms_and_conditions="Terms and conditions for test challenge 2",
submission_guidelines="Submission guidelines for test challenge 2",
creator=self.challenge_host_team,
published=False,
is_registration_open=True,
enable_forum=True,
anonymous_leaderboard=False,
start_date=timezone.now() - timedelta(days=2),
end_date=timezone.now() + timedelta(days=1),
)
self.url = reverse_lazy(
"participants:get_teams_and_corresponding_challenges_for_a_participant",
kwargs={"challenge_pk": self.challenge1.pk},
)
self.time = timezone.now()
def test_get_teams_and_corresponding_challenges_for_a_participant(self):
self.challenge1.participant_teams.add(self.participant_team)
self.challenge1.save()
expected = {
"challenge_participant_team_list": [
{
"challenge": {
"id": self.challenge1.id,
"title": self.challenge1.title,
"description": self.challenge1.description,
"short_description": self.challenge1.short_description,
"terms_and_conditions": self.challenge1.terms_and_conditions,
"submission_guidelines": self.challenge1.submission_guidelines,
"evaluation_details": self.challenge1.evaluation_details,
"image": self.challenge1.image,
"start_date": "{0}{1}".format(
self.challenge1.start_date.isoformat(), "Z"
).replace("+00:00", ""),
"end_date": "{0}{1}".format(
self.challenge1.end_date.isoformat(), "Z"
).replace("+00:00", ""),
"creator": {
"id": self.challenge_host_team.id,
"team_name": self.challenge_host_team.team_name,
"created_by": self.challenge_host_team.created_by.username,
"team_url": self.challenge_host_team.team_url,
},
"published": self.challenge1.published,
"is_registration_open": self.challenge1.is_registration_open,
"enable_forum": self.challenge1.enable_forum,
"leaderboard_description": self.challenge1.leaderboard_description,
"anonymous_leaderboard": self.challenge1.anonymous_leaderboard,
"is_active": True,
"allowed_email_domains": [],
"blocked_email_domains": [],
"banned_email_ids": [],
"approved_by_admin": False,
"forum_url": self.challenge1.forum_url,
"is_docker_based": self.challenge1.is_docker_based,
"slug": self.challenge1.slug,
"max_docker_image_size": self.challenge1.max_docker_image_size,
"cli_version": self.challenge1.cli_version,
},
"participant_team": {
"id": self.participant_team.id,
"team_name": self.participant_team.team_name,
"created_by": self.participant_team.created_by.username,
"team_url": self.participant_team.team_url,
},
}
],
"is_challenge_host": False,
}
response = self.client.get(self.url, {})
# checking 'datetime_now' separately because of time difference in microseconds
self.assertTrue(
abs(response.data["datetime_now"] - self.time)
< timedelta(seconds=1)
)
# deleting field 'datetime_now' from response to check with expected response without time field
del response.data["datetime_now"]
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_participant_team_challenge_list(self):
self.url = reverse_lazy(
"participants:get_participant_team_challenge_list",
kwargs={"participant_team_pk": self.participant_team.pk},
)
expected = [
{
"id": self.challenge1.id,
"title": self.challenge1.title,
"description": self.challenge1.description,
"short_description": self.challenge1.short_description,
"terms_and_conditions": self.challenge1.terms_and_conditions,
"submission_guidelines": self.challenge1.submission_guidelines,
"evaluation_details": self.challenge1.evaluation_details,
"image": self.challenge1.image,
"start_date": "{0}{1}".format(
self.challenge1.start_date.isoformat(), "Z"
).replace("+00:00", ""),
"end_date": "{0}{1}".format(
self.challenge1.end_date.isoformat(), "Z"
).replace("+00:00", ""),
"creator": {
"id": self.challenge_host_team.id,
"team_name": self.challenge_host_team.team_name,
"created_by": self.challenge_host_team.created_by.username,
"team_url": self.challenge_host_team.team_url,
},
"published": self.challenge1.published,
"is_registration_open": self.challenge1.is_registration_open,
"enable_forum": self.challenge1.enable_forum,
"leaderboard_description": self.challenge1.leaderboard_description,
"anonymous_leaderboard": self.challenge1.anonymous_leaderboard,
"is_active": True,
"allowed_email_domains": [],
"blocked_email_domains": [],
"banned_email_ids": [],
"approved_by_admin": False,
"forum_url": self.challenge1.forum_url,
"is_docker_based": self.challenge1.is_docker_based,
"slug": self.challenge1.slug,
"max_docker_image_size": self.challenge1.max_docker_image_size,
"cli_version": self.challenge1.cli_version,
}
]
self.challenge1.participant_teams.add(self.participant_team)
self.challenge1.save()
response = self.client.get(self.url, {})
self.assertEqual(response.data["results"], expected)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_when_participant_team_hasnot_participated_in_any_challenge(self):
expected = {
"challenge_participant_team_list": [
{
"challenge": None,
"participant_team": {
"id": self.participant_team.id,
"team_name": self.participant_team.team_name,
"created_by": self.participant_team.created_by.username,
"team_url": self.participant_team.team_url,
},
}
],
"is_challenge_host": False,
}
response = self.client.get(self.url, {})
# checking 'datetime_now' separately because of time difference in microseconds
self.assertTrue(
abs(response.data["datetime_now"] - self.time)
< timedelta(seconds=1)
)
# deleting field 'datetime_now' from response to check with expected response without time field
del response.data["datetime_now"]
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_when_there_is_no_participant_team_of_user(self):
self.participant_team.delete()
expected = {
"challenge_participant_team_list": [],
"is_challenge_host": False,
}
response = self.client.get(self.url, {})
# checking 'datetime_now' separately because of time difference in microseconds
self.assertTrue(
abs(response.data["datetime_now"] - self.time)
< timedelta(seconds=1)
)
# deleting field 'datetime_now' from response to check with expected response without time field
del response.data["datetime_now"]
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class RemoveSelfFromParticipantTeamTest(BaseAPITestClass):
def setUp(self):
super(RemoveSelfFromParticipantTeamTest, self).setUp()
# user who create a challenge host team
self.user2 = User.objects.create(
username="someuser2", password="some_secret_password"
)
self.challenge_host_team = ChallengeHostTeam.objects.create(
team_name="Some Test Challenge Host Team", created_by=self.user2
)
self.challenge_host2 = ChallengeHost.objects.create(
user=self.user2,
team_name=self.challenge_host_team,
status=ChallengeHost.ACCEPTED,
permissions=ChallengeHost.ADMIN,
)
self.challenge = Challenge.objects.create(
title="Some Test Challenge",
short_description="Short description for some test challenge",
description="Description for some test challenge",
terms_and_conditions="Terms and conditions for some test challenge",
submission_guidelines="Submission guidelines for some test challenge",
creator=self.challenge_host_team,
published=False,
is_registration_open=True,
enable_forum=True,
leaderboard_description="Fusce quis sapien eget sem accumsan euismod",
anonymous_leaderboard=False,
start_date=timezone.now() - timedelta(days=2),
end_date=timezone.now() + timedelta(days=1),
)
self.url = reverse_lazy(
"participants:remove_self_from_participant_team",
kwargs={"participant_team_pk": self.participant_team.pk},
)
def test_when_participant_team_does_not_exist(self):
self.url = reverse_lazy(
"participants:remove_self_from_participant_team",
kwargs={"participant_team_pk": self.participant_team.pk + 1},
)
expected = {"error": "ParticipantTeam does not exist!"}
response = self.client.delete(self.url, {})
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_406_NOT_ACCEPTABLE)
def test_when_a_participant_is_successfully_removed_from_team(self):
self.url = reverse_lazy(
"participants:remove_self_from_participant_team",
kwargs={"participant_team_pk": self.participant_team.pk},
)
response = self.client.delete(self.url, {})
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_when_participant_team_has_taken_part_in_challenges(self):
self.challenge.participant_teams.add(self.participant_team)
expected = {
"error": "Sorry, you cannot delete this team since it has taken part in challenge(s)!"
}
response = self.client.delete(self.url, {})
self.assertEqual(response.data, expected)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_participant_team_remove_when_no_participants_exists(self):
self.url = reverse_lazy(
"participants:remove_self_from_participant_team",
kwargs={"participant_team_pk": self.participant_team.pk},
)
self.client.delete(self.url, {})
participant_teams = ParticipantTeam.objects.all()
self.assertEqual(participant_teams.count(), 0)
| [
[
[
21,
30
],
[
14019,
14028
],
[
14076,
14085
],
[
21010,
21019
],
[
21067,
21076
],
[
21877,
21886
],
[
21934,
21943
],
[
25449,
25458
],
[
29182,
29191
],
[
29939,
29948
],
[
31644,
31653
],
[
31701,
31710
]
],
[
[
69,
81
],
[
1443,
1455
],
[
3164,
3176
],
[
4524,
4536
],
[
6184,
6196
],
[
6723,
6735
],
[
8590,
8602
],
[
9113,
9125
],
[
10536,
10548
],
[
11339,
11351
],
[
14369,
14381
],
[
15578,
15590
],
[
15890,
15902
],
[
16451,
16463
],
[
17033,
17045
],
[
17760,
17772
],
[
18954,
18966
],
[
21983,
21995
],
[
25820,
25832
],
[
31750,
31762
],
[
31983,
31995
],
[
32483,
32495
],
[
33306,
33318
]
],
[
[
121,
125
],
[
597,
601
],
[
891,
895
],
[
1594,
1598
],
[
4680,
4684
],
[
10226,
10230
],
[
11929,
11933
],
[
12261,
12265
],
[
15241,
15245
],
[
18020,
18024
],
[
19518,
19522
],
[
30449,
30453
]
],
[
[
151,
159
],
[
14002,
14010
],
[
14059,
14067
],
[
20993,
21001
],
[
21050,
21058
],
[
21860,
21868
],
[
21917,
21925
],
[
22170,
22178
],
[
31627,
31635
],
[
31684,
31692
]
],
[
[
196,
208
],
[
745,
757
],
[
1743,
1755
],
[
4829,
4841
],
[
12078,
12090
],
[
12410,
12422
],
[
18169,
18181
],
[
19667,
19679
]
],
[
[
236,
242
],
[
3080,
3086
],
[
3517,
3523
],
[
3892,
3898
],
[
4065,
4071
],
[
4341,
4347
],
[
6081,
6087
],
[
6531,
6537
],
[
7705,
7711
],
[
8171,
8177
],
[
8401,
8407
],
[
8879,
8885
],
[
9541,
9547
],
[
9763,
9769
],
[
10103,
10109
],
[
10823,
10829
],
[
11213,
11219
],
[
11686,
11692
],
[
14904,
14910
],
[
16342,
16348
],
[
16907,
16913
],
[
17601,
17607
],
[
18831,
18837
],
[
19291,
19297
],
[
25724,
25730
],
[
28296,
28302
],
[
29457,
29463
],
[
30214,
30220
],
[
32358,
32364
],
[
32738,
32744
],
[
33187,
33193
]
],
[
[
275,
286
],
[
483,
494
]
],
[
[
288,
297
],
[
540,
549
]
],
[
[
329,
338
],
[
13446,
13455
],
[
20335,
20344
],
[
21297,
21306
],
[
30966,
30975
]
],
[
[
364,
377
],
[
30735,
30748
],
[
30861,
30874
],
[
30909,
30922
]
],
[
[
379,
396
],
[
13305,
13322
],
[
20205,
20222
],
[
30582,
30599
]
],
[
[
429,
444
],
[
1075,
1090
],
[
10348,
10363
],
[
12605,
12620
],
[
12776,
12791
],
[
19862,
19877
],
[
33532,
33547
]
],
[
[
446,
457
],
[
1208,
1219
],
[
1299,
1310
],
[
1933,
1944
],
[
2009,
2020
],
[
5019,
5030
],
[
5095,
5106
],
[
12942,
12953
],
[
13018,
13029
],
[
13120,
13131
],
[
13196,
13207
],
[
15101,
15112
],
[
15164,
15175
],
[
15410,
15421
],
[
15486,
15497
],
[
18359,
18370
],
[
18435,
18446
],
[
20020,
20031
],
[
20096,
20107
]
],
[
[
466,
482
],
[
1413,
1429
],
[
3134,
3150
],
[
4407,
4423
],
[
6603,
6619
],
[
8470,
8486
],
[
8943,
8959
],
[
14974,
14990
],
[
19377,
19393
],
[
30276,
30292
]
],
[
[
1390,
1412
],
[
1534,
1556
]
],
[
[
3108,
3133
],
[
3255,
3280
]
],
[
[
4378,
4406
],
[
4461,
4489
]
],
[
[
6571,
6602
],
[
6657,
6688
]
],
[
[
8438,
8469
],
[
8524,
8555
]
],
[
[
8915,
8942
],
[
8997,
9024
]
],
[
[
14944,
14973
],
[
15028,
15057
]
],
[
[
19327,
19376
],
[
19431,
19480
]
],
[
[
30242,
30275
],
[
30330,
30363
]
]
] |
# copyright 2022 @Ansaku
# Telegram @AnkiSatya
# Instagram @satya_ask
import telebot
import requests
from telebot.types import InlineKeyboardButton
# Fillout Here The BotToken it gets from botfather further queries @AnkiSatya 0n telegram
bot = telebot.TeleBot('**********************')
while True:
try:
keyboard = telebot.types.ReplyKeyboardMarkup(resize_keyboard=True)
keyboard.add(InlineKeyboardButton(text='Buat email'))
keyboard.add(InlineKeyboardButton(text='Refresh pesan'))
keyboard.add(InlineKeyboardButton(text='Tentang'))
@bot.message_handler(commands=['start'])
def start_message(message):
bot.send_message(message.chat.id,
'Hai Pengguna., Selamat datang di TempEmail Bot \nPenggunaan:\nUntuk Menghasilkan email klik tombol "Buat email"\nUntuk menyegarkan kotak masuk Anda, klik tombol "Refresh inbox". Setelah surat baru tiba, Anda akan melihat tombol dengan baris subjek, klik tombol read the message. \n\n Dev : @AnkiSatya',
reply_markup=keyboard)
@bot.message_handler(content_types=['text'])
def send_text(message):
if message.text.lower() == 'buat email':
email = requests.get("https://www.1secmail.com/api/v1/?action=genRandomMailbox&count=1").json()[0]
ekeyboard = telebot.types.ReplyKeyboardMarkup(resize_keyboard=True)
ekeyboard.add(InlineKeyboardButton(text='Buat email'))
ekeyboard.add(InlineKeyboardButton(text='Refresh pesan\n[' + str(email) + "]"))
ekeyboard.add(InlineKeyboardButton(text='Tentang'))
bot.send_message(message.chat.id, "E-Mail Sementara Anda:")
bot.send_message(message.chat.id, str(email), reply_markup=ekeyboard)
elif message.text.lower() == 'refresh pesan':
bot.send_message(message.chat.id, 'Pertama, buat email anda', reply_markup=keyboard)
elif message.text.lower() == 'tentang':
bot.send_message(message.chat.id,
'Apa itu Email Semantara?\n- Itu adalah layanan email gratis yang memungkinkan untuk menerima email di alamat sementara yang akan dihancurkan sendiri setelah waktu tertentu berlalu. Itu juga dikenal dengan nama-nama seperti tempmail, 10minutemail, 10minmail, throwaway email, fake-mail , fake email generator, burner mail atau trash-mail\n\nBagaimana Email Sementara Menjadi Lebih Aman bagi Anda?\n- Menggunakan Email sementara memungkinkan Anda untuk sepenuhnya melindungi kotak surat asli Anda dari hilangnya informasi pribadi. Alamat email sementara Anda sepenuhnya anonim. Detail Anda: informasi tentang orang Anda dan pengguna yang berkomunikasi dengan Anda, alamat IP, alamat email dilindungi dan sepenuhnya dirahasiakan.\n\n➪ Nama Bot : TempMail Bot\n➪ Pembuat : @AnkiSatya\n➪ Language : Python \n➪ Donasi : https://saweria.co/ansaku')
elif message.text.lower()[14] == "[":
email = message.text.lower()[15:message.text.lower().find("]")]
bkeyboard = telebot.types.ReplyKeyboardMarkup(resize_keyboard=True)
bkeyboard.add(InlineKeyboardButton(text='Refresh pesan\n[' + str(email) + "]"))
bkeyboard.add(InlineKeyboardButton(text='Buat email'))
try:
data = requests.get(
"https://www.1secmail.com/api/v1/?action=getMessages&login=" + email[:email.find(
"@")] + "&domain=" + email[email.find("@") + 1:]).json()
if 'id' in data[0]:
for i in range(len(data)):
id = data[i]['id']
subject = data[i]['subject']
fromm = data[i]['from']
date = data[i]['date']
if len(subject) > 15:
subject = str(subject[0:15]) + "..."
bkeyboard.add(InlineKeyboardButton(
text=str(subject) + "\n dari: " + fromm + " in " + "[id" + str(id) + "][" + str(
email) + "]"))
bot.send_message(message.chat.id,
"Subjek: " + subject + "\n Dari: " + fromm + "\n Tanggal:" + date,
reply_markup=bkeyboard)
count = i + 1
bot.send_message(message.chat.id, "Di Sini " + str(
count) + " Pesan ditemukan\nKlik tombol di bawah untuk membaca pesan\n\n Info lebih lanjut @AnkiSatya")
else:
bot.send_message(message.chat.id, 'Tidak ditemukan', reply_markup=bkeyboard)
except BaseException:
bot.send_message(message.chat.id, 'Tidak ada pesan yang diterima...', reply_markup=bkeyboard)
elif message.text.lower().find("[id"):
try:
data = message.text.lower()[message.text.lower().find("[id"):]
id = data[data.find("[") + 3:data.find(']')]
email = data[data.find("][") + 2:-1]
msg = requests.get("https://www.1secmail.com/api/v1/?action=readMessage&login=" + email[:email.find(
"@")] + "&domain=" + email[email.find("@") + 1:] + "&id=" + id).json()
bot.send_message(message.chat.id,
'Pesan ✉️\n\n Dari: ' + msg['from'] + "\n Subjek: " + msg[
'subject'] + "\n Tanggal: " + msg[
'date'] + "\n Teks: " + msg['textBody'])
except BaseException:
pass
bot.polling(none_stop=True, interval=1, timeout=5000)
except BaseException:
pass
# Stay tuned for more : Telegram @AnkiSatya
| [
[
[
80,
87
],
[
253,
260
],
[
342,
349
],
[
1402,
1409
],
[
3163,
3170
]
],
[
[
96,
104
],
[
1282,
1290
],
[
3438,
3446
],
[
5392,
5400
]
],
[
[
132,
152
],
[
420,
440
],
[
483,
503
],
[
549,
569
],
[
1489,
1509
],
[
1561,
1581
],
[
1658,
1678
],
[
3250,
3270
],
[
3347,
3367
],
[
4113,
4133
]
],
[
[
247,
250
],
[
601,
604
],
[
1126,
1129
],
[
5981,
5984
],
[
691,
694
],
[
1713,
1716
],
[
1790,
1793
],
[
1936,
1939
],
[
2091,
2094
],
[
4330,
4333
],
[
4615,
4618
],
[
4852,
4855
],
[
4989,
4992
],
[
5604,
5607
]
],
[
[
331,
339
],
[
407,
415
],
[
470,
478
],
[
536,
544
],
[
1102,
1110
],
[
2011,
2019
]
],
[
[
654,
667
]
],
[
[
1183,
1192
]
],
[
[
654,
667
]
],
[
[
1183,
1192
]
]
] |
#name_scan "d/yourdomain" 1
import sys, os
#sys.path.append('/home/khal/sources/nmcontrol/lib/')
import DNS
import rpcClient
import struct, listdns, base64, types, json, random
#from jsonrpc import ServiceProxy
from utils import *
from common import *
class Source(object):
#def __init__(self):
#self.servers = app['services']['dns'].conf['resolver'].split(',')
#self.reqobj = DNS.Request()
#jsonfile = open("config.json", "r")
#data = json.loads(jsonfile.read())
#jsonfile.close()
#username = str(data[u"username"])
#port = data[u"port"]
#password = str(data[u"password"])
#self.sp = ServiceProxy("http://%(user)s:%(passwd)[email protected]:%(port)d" % dict(user=username, passwd=password, port=port))
#elf.sp = rpcClient.rpcClientNamecoin('127.0.0.1', port, username, password)
#self.sp = app['plugins']['domain']
# def _parse_file(self):
# f = open(self._filename, "r")
# for line in f.readlines():
# line = line.strip()
# if line and line[0] != '#':
# question, type, value = line.split()
# question = question.lower()
# type = type.upper()
# if question == '@':
# question = ''
# if type == 'A':
# answer = struct.pack("!I", ipstr2int(value))
# qtype = 1
# if type == 'NS':
# answer = labels2str(value.split("."))
# qtype = 2
# elif type == 'CNAME':
# answer = labels2str(value.split("."))
# qtype = 5
# elif type == 'TXT':
# answer = label2str(value)
# qtype = 16
# elif type == 'MX':
# preference, domain = value.split(":")
# answer = struct.pack("!H", int(preference))
# answer += labels2str(domain.split("."))
# qtype = 15
# self._answers.setdefault(question, {}).setdefault(qtype, []).append(answer)
# f.close()
def isIP(self, host) :
parts = host.split(".")
if len(parts) != 4:
return False
try :
valid = False
for part in parts :
intpart = int(part)
if intpart <= 255 and intpart >= 0 :
valid = True
else : return False
if valid :
return True
return False
except : return False
def get_response(self, query, domain, qtype, qclass, src_addr):
#print query
#print domain
#print qtype
#print qclass
#print src_addr
if qtype == 1:
#answer = struct.pack("!I", ipstr2int(value))
reqtype = "A"
if qtype == 2:
#answer = labels2str(value.split("."))
reqtype = "NS"
elif qtype == 5:
#answer = labels2str(value.split("."))
reqtype = "CNAME"
elif qtype == 16:
#answer = label2str(value)
reqtype = "TXT"
elif qtype == 15:
#preference, domain = value.split(":")
#nswer = struct.pack("!H", int(preference))
#answer += labels2str(domain.split("."))
reqtype = "MX"
elif qtype == 28:
#answer = struct.pack("!I", ipstr2int(value))
reqtype = "AAAA"
elif qtype == 52:
reqtype = "TLSA"
else : reqtype = None
answers = app['services']['dns'].lookup({"query":query, "domain":domain, "qtype":qtype, "qclass":qclass, "src_addr":src_addr})
#print 'domain:', domain
#print 'answers:', answers
if domain.endswith(".bit") or domain.endswith(".tor") :
#response = listdns.lookup(self.sp, {"query":query, "domain":domain, "qtype":qtype, "qclass":qclass, "src_addr":src_addr})
#response = self.sp.lookup({"query":query, "domain":domain, "qtype":qtype, "qclass":qclass, "src_addr":src_addr})
response = answers
results = []
if type(response) == types.DictType :
tempresults = {"qtype":response["type"], "qclass":response["class"], "ttl":response["ttl"]}
if response["type"] == 1 :
#if answers == [] :
# return self.get_response(query, domain, 5, qclass, src_addr)
tempresults["rdata"] = struct.pack("!I", ipstr2int(response["data"]))
elif response["type"] == 2 or response["type"] == 5:
tempresults["rdata"] = labels2str(response["data"].split("."))
elif response["type"] == 16 :
tempresults["rdata"] = labels2str(response["data"])
elif response["type"] == 15 :
tempresult = struct.pack("!H", response["data"][0])
tempresult += labels2str(response["data"][1].split("."))
tempresults["rdata"] = tempresult
elif response["type"] == 28 :
tempresults["rdata"] = response["data"]
elif response["type"] == 52 :
tempresult = '\x03\x00'
tempresult += chr(int(response["data"][0][0]))
tempresult += bytearray.fromhex(response["data"][0][1])
tempresults["rdata"] = tempresult
#else : return 3, []
results.append(tempresults)
return 0, results
if type(response) == types.StringType :
if self.isIP(response) :
return 0, [{"qtype":1, "qclass":qclass, "ttl":300, "rdata":struct.pack("!I", ipstr2int(response))}]
return 3, []
#if query not in self._answers:
#return 3, []
#if qtype in self._answers[query]:
#if domain == "sonicrules.bit":
# results = [{'qtype': 1, 'qclass':qclass, 'ttl': 300, 'rdata': struct.pack("!I", ipstr2int(self.reqobj.req("sonicrules.org", qtype=1).answers[0]["data"]))}]
# return 0, results
#elif qtype == 1:
# if they asked for an A record and we didn't find one, check for a CNAME
#return self.get_response(query, domain, 5, qclass, src_addr)
else:
#server = self.servers[random.randrange(0, len(self.servers)-1)]
#answers = self.reqobj.req(name=domain, qtype=qtype, server=server).answers
results = []
for response in answers :
tempresults = {"qtype":response["type"], "qclass":response["class"], "ttl":response["ttl"]}
if response["type"] == 1 :
if answers == [] :
return self.get_response(query, domain, 5, qclass, src_addr)
tempresults["rdata"] = struct.pack("!I", ipstr2int(response["data"]))
elif response["type"] == 2 or response["type"] == 5:
tempresults["rdata"] = labels2str(response["data"].split("."))
elif response["type"] == 16 :
tempresults["rdata"] = labels2str(response["data"])
elif response["type"] == 15 :
tempresult = struct.pack("!H", response["data"][0])
tempresult += labels2str(response["data"][1].split("."))
tempresults["rdata"] = tempresult
elif response["type"] == 28 :
if answers == [] :
return self.get_response(query, domain, 5, qclass, src_addr)
#tempresults["rdata"] = struct.pack("!I", ipstr2int(response["data"]))
tempresults["rdata"] = response["data"]
elif response["type"] == 52 :
tempresults["rdata"] = response["data"]
#else : return 3, []
results.append(tempresults)
return 0, results
return 3, []
| [
[
[
35,
38
]
],
[
[
40,
42
]
],
[
[
104,
107
]
],
[
[
115,
124
]
],
[
[
132,
138
],
[
4552,
4558
],
[
4948,
4954
],
[
5798,
5804
],
[
6995,
7001
],
[
7391,
7397
]
],
[
[
140,
147
]
],
[
[
149,
155
]
],
[
[
157,
162
],
[
4215,
4220
],
[
5659,
5664
]
],
[
[
164,
168
]
],
[
[
170,
176
]
],
[
[
229,
230
]
],
[
[
250,
251
],
[
3616,
3619
],
[
4570,
4579
],
[
4711,
4721
],
[
4840,
4850
],
[
5021,
5031
],
[
5816,
5825
],
[
7013,
7022
],
[
7154,
7164
],
[
7283,
7293
],
[
7464,
7474
]
],
[
[
259,
265
]
]
] |
"""Tests for the :mod:`~polymatheia.data.writer` package."""
import json
import os
from shutil import rmtree
from polymatheia.data import NavigableDict
from polymatheia.data.writer import JSONWriter
DOCUMENTS = [NavigableDict(r) for r in [
{
'id': '1',
'name': {
'first': 'A',
'last': 'Person'
},
'age': 32,
'special tags': 'The first'
},
{
'id': '2',
'name': {
'first': ['Another', {'abbr': 'Nameless'}],
'last': 'Parrot'
},
'age': 23,
},
{
'id': '3',
'name': {
'first': 'The',
'last': 'Last'
},
'age': 65,
},
]]
def test_local_json_writing():
"""Test writing to the local filesystem."""
rmtree('tmp/json_writer_test', ignore_errors=True)
writer = JSONWriter('tmp/json_writer_test', 'id')
writer.write(DOCUMENTS)
count = 0
for basepath, _, filenames in os.walk('tmp/json_writer_test'):
for filename in filenames:
if filename.endswith('.json'):
count = count + len(filenames)
with open(os.path.join(basepath, filename)) as in_f:
doc = json.load(in_f)
assert 'id' in doc
assert 'name' in doc
if doc['id'] == '2':
assert 'first' in doc['name']
assert len(doc['name']['first']) == 2
else:
assert 'first' in doc['name']
assert 'last' in doc['name']
assert 'age' in doc
if doc['id'] == '1':
assert 'special tags' in doc
assert count == 3
def test_local_json_writing_pre_split_id_path():
"""Test writing to the local filesystem."""
rmtree('tmp/json_writer_test', ignore_errors=True)
writer = JSONWriter('tmp/json_writer_test', ['id'])
writer.write(DOCUMENTS)
count = 0
for basepath, _, filenames in os.walk('tmp/json_writer_test'):
for filename in filenames:
if filename.endswith('.json'):
count = count + len(filenames)
with open(os.path.join(basepath, filename)) as in_f:
doc = json.load(in_f)
assert 'id' in doc
assert 'name' in doc
if doc['id'] == '2':
assert 'first' in doc['name']
assert len(doc['name']['first']) == 2
else:
assert 'first' in doc['name']
assert 'last' in doc['name']
assert 'age' in doc
if doc['id'] == '1':
assert 'special tags' in doc
assert count == 3
| [
[
[
68,
72
],
[
1237,
1241
],
[
2314,
2318
]
],
[
[
80,
82
],
[
984,
986
],
[
1168,
1170
],
[
2061,
2063
],
[
2245,
2247
]
],
[
[
103,
109
],
[
803,
809
],
[
1878,
1884
]
],
[
[
140,
153
],
[
216,
229
]
],
[
[
190,
200
],
[
867,
877
],
[
1942,
1952
]
],
[
[
203,
212
],
[
925,
934
],
[
2002,
2011
]
],
[
[
724,
747
]
],
[
[
1781,
1822
]
]
] |
# -*- coding: utf-8; -*-
from __future__ import unicode_literals, absolute_import
import json
import requests
import six
from tests import unittest, mock
from freight_forwarder.registry import Registry, V1, V2
from freight_forwarder.registry.registry_base import RegistryBase, RegistryException
from ..factories.registry_factory import RegistryV1Factory, RegistryV2Factory
class RegistryTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@mock.patch.object(V1, '_validate_response', autospec=True, return_value=True)
@mock.patch('freight_forwarder.registry.registry_base.requests', autospec=True)
def test_registry_v1_init(self, mock_requests, mock_v1_validate):
test_registry = Registry()
self.assertIsInstance(test_registry, RegistryBase)
self.assertEquals(test_registry.ping(), True)
@mock.patch.object(V1, '_validate_response', name="v1_validate")
@mock.patch.object(V2, '_validate_response', name="v2_validate")
@mock.patch('freight_forwarder.registry.registry_base.requests', autospec=True)
def test_registry_v2_init(self, mock_requests, mock_v2, mock_v1):
mock_v1.side_effect = RegistryException("test")
mock_v2.return_value = True
test_v1_registry = RegistryV1Factory()
test_v2_registry = RegistryV2Factory()
# This is stated to ensure the test environment is setup correctly
# validated v1.ping() returns an exception
with self.assertRaises(RegistryException):
test_v1_registry.ping()
# validated v2.ping() returns an exception
self.assertEquals(test_v2_registry.ping(), True)
# Validate the logic of the registry class to return a V2 object
test_registry = Registry(address="https://v2.dockertest.io")
self.assertIsInstance(test_registry, RegistryBase)
class RegistryV1Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@mock.patch.object(V1, '_validate_response', return_value=True)
@mock.patch.object(V1, '_request_builder')
@mock.patch('freight_forwarder.registry.registry_base.requests', autospec=True)
def test_v1_search(self, mock_requests, mock_request_builder, mock_validate_response):
# Defined Search Request a
search_response_content = {
"num_results": 3,
"query": "test",
"results": [
{"description": "api test app", "name": "testproject/test-app"},
{"description": "database test app", "name": "testproject/test-db"},
{"description": "cache test app", "name": "testproject/test-cache"}
]
}
# Define Response Value for content once request has been validated
mock_request_builder.return_value = create_response_object(
url="https://search.registry.docker.com",
status_code=200,
content=json.dumps(search_response_content).encode('utf-8')
)
# Define Default value for utils _validate_reponse
mock_validate_response.return_value = True
# Build V1 Factory Registry
test_registry = RegistryV1Factory(address='https://search.registry.docker.com')
results = test_registry.search("test")
self.assertIsInstance(results, dict)
@mock.patch.object(V1, '_validate_response', return_value=True)
@mock.patch.object(V1, '_request_builder')
@mock.patch('freight_forwarder.registry.registry_base.requests', autospec=True)
def test_v1_tags(self, mock_requests, mock_request_builder, mock_validate_response):
tag_response_content = {
"0.1": "3fad19bfa2",
"latest": "xxxxxxxxxx",
"localtest": "xxxxxxxxxxxxxxae13",
"redis123123": "xxxxxxxxxxxxxxae132",
"jira1268": "xxxxxxxxxxxxxxae1324987"
}
formatted_output = [
'appexample/test-app:0.1',
'appexample/test-app:latest',
'appexample/test-app:us-east-01-dev',
'appexample/test-app:localtest',
'appexample/test-app:redis123123',
'appexample/test-app:jira1268'
]
mock_request_builder.return_value = create_response_object(
url="https://tag.registry.docker.com",
status_code=200,
content=json.dumps(tag_response_content).encode('utf-8')
)
mock_validate_response.return_value = True
test_registry = RegistryV1Factory(address='https://tag.registry.docker.com')
for tag in test_registry.tags("appexample/test-app"):
tag_output = "".join(tag)
self.assertIsInstance(tag_output, six.string_types)
self.assertIn(tag_output, formatted_output)
def test_delete_tag(self):
self.skipTest("Implemented but not used")
def test_delete(self):
self.skipTest("Implemented but not used")
def test_get_image_by_id(self):
self.skipTest("Implemented but not used")
def test_get_image_id_by_tag(self):
self.skipTest("Implemented but not used")
def set_image_tag(self):
self.skipTest("Implemented but not used")
class RegistryV2Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@mock.patch.object(V2, '_validate_response', name='mock_v2_validate_response', return_value=True)
@mock.patch.object(V2, '_request_builder', name='mock_v2_request_builder')
@mock.patch('freight_forwarder.registry.registry_base.requests', autospec=True)
def test_v2_search(self, mock_requests, mock_request_builder, mock_validate_response):
# Defined Search Request
search_response_content = json.dumps({"repositories": ["appexample/test-app",
"appexample/test-db",
"appexample/test-cache"]}).encode('utf-8')
response = create_response_object(url="https://v2search.registry.docker.com",
status_code=200,
content=search_response_content)
# Define Response Value for content once request has been validated
mock_request_builder.return_value = response
# Define Default value for utils _validate_response
mock_validate_response.return_value = True
# Build V1 Factory Registry
test_registry = RegistryV2Factory(address='https://v2search.registry.docker.com')
test_registry.search("test")
for search in test_registry.search("test"):
search_output = "".join(search)
self.assertIsInstance(search_output, six.string_types)
@mock.patch.object(V2, '_validate_response', name='mock_v2_validate_response', return_value=True)
@mock.patch.object(V2, '_request_builder', name='mock_v2_request_builder')
@mock.patch('freight_forwarder.registry.registry_base.requests', autospec=True)
def test_v2_tags(self, mock_requests, mock_request_builder, mock_validate_response):
tag_response_content = json.dumps({"name": "appexample/test-app",
"tags": [
"latest",
"0.0.15",
"asdfasb81"]
}
).encode('utf-8')
formatted_output = ['appexample/test-app:latest',
'appexample/test-app:0.0.15',
'appexample/test-app:asdfasb81']
response = create_response_object(url="https://v2tags.registry.docker.com",
status_code=200,
content=tag_response_content)
mock_request_builder.return_value = response
mock_validate_response.return_value = True
test_registry = RegistryV2Factory(address='https://v2tags.registry.docker.com')
for tags in test_registry.tags("appexample/test-app"):
tag_output = "".join(tags)
self.assertIsInstance(tag_output, six.string_types)
self.assertIn(tag_output, formatted_output)
def test_blobs(self):
self.skipTest("Not implemented")
def test_catalog(self, count=None, last=None):
self.skipTest("Not implemented")
def test_manifests(self):
self.skipTest("Not implemented")
class RegistryBaseTests(unittest.TestCase):
def setUp(self):
self.patch_requests = mock.patch('freight_forwarder.registry.registry_base.requests', autospec=True)
self.patch_requests.start()
self.test_registry = RegistryV1Factory(address="https://registrybasetest.docker.com")
def tearDown(self):
self.patch_requests.stop()
del self.test_registry
def test_ping(self):
self.skipTest("Defined as abc method. Override in class")
def test_tags(self):
self.skipTest("Defined as abc method. Override in class")
def test_init(self):
self.assertEquals(self.test_registry.scheme, 'https://')
self.assertEquals(self.test_registry.location, 'registrybasetest.docker.com')
self.assertEquals(self.test_registry.auth, None)
self.assertEquals(self.test_registry.__str__(), "https://registrybasetest.docker.com")
self.assertIsInstance(self.test_registry, RegistryBase)
def test_registry_base_auth_base_functionality(self):
self.assertEquals(self.test_registry.auth, None)
with self.assertRaises(TypeError):
self.test_registry.auth = ["user=test_user", "passwd=password"]
def test_registry_base_auth_with_auth(self):
pass
class RegistryExceptionTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_exception_with_status_code_and_url(self):
response = create_response_object(url="https://bad.docker.io",
status_code=503,
content={"test": "data"})
registry_exception = RegistryException(response)
self.assertIsInstance(registry_exception, RegistryException)
self.assertEquals(registry_exception.response.status_code, 503)
def test_exception_with_no_content(self):
response = create_response_object(url="https://nocontent.docker.io",
status_code=503)
registry_exception = RegistryException(response)
self.assertIsInstance(registry_exception, RegistryException)
self.assertEquals(registry_exception.message, 'There was an issue with the request to the docker registry.')
def test_exception_with_error_content(self):
# TODO - grab a properly formatted error for testing
response = create_response_object(url="https://errorcontent.docker.io",
status_code=500,
content=json.dumps({'error': 'Docker Registry Error Example'}))
registry_exception = RegistryException(response)
self.assertIsInstance(registry_exception, RegistryException)
self.assertEquals(registry_exception.message, 'Docker Registry Error Example')
# Test the class.__str__ MagicMethod
self.assertEquals("{0}".format(registry_exception), 'Docker Registry Error Example')
def create_response_object(url, status_code, content=None):
"""
The function generates a mock object that is properly formatted for the RegistryException and validates the input
:param url: url to pass through for the mock request object
:param status_code: status code to append to the response object
:param content: **required** if not provided, this attribute will be blocked
:return: Parent Mock: request.Reponse Child Mock: request - requests.PreparedRequest
"""
if not isinstance(url, six.string_types):
raise(TypeError("incorrect type provided for url"))
if not isinstance(status_code, six.integer_types):
raise(TypeError("incorrect type provided for http status code"))
mock_object_request = mock.MagicMock(spec=requests.PreparedRequest, url=url)
mock_object_response = mock.MagicMock(spec=requests.Response, request=mock_object_request)
mock_object_response.status_code = status_code
if content:
mock_object_response.content = content
else:
# this blocks the content attribute from being present
del mock_object_response.content
return mock_object_response
def format_image_results(registry_response_dict):
"""
Response attribute content is formatted correctly for the Images
:param response: response object with content attribute
:return: dict of various images
"""
if not isinstance(registry_response_dict, dict):
raise TypeError('registry_response_dict must be a dict.')
images = {}
results = registry_response_dict.get('results')
if results:
for image in results:
images[image.get('name')] = image
return images
| [
[
[
48,
64
]
],
[
[
66,
81
]
],
[
[
89,
93
],
[
2973,
2977
],
[
4387,
4391
],
[
5762,
5766
],
[
7181,
7185
],
[
11173,
11177
]
],
[
[
102,
110
],
[
12363,
12371
],
[
12445,
12453
]
],
[
[
118,
121
],
[
4731,
4734
],
[
6777,
6780
],
[
8293,
8296
],
[
12108,
12111
],
[
12223,
12226
]
],
[
[
140,
148
],
[
410,
418
],
[
1913,
1921
],
[
5246,
5254
],
[
8626,
8634
],
[
9903,
9911
]
],
[
[
150,
154
],
[
508,
512
],
[
591,
595
],
[
894,
898
],
[
963,
967
],
[
1032,
1036
],
[
2011,
2015
],
[
2079,
2083
],
[
2126,
2130
],
[
3370,
3374
],
[
3438,
3442
],
[
3485,
3489
],
[
5344,
5348
],
[
5446,
5450
],
[
5525,
5529
],
[
6801,
6805
],
[
6903,
6907
],
[
6982,
6986
],
[
8697,
8701
],
[
12343,
12347
],
[
12425,
12429
]
],
[
[
209,
217
],
[
764,
772
],
[
1786,
1794
]
],
[
[
219,
221
],
[
526,
528
],
[
912,
914
],
[
2029,
2031
],
[
2097,
2099
],
[
3388,
3390
],
[
3456,
3458
]
],
[
[
223,
225
],
[
981,
983
],
[
5362,
5364
],
[
5464,
5466
],
[
6819,
6821
],
[
6921,
6923
]
],
[
[
279,
291
],
[
820,
832
],
[
1876,
1888
],
[
9560,
9572
]
],
[
[
293,
310
],
[
1211,
1228
],
[
1524,
1541
],
[
10278,
10295
],
[
10356,
10373
],
[
10659,
10676
],
[
10737,
10754
],
[
11258,
11275
],
[
11336,
11353
]
],
[
[
352,
369
],
[
1300,
1317
],
[
3207,
3224
],
[
4523,
4540
],
[
8841,
8858
]
],
[
[
371,
388
],
[
1347,
1364
],
[
6528,
6545
],
[
8080,
8097
]
],
[
[
397,
409
]
],
[
[
1898,
1912
]
],
[
[
5231,
5245
]
],
[
[
8608,
8625
]
],
[
[
9881,
9902
]
],
[
[
11587,
11609
],
[
2846,
2868
],
[
4263,
4285
],
[
6025,
6047
],
[
7754,
7776
],
[
10070,
10092
],
[
10513,
10535
],
[
11003,
11025
]
],
[
[
12761,
12781
]
]
] |
__all__ = ["loss_fn"]
from icevision.imports import *
def loss_fn(preds, targets) -> torch.Tensor:
return preds["loss"]
| [
[
[
0,
7
]
],
[
[
53,
54
],
[
88,
93
]
],
[
[
61,
68
]
]
] |
# Generated by Django 3.2.4 on 2021-09-11 12:44
import ckeditor_uploader.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('blog', '0007_subscriber'),
]
operations = [
migrations.AlterField(
model_name='post',
name='content',
field=ckeditor_uploader.fields.RichTextUploadingField(),
),
]
| [
[
[
56,
80
],
[
348,
365
]
],
[
[
103,
113
],
[
132,
142
],
[
248,
258
]
],
[
[
122,
131
]
]
] |
import unittest
from numpy.testing import assert_allclose
from qspectra import polarization
from qspectra.simulate import decorators
class TestGetCallArgs(unittest.TestCase):
def test(self):
self.assertEqual(
decorators._get_call_args(lambda a: None, 1),
{'a': 1})
self.assertEqual(
decorators._get_call_args(lambda a, **b: None, 1),
{'a': 1})
self.assertEqual(
decorators._get_call_args(lambda a, **b: None, a=1, c=2),
{'a': 1, 'c': 2})
self.assertEqual(
decorators._get_call_args(lambda **b: None, a=1, c=2),
{'a': 1, 'c': 2})
with self.assertRaises(NotImplementedError):
decorators._get_call_args(lambda *a: None, 1, 2, 3)
class TestIsotropicAverage(unittest.TestCase):
def test_optional_2nd_order_isotropic_average(self):
binary = {'xx': 1, 'yy': 2, 'zz': 4}
f = decorators.optional_2nd_order_isotropic_average(
lambda polarization: (0, binary[polarization]))
assert_allclose(f('xx'), (0, 1))
assert_allclose(f('xx', exact_isotropic_average=False), (0, 1))
assert_allclose(f('xx', exact_isotropic_average=True), (0, 7 / 3.0))
assert_allclose(f('xy', exact_isotropic_average=True), (0, 0))
with self.assertRaises(ValueError):
# wrong number of polarizations
f('xyz', exact_isotropic_average=True)
def test_optional_4th_order_isotropic_average(self):
binary = {'xx': 1, 'yy': 2, 'zz': 4}
f = decorators.optional_4th_order_isotropic_average(
lambda polarization: (0, binary[polarization[:2]]
+ 10 * binary[polarization[2:]]))
assert_allclose(f('xxxx'), (0, 11))
ma = polarization.MAGIC_ANGLE
assert_allclose(f([0, 0, ma, ma], exact_isotropic_average=True),
(0, (11 + 12 + 14 + 21 + 22 + 24 + 41 + 42 + 44) / 9.0))
with self.assertRaises(ValueError):
# wrong number of polarizations
f('xyz', exact_isotropic_average=True)
| [
[
[
7,
15
],
[
158,
166
],
[
810,
818
]
],
[
[
42,
57
],
[
1061,
1076
],
[
1102,
1117
],
[
1174,
1189
],
[
1251,
1266
],
[
1759,
1774
],
[
1841,
1856
]
],
[
[
80,
92
],
[
1808,
1820
]
],
[
[
123,
133
],
[
236,
246
],
[
342,
352
],
[
453,
463
],
[
579,
589
],
[
729,
739
],
[
944,
954
],
[
1568,
1578
]
],
[
[
142,
157
]
],
[
[
789,
809
]
]
] |
# proxy module
from traitsui.editors.check_list_editor import *
| [
[
[
62,
63
]
]
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed May 29 16:16:57 2019
@author: rakshit
"""
import os
import cv2
import argparse
import matplotlib
import numpy as np
import deepdish as dd
import scipy.io as scio
print('Extracting Santini')
parser = argparse.ArgumentParser()
parser.add_argument('--noDisp', help='Specify flag to display labelled images', type=int)
parser.add_argument('--path2ds', help='Path to dataset', type=str)
args = parser.parse_args()
if args.noDisp:
noDisp = True
print('No graphics')
else:
noDisp = False
print('Showing figures')
gui_env = ['Qt5Agg','WXAgg','TKAgg','GTKAgg']
for gui in gui_env:
try:
print("testing: {}".format(gui))
matplotlib.use(gui,warn=False, force=True)
from matplotlib import pyplot as plt
break
except:
continue
print("Using: {}".format(matplotlib.get_backend()))
plt.ion()
args.path2ds = '/media/rakshit/tank/Dataset'
PATH_DIR = os.path.join(args.path2ds, 'Santini')
PATH_DS = os.path.join(args.path2ds, 'All')
PATH_MASTER = os.path.join(args.path2ds, 'MasterKey')
list_ds = ['1', '2', '3', '4', '5', '6']
sc = (640.0/384.0)
Image_counter = 0.0
ds_num = 24
def mypause(interval):
backend = plt.rcParams['backend']
if backend in matplotlib.rcsetup.interactive_bk:
figManager = matplotlib._pylab_helpers.Gcf.get_active()
if figManager is not None:
canvas = figManager.canvas
if canvas.figure.stale:
canvas.draw()
canvas.start_event_loop(interval)
return
def fix_pupil_loc(p, res):
# res: [H, W]
p[0] = 0.5*p[0]
p[1] = res[0] - 0.5*p[1]
return p
def readFormattedText(path2file, ignoreLines):
data = []
count = 0
f = open(path2file, 'r')
for line in f:
d = [int(d) for d in line.split() if d.isdigit()]
count = count + 1
if d and count > ignoreLines:
data.append(d)
f.close()
return data
for name in list_ds:
# Ignore the first row and column.
# Columns: [index, p_x, p_y]
opts = os.listdir(os.path.join(PATH_DIR, name))
for subdir in opts:
PATH_DATA = os.path.join(PATH_DIR, name, subdir)
# Read pupil data
Path2text = os.path.join(PATH_DATA, 'journal-{:04d}.txt'.format(int(subdir)-1))
Path2vid = os.path.join(PATH_DATA, 'eye-{:04d}-0000.avi'.format(int(subdir)-1))
PupilData = np.array(readFormattedText(Path2text, 2))
VidObj = cv2.VideoCapture(Path2vid)
keydict = {k:[] for k in ['pupil_loc', 'archive', 'data_type', 'resolution', 'dataset', 'subset']}
# Generate empty dictionaries
keydict['data_type'] = 0 # Only pupil center available
keydict['resolution'] = []
keydict['dataset'] = 'Santini'
keydict['subset'] = '{}-{}'.format(name, subdir)
# Create an empty dictionary as per agreed structure
Data = {k:[] for k in ['Images', 'Info', 'Masks', 'Masks_noSkin', 'Fits', 'pupil_loc']}
Data['Fits'] = {k:[] for k in ['pupil', 'pupil_norm', 'pupil_phi', 'iris', 'iris_norm', 'iris_phi']}
if not noDisp:
fig, plts = plt.subplots(1,1)
fr_num = 0
while(VidObj.isOpened()):
ret, I = VidObj.read()
if ret == True:
I = cv2.cvtColor(I, cv2.COLOR_BGR2GRAY)
I = cv2.resize(I, (640, 480), cv2.INTER_LANCZOS4)
Data['Images'].append(I)
keydict['resolution'].append(I.shape)
keydict['archive'].append(ds_num)
pupil_loc = fix_pupil_loc(PupilData[fr_num, 10:12]*sc, I.shape)
keydict['pupil_loc'].append(pupil_loc)
Data['pupil_loc'].append(pupil_loc)
Data['Info'].append(str(fr_num))
fr_num+=1
Image_counter+=1
if not noDisp:
if fr_num == 1:
cI = plts.imshow(I)
cX = plts.scatter(pupil_loc[0], pupil_loc[1])
plt.show()
plt.pause(.01)
else:
newLoc = np.array([pupil_loc[0], pupil_loc[1]])
cI.set_data(I)
cX.set_offsets(newLoc)
mypause(0.01)
else: # No more frames to load
break
Data['Images'] = np.stack(Data['Images'], axis=0)
Data['pupil_loc'] = np.stack(Data['pupil_loc'], axis=0)
keydict['pupil_loc'] = np.stack(keydict['pupil_loc'], axis=0)
keydict['resolution'] = np.stack(keydict['resolution'], axis=0)
keydict['archive'] = np.stack(keydict['archive'], axis=0)
# Save out data
dd.io.save(os.path.join(PATH_DS, str(ds_num)+'.h5'), Data)
scio.savemat(os.path.join(PATH_MASTER, str(ds_num)), keydict, appendmat=True)
ds_num=ds_num+1 | [
[
[
116,
118
],
[
967,
969
],
[
1015,
1017
],
[
1063,
1065
],
[
2096,
2098
],
[
2107,
2109
],
[
2181,
2183
],
[
2265,
2267
],
[
2352,
2354
],
[
4800,
4802
],
[
4869,
4871
]
],
[
[
126,
129
],
[
2500,
2503
],
[
3338,
3341
],
[
3354,
3357
],
[
3394,
3397
],
[
3420,
3423
]
],
[
[
137,
145
],
[
268,
276
]
],
[
[
153,
163
],
[
716,
726
],
[
873,
883
],
[
1276,
1286
],
[
1332,
1342
]
],
[
[
171,
182
],
[
2441,
2443
],
[
4197,
4199
],
[
4451,
4453
],
[
4512,
4514
],
[
4579,
4581
],
[
4650,
4652
],
[
4719,
4721
]
],
[
[
190,
204
],
[
4789,
4791
]
],
[
[
212,
228
],
[
4856,
4860
]
],
[
[
259,
265
],
[
294,
300
],
[
384,
390
],
[
458,
464
]
],
[
[
451,
455
],
[
481,
485
],
[
911,
915
],
[
980,
984
],
[
1028,
1032
],
[
1076,
1080
]
],
[
[
498,
504
],
[
3151,
3157
],
[
3906,
3912
]
],
[
[
547,
553
],
[
3151,
3157
],
[
3906,
3912
]
],
[
[
592,
599
],
[
649,
656
]
],
[
[
642,
645
],
[
702,
705
],
[
731,
734
]
],
[
[
790,
803
],
[
900,
903
],
[
3183,
3186
],
[
4088,
4091
],
[
4123,
4126
],
[
1234,
1237
]
],
[
[
790,
803
],
[
900,
903
],
[
3183,
3186
],
[
4088,
4091
],
[
4123,
4126
],
[
1234,
1237
]
],
[
[
956,
964
],
[
2120,
2128
],
[
2194,
2202
]
],
[
[
1005,
1012
],
[
4813,
4820
]
],
[
[
1049,
1060
],
[
4882,
4893
]
],
[
[
1103,
1110
],
[
2004,
2011
]
],
[
[
1145,
1147
],
[
3654,
3656
]
],
[
[
1164,
1177
],
[
3866,
3879
]
],
[
[
1184,
1190
],
[
3578,
3584
],
[
4826,
4832
],
[
4899,
4905
],
[
4949,
4955
]
],
[
[
1201,
1208
],
[
4346,
4353
]
],
[
[
1585,
1598
],
[
3615,
3628
]
],
[
[
1693,
1710
],
[
2450,
2467
]
],
[
[
1996,
2000
],
[
2130,
2134
],
[
2204,
2208
],
[
2854,
2858
]
],
[
[
2089,
2093
],
[
2155,
2159
]
],
[
[
2145,
2151
],
[
2210,
2216
],
[
2321,
2327
],
[
2409,
2415
],
[
2860,
2866
]
],
[
[
2169,
2178
],
[
2278,
2287
],
[
2365,
2374
]
],
[
[
2253,
2262
],
[
2468,
2477
]
],
[
[
2341,
2349
],
[
2517,
2525
]
],
[
[
2429,
2438
],
[
3629,
3638
]
],
[
[
2491,
2497
],
[
3234,
3240
],
[
3275,
3281
]
],
[
[
2536,
2543
],
[
2682,
2689
],
[
2745,
2752
],
[
2780,
2787
],
[
2819,
2826
],
[
3498,
3505
],
[
3552,
3559
],
[
3684,
3691
],
[
4588,
4595
],
[
4556,
4563
],
[
4659,
4666
],
[
4626,
4633
],
[
4728,
4735
],
[
4698,
4705
],
[
4909,
4916
]
],
[
[
2938,
2942
],
[
3034,
3038
],
[
3457,
3461
],
[
3739,
3743
],
[
3791,
3795
],
[
4460,
4464
],
[
4434,
4438
],
[
4521,
4525
],
[
4492,
4496
],
[
4842,
4846
]
],
[
[
3171,
3174
]
],
[
[
3176,
3180
],
[
3979,
3983
],
[
4023,
4027
]
],
[
[
3209,
3215
],
[
3639,
3645
],
[
3815,
3821
],
[
3840,
3846
]
],
[
[
3266,
3269
],
[
3304,
3307
]
],
[
[
3271,
3272
],
[
3351,
3352
]
],
[
[
3334,
3335
],
[
3405,
3406
]
],
[
[
3390,
3391
],
[
3479,
3480
],
[
3527,
3528
],
[
3658,
3659
],
[
3991,
3992
],
[
4272,
4273
]
],
[
[
3603,
3612
],
[
3712,
3721
],
[
3764,
3773
],
[
4036,
4045
],
[
4050,
4059
],
[
4207,
4216
],
[
4221,
4230
]
],
[
[
3974,
3976
],
[
4260,
4262
]
],
[
[
4018,
4020
],
[
4299,
4301
]
],
[
[
4188,
4194
],
[
4314,
4320
]
],
[
[
4942,
4948
],
[
3578,
3584
],
[
4826,
4832
],
[
4899,
4905
],
[
4949,
4955
]
]
] |
from django.http import HttpResponse
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.shortcuts import redirect
from django.core.validators import URLValidator # https://stackoverflow.com/questions/7160737/python-how-to-validate-a-url-in-python-malformed-or-not
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from django.db.models import Max, Count
from app.models import Reference, Tools, Reports, Tasks, TasksStats
import io
import re
import six
import uuid
import hashlib
import simplejson
#https://pybtex.org/
from pybtex.database import parse_string as parse_reference_string
import pybtex.database.input.bibtex
import pybtex.plugin
# Globals
pybtex_style = pybtex.plugin.find_plugin('pybtex.style.formatting', 'plain')()
pybtex_html_backend = pybtex.plugin.find_plugin('pybtex.backends', 'html')()
pybtex_parser = pybtex.database.input.bibtex.Parser()
sep = '||'
sep2 = '@@'
format_time_string = '%a, %d %b %Y %H:%M:%S' # RFC 2822 Internet email standard. https://docs.python.org/2/library/time.html#time.strftime # '%Y-%m-%d, %H:%M:%S'
url_validator = URLValidator() # https://stackoverflow.com/questions/7160737/python-how-to-validate-a-url-in-python-malformed-or-not
class ArkalosException(Exception):
pass
def get_guid():
'''
Create a new guid
'''
return str(uuid.uuid4())
def get_user_id(request):
'''
Get id of user
'''
is_authenticated = request.user.is_authenticated()
if is_authenticated:
return request.user.id
return None
def get_user(request):
'''
Get user object
'''
is_authenticated = request.user.is_authenticated()
if is_authenticated:
return request.user
return None
def fail(error_message=None):
'''
Failed AJAX request
'''
ret = {'success': False, 'error_message': error_message}
json = simplejson.dumps(ret)
return HttpResponse(json, content_type='application/json')
def success(data={}):
'''
success Ajax request
'''
data['success'] = True
json = simplejson.dumps(data)
return HttpResponse(json, content_type='application/json')
def has_data(f):
'''
Decorator that passes AJAX data to a function parameters
'''
def wrapper(*args, **kwargs):
request = args[0]
if request.method == 'POST':
if len(request.POST):
for k in request.POST:
kwargs[k] = request.POST[k]
else:
POST = simplejson.loads(request.body)
for k in POST:
kwargs[k] = POST[k]
elif request.method == 'GET':
for k in request.GET:
kwargs[k] = request.GET[k]
print ("GET: {} == {}".format(k, kwargs[k]))
return f(*args, **kwargs)
return wrapper
def has_field(field_names, errors):
'''
Check if field names are present
field_name: The field to check
'''
def decorator(f):
def wrapper(*args, **kwargs):
for field_index, field_name in enumerate(field_names):
if not field_name in kwargs:
if callable(errors):
kwargs['error'] = errors(field_name)
elif type(errors) is list:
kwargs['error'] = errors[field_index]
elif type(errors) is dict:
kwargs['error'] = errors[field_name]
elif type(errors) is str:
kwargs['error'] = errors
else:
# This should never happen
raise ArkalosException('Unknown error type: {}'.format(type(error).__name__))
return f(*args, **kwargs)
return f(*args, **kwargs)
return wrapper
return decorator
def has_error(f):
'''
Check if error in kwargs
'''
def wrapper(*args, **kwargs):
if 'error' in kwargs:
return fail(kwargs['error'])
return f(*args, **kwargs)
return wrapper
def username_exists(username):
'''
Checks if a username exists
'''
return User.objects.filter(username=username).exists()
def URL_validate(url):
'''
https://stackoverflow.com/questions/7160737/python-how-to-validate-a-url-in-python-malformed-or-not
'''
try:
url_validator(url)
except ValidationError as e:
return False
return True
def format_time(t):
'''
Universal method to string format time vars
'''
return t.strftime(format_time_string)
###########################################################################
##################DATABASE FUNCTIONS#######################################
###########################################################################
def bootstrap_table_format_field(entry, value):
'''
Formats the field of a bootstrap table. Values are taken from bidings
'''
if type(value) is str:
if type(entry) is dict:
return entry[value]
else:
return getattr(entry, value)
elif callable(value):
return value(entry)
def serve_boostrap_table2(model, query_f, filters, bindings, **kwargs):
'''
count_f = Tools.objects.values('name', 'url').annotate(Count('name')).count()
query_f = Tools.objects.values('name', 'url').annotate(Count('name'))
IT DOES NOT USE count_f !
'''
#count = count_f()
order = kwargs['order']
offset = kwargs['offset']
limit = kwargs['limit']
from_offset = int(offset)
to_offset = from_offset + int(limit)
if 'filter' in kwargs:
# "read" the filter
filter_ = kwargs['filter']
filter_ = simplejson.loads(filter_)
print ("Filter:")
print (filter_)
applied_filters = {filters[f][0](): filters[f][1](f_value) for f, f_value in filter_.items() if f in filters}
print ("Applied filters:")
print (applied_filters)
else:
applied_filters = {}
querySet = query_f(applied_filters)
count = querySet.count()
querySet = querySet[from_offset:to_offset]
ret = {'total': count}
ret['rows'] = [ {k: bootstrap_table_format_field(entry, v) for k, v in bindings.items()} for entry in querySet]
json = simplejson.dumps(ret)
return HttpResponse(json, content_type='application/json')
def serve_boostrap_table(model, bindings, order_by, **kwargs):
'''
http://bootstrap-table.wenzhixin.net.cn/
'''
count = model.objects.count()
order = kwargs['order']
offset = kwargs['offset']
limit = kwargs['limit']
from_offset = int(offset)
to_offset = from_offset + int(limit)
if 'filter' in kwargs:
filter_ = kwargs['filter']
filter_ = simplejson.loads(filter_)
filter_ = { bindings[k] + '__icontains':v for k,v in filter_.items()}
querySet = model.objects.filter(**filter_)
count = querySet.count()
querySet = querySet[from_offset:to_offset]
else:
querySet = model.objects.order_by(order_by)[from_offset:to_offset]
ret = {'total': count}
ret['rows'] = [ {k: bootstrap_table_format_field(entry, v) for k, v in bindings.items()} for entry in querySet]
json = simplejson.dumps(ret)
return HttpResponse(json, content_type='application/json')
def db_exists(model, filters):
'''
Does this entry exist?
'''
return model.objects.filter(**filters).exists()
def get_maximum_current_version(model, name):
'''
Return the next available current_version
'''
max_entry = model.objects.filter(name=name).aggregate(Max('current_version'))
if max_entry['current_version__max'] is None:
return 1
assert type(max_entry) is dict
assert len(max_entry) == 1
return max_entry['current_version__max'] + 1
def build_jstree_tool_dependencies(tool, prefix='', include_original=False):
'''
Build the dependency jstree of this tool
include_original are we including the original tool in the jstree?
'''
def node(t):
ret = {
'id': prefix + sep + t.name + sep + str(t.current_version), #Through this id we get info from jstree jandlers
'text': t.name + ' ' + str(t.current_version),
'children': [build_jstree_tool_dependencies(x, prefix, include_original=True) for x in t.dependencies.all()] + \
[{'text': x[0], 'type': 'exposed', 'value': x[1], 'description': x[2], 'id': prefix+sep+x[0]+sep+t.name+sep2+str(t.current_version)} for x in simplejson.loads(t.exposed)],
'current_version': t.current_version,
'name': t.name,
'type': 'tool',
}
return ret
if include_original:
return node(tool)
else:
return [node(dependent_tool) for dependent_tool in tool.dependencies.all()]
def build_jstree(model, name, prefix=''):
'''
Take an entry that has a previous_version and current_version
Build a jstree compatible structure
'''
index = {}
if prefix:
prefix_to_add = prefix + sep
else:
prefix_to_add = ''
def node(o):
current_version = o.current_version
ret = {
'id': prefix_to_add + o.name + sep + str(o.current_version),
'text': o.name + ' ' + str(o.current_version),
'children': [],
'current_version': o.current_version,
'name': o.name
}
index[current_version] = ret
return ret
ret = []
all_objects = model.objects.filter(name=name).order_by("current_version")
#ret.append(node(all_objects[0]))
for o in all_objects:
previous_version = o.previous_version
if previous_version is None:
ret.append(node(o))
else:
this_node = node(o)
index[previous_version]['children'].append(this_node)
#print (simplejson.dumps(ret))
return ret
###########################################################################
##################END OF DATABASE#######################################
###########################################################################
###########################################################################
################## REGISTER ###############################################
###########################################################################
@has_data
@has_field(['username', 'password', 'password_confirm', 'email'], lambda x :'{} is required'.format(x))
@has_error
def register(request, **kwargs):
'''
Register
'''
#print (kwargs)
username = kwargs['username']
password = kwargs['password']
password_confirm = kwargs['password_confirm']
email = kwargs['email']
#Check if this user exists
if username_exists(username):
return fail('Username {} exists'.format(username))
#Check if password match
if kwargs['password'] != kwargs['password_confirm']:
return fail('Passwords do not match')
#Create user
user = User.objects.create_user(username, email, password)
return success({})
@has_data
@has_field(['username', 'password'], lambda x :'{} is required'.format(x))
@has_error
def loginlocal(request, **kwargs):
'''
Function called from login
'''
username = kwargs['username']
password = kwargs['password']
user = authenticate(username=username, password=password)
if user is None:
return fail('Invalid username or password')
#if user.is_active: ... # https://docs.djangoproject.com/en/1.9/topics/auth/default/
login(request, user)
ret = {'username': username}
return success(ret)
def logoutlocal(request):
'''
logout
'''
logout(request)
return redirect('/')
###########################################################################
################## END OF REGISTER ########################################
###########################################################################
###############################
####REFERENCES#################
###############################
def reference_get_fields(content):
'''
Get the code of the bibtex entry
'''
p = parse_reference_string(content, 'bibtex')
p_len = len(p.entries)
if p_len == 0:
return False, 'Could not find BIBTEX entry'
if p_len > 1:
return False, 'More than one BIBTEX entries found'
code = p.entries.keys()[0]
if not 'title' in p.entries[code].fields:
return False, 'Could not find title information'
title = p.entries[code].fields['title']
if not hasattr(p.entries[code], 'persons'):
return False, 'Could not find author information'
if not 'author' in p.entries[code].persons:
return False, 'Could not find author information'
if len(p.entries[code].persons['author']) == 0:
return False, 'Could not find author information'
authors = sep.join([str(x) for x in p.entries[code].persons['author']])
return True, {'code': code, 'title': title, 'authors': authors}
def bibtex_to_html(content):
'''
Convert bibtex to html
Adapted from: http://pybtex-docutils.readthedocs.io/en/latest/quickstart.html#overview
'''
data = pybtex_parser.parse_stream(six.StringIO(content))
data_formatted = pybtex_style.format_entries(six.itervalues(data.entries))
output = io.StringIO()
pybtex_html_backend.write_to_stream(data_formatted, output)
html = output.getvalue()
html_s = html.split('\n')
html_s = html_s[9:-2]
new_html = '\n'.join(html_s).replace('<dd>', '').replace('</dd>', '')
return new_html
@has_data
@has_field(['content'], 'BIBTEX content is required')
@has_error
def add_reference(request, **kwargs):
'''
Add reference
'''
content = kwargs['content']
s, fields = reference_get_fields(content)
if not s:
return fail(fiels)
if db_exists(Reference, {'code': fields['code']}):
return fail('BIBTEX entry with code {} already exists'.format(code))
html = bibtex_to_html(content)
r = Reference(
user=get_user(request),
code=fields['code'],
title=fields['title'],
authors=fields['authors'],
content=content,
reference_type='BIBTEX',
html = html,
)
r.save()
return success()
@has_data
def get_references(request, **kwargs):
'''
Serve GET Request for References bootstrap table
'''
bindings = {
'id': 'code',
'content': 'html',
}
return serve_boostrap_table(Reference, bindings, 'id', **kwargs)
@has_data
@has_error
def get_reference(request, **kwargs):
'''
Get reference
'''
codes = kwargs['codes']
ret = {'data': {}, 'html': []}
c = 0
for code in codes:
try:
ref = Reference.objects.get(code=code)
c += 1
ret['data'][code] = {'counter': c}
ret['html'].append({'html': ref.html})
except ObjectDoesNotExist:
pass
ret['total'] = c
return success(ret)
@has_data
def reference_suggestions(request, **kwargs):
'''
Get called from tagas input
'''
query = kwargs['query']
querySet = Reference.objects.filter(content__icontains = query)[:10]
ret = [ {'value' : entry.code, 'html': entry.html} for entry in querySet] # We have a html representation for each Reference
json = simplejson.dumps(ret)
return HttpResponse(json, content_type='application/json')
def get_references_from_text(text):
'''
Get all reference objects from a text.
This is useful for the report
'''
ret = []
all_brackets = re.findall(r'\[[\w]+\]', text)
for bracket in all_brackets:
#Remove brackets
code = bracket[1:-1]
#Check if this a real reference
try:
ref = Reference.objects.get(code=code)
except ObjectDoesNotExist:
pass
else:
ret += [ref]
return ret
###############################
######END OF REFERENCES########
###############################
#################################
#### REPORTS ####################
#################################
@has_data
def get_reports(request, **kwargs):
'''
Serve bootstrap table for reports
'''
bindings = {
'name': 'name',
#'total_edits': lambda entry: entry['name__count'],
'content': lambda entry : ''
}
#return serve_boostrap_table(Reports, bindings, 'id', **kwargs)
return serve_boostrap_table2(
model = Reports,
#count_f = lambda : Reports.objects.values('name').annotate(Count('name')).count(),
query_f = lambda x : Reports.objects.filter(**x).values('name').distinct(),
bindings = bindings,
filters = {
'name': (lambda : 'name__icontains', lambda x : x) # name_contains = x
},
**kwargs
)
@has_data
@has_error
def get_reports_ui(request, **kwargs):
name = kwargs['name']
current_version = kwargs['current_version']
report = Reports.objects.get(name=name, current_version=current_version)
username = report.user.username
ret = {
'name': name,
'current_version': current_version,
'username': username,
'created_at': format_time(report.created_at),
'markdown': report.markdown,
'summary': report.summary,
}
return success(ret)
@has_data
@has_error
def add_report(request, **kwargs):
name = kwargs['name']
previous_version = kwargs['previous_version']
markdown = kwargs['markdown']
references = kwargs['references']
user = get_user(request)
#print (name)
#print (previous_version)
#print (markdown)
#print (references)
current_version = get_maximum_current_version(Reports, name)
previous_version = kwargs["previous_version"]
if previous_version == 'N/A':
previous_version = None
if current_version == 1:
previous_version = None
report = Reports(
name=name,
user=user,
current_version=current_version,
previous_version=previous_version,
markdown=markdown,
)
report.save()
fetched_references = [Reference.objects.get(name=x) for x in references]
report.references.add(*fetched_references)
report.save()
ret = {
'created_at' : format_time(report.created_at),
'current_version': current_version,
'jstree': build_jstree(Reports, report.name)
}
#print (ret)
return success(ret)
#################################
#### END OF REPORTS #############
#################################
#################################
####TOOLS / DATA#################
#################################
@has_data
def get_tools(request, **kwargs):
'''
Serve GET Request for Tools bootstrap table
def serve_boostrap_table2(model, count_f, query_f, bindings, **kwargs):
count_f = Tools.objects.values('name', 'url').annotate(Count('name')).count()
query_f = Tools.objects.values('name', 'url').annotate(Count('name')
'''
bindings = {
'name' : 'name',
'url': lambda entry : '<a href="{}" target="_blank">{}</a>'.format(entry['url'], entry['url']),
#'total_edits': lambda entry: entry['name__count'],
'description': lambda entry: ''
#'current_version': lambda entry: '{} -- {}'.format(entry.current_version, entry.previous_version),
#'current_version': 'current_version',
#'description': 'description',
#'description': lambda entry: '{} {} -- {}'.format(entry.description, entry.current_version, entry.previous_version),
}
#return serve_boostrap_table(Tools, bindings, 'name', **kwargs)
return serve_boostrap_table2(
model = Tools,
#count_f = lambda : Tools.objects.values('name', 'url').annotate(Count('name')).count(),
query_f = lambda x : Tools.objects.values('name', 'url').annotate(Count('name')),
filters = {
},
bindings = bindings,
**kwargs
)
@has_data
@has_error
def get_tools_ui(request, **kwargs):
'''
Called when we want an explicit tool from the UI
'''
name = kwargs['name']
current_version = kwargs['current_version']
tool = Tools.objects.get(name=name, current_version=current_version)
#print ('System: {}'.format(tool.system))
exposed = simplejson.loads(tool.exposed)
if not len(exposed):
exposed = [['', '', '']]
jstree = build_jstree(Tools, tool.name)
dependencies = build_jstree_tool_dependencies(tool, prefix='3', include_original=False)
#print ('DEPENDENCIES:')
#print (dependencies)
ret = {
'name': tool.name,
'current_version': current_version,
'version' : tool.version,
'system' : simplejson.loads(tool.system),
'username': tool.user.username,
'created_at': format_time(tool.created_at),
'url': tool.url,
'description': tool.description,
'installation': tool.installation,
'validate_installation': tool.validate_installation,
'exposed': exposed,
'jstree': jstree,
'references': [x.code for x in tool.references.all()],
'summary': tool.summary,
'dependencies': dependencies
}
return success(ret)
@has_data
@has_field(
['name', 'version', 'url', 'description', 'installation'],
['Name cannot be empty', 'Version cannot be empty', 'Link cannot be empty', 'Description cannot be empty', 'Installation cannot be empty'])
@has_error
def add_tool(request, **kwargs):
'''
Attempt to add a new Tool
'''
system = kwargs['system']
system_p = simplejson.loads(system)
if not len(system_p):
return fail('Please select one or more systems')
url = kwargs['url']
if not URL_validate(url):
return fail('URL: {} does not seem to be valid'.format(url))
references = kwargs['references']
references = simplejson.loads(references)
references = [Reference.objects.get(code=r) for r in references]
name = kwargs['name']
current_version = get_maximum_current_version(Tools, name)
previous_version = kwargs["previous_version"]
if previous_version == 'N/A':
previous_version = None
# else:
# print ('Previous version: {}'.format(previous_version))
# print ('Current version: {}'.format(current_version))
# a=1/0 # Throw exception deliberately
print ('Current version: {}'.format(current_version))
user = get_user(request)
version = kwargs['version']
description = kwargs['description']
installation=kwargs['installation']
validate_installation = kwargs['validate_installation']
exposed = kwargs['exposed']
#print ('Exposed: {} {}'.format(exposed, type(exposed).__name__)) # This is a list
exposed = [e for e in exposed if any(e)] # Remove empty
exposed = simplejson.dumps(exposed) # Serialize
summary = kwargs['summary']
new_tool = Tools(
user=user,
name=name,
version=version,
system=system,
current_version=current_version,
previous_version=previous_version,
url = url,
description = description,
installation = installation,
validate_installation = validate_installation,
exposed = exposed,
summary = summary,
);
new_tool.save()
#Add references
new_tool.references.add(*references)
new_tool.save()
jstree = build_jstree(Tools, new_tool.name)
#Add dependencies
dependencies = kwargs['dependencies']
dependencies_objects = [Tools.objects.get(name=dependency['name'], current_version=dependency['current_version']) for dependency in dependencies]
new_tool.dependencies.add(*dependencies_objects)
new_tool.save()
#Get created at
created_at = format_time(new_tool.created_at)
#print ('Created at: {}'.format(created_at))
ret = {
'created_at': created_at,
'current_version': current_version,
'jstree': jstree
}
return success(ret)
@has_data
@has_error
def jstree_tool(request, **kwargs):
'''
AJAX backend to get the version jstree for a tool
'''
name = kwargs['name']
prefix = kwargs['prefix']
ret = {
'jstree' : build_jstree(Tools, name, prefix=prefix),
}
return success(ret)
@has_data
@has_error
def jstree_report(request, **kwargs):
'''
AJAX backend to get the version jstree for a tool
'''
name = kwargs['name']
prefix = kwargs['prefix']
ret = {
'jstree' : build_jstree(Reports, name, prefix=prefix),
}
return success(ret)
@has_data
@has_error
def jstree_wf(request, **kwargs):
'''
AJAX backend to get the version jstree for a tool
'''
name = kwargs['name']
prefix = kwargs['prefix']
ret = {
'jstree' : build_jstree(Tasks, name, prefix=prefix),
}
return success(ret)
@has_data
@has_error
def jstree_tool_dependencies(request, **kwargs):
'''
AJAX backend to get the dependency jstree for a tool
'''
name = kwargs['name']
current_version = int(kwargs['current_version'])
if 'prefix' in kwargs:
prefix=kwargs['prefix']
else:
prefix = '3'
tool = Tools.objects.get(name=name, current_version=current_version)
ret = {
'jstree': build_jstree_tool_dependencies(tool, prefix=prefix, include_original=True)
}
#print(ret)
return success(ret)
@has_data
@has_error
def get_tool_dependencies(request, **kwargs):
'''
Return ONE LEVEL dependencies of this tool
'''
name = kwargs['name']
current_version = int(kwargs['current_version'])
tool = Tools.objects.get(name=name, current_version=current_version)
ret = {
'dependencies': [{'name': x.name, 'current_version': x.current_version} for x in tool.dependencies.all()]
}
return success(ret)
@has_data
@has_error
def get_tool_variables(request, **kwargs):
'''
Return the variables of this tool
'''
name = kwargs['name']
current_version = int(kwargs['current_version'])
tool = Tools.objects.get(name=name, current_version=current_version)
ret = {
'variables': simplejson.loads(tool.exposed)
}
return success(ret)
########################################
####END OF TOOLS / DATA#################
########################################
########################################
######### WORKFLOWS ####################
########################################
def jason_or_django(f):
'''
getattr and iterate methods for JSON or DJANGO objects
'''
def dec(*args, **kwargs):
if type(args[0]) is dict:
attr = lambda x,y : x[y]
iterate = lambda x,y : (k for k in x[y])
elif type(args[0]) is Tasks:
attr = lambda x,y : getattr(x,y)
iterate = lambda x,y : (k for k in getattr(x,y).all())
else:
raise ArkalosException('This should never happen: {}'.format(type(task)))
kwargs['attr'] = attr
kwargs['iterate'] = iterate
return f(*args, **kwargs)
return dec
@jason_or_django
def task_hash(task, **kwargs):
'''
Creates a unique hash for this task
attr: Get attribute
iterate: Iterator
'''
attr = kwargs['attr']
iterate = kwargs['iterate']
# Dictionary version
# to_hash = [
# task['name'],
# task['bash'],
# task['documentation'],
# '@@'.join(['&&'.join((x['name'], str(x['current_version']))) for x in task['dependencies'] if x['type'] == 'tool']),
# '!!'.join(['**'.join((x['name'], str(x['current_version']) if x['is_workflow'] else 'None')) for x in task['calls']]),
# '##'.join(task['inputs']),
# '$$'.join(task['outputs'])
# ]
# This works with both dictionary and django database objects
to_hash = [
attr(task, 'name'),
attr(task, 'bash'),
attr(task, 'documentation'),
'@@'.join(['&&'.join((attr(x, 'name'), str(attr(x, 'current_version')))) for x in iterate(task, 'dependencies')]),
'!!'.join(['**'.join((attr(x, 'name'), str(attr(x, 'current_version')) if attr(x, 'current_version') else 'None')) for x in iterate(task, 'calls')]),
'##'.join(attr(task, 'inputs')),
'$$'.join(attr(task, 'outputs')),
]
to_hash = '^^'.join(to_hash)
to_hash_b = bytearray(to_hash, encoding="utf-8")
return hashlib.sha256(to_hash_b).hexdigest()
def save_task_or_workflow(request, workflow_or_task):
'''
Saves a workflow or task
'''
if workflow_or_task['is_workflow']:
# This is worflow
is_workflow = True
if workflow_or_task['current_version'] is None:
# This workflow is not saved
# Get the previous_version
previous_version = workflow_or_task['previous_version']
# Get the current number
current_version = get_maximum_current_version(Tasks, workflow_or_task['name'])
else:
# This workflow is saved. Find it and return it
worklfow = Tasks.objects.get(name=workflow_or_task['name'], current_version=workflow_or_task['current_version'])
return worklfow
else:
# This is a task
is_workflow = False
current_version = None
previous_version = None
#Check if it exists in the database
try:
task = Tasks.objects.get(hash_field=workflow_or_task['hash_value'])
except ObjectDoesNotExist:
pass
else:
return task
# It does not exist. Create it!
task = Tasks(
user=get_user(request),
name=workflow_or_task['name'],
current_version=current_version,
previous_version=previous_version,
bash=workflow_or_task['bash'],
documentation=workflow_or_task['documentation'],
hash_field=workflow_or_task['hash_value'],
is_workflow=is_workflow,
inputs=simplejson.dumps(workflow_or_task['inputs']),
outputs=simplejson.dumps(workflow_or_task['outputs']),
)
task.save()
# Add dependencies
tools = []
for dependency in workflow_or_task['dependencies']:
if dependency['type'] != 'tool':
continue
tools += [Tools.objects.get(name=dependency['name'], current_version=dependency['current_version'])]
task.dependencies.add(*tools)
task.save()
# Add references
refs = get_references_from_text(workflow_or_task['documentation'])
task.references.add(*refs)
task.save()
return task
def update_TasksStats(task):
'''
Update the stats of this task
'''
name = task.name
try:
taskStat = TasksStats.objects.get(name=name)
except ObjectDoesNotExist:
taskStat = TasksStats(
name=name,
edits=1,
users=1,
last_edit=task,
)
else:
taskStat.edits += 1
taskStat.users = Tasks.objects.filter(name=name).values('user').count()
taskStat.last_edit=task
finally:
taskStat.save()
@has_data
@has_error
def add_workflow(request, **kwargs):
'''
Add a new workflow
'''
graph = kwargs['graph']
main_guid = kwargs['main_guid']
#Fix is_workflow
for node in graph:
node['is_workflow'] = node['type'] == 'workflow'
#Take main node
main_node = None
for node in graph:
if node['guid'] == main_guid:
main_node = node
break
assert not (main_node is None)
assert main_node['is_workflow']
# Check if there is another workflow with the same name
if main_node['previous_version'] is None: # It is a new workflow!
if db_exists(Tasks, {'name': main_node['name']}):
return fail('Another workflow with this name exists. Please choose another name')
# Check if this workflow calls another workflow which is unsaved (this is not allowed)
for node in graph:
if not node['is_workflow']: # It is not a workflow
continue
if node['guid'] == main_guid: # It is not the main workflow
continue
if node['current_version'] is None: # It is not saved
return fail('Could not save. Workflow: {} calls an UNSAVED workflow: {}'.format(main_node['name'], node['name']))
#Fix the "calls"
guids_to_graph = {node['guid']:node for node in graph}
for node in graph:
node['calls'] = [{'name': guids_to_graph[callee_guid]['name'], 'current_version': guids_to_graph[callee_guid]['current_version']} for callee_guid in node['serial_calls']]
#Do the following three things:
#1. Add hash_value information
#2. Take the hash of the main workflow
#3. Create a mapping from GUIDs to hash_values
from_guid_to_hash = {}
main_hash = None
guids_to_hashes = {}
for node in graph:
#print ('======')
#print(node)
node['hash_value'] = task_hash(node)
if node['guid'] == main_guid:
main_hash = node['hash_value']
guids_to_hashes[node['guid']] = node['hash_value']
assert not (main_hash is None)
# Save the graph and create a new dictionary with the saved objects
hash_objects_dict = {
node['hash_value']: save_task_or_workflow(request, node)
for node in graph
}
#Add the who calls whom information
for node in graph:
this_node_called =[hash_objects_dict[guids_to_hashes[callee_guid]] for callee_guid in node['serial_calls']]
if this_node_called:
hash_objects_dict[node['hash_value']].calls.add(*this_node_called)
hash_objects_dict[node['hash_value']].save()
#Update TaskStats. Perhaps can be done better with signals
update_TasksStats(hash_objects_dict[main_hash])
ret = {
'current_version': hash_objects_dict[main_hash].current_version,
'created_at': format_time(hash_objects_dict[main_hash].created_at),
}
return success(ret)
def workflow_graph(workflow_or_task):
'''
Create a caller--callee graph identical to the one sent from angular for a workflow
'''
ret = []
all_hashes = []
def create_node(node):
ret = {
'bash': node.bash,
'current_version': node.current_version,
'previous_version': node.previous_version,
'documentation': node.documentation,
'tools_jstree_data': [build_jstree_tool_dependencies(tool, prefix='5', include_original=True) for x in node.dependencies.all()],
'inputs': simplejson.loads(node.inputs),
'outputs': simplejson.loads(node.outputs),
'type': 'workflow' if node.is_workflow else 'task',
'hash_value': node.hash_field,
'children': []
}
if node.is_workflow:
ret['name'] = node.name + '_' + str(node.current_version)
ret['workflow_name'] = node.name
ret['created_at'] = format_time(node.created_at)
ret['username'] = node.user.username
else:
ret['name'] = node.name
return ret
def workflow_graph_rec(node):
if node.hash_field in all_hashes:
return
all_hashes.append(node.hash_field)
ret_json = create_node(node)
ret_json['serial_calls'] = []
for callee in node.calls.all():
ret_json['serial_calls'].append(callee.hash_field)
workflow_graph_rec(callee)
ret.append(ret_json)
workflow_graph_rec(workflow_or_task)
return ret
@has_data
def get_workflow(request, **kwargs):
'''
Creates a json object EXACTTLY the same as the one saved
return {
"name": node.type == 'workflow' ? node.workflow_name : node.name,
"bash": node.bash,
"current_version": node.current_version, // This is always null
"previous_version": node.previous_version,
"documentation": node.documentation,
"dependencies": node.tools_jstree_data,
"serial_calls" : node.serial_calls,
"inputs": node.inputs,
"outputs": node.outputs,
"type": node.type,
"guid": node.guid
};
'''
name = kwargs['name']
current_version = kwargs['current_version']
wf = Tasks.objects.get(name=name, current_version=current_version)
graph = workflow_graph(wf)
# print ('ret:')
# print (ret)
ret = {
'graph': graph,
'main_hash': wf.hash_field
}
return success(ret)
@has_data
def get_workflows(request, **kwargs):
'''
Serve bootstrap table for workflows
'''
def description(entry):
ret = '<p>Edits: <strong>%i</strong> Users: <strong>%i</strong> Last Edit: <strong>%s</strong><br />Last documentation: %s</p>' % (entry.edits, entry.users, format_time(entry.last_edit.created_at), entry.last_edit.documentation)
return ret
bindings = {
'name' : 'name',
'description': description,
}
#return serve_boostrap_table(Tools, bindings, 'name', **kwargs)
return serve_boostrap_table2(
model = TasksStats,
#count_f = lambda : Tasks.objects.values('name').count(), # COUNT ALL
query_f = lambda x : TasksStats.objects.filter(**x), # Query function
filters = {
'name': (lambda : 'name__icontains', lambda x : x) # name_contains = x
},
bindings = bindings,
**kwargs
)
########################################
####### END OF WORKFLOWS ###############
########################################
| [
[
[
25,
37
],
[
1964,
1976
],
[
2152,
2164
],
[
6536,
6548
],
[
7503,
7515
],
[
15715,
15727
]
],
[
[
78,
82
],
[
4359,
4363
],
[
11280,
11284
]
],
[
[
115,
127
],
[
11616,
11628
]
],
[
[
129,
134
],
[
11837,
11842
]
],
[
[
136,
142
],
[
11974,
11980
]
],
[
[
173,
181
],
[
12001,
12009
]
],
[
[
218,
230
],
[
1165,
1177
]
],
[
[
369,
384
],
[
4599,
4614
]
],
[
[
386,
404
],
[
15249,
15267
],
[
16169,
16187
],
[
29892,
29910
],
[
31153,
31171
]
],
[
[
435,
438
],
[
7852,
7855
]
],
[
[
440,
445
],
[
20251,
20256
]
],
[
[
470,
479
],
[
14177,
14186
],
[
14337,
14346
],
[
14823,
14832
],
[
15083,
15092
],
[
15483,
15492
],
[
16121,
16130
],
[
18496,
18505
],
[
22327,
22336
]
],
[
[
481,
486
],
[
20073,
20078
],
[
20569,
20574
],
[
20809,
20814
],
[
22455,
22460
],
[
23310,
23315
],
[
23827,
23832
],
[
23942,
23947
],
[
24633,
24638
],
[
25602,
25607
],
[
26041,
26046
],
[
26471,
26476
],
[
30679,
30684
],
[
20206,
20211
]
],
[
[
488,
495
],
[
16827,
16834
],
[
17332,
17339
],
[
18080,
18087
],
[
18286,
18293
],
[
18755,
18762
],
[
24924,
24931
],
[
16957,
16964
]
],
[
[
497,
502
],
[
25214,
25219
],
[
29344,
29349
],
[
29474,
29479
],
[
29816,
29821
],
[
30015,
30020
],
[
31370,
31375
],
[
32136,
32141
],
[
36807,
36812
],
[
27162,
27167
]
],
[
[
504,
514
],
[
31108,
31118
],
[
31192,
31202
],
[
37635,
37645
],
[
37754,
37764
]
],
[
[
524,
526
],
[
13631,
13633
]
],
[
[
534,
536
],
[
15930,
15932
]
],
[
[
544,
547
],
[
13515,
13518
],
[
13587,
13590
]
],
[
[
555,
559
],
[
1398,
1402
]
],
[
[
567,
574
],
[
28809,
28816
]
],
[
[
582,
592
],
[
1931,
1941
],
[
2118,
2128
],
[
5924,
5934
],
[
6503,
6513
],
[
6989,
6999
],
[
7470,
7480
],
[
15682,
15692
],
[
20693,
20703
],
[
21113,
21123
],
[
21992,
22002
],
[
22280,
22290
],
[
23223,
23233
],
[
26566,
26576
],
[
30372,
30382
],
[
30434,
30444
],
[
2622,
2632
],
[
8771,
8781
],
[
34987,
34997
],
[
35041,
35051
]
],
[
[
643,
681
],
[
12439,
12461
]
],
[
[
690,
718
]
],
[
[
726,
739
],
[
766,
772
],
[
852,
858
],
[
923,
929
]
],
[
[
751,
763
],
[
13559,
13571
]
],
[
[
830,
849
],
[
13649,
13668
]
],
[
[
907,
920
],
[
13488,
13501
]
],
[
[
962,
965
],
[
9314,
9317
],
[
13180,
13183
],
[
8333,
8336
],
[
8348,
8351
],
[
8713,
8716
],
[
8722,
8725
],
[
9476,
9479
]
],
[
[
973,
977
],
[
8733,
8737
]
],
[
[
985,
1003
],
[
4767,
4785
]
],
[
[
1149,
1162
],
[
4569,
4582
]
],
[
[
1290,
1306
],
[
27313,
27329
],
[
3843,
3859
]
],
[
[
1333,
1341
]
],
[
[
1417,
1428
]
],
[
[
1607,
1615
],
[
14361,
14369
],
[
17916,
17924
],
[
22840,
22848
],
[
30035,
30043
]
],
[
[
1792,
1796
],
[
11074,
11078
],
[
11220,
11224
],
[
11704,
11708
],
[
14147,
14151
],
[
14230,
14234
],
[
22058,
22062
],
[
22170,
22174
],
[
32192,
32196
],
[
32636,
32640
],
[
4192,
4196
]
],
[
[
2021,
2028
],
[
11344,
11351
],
[
11903,
11910
],
[
14589,
14596
],
[
15321,
15328
],
[
17686,
17693
],
[
18813,
18820
],
[
21611,
21618
],
[
24391,
24398
],
[
24680,
24687
],
[
24973,
24980
],
[
25261,
25268
],
[
25805,
25812
],
[
26247,
26254
],
[
26615,
26622
],
[
34400,
34407
],
[
37027,
37034
]
],
[
[
2209,
2217
],
[
10639,
10647
],
[
11358,
11366
],
[
13891,
13899
],
[
14601,
14609
],
[
14862,
14870
],
[
15336,
15344
],
[
16463,
16471
],
[
17184,
17192
],
[
17701,
17709
],
[
19035,
19043
],
[
20357,
20365
],
[
21627,
21635
],
[
24406,
24414
],
[
24695,
24703
],
[
24989,
24997
],
[
25277,
25285
],
[
25820,
25828
],
[
26262,
26270
],
[
31498,
31506
],
[
35993,
36001
],
[
37043,
37051
]
],
[
[
3028,
3037
],
[
10649,
10658
],
[
11368,
11377
],
[
13901,
13910
],
[
21637,
21646
]
],
[
[
4050,
4059
],
[
10753,
10762
],
[
11443,
11452
],
[
13955,
13964
],
[
14872,
14881
],
[
17194,
17203
],
[
17711,
17720
],
[
20367,
20376
],
[
21857,
21866
],
[
24416,
24425
],
[
24705,
24714
],
[
24999,
25008
],
[
25287,
25296
],
[
25830,
25839
],
[
26272,
26281
],
[
31508,
31517
]
],
[
[
4273,
4288
],
[
11032,
11047
]
],
[
[
4412,
4424
],
[
22136,
22148
]
],
[
[
4665,
4676
],
[
17564,
17575
],
[
18648,
18659
],
[
21206,
21217
],
[
24175,
24186
],
[
34328,
34339
],
[
35394,
35405
],
[
37340,
37351
]
],
[
[
5021,
5049
],
[
6399,
6427
],
[
7366,
7394
]
],
[
[
5360,
5381
],
[
16788,
16809
],
[
20034,
20055
],
[
37596,
37617
]
],
[
[
6593,
6613
],
[
14802,
14822
]
],
[
[
7561,
7570
],
[
14167,
14176
],
[
32126,
32135
]
],
[
[
7688,
7715
],
[
18052,
18079
],
[
22427,
22454
],
[
29316,
29343
]
],
[
[
8066,
8096
],
[
20846,
20876
],
[
25695,
25725
],
[
8513,
8543
],
[
34858,
34888
]
],
[
[
9088,
9100
],
[
18742,
18754
],
[
20796,
20808
],
[
23814,
23826
],
[
24620,
24632
],
[
24911,
24923
],
[
25201,
25213
]
],
[
[
10767,
10775
]
],
[
[
11457,
11467
]
],
[
[
11921,
11932
]
],
[
[
12347,
12367
],
[
14088,
14108
]
],
[
[
13317,
13331
],
[
14304,
14318
]
],
[
[
13969,
13982
]
],
[
[
14614,
14628
]
],
[
[
14886,
14899
]
],
[
[
15349,
15370
]
],
[
[
15772,
15796
],
[
30853,
30877
]
],
[
[
16476,
16487
]
],
[
[
17208,
17222
]
],
[
[
17725,
17735
]
],
[
[
19048,
19057
]
],
[
[
20381,
20393
]
],
[
[
21871,
21879
]
],
[
[
24430,
24441
]
],
[
[
24719,
24732
]
],
[
[
25013,
25022
]
],
[
[
25301,
25325
]
],
[
[
25844,
25865
]
],
[
[
26286,
26304
]
],
[
[
26882,
26897
],
[
27501,
27516
]
],
[
[
27521,
27530
],
[
33366,
33375
]
],
[
[
28853,
28874
],
[
33688,
33709
]
],
[
[
30982,
30999
],
[
34172,
34189
]
],
[
[
31522,
31534
]
],
[
[
34418,
34432
],
[
36881,
36895
]
],
[
[
36006,
36018
]
],
[
[
37056,
37069
]
]
] |
"""
The arraypad module contains a group of functions to pad values onto the edges
of an n-dimensional array.
"""
from __future__ import division, absolute_import, print_function
import numpy as np
__all__ = ['pad']
###############################################################################
# Private utility functions.
def _arange_ndarray(arr, shape, axis, reverse=False):
"""
Create an ndarray of `shape` with increments along specified `axis`
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
shape : tuple of ints
Shape of desired array. Should be equivalent to `arr.shape` except
`shape[axis]` which may have any positive value.
axis : int
Axis to increment along.
reverse : bool
If False, increment in a positive fashion from 1 to `shape[axis]`,
inclusive. If True, the bounds are the same but the order reversed.
Returns
-------
padarr : ndarray
Output array sized to pad `arr` along `axis`, with linear range from
1 to `shape[axis]` along specified `axis`.
Notes
-----
The range is deliberately 1-indexed for this specific use case. Think of
this algorithm as broadcasting `np.arange` to a single `axis` of an
arbitrarily shaped ndarray.
"""
initshape = tuple(1 if i != axis else shape[axis]
for (i, x) in enumerate(arr.shape))
if not reverse:
padarr = np.arange(1, shape[axis] + 1)
else:
padarr = np.arange(shape[axis], 0, -1)
padarr = padarr.reshape(initshape)
for i, dim in enumerate(shape):
if padarr.shape[i] != dim:
padarr = padarr.repeat(dim, axis=i)
return padarr
def _round_ifneeded(arr, dtype):
"""
Rounds arr inplace if destination dtype is integer.
Parameters
----------
arr : ndarray
Input array.
dtype : dtype
The dtype of the destination array.
"""
if np.issubdtype(dtype, np.integer):
arr.round(out=arr)
def _prepend_const(arr, pad_amt, val, axis=-1):
"""
Prepend constant `val` along `axis` of `arr`.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : int
Amount of padding to prepend.
val : scalar
Constant value to use. For best results should be of type `arr.dtype`;
if not `arr.dtype` will be cast to `arr.dtype`.
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, with `pad_amt` constant `val` prepended along `axis`.
"""
if pad_amt == 0:
return arr
padshape = tuple(x if i != axis else pad_amt
for (i, x) in enumerate(arr.shape))
if val == 0:
return np.concatenate((np.zeros(padshape, dtype=arr.dtype), arr),
axis=axis)
else:
return np.concatenate(((np.zeros(padshape) + val).astype(arr.dtype),
arr), axis=axis)
def _append_const(arr, pad_amt, val, axis=-1):
"""
Append constant `val` along `axis` of `arr`.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : int
Amount of padding to append.
val : scalar
Constant value to use. For best results should be of type `arr.dtype`;
if not `arr.dtype` will be cast to `arr.dtype`.
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, with `pad_amt` constant `val` appended along `axis`.
"""
if pad_amt == 0:
return arr
padshape = tuple(x if i != axis else pad_amt
for (i, x) in enumerate(arr.shape))
if val == 0:
return np.concatenate((arr, np.zeros(padshape, dtype=arr.dtype)),
axis=axis)
else:
return np.concatenate(
(arr, (np.zeros(padshape) + val).astype(arr.dtype)), axis=axis)
def _prepend_edge(arr, pad_amt, axis=-1):
"""
Prepend `pad_amt` to `arr` along `axis` by extending edge values.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : int
Amount of padding to prepend.
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, extended by `pad_amt` edge values appended along `axis`.
"""
if pad_amt == 0:
return arr
edge_slice = tuple(slice(None) if i != axis else 0
for (i, x) in enumerate(arr.shape))
# Shape to restore singleton dimension after slicing
pad_singleton = tuple(x if i != axis else 1
for (i, x) in enumerate(arr.shape))
edge_arr = arr[edge_slice].reshape(pad_singleton)
return np.concatenate((edge_arr.repeat(pad_amt, axis=axis), arr),
axis=axis)
def _append_edge(arr, pad_amt, axis=-1):
"""
Append `pad_amt` to `arr` along `axis` by extending edge values.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : int
Amount of padding to append.
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, extended by `pad_amt` edge values prepended along
`axis`.
"""
if pad_amt == 0:
return arr
edge_slice = tuple(slice(None) if i != axis else arr.shape[axis] - 1
for (i, x) in enumerate(arr.shape))
# Shape to restore singleton dimension after slicing
pad_singleton = tuple(x if i != axis else 1
for (i, x) in enumerate(arr.shape))
edge_arr = arr[edge_slice].reshape(pad_singleton)
return np.concatenate((arr, edge_arr.repeat(pad_amt, axis=axis)),
axis=axis)
def _prepend_ramp(arr, pad_amt, end, axis=-1):
"""
Prepend linear ramp along `axis`.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : int
Amount of padding to prepend.
end : scalar
Constal value to use. For best results should be of type `arr.dtype`;
if not `arr.dtype` will be cast to `arr.dtype`.
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, with `pad_amt` values prepended along `axis`. The
prepended region ramps linearly from the edge value to `end`.
"""
if pad_amt == 0:
return arr
# Generate shape for final concatenated array
padshape = tuple(x if i != axis else pad_amt
for (i, x) in enumerate(arr.shape))
# Generate an n-dimensional array incrementing along `axis`
ramp_arr = _arange_ndarray(arr, padshape, axis,
reverse=True).astype(np.float64)
# Appropriate slicing to extract n-dimensional edge along `axis`
edge_slice = tuple(slice(None) if i != axis else 0
for (i, x) in enumerate(arr.shape))
# Shape to restore singleton dimension after slicing
pad_singleton = tuple(x if i != axis else 1
for (i, x) in enumerate(arr.shape))
# Extract edge, reshape to original rank, and extend along `axis`
edge_pad = arr[edge_slice].reshape(pad_singleton).repeat(pad_amt, axis)
# Linear ramp
slope = (end - edge_pad) / float(pad_amt)
ramp_arr = ramp_arr * slope
ramp_arr += edge_pad
_round_ifneeded(ramp_arr, arr.dtype)
# Ramp values will most likely be float, cast them to the same type as arr
return np.concatenate((ramp_arr.astype(arr.dtype), arr), axis=axis)
def _append_ramp(arr, pad_amt, end, axis=-1):
"""
Append linear ramp along `axis`.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : int
Amount of padding to append.
end : scalar
Constal value to use. For best results should be of type `arr.dtype`;
if not `arr.dtype` will be cast to `arr.dtype`.
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, with `pad_amt` values appended along `axis`. The
appended region ramps linearly from the edge value to `end`.
"""
if pad_amt == 0:
return arr
# Generate shape for final concatenated array
padshape = tuple(x if i != axis else pad_amt
for (i, x) in enumerate(arr.shape))
# Generate an n-dimensional array incrementing along `axis`
ramp_arr = _arange_ndarray(arr, padshape, axis,
reverse=False).astype(np.float64)
# Slice a chunk from the edge to calculate stats on
edge_slice = tuple(slice(None) if i != axis else -1
for (i, x) in enumerate(arr.shape))
# Shape to restore singleton dimension after slicing
pad_singleton = tuple(x if i != axis else 1
for (i, x) in enumerate(arr.shape))
# Extract edge, reshape to original rank, and extend along `axis`
edge_pad = arr[edge_slice].reshape(pad_singleton).repeat(pad_amt, axis)
# Linear ramp
slope = (end - edge_pad) / float(pad_amt)
ramp_arr = ramp_arr * slope
ramp_arr += edge_pad
_round_ifneeded(ramp_arr, arr.dtype)
# Ramp values will most likely be float, cast them to the same type as arr
return np.concatenate((arr, ramp_arr.astype(arr.dtype)), axis=axis)
def _prepend_max(arr, pad_amt, num, axis=-1):
"""
Prepend `pad_amt` maximum values along `axis`.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : int
Amount of padding to prepend.
num : int
Depth into `arr` along `axis` to calculate maximum.
Range: [1, `arr.shape[axis]`] or None (entire axis)
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, with `pad_amt` values appended along `axis`. The
prepended region is the maximum of the first `num` values along
`axis`.
"""
if pad_amt == 0:
return arr
# Equivalent to edge padding for single value, so do that instead
if num == 1:
return _prepend_edge(arr, pad_amt, axis)
# Use entire array if `num` is too large
if num is not None:
if num >= arr.shape[axis]:
num = None
# Slice a chunk from the edge to calculate stats on
max_slice = tuple(slice(None) if i != axis else slice(num)
for (i, x) in enumerate(arr.shape))
# Shape to restore singleton dimension after slicing
pad_singleton = tuple(x if i != axis else 1
for (i, x) in enumerate(arr.shape))
# Extract slice, calculate max, reshape to add singleton dimension back
max_chunk = arr[max_slice].max(axis=axis).reshape(pad_singleton)
# Concatenate `arr` with `max_chunk`, extended along `axis` by `pad_amt`
return np.concatenate((max_chunk.repeat(pad_amt, axis=axis), arr),
axis=axis)
def _append_max(arr, pad_amt, num, axis=-1):
"""
Pad one `axis` of `arr` with the maximum of the last `num` elements.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : int
Amount of padding to append.
num : int
Depth into `arr` along `axis` to calculate maximum.
Range: [1, `arr.shape[axis]`] or None (entire axis)
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, with `pad_amt` values appended along `axis`. The
appended region is the maximum of the final `num` values along `axis`.
"""
if pad_amt == 0:
return arr
# Equivalent to edge padding for single value, so do that instead
if num == 1:
return _append_edge(arr, pad_amt, axis)
# Use entire array if `num` is too large
if num is not None:
if num >= arr.shape[axis]:
num = None
# Slice a chunk from the edge to calculate stats on
end = arr.shape[axis] - 1
if num is not None:
max_slice = tuple(
slice(None) if i != axis else slice(end, end - num, -1)
for (i, x) in enumerate(arr.shape))
else:
max_slice = tuple(slice(None) for x in arr.shape)
# Shape to restore singleton dimension after slicing
pad_singleton = tuple(x if i != axis else 1
for (i, x) in enumerate(arr.shape))
# Extract slice, calculate max, reshape to add singleton dimension back
max_chunk = arr[max_slice].max(axis=axis).reshape(pad_singleton)
# Concatenate `arr` with `max_chunk`, extended along `axis` by `pad_amt`
return np.concatenate((arr, max_chunk.repeat(pad_amt, axis=axis)),
axis=axis)
def _prepend_mean(arr, pad_amt, num, axis=-1):
"""
Prepend `pad_amt` mean values along `axis`.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : int
Amount of padding to prepend.
num : int
Depth into `arr` along `axis` to calculate mean.
Range: [1, `arr.shape[axis]`] or None (entire axis)
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, with `pad_amt` values prepended along `axis`. The
prepended region is the mean of the first `num` values along `axis`.
"""
if pad_amt == 0:
return arr
# Equivalent to edge padding for single value, so do that instead
if num == 1:
return _prepend_edge(arr, pad_amt, axis)
# Use entire array if `num` is too large
if num is not None:
if num >= arr.shape[axis]:
num = None
# Slice a chunk from the edge to calculate stats on
mean_slice = tuple(slice(None) if i != axis else slice(num)
for (i, x) in enumerate(arr.shape))
# Shape to restore singleton dimension after slicing
pad_singleton = tuple(x if i != axis else 1
for (i, x) in enumerate(arr.shape))
# Extract slice, calculate mean, reshape to add singleton dimension back
mean_chunk = arr[mean_slice].mean(axis).reshape(pad_singleton)
_round_ifneeded(mean_chunk, arr.dtype)
# Concatenate `arr` with `mean_chunk`, extended along `axis` by `pad_amt`
return np.concatenate((mean_chunk.repeat(pad_amt, axis).astype(arr.dtype),
arr), axis=axis)
def _append_mean(arr, pad_amt, num, axis=-1):
"""
Append `pad_amt` mean values along `axis`.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : int
Amount of padding to append.
num : int
Depth into `arr` along `axis` to calculate mean.
Range: [1, `arr.shape[axis]`] or None (entire axis)
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, with `pad_amt` values appended along `axis`. The
appended region is the maximum of the final `num` values along `axis`.
"""
if pad_amt == 0:
return arr
# Equivalent to edge padding for single value, so do that instead
if num == 1:
return _append_edge(arr, pad_amt, axis)
# Use entire array if `num` is too large
if num is not None:
if num >= arr.shape[axis]:
num = None
# Slice a chunk from the edge to calculate stats on
end = arr.shape[axis] - 1
if num is not None:
mean_slice = tuple(
slice(None) if i != axis else slice(end, end - num, -1)
for (i, x) in enumerate(arr.shape))
else:
mean_slice = tuple(slice(None) for x in arr.shape)
# Shape to restore singleton dimension after slicing
pad_singleton = tuple(x if i != axis else 1
for (i, x) in enumerate(arr.shape))
# Extract slice, calculate mean, reshape to add singleton dimension back
mean_chunk = arr[mean_slice].mean(axis=axis).reshape(pad_singleton)
_round_ifneeded(mean_chunk, arr.dtype)
# Concatenate `arr` with `mean_chunk`, extended along `axis` by `pad_amt`
return np.concatenate(
(arr, mean_chunk.repeat(pad_amt, axis).astype(arr.dtype)), axis=axis)
def _prepend_med(arr, pad_amt, num, axis=-1):
"""
Prepend `pad_amt` median values along `axis`.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : int
Amount of padding to prepend.
num : int
Depth into `arr` along `axis` to calculate median.
Range: [1, `arr.shape[axis]`] or None (entire axis)
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, with `pad_amt` values prepended along `axis`. The
prepended region is the median of the first `num` values along `axis`.
"""
if pad_amt == 0:
return arr
# Equivalent to edge padding for single value, so do that instead
if num == 1:
return _prepend_edge(arr, pad_amt, axis)
# Use entire array if `num` is too large
if num is not None:
if num >= arr.shape[axis]:
num = None
# Slice a chunk from the edge to calculate stats on
med_slice = tuple(slice(None) if i != axis else slice(num)
for (i, x) in enumerate(arr.shape))
# Shape to restore singleton dimension after slicing
pad_singleton = tuple(x if i != axis else 1
for (i, x) in enumerate(arr.shape))
# Extract slice, calculate median, reshape to add singleton dimension back
med_chunk = np.median(arr[med_slice], axis=axis).reshape(pad_singleton)
_round_ifneeded(med_chunk, arr.dtype)
# Concatenate `arr` with `med_chunk`, extended along `axis` by `pad_amt`
return np.concatenate(
(med_chunk.repeat(pad_amt, axis).astype(arr.dtype), arr), axis=axis)
def _append_med(arr, pad_amt, num, axis=-1):
"""
Append `pad_amt` median values along `axis`.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : int
Amount of padding to append.
num : int
Depth into `arr` along `axis` to calculate median.
Range: [1, `arr.shape[axis]`] or None (entire axis)
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, with `pad_amt` values appended along `axis`. The
appended region is the median of the final `num` values along `axis`.
"""
if pad_amt == 0:
return arr
# Equivalent to edge padding for single value, so do that instead
if num == 1:
return _append_edge(arr, pad_amt, axis)
# Use entire array if `num` is too large
if num is not None:
if num >= arr.shape[axis]:
num = None
# Slice a chunk from the edge to calculate stats on
end = arr.shape[axis] - 1
if num is not None:
med_slice = tuple(
slice(None) if i != axis else slice(end, end - num, -1)
for (i, x) in enumerate(arr.shape))
else:
med_slice = tuple(slice(None) for x in arr.shape)
# Shape to restore singleton dimension after slicing
pad_singleton = tuple(x if i != axis else 1
for (i, x) in enumerate(arr.shape))
# Extract slice, calculate median, reshape to add singleton dimension back
med_chunk = np.median(arr[med_slice], axis=axis).reshape(pad_singleton)
_round_ifneeded(med_chunk, arr.dtype)
# Concatenate `arr` with `med_chunk`, extended along `axis` by `pad_amt`
return np.concatenate(
(arr, med_chunk.repeat(pad_amt, axis).astype(arr.dtype)), axis=axis)
def _prepend_min(arr, pad_amt, num, axis=-1):
"""
Prepend `pad_amt` minimum values along `axis`.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : int
Amount of padding to prepend.
num : int
Depth into `arr` along `axis` to calculate minimum.
Range: [1, `arr.shape[axis]`] or None (entire axis)
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, with `pad_amt` values prepended along `axis`. The
prepended region is the minimum of the first `num` values along
`axis`.
"""
if pad_amt == 0:
return arr
# Equivalent to edge padding for single value, so do that instead
if num == 1:
return _prepend_edge(arr, pad_amt, axis)
# Use entire array if `num` is too large
if num is not None:
if num >= arr.shape[axis]:
num = None
# Slice a chunk from the edge to calculate stats on
min_slice = tuple(slice(None) if i != axis else slice(num)
for (i, x) in enumerate(arr.shape))
# Shape to restore singleton dimension after slicing
pad_singleton = tuple(x if i != axis else 1
for (i, x) in enumerate(arr.shape))
# Extract slice, calculate min, reshape to add singleton dimension back
min_chunk = arr[min_slice].min(axis=axis).reshape(pad_singleton)
# Concatenate `arr` with `min_chunk`, extended along `axis` by `pad_amt`
return np.concatenate((min_chunk.repeat(pad_amt, axis=axis), arr),
axis=axis)
def _append_min(arr, pad_amt, num, axis=-1):
"""
Append `pad_amt` median values along `axis`.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : int
Amount of padding to append.
num : int
Depth into `arr` along `axis` to calculate minimum.
Range: [1, `arr.shape[axis]`] or None (entire axis)
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, with `pad_amt` values appended along `axis`. The
appended region is the minimum of the final `num` values along `axis`.
"""
if pad_amt == 0:
return arr
# Equivalent to edge padding for single value, so do that instead
if num == 1:
return _append_edge(arr, pad_amt, axis)
# Use entire array if `num` is too large
if num is not None:
if num >= arr.shape[axis]:
num = None
# Slice a chunk from the edge to calculate stats on
end = arr.shape[axis] - 1
if num is not None:
min_slice = tuple(
slice(None) if i != axis else slice(end, end - num, -1)
for (i, x) in enumerate(arr.shape))
else:
min_slice = tuple(slice(None) for x in arr.shape)
# Shape to restore singleton dimension after slicing
pad_singleton = tuple(x if i != axis else 1
for (i, x) in enumerate(arr.shape))
# Extract slice, calculate min, reshape to add singleton dimension back
min_chunk = arr[min_slice].min(axis=axis).reshape(pad_singleton)
# Concatenate `arr` with `min_chunk`, extended along `axis` by `pad_amt`
return np.concatenate((arr, min_chunk.repeat(pad_amt, axis=axis)),
axis=axis)
def _pad_ref(arr, pad_amt, method, axis=-1):
"""
Pad `axis` of `arr` by reflection.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : tuple of ints, length 2
Padding to (prepend, append) along `axis`.
method : str
Controls method of reflection; options are 'even' or 'odd'.
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, with `pad_amt[0]` values prepended and `pad_amt[1]`
values appended along `axis`. Both regions are padded with reflected
values from the original array.
Notes
-----
This algorithm does not pad with repetition, i.e. the edges are not
repeated in the reflection. For that behavior, use `mode='symmetric'`.
The modes 'reflect', 'symmetric', and 'wrap' must be padded with a
single function, lest the indexing tricks in non-integer multiples of the
original shape would violate repetition in the final iteration.
"""
# Implicit booleanness to test for zero (or None) in any scalar type
if pad_amt[0] == 0 and pad_amt[1] == 0:
return arr
##########################################################################
# Prepended region
# Slice off a reverse indexed chunk from near edge to pad `arr` before
ref_slice = tuple(slice(None) if i != axis else slice(pad_amt[0], 0, -1)
for (i, x) in enumerate(arr.shape))
ref_chunk1 = arr[ref_slice]
# Shape to restore singleton dimension after slicing
pad_singleton = tuple(x if i != axis else 1
for (i, x) in enumerate(arr.shape))
if pad_amt[0] == 1:
ref_chunk1 = ref_chunk1.reshape(pad_singleton)
# Memory/computationally more expensive, only do this if `method='odd'`
if 'odd' in method and pad_amt[0] > 0:
edge_slice1 = tuple(slice(None) if i != axis else 0
for (i, x) in enumerate(arr.shape))
edge_chunk = arr[edge_slice1].reshape(pad_singleton)
ref_chunk1 = 2 * edge_chunk - ref_chunk1
del edge_chunk
##########################################################################
# Appended region
# Slice off a reverse indexed chunk from far edge to pad `arr` after
start = arr.shape[axis] - pad_amt[1] - 1
end = arr.shape[axis] - 1
ref_slice = tuple(slice(None) if i != axis else slice(start, end)
for (i, x) in enumerate(arr.shape))
rev_idx = tuple(slice(None) if i != axis else slice(None, None, -1)
for (i, x) in enumerate(arr.shape))
ref_chunk2 = arr[ref_slice][rev_idx]
if pad_amt[1] == 1:
ref_chunk2 = ref_chunk2.reshape(pad_singleton)
if 'odd' in method:
edge_slice2 = tuple(slice(None) if i != axis else -1
for (i, x) in enumerate(arr.shape))
edge_chunk = arr[edge_slice2].reshape(pad_singleton)
ref_chunk2 = 2 * edge_chunk - ref_chunk2
del edge_chunk
# Concatenate `arr` with both chunks, extending along `axis`
return np.concatenate((ref_chunk1, arr, ref_chunk2), axis=axis)
def _pad_sym(arr, pad_amt, method, axis=-1):
"""
Pad `axis` of `arr` by symmetry.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : tuple of ints, length 2
Padding to (prepend, append) along `axis`.
method : str
Controls method of symmetry; options are 'even' or 'odd'.
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, with `pad_amt[0]` values prepended and `pad_amt[1]`
values appended along `axis`. Both regions are padded with symmetric
values from the original array.
Notes
-----
This algorithm DOES pad with repetition, i.e. the edges are repeated.
For padding without repeated edges, use `mode='reflect'`.
The modes 'reflect', 'symmetric', and 'wrap' must be padded with a
single function, lest the indexing tricks in non-integer multiples of the
original shape would violate repetition in the final iteration.
"""
# Implicit booleanness to test for zero (or None) in any scalar type
if pad_amt[0] == 0 and pad_amt[1] == 0:
return arr
##########################################################################
# Prepended region
# Slice off a reverse indexed chunk from near edge to pad `arr` before
sym_slice = tuple(slice(None) if i != axis else slice(0, pad_amt[0])
for (i, x) in enumerate(arr.shape))
rev_idx = tuple(slice(None) if i != axis else slice(None, None, -1)
for (i, x) in enumerate(arr.shape))
sym_chunk1 = arr[sym_slice][rev_idx]
# Shape to restore singleton dimension after slicing
pad_singleton = tuple(x if i != axis else 1
for (i, x) in enumerate(arr.shape))
if pad_amt[0] == 1:
sym_chunk1 = sym_chunk1.reshape(pad_singleton)
# Memory/computationally more expensive, only do this if `method='odd'`
if 'odd' in method and pad_amt[0] > 0:
edge_slice1 = tuple(slice(None) if i != axis else 0
for (i, x) in enumerate(arr.shape))
edge_chunk = arr[edge_slice1].reshape(pad_singleton)
sym_chunk1 = 2 * edge_chunk - sym_chunk1
del edge_chunk
##########################################################################
# Appended region
# Slice off a reverse indexed chunk from far edge to pad `arr` after
start = arr.shape[axis] - pad_amt[1]
end = arr.shape[axis]
sym_slice = tuple(slice(None) if i != axis else slice(start, end)
for (i, x) in enumerate(arr.shape))
sym_chunk2 = arr[sym_slice][rev_idx]
if pad_amt[1] == 1:
sym_chunk2 = sym_chunk2.reshape(pad_singleton)
if 'odd' in method:
edge_slice2 = tuple(slice(None) if i != axis else -1
for (i, x) in enumerate(arr.shape))
edge_chunk = arr[edge_slice2].reshape(pad_singleton)
sym_chunk2 = 2 * edge_chunk - sym_chunk2
del edge_chunk
# Concatenate `arr` with both chunks, extending along `axis`
return np.concatenate((sym_chunk1, arr, sym_chunk2), axis=axis)
def _pad_wrap(arr, pad_amt, axis=-1):
"""
Pad `axis` of `arr` via wrapping.
Parameters
----------
arr : ndarray
Input array of arbitrary shape.
pad_amt : tuple of ints, length 2
Padding to (prepend, append) along `axis`.
axis : int
Axis along which to pad `arr`.
Returns
-------
padarr : ndarray
Output array, with `pad_amt[0]` values prepended and `pad_amt[1]`
values appended along `axis`. Both regions are padded wrapped values
from the opposite end of `axis`.
Notes
-----
This method of padding is also known as 'tile' or 'tiling'.
The modes 'reflect', 'symmetric', and 'wrap' must be padded with a
single function, lest the indexing tricks in non-integer multiples of the
original shape would violate repetition in the final iteration.
"""
# Implicit booleanness to test for zero (or None) in any scalar type
if pad_amt[0] == 0 and pad_amt[1] == 0:
return arr
##########################################################################
# Prepended region
# Slice off a reverse indexed chunk from near edge to pad `arr` before
start = arr.shape[axis] - pad_amt[0]
end = arr.shape[axis]
wrap_slice = tuple(slice(None) if i != axis else slice(start, end)
for (i, x) in enumerate(arr.shape))
wrap_chunk1 = arr[wrap_slice]
# Shape to restore singleton dimension after slicing
pad_singleton = tuple(x if i != axis else 1
for (i, x) in enumerate(arr.shape))
if pad_amt[0] == 1:
wrap_chunk1 = wrap_chunk1.reshape(pad_singleton)
##########################################################################
# Appended region
# Slice off a reverse indexed chunk from far edge to pad `arr` after
wrap_slice = tuple(slice(None) if i != axis else slice(0, pad_amt[1])
for (i, x) in enumerate(arr.shape))
wrap_chunk2 = arr[wrap_slice]
if pad_amt[1] == 1:
wrap_chunk2 = wrap_chunk2.reshape(pad_singleton)
# Concatenate `arr` with both chunks, extending along `axis`
return np.concatenate((wrap_chunk1, arr, wrap_chunk2), axis=axis)
def _normalize_shape(ndarray, shape, cast_to_int=True):
"""
Private function which does some checks and normalizes the possibly
much simpler representations of 'pad_width', 'stat_length',
'constant_values', 'end_values'.
Parameters
----------
narray : ndarray
Input ndarray
shape : {sequence, array_like, float, int}, optional
The width of padding (pad_width), the number of elements on the
edge of the narray used for statistics (stat_length), the constant
value(s) to use when filling padded regions (constant_values), or the
endpoint target(s) for linear ramps (end_values).
((before_1, after_1), ... (before_N, after_N)) unique number of
elements for each axis where `N` is rank of `narray`.
((before, after),) yields same before and after constants for each
axis.
(constant,) or val is a shortcut for before = after = constant for
all axes.
cast_to_int : bool, optional
Controls if values in ``shape`` will be rounded and cast to int
before being returned.
Returns
-------
normalized_shape : tuple of tuples
val => ((val, val), (val, val), ...)
[[val1, val2], [val3, val4], ...] => ((val1, val2), (val3, val4), ...)
((val1, val2), (val3, val4), ...) => no change
[[val1, val2], ] => ((val1, val2), (val1, val2), ...)
((val1, val2), ) => ((val1, val2), (val1, val2), ...)
[[val , ], ] => ((val, val), (val, val), ...)
((val , ), ) => ((val, val), (val, val), ...)
"""
ndims = ndarray.ndim
# Shortcut shape=None
if shape is None:
return ((None, None), ) * ndims
# Convert any input `info` to a NumPy array
arr = np.asarray(shape)
# Switch based on what input looks like
if arr.ndim <= 1:
if arr.shape == () or arr.shape == (1,):
# Single scalar input
# Create new array of ones, multiply by the scalar
arr = np.ones((ndims, 2), dtype=ndarray.dtype) * arr
elif arr.shape == (2,):
# Apply padding (before, after) each axis
# Create new axis 0, repeat along it for every axis
arr = arr[np.newaxis, :].repeat(ndims, axis=0)
else:
fmt = "Unable to create correctly shaped tuple from %s"
raise ValueError(fmt % (shape,))
elif arr.ndim == 2:
if arr.shape[1] == 1 and arr.shape[0] == ndims:
# Padded before and after by the same amount
arr = arr.repeat(2, axis=1)
elif arr.shape[0] == ndims:
# Input correctly formatted, pass it on as `arr`
arr = shape
else:
fmt = "Unable to create correctly shaped tuple from %s"
raise ValueError(fmt % (shape,))
else:
fmt = "Unable to create correctly shaped tuple from %s"
raise ValueError(fmt % (shape,))
# Cast if necessary
if cast_to_int is True:
arr = np.round(arr).astype(int)
# Convert list of lists to tuple of tuples
return tuple(tuple(axis) for axis in arr.tolist())
def _validate_lengths(narray, number_elements):
"""
Private function which does some checks and reformats pad_width and
stat_length using _normalize_shape.
Parameters
----------
narray : ndarray
Input ndarray
number_elements : {sequence, int}, optional
The width of padding (pad_width) or the number of elements on the edge
of the narray used for statistics (stat_length).
((before_1, after_1), ... (before_N, after_N)) unique number of
elements for each axis.
((before, after),) yields same before and after constants for each
axis.
(constant,) or int is a shortcut for before = after = constant for all
axes.
Returns
-------
_validate_lengths : tuple of tuples
int => ((int, int), (int, int), ...)
[[int1, int2], [int3, int4], ...] => ((int1, int2), (int3, int4), ...)
((int1, int2), (int3, int4), ...) => no change
[[int1, int2], ] => ((int1, int2), (int1, int2), ...)
((int1, int2), ) => ((int1, int2), (int1, int2), ...)
[[int , ], ] => ((int, int), (int, int), ...)
((int , ), ) => ((int, int), (int, int), ...)
"""
normshp = _normalize_shape(narray, number_elements)
for i in normshp:
chk = [1 if x is None else x for x in i]
chk = [1 if x >= 0 else -1 for x in chk]
if (chk[0] < 0) or (chk[1] < 0):
fmt = "%s cannot contain negative values."
raise ValueError(fmt % (number_elements,))
return normshp
###############################################################################
# Public functions
def pad(array, pad_width, mode=None, **kwargs):
"""
Pads an array.
Parameters
----------
array : array_like of rank N
Input array
pad_width : {sequence, array_like, int}
Number of values padded to the edges of each axis.
((before_1, after_1), ... (before_N, after_N)) unique pad widths
for each axis.
((before, after),) yields same before and after pad for each axis.
(pad,) or int is a shortcut for before = after = pad width for all
axes.
mode : str or function
One of the following string values or a user supplied function.
'constant'
Pads with a constant value.
'edge'
Pads with the edge values of array.
'linear_ramp'
Pads with the linear ramp between end_value and the
array edge value.
'maximum'
Pads with the maximum value of all or part of the
vector along each axis.
'mean'
Pads with the mean value of all or part of the
vector along each axis.
'median'
Pads with the median value of all or part of the
vector along each axis.
'minimum'
Pads with the minimum value of all or part of the
vector along each axis.
'reflect'
Pads with the reflection of the vector mirrored on
the first and last values of the vector along each
axis.
'symmetric'
Pads with the reflection of the vector mirrored
along the edge of the array.
'wrap'
Pads with the wrap of the vector along the axis.
The first values are used to pad the end and the
end values are used to pad the beginning.
<function>
Padding function, see Notes.
stat_length : sequence or int, optional
Used in 'maximum', 'mean', 'median', and 'minimum'. Number of
values at edge of each axis used to calculate the statistic value.
((before_1, after_1), ... (before_N, after_N)) unique statistic
lengths for each axis.
((before, after),) yields same before and after statistic lengths
for each axis.
(stat_length,) or int is a shortcut for before = after = statistic
length for all axes.
Default is ``None``, to use the entire axis.
constant_values : sequence or int, optional
Used in 'constant'. The values to set the padded values for each
axis.
((before_1, after_1), ... (before_N, after_N)) unique pad constants
for each axis.
((before, after),) yields same before and after constants for each
axis.
(constant,) or int is a shortcut for before = after = constant for
all axes.
Default is 0.
end_values : sequence or int, optional
Used in 'linear_ramp'. The values used for the ending value of the
linear_ramp and that will form the edge of the padded array.
((before_1, after_1), ... (before_N, after_N)) unique end values
for each axis.
((before, after),) yields same before and after end values for each
axis.
(constant,) or int is a shortcut for before = after = end value for
all axes.
Default is 0.
reflect_type : {'even', 'odd'}, optional
Used in 'reflect', and 'symmetric'. The 'even' style is the
default with an unaltered reflection around the edge value. For
the 'odd' style, the extented part of the array is created by
subtracting the reflected values from two times the edge value.
Returns
-------
pad : ndarray
Padded array of rank equal to `array` with shape increased
according to `pad_width`.
Notes
-----
.. versionadded:: 1.7.0
For an array with rank greater than 1, some of the padding of later
axes is calculated from padding of previous axes. This is easiest to
think about with a rank 2 array where the corners of the padded array
are calculated by using padded values from the first axis.
The padding function, if used, should return a rank 1 array equal in
length to the vector argument with padded values replaced. It has the
following signature::
padding_func(vector, iaxis_pad_width, iaxis, **kwargs)
where
vector : ndarray
A rank 1 array already padded with zeros. Padded values are
vector[:pad_tuple[0]] and vector[-pad_tuple[1]:].
iaxis_pad_width : tuple
A 2-tuple of ints, iaxis_pad_width[0] represents the number of
values padded at the beginning of vector where
iaxis_pad_width[1] represents the number of values padded at
the end of vector.
iaxis : int
The axis currently being calculated.
kwargs : misc
Any keyword arguments the function requires.
Examples
--------
>>> a = [1, 2, 3, 4, 5]
>>> np.lib.pad(a, (2,3), 'constant', constant_values=(4, 6))
array([4, 4, 1, 2, 3, 4, 5, 6, 6, 6])
>>> np.lib.pad(a, (2, 3), 'edge')
array([1, 1, 1, 2, 3, 4, 5, 5, 5, 5])
>>> np.lib.pad(a, (2, 3), 'linear_ramp', end_values=(5, -4))
array([ 5, 3, 1, 2, 3, 4, 5, 2, -1, -4])
>>> np.lib.pad(a, (2,), 'maximum')
array([5, 5, 1, 2, 3, 4, 5, 5, 5])
>>> np.lib.pad(a, (2,), 'mean')
array([3, 3, 1, 2, 3, 4, 5, 3, 3])
>>> np.lib.pad(a, (2,), 'median')
array([3, 3, 1, 2, 3, 4, 5, 3, 3])
>>> a = [[1, 2], [3, 4]]
>>> np.lib.pad(a, ((3, 2), (2, 3)), 'minimum')
array([[1, 1, 1, 2, 1, 1, 1],
[1, 1, 1, 2, 1, 1, 1],
[1, 1, 1, 2, 1, 1, 1],
[1, 1, 1, 2, 1, 1, 1],
[3, 3, 3, 4, 3, 3, 3],
[1, 1, 1, 2, 1, 1, 1],
[1, 1, 1, 2, 1, 1, 1]])
>>> a = [1, 2, 3, 4, 5]
>>> np.lib.pad(a, (2, 3), 'reflect')
array([3, 2, 1, 2, 3, 4, 5, 4, 3, 2])
>>> np.lib.pad(a, (2, 3), 'reflect', reflect_type='odd')
array([-1, 0, 1, 2, 3, 4, 5, 6, 7, 8])
>>> np.lib.pad(a, (2, 3), 'symmetric')
array([2, 1, 1, 2, 3, 4, 5, 5, 4, 3])
>>> np.lib.pad(a, (2, 3), 'symmetric', reflect_type='odd')
array([0, 1, 1, 2, 3, 4, 5, 5, 6, 7])
>>> np.lib.pad(a, (2, 3), 'wrap')
array([4, 5, 1, 2, 3, 4, 5, 1, 2, 3])
>>> def padwithtens(vector, pad_width, iaxis, kwargs):
... vector[:pad_width[0]] = 10
... vector[-pad_width[1]:] = 10
... return vector
>>> a = np.arange(6)
>>> a = a.reshape((2, 3))
>>> np.lib.pad(a, 2, padwithtens)
array([[10, 10, 10, 10, 10, 10, 10],
[10, 10, 10, 10, 10, 10, 10],
[10, 10, 0, 1, 2, 10, 10],
[10, 10, 3, 4, 5, 10, 10],
[10, 10, 10, 10, 10, 10, 10],
[10, 10, 10, 10, 10, 10, 10]])
"""
if not np.asarray(pad_width).dtype.kind == 'i':
raise TypeError('`pad_width` must be of integral type.')
narray = np.array(array)
pad_width = _validate_lengths(narray, pad_width)
allowedkwargs = {
'constant': ['constant_values'],
'edge': [],
'linear_ramp': ['end_values'],
'maximum': ['stat_length'],
'mean': ['stat_length'],
'median': ['stat_length'],
'minimum': ['stat_length'],
'reflect': ['reflect_type'],
'symmetric': ['reflect_type'],
'wrap': [],
}
kwdefaults = {
'stat_length': None,
'constant_values': 0,
'end_values': 0,
'reflect_type': 'even',
}
if isinstance(mode, str):
# Make sure have allowed kwargs appropriate for mode
for key in kwargs:
if key not in allowedkwargs[mode]:
raise ValueError('%s keyword not in allowed keywords %s' %
(key, allowedkwargs[mode]))
# Set kwarg defaults
for kw in allowedkwargs[mode]:
kwargs.setdefault(kw, kwdefaults[kw])
# Need to only normalize particular keywords.
for i in kwargs:
if i == 'stat_length':
kwargs[i] = _validate_lengths(narray, kwargs[i])
if i in ['end_values', 'constant_values']:
kwargs[i] = _normalize_shape(narray, kwargs[i],
cast_to_int=False)
elif mode is None:
raise ValueError('Keyword "mode" must be a function or one of %s.' %
(list(allowedkwargs.keys()),))
else:
# Drop back to old, slower np.apply_along_axis mode for user-supplied
# vector function
function = mode
# Create a new padded array
rank = list(range(len(narray.shape)))
total_dim_increase = [np.sum(pad_width[i]) for i in rank]
offset_slices = [slice(pad_width[i][0],
pad_width[i][0] + narray.shape[i])
for i in rank]
new_shape = np.array(narray.shape) + total_dim_increase
newmat = np.zeros(new_shape, narray.dtype)
# Insert the original array into the padded array
newmat[offset_slices] = narray
# This is the core of pad ...
for iaxis in rank:
np.apply_along_axis(function,
iaxis,
newmat,
pad_width[iaxis],
iaxis,
kwargs)
return newmat
# If we get here, use new padding method
newmat = narray.copy()
# API preserved, but completely new algorithm which pads by building the
# entire block to pad before/after `arr` with in one step, for each axis.
if mode == 'constant':
for axis, ((pad_before, pad_after), (before_val, after_val)) \
in enumerate(zip(pad_width, kwargs['constant_values'])):
newmat = _prepend_const(newmat, pad_before, before_val, axis)
newmat = _append_const(newmat, pad_after, after_val, axis)
elif mode == 'edge':
for axis, (pad_before, pad_after) in enumerate(pad_width):
newmat = _prepend_edge(newmat, pad_before, axis)
newmat = _append_edge(newmat, pad_after, axis)
elif mode == 'linear_ramp':
for axis, ((pad_before, pad_after), (before_val, after_val)) \
in enumerate(zip(pad_width, kwargs['end_values'])):
newmat = _prepend_ramp(newmat, pad_before, before_val, axis)
newmat = _append_ramp(newmat, pad_after, after_val, axis)
elif mode == 'maximum':
for axis, ((pad_before, pad_after), (chunk_before, chunk_after)) \
in enumerate(zip(pad_width, kwargs['stat_length'])):
newmat = _prepend_max(newmat, pad_before, chunk_before, axis)
newmat = _append_max(newmat, pad_after, chunk_after, axis)
elif mode == 'mean':
for axis, ((pad_before, pad_after), (chunk_before, chunk_after)) \
in enumerate(zip(pad_width, kwargs['stat_length'])):
newmat = _prepend_mean(newmat, pad_before, chunk_before, axis)
newmat = _append_mean(newmat, pad_after, chunk_after, axis)
elif mode == 'median':
for axis, ((pad_before, pad_after), (chunk_before, chunk_after)) \
in enumerate(zip(pad_width, kwargs['stat_length'])):
newmat = _prepend_med(newmat, pad_before, chunk_before, axis)
newmat = _append_med(newmat, pad_after, chunk_after, axis)
elif mode == 'minimum':
for axis, ((pad_before, pad_after), (chunk_before, chunk_after)) \
in enumerate(zip(pad_width, kwargs['stat_length'])):
newmat = _prepend_min(newmat, pad_before, chunk_before, axis)
newmat = _append_min(newmat, pad_after, chunk_after, axis)
elif mode == 'reflect':
for axis, (pad_before, pad_after) in enumerate(pad_width):
# Recursive padding along any axis where `pad_amt` is too large
# for indexing tricks. We can only safely pad the original axis
# length, to keep the period of the reflections consistent.
if ((pad_before > 0) or
(pad_after > 0)) and newmat.shape[axis] == 1:
# Extending singleton dimension for 'reflect' is legacy
# behavior; it really should raise an error.
newmat = _prepend_edge(newmat, pad_before, axis)
newmat = _append_edge(newmat, pad_after, axis)
continue
method = kwargs['reflect_type']
safe_pad = newmat.shape[axis] - 1
while ((pad_before > safe_pad) or (pad_after > safe_pad)):
pad_iter_b = min(safe_pad,
safe_pad * (pad_before // safe_pad))
pad_iter_a = min(safe_pad, safe_pad * (pad_after // safe_pad))
newmat = _pad_ref(newmat, (pad_iter_b,
pad_iter_a), method, axis)
pad_before -= pad_iter_b
pad_after -= pad_iter_a
safe_pad += pad_iter_b + pad_iter_a
newmat = _pad_ref(newmat, (pad_before, pad_after), method, axis)
elif mode == 'symmetric':
for axis, (pad_before, pad_after) in enumerate(pad_width):
# Recursive padding along any axis where `pad_amt` is too large
# for indexing tricks. We can only safely pad the original axis
# length, to keep the period of the reflections consistent.
method = kwargs['reflect_type']
safe_pad = newmat.shape[axis]
while ((pad_before > safe_pad) or
(pad_after > safe_pad)):
pad_iter_b = min(safe_pad,
safe_pad * (pad_before // safe_pad))
pad_iter_a = min(safe_pad, safe_pad * (pad_after // safe_pad))
newmat = _pad_sym(newmat, (pad_iter_b,
pad_iter_a), method, axis)
pad_before -= pad_iter_b
pad_after -= pad_iter_a
safe_pad += pad_iter_b + pad_iter_a
newmat = _pad_sym(newmat, (pad_before, pad_after), method, axis)
elif mode == 'wrap':
for axis, (pad_before, pad_after) in enumerate(pad_width):
# Recursive padding along any axis where `pad_amt` is too large
# for indexing tricks. We can only safely pad the original axis
# length, to keep the period of the reflections consistent.
safe_pad = newmat.shape[axis]
while ((pad_before > safe_pad) or
(pad_after > safe_pad)):
pad_iter_b = min(safe_pad,
safe_pad * (pad_before // safe_pad))
pad_iter_a = min(safe_pad, safe_pad * (pad_after // safe_pad))
newmat = _pad_wrap(newmat, (pad_iter_b, pad_iter_a), axis)
pad_before -= pad_iter_b
pad_after -= pad_iter_a
safe_pad += pad_iter_b + pad_iter_a
newmat = _pad_wrap(newmat, (pad_before, pad_after), axis)
return newmat
| [
[
[
138,
146
]
],
[
[
148,
163
]
],
[
[
165,
179
]
],
[
[
188,
199
],
[
1466,
1468
],
[
1523,
1525
],
[
1976,
1978
],
[
1997,
1999
],
[
2805,
2807
],
[
2821,
2823
],
[
2930,
2932
],
[
2947,
2949
],
[
3804,
3806
],
[
3825,
3827
],
[
3929,
3931
],
[
3964,
3966
],
[
4864,
4866
],
[
5827,
5829
],
[
6931,
6933
],
[
7696,
7698
],
[
8761,
8763
],
[
9514,
9516
],
[
11115,
11117
],
[
12906,
12908
],
[
14573,
14575
],
[
16401,
16403
],
[
17886,
17888
],
[
18077,
18079
],
[
19699,
19701
],
[
19890,
19892
],
[
21524,
21526
],
[
23291,
23293
],
[
26526,
26528
],
[
29702,
29704
],
[
31924,
31926
],
[
33851,
33853
],
[
34102,
34104
],
[
34323,
34325
],
[
35096,
35098
],
[
43890,
43892
],
[
44010,
44012
],
[
45785,
45787
],
[
45995,
45997
],
[
46056,
46058
],
[
46266,
46268
]
],
[
[
202,
209
]
],
[
[
337,
352
],
[
6842,
6857
],
[
8671,
8686
]
],
[
[
1735,
1750
],
[
7568,
7583
],
[
9386,
9401
],
[
14444,
14459
],
[
16272,
16287
],
[
17950,
17965
],
[
19763,
19778
]
],
[
[
2043,
2057
],
[
46947,
46961
]
],
[
[
3046,
3059
],
[
47021,
47034
]
],
[
[
4027,
4040
],
[
10372,
10385
],
[
13785,
13798
],
[
17282,
17295
],
[
20781,
20794
],
[
47185,
47198
],
[
49449,
49462
]
],
[
[
4966,
4978
],
[
12020,
12032
],
[
15465,
15477
],
[
18952,
18964
],
[
22405,
22417
],
[
47246,
47258
],
[
49514,
49526
]
],
[
[
5929,
5942
],
[
47477,
47490
]
],
[
[
7763,
7775
],
[
47550,
47562
]
],
[
[
9581,
9593
],
[
47793,
47805
]
],
[
[
11218,
11229
],
[
47867,
47878
]
],
[
[
13009,
13022
],
[
48108,
48121
]
],
[
[
14691,
14703
],
[
48183,
48195
]
],
[
[
16501,
16513
],
[
48427,
48439
]
],
[
[
18176,
18187
],
[
48501,
48512
]
],
[
[
19989,
20001
],
[
48745,
48757
]
],
[
[
21627,
21638
],
[
48819,
48830
]
],
[
[
23394,
23402
],
[
49956,
49964
],
[
50210,
50218
]
],
[
[
26589,
26597
],
[
50981,
50989
],
[
51235,
51243
]
],
[
[
29765,
29774
],
[
51957,
51966
],
[
52162,
52171
]
],
[
[
31989,
32005
],
[
36544,
36560
],
[
45278,
45294
]
],
[
[
35231,
35248
],
[
44042,
44059
],
[
45158,
45175
]
],
[
[
36983,
36986
]
]
] |
import copy
import json
import logging
import pytest
import burn_lock_functions
import test_utilities
from integration_env_credentials import sifchain_cli_credentials_for_test
from pytest_utilities import generate_minimal_test_account
from test_utilities import EthereumToSifchainTransferRequest, SifchaincliCredentials
def create_new_sifaddr():
new_account_key = test_utilities.get_shell_output("uuidgen")
credentials = sifchain_cli_credentials_for_test(new_account_key)
new_addr = burn_lock_functions.create_new_sifaddr(credentials=credentials, keyname=new_account_key)
return new_addr["address"]
def create_new_sifaddr_and_key():
new_account_key = test_utilities.get_shell_output("uuidgen")
credentials = sifchain_cli_credentials_for_test(new_account_key)
new_addr = burn_lock_functions.create_new_sifaddr(credentials=credentials, keyname=new_account_key)
return new_addr["address"], new_addr["name"]
@pytest.mark.skip(reason="run manually")
def test_bulk_transfers(
basic_transfer_request: EthereumToSifchainTransferRequest,
smart_contracts_dir,
source_ethereum_address,
bridgebank_address,
bridgetoken_address,
ethereum_network,
):
n_transfers = int(test_utilities.get_optional_env_var("NTRANSFERS", 2))
ganache_delay = test_utilities.get_optional_env_var("GANACHE_DELAY", 1)
# test_utilities.get_shell_output(f"{integration_dir}/ganache_start.sh {ganache_delay}")
amount = "{:d}".format(5 * test_utilities.highest_gas_cost)
new_addresses_and_keys = list(map(lambda x: create_new_sifaddr_and_key(), range(n_transfers)))
logging.info(f"aandk: {new_addresses_and_keys}")
new_addresses = list(map(lambda a: a[0], new_addresses_and_keys))
logging.debug(f"new_addresses: {new_addresses}")
new_eth_addrs = test_utilities.create_ethereum_addresses(smart_contracts_dir, basic_transfer_request.ethereum_network, len(new_addresses))
logging.info(f"new eth addrs: {new_eth_addrs}")
request: EthereumToSifchainTransferRequest = copy.deepcopy(basic_transfer_request)
requests = list(map(lambda addr: {
"amount": amount,
"symbol": test_utilities.NULL_ADDRESS,
"sifchain_address": addr
}, new_addresses))
json_requests = json.dumps(requests)
test_utilities.run_yarn_command(
" ".join([
f"yarn --cwd {smart_contracts_dir}",
"integrationtest:sendBulkLockTx",
f"--amount {amount}",
f"--symbol eth",
f"--json_path {request.solidity_json_path}",
f"--sifchain_address {new_addresses[0]}",
f"--transactions \'{json_requests}\'",
f"--ethereum_address {source_ethereum_address}",
f"--bridgebank_address {bridgebank_address}",
f"--ethereum_network {ethereum_network}",
])
)
requests = list(map(lambda addr: {
"amount": amount,
"symbol": bridgetoken_address,
"sifchain_address": addr
}, new_addresses))
json_requests = json.dumps(requests)
yarn_result = test_utilities.run_yarn_command(
" ".join([
f"yarn --cwd {smart_contracts_dir}",
"integrationtest:sendBulkLockTx",
f"--amount {amount}",
"--lock_or_burn burn",
f"--symbol {bridgetoken_address}",
f"--json_path {request.solidity_json_path}",
f"--sifchain_address {new_addresses[0]}",
f"--transactions \'{json_requests}\'",
f"--ethereum_address {source_ethereum_address}",
f"--bridgebank_address {bridgebank_address}",
f"--ethereum_network {ethereum_network}",
])
)
logging.info(f"bulk result: {yarn_result}")
manual_advance = False
if manual_advance:
test_utilities.advance_n_ethereum_blocks(test_utilities.n_wait_blocks, smart_contracts_dir)
test_utilities.wait_for_ethereum_block_number(yarn_result["blockNumber"] + test_utilities.n_wait_blocks, basic_transfer_request);
for a in new_addresses:
test_utilities.wait_for_sif_account(a, basic_transfer_request.sifnoded_node, 90)
test_utilities.wait_for_sifchain_addr_balance(a, "ceth", amount, basic_transfer_request.sifnoded_node, 180)
test_utilities.wait_for_sifchain_addr_balance(a, "rowan", amount, basic_transfer_request.sifnoded_node, 180)
text_file = open("pfile.cmds", "w")
simple_credentials = SifchaincliCredentials(
keyring_passphrase=None,
keyring_backend="test",
from_key=None,
sifnoded_homedir=None
)
logging.info(f"all accounts are on sifchain and have the correct balance")
for sifaddr, ethaddr in zip(new_addresses_and_keys, new_eth_addrs):
r = copy.deepcopy(basic_transfer_request)
r.sifchain_address = sifaddr[0]
r.ethereum_address = ethaddr["address"]
r.amount = 100
simple_credentials.from_key = sifaddr[1]
c = test_utilities.send_from_sifchain_to_ethereum_cmd(r, simple_credentials)
text_file.write(f"{c}\n")
text_file.close()
# test_utilities.get_shell_output("cat pfile.cmds | parallel --trim lr -v {}")
test_utilities.get_shell_output("bash -x pfile.cmds")
for sifaddr, ethaddr in zip(new_addresses_and_keys, new_eth_addrs):
r = copy.deepcopy(basic_transfer_request)
r.ethereum_address = ethaddr["address"]
r.amount = 100
test_utilities.wait_for_eth_balance(r, 100, 300)
| [
[
[
7,
11
],
[
2053,
2057
],
[
4758,
4762
],
[
5322,
5326
]
],
[
[
19,
23
],
[
2279,
2283
],
[
3046,
3050
]
],
[
[
31,
38
],
[
1637,
1644
],
[
1760,
1767
],
[
1956,
1963
],
[
3704,
3711
],
[
4599,
4606
]
],
[
[
47,
53
],
[
945,
951
]
],
[
[
62,
81
],
[
499,
518
],
[
804,
823
]
],
[
[
89,
103
],
[
372,
386
],
[
677,
691
],
[
1247,
1261
],
[
1321,
1335
],
[
1501,
1515
],
[
1829,
1843
],
[
2304,
2318
],
[
3085,
3099
],
[
3806,
3820
],
[
3847,
3861
],
[
3902,
3916
],
[
3977,
3991
],
[
4068,
4082
],
[
4157,
4171
],
[
4273,
4287
],
[
4968,
4982
],
[
5184,
5198
],
[
5439,
5453
],
[
2174,
2188
]
],
[
[
144,
177
],
[
433,
466
],
[
738,
771
]
],
[
[
207,
236
]
],
[
[
264,
297
],
[
1042,
1075
],
[
2017,
2050
]
],
[
[
299,
321
],
[
4447,
4469
]
],
[
[
328,
346
]
],
[
[
625,
651
],
[
1582,
1608
]
],
[
[
989,
1008
]
]
] |
"""Build Environment used for isolation during sdist building
"""
import logging
import os
import sys
import textwrap
from distutils.sysconfig import get_python_lib
from sysconfig import get_paths
from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet
from pip import __file__ as pip_location
from pip._internal.utils.misc import call_subprocess
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.ui import open_spinner
logger = logging.getLogger(__name__)
class BuildEnvironment(object):
"""Creates and manages an isolated environment to install build deps
"""
def __init__(self):
self._temp_dir = TempDirectory(kind="build-env")
self._temp_dir.create()
@property
def path(self):
return self._temp_dir.path
def __enter__(self):
self.save_path = os.environ.get('PATH', None)
self.save_pythonpath = os.environ.get('PYTHONPATH', None)
self.save_nousersite = os.environ.get('PYTHONNOUSERSITE', None)
install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
install_dirs = get_paths(install_scheme, vars={
'base': self.path,
'platbase': self.path,
})
scripts = install_dirs['scripts']
if self.save_path:
os.environ['PATH'] = scripts + os.pathsep + self.save_path
else:
os.environ['PATH'] = scripts + os.pathsep + os.defpath
# Note: prefer distutils' sysconfig to get the
# library paths so PyPy is correctly supported.
purelib = get_python_lib(plat_specific=0, prefix=self.path)
platlib = get_python_lib(plat_specific=1, prefix=self.path)
if purelib == platlib:
lib_dirs = purelib
else:
lib_dirs = purelib + os.pathsep + platlib
if self.save_pythonpath:
os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
self.save_pythonpath
else:
os.environ['PYTHONPATH'] = lib_dirs
os.environ['PYTHONNOUSERSITE'] = '1'
# Ensure .pth files are honored.
with open(os.path.join(purelib, 'sitecustomize.py'), 'w') as fp:
fp.write(textwrap.dedent(
'''
import site
site.addsitedir({!r})
'''
).format(purelib))
return self.path
def __exit__(self, exc_type, exc_val, exc_tb):
def restore_var(varname, old_value):
if old_value is None:
os.environ.pop(varname, None)
else:
os.environ[varname] = old_value
restore_var('PATH', self.save_path)
restore_var('PYTHONPATH', self.save_pythonpath)
restore_var('PYTHONNOUSERSITE', self.save_nousersite)
def cleanup(self):
self._temp_dir.cleanup()
def missing_requirements(self, reqs):
"""Return a list of the requirements from reqs that are not present
"""
missing = []
with self:
ws = WorkingSet(os.environ["PYTHONPATH"].split(os.pathsep))
for req in reqs:
try:
if ws.find(Requirement.parse(req)) is None:
missing.append(req)
except VersionConflict:
missing.append(req)
return missing
def install_requirements(self, finder, requirements, message):
args = [
sys.executable, os.path.dirname(pip_location), 'install',
'--ignore-installed', '--no-user', '--prefix', self.path,
'--no-warn-script-location',
]
if logger.getEffectiveLevel() <= logging.DEBUG:
args.append('-v')
for format_control in ('no_binary', 'only_binary'):
formats = getattr(finder.format_control, format_control)
args.extend(('--' + format_control.replace('_', '-'),
','.join(sorted(formats or {':none:'}))))
if finder.index_urls:
args.extend(['-i', finder.index_urls[0]])
for extra_index in finder.index_urls[1:]:
args.extend(['--extra-index-url', extra_index])
else:
args.append('--no-index')
for link in finder.find_links:
args.extend(['--find-links', link])
for _, host, _ in finder.secure_origins:
args.extend(['--trusted-host', host])
if finder.allow_all_prereleases:
args.append('--pre')
if finder.process_dependency_links:
args.append('--process-dependency-links')
args.append('--')
args.extend(requirements)
with open_spinner(message) as spinner:
call_subprocess(args, show_stdout=False, spinner=spinner)
class NoOpBuildEnvironment(BuildEnvironment):
"""A no-op drop-in replacement for BuildEnvironment
"""
def __init__(self):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def cleanup(self):
pass
def install_requirements(self, finder, requirements, message):
raise NotImplementedError()
| [
[
[
74,
81
],
[
486,
493
],
[
3683,
3690
]
],
[
[
89,
91
],
[
864,
866
],
[
924,
926
],
[
990,
992
],
[
1066,
1068
],
[
1349,
1351
],
[
1318,
1320
],
[
1434,
1436
],
[
1447,
1449
],
[
1403,
1405
],
[
1815,
1817
],
[
1919,
1921
],
[
1881,
1883
],
[
1997,
1999
],
[
2042,
2044
],
[
2139,
2141
],
[
3057,
3059
],
[
3088,
3090
],
[
3479,
3481
],
[
2542,
2544
],
[
2606,
2608
]
],
[
[
99,
102
],
[
3463,
3466
]
],
[
[
110,
118
],
[
2215,
2223
]
],
[
[
151,
165
],
[
1588,
1602
],
[
1656,
1670
]
],
[
[
188,
197
],
[
1126,
1135
]
],
[
[
237,
248
],
[
3182,
3193
]
],
[
[
250,
265
],
[
3282,
3297
]
],
[
[
267,
277
],
[
3046,
3056
]
],
[
[
295,
319
],
[
3495,
3507
]
],
[
[
357,
372
],
[
4721,
4736
]
],
[
[
414,
427
],
[
679,
692
]
],
[
[
463,
475
],
[
4675,
4687
]
],
[
[
477,
483
],
[
3653,
3659
]
],
[
[
522,
538
],
[
4808,
4824
]
],
[
[
4787,
4807
]
]
] |
import logging
from random import randint
import traceback
from typing import cast, Dict, List, Set, Collection
from geniusweb.actions.Accept import Accept
from geniusweb.actions.Action import Action
from geniusweb.actions.LearningDone import LearningDone
from geniusweb.actions.Offer import Offer
from geniusweb.actions.PartyId import PartyId
from geniusweb.actions.Vote import Vote
from geniusweb.actions.Votes import Votes
from geniusweb.bidspace.AllBidsList import AllBidsList
from geniusweb.inform.ActionDone import ActionDone
from geniusweb.inform.Finished import Finished
from geniusweb.inform.Inform import Inform
from geniusweb.inform.OptIn import OptIn
from geniusweb.inform.Settings import Settings
from geniusweb.inform.Voting import Voting
from geniusweb.inform.YourTurn import YourTurn
from geniusweb.issuevalue.Bid import Bid
from geniusweb.issuevalue.Domain import Domain
from geniusweb.issuevalue.Value import Value
from geniusweb.issuevalue.ValueSet import ValueSet
from geniusweb.party.Capabilities import Capabilities
from geniusweb.party.DefaultParty import DefaultParty
from geniusweb.profile.utilityspace.UtilitySpace import UtilitySpace
from geniusweb.profileconnection.ProfileConnectionFactory import (
ProfileConnectionFactory,
)
from geniusweb.progress.ProgressRounds import ProgressRounds
from geniusweb.utils import val
class RandomAgent(DefaultParty):
"""
Offers random bids until a bid with sufficient utility is offered.
"""
def __init__(self):
super().__init__()
self.getReporter().log(logging.INFO, "party is initialized")
self._profile = None
self._lastReceivedBid: Bid = None
# Override
def notifyChange(self, info: Inform):
# self.getReporter().log(logging.INFO,"received info:"+str(info))
if isinstance(info, Settings):
self._settings: Settings = cast(Settings, info)
self._me = self._settings.getID()
self._protocol: str = str(self._settings.getProtocol().getURI())
self._progress = self._settings.getProgress()
if "Learn" == self._protocol:
self.getConnection().send(LearningDone(self._me)) # type:ignore
else:
self._profile = ProfileConnectionFactory.create(
info.getProfile().getURI(), self.getReporter()
)
elif isinstance(info, ActionDone):
action: Action = cast(ActionDone, info).getAction()
if isinstance(action, Offer):
self._lastReceivedBid = cast(Offer, action).getBid()
elif isinstance(info, YourTurn):
self._myTurn()
if isinstance(self._progress, ProgressRounds):
self._progress = self._progress.advance()
elif isinstance(info, Finished):
self.terminate()
elif isinstance(info, Voting):
# MOPAC protocol
self._lastvotes = self._vote(cast(Voting, info))
val(self.getConnection()).send(self._lastvotes)
elif isinstance(info, OptIn):
val(self.getConnection()).send(self._lastvotes)
else:
self.getReporter().log(
logging.WARNING, "Ignoring unknown info " + str(info)
)
# Override
def getCapabilities(self) -> Capabilities:
return Capabilities(
set(["SAOP", "Learn", "MOPAC"]),
set(["geniusweb.profile.utilityspace.LinearAdditive"]),
)
# Override
def getDescription(self) -> str:
return "Offers random bids until a bid with sufficient utility is offered. Parameters minPower and maxPower can be used to control voting behaviour."
# Override
def terminate(self):
self.getReporter().log(logging.INFO, "party is terminating:")
super().terminate()
if self._profile != None:
self._profile.close()
self._profile = None
def _myTurn(self):
if self._isGood(self._lastReceivedBid):
action = Accept(self._me, self._lastReceivedBid)
else:
for _attempt in range(20):
bid = self._getRandomBid(self._profile.getProfile().getDomain())
if self._isGood(bid):
break
action = Offer(self._me, bid)
self.getConnection().send(action)
def _isGood(self, bid: Bid) -> bool:
if bid == None:
return False
profile = self._profile.getProfile()
if isinstance(profile, UtilitySpace):
return profile.getUtility(bid) > 0.6
raise Exception("Can not handle this type of profile")
def _getRandomBid(self, domain: Domain) -> Bid:
allBids = AllBidsList(domain)
return allBids.get(randint(0, allBids.size() - 1))
def _vote(self, voting: Voting) -> Votes:
"""
@param voting the {@link Voting} object containing the options
@return our next Votes.
"""
val = self._settings.getParameters().get("minPower")
minpower: int = val if isinstance(val, int) else 2
val = self._settings.getParameters().get("maxPower")
maxpower: int = val if isinstance(val, int) else 9999999
votes: Set[Vote] = set(
[
Vote(self._me, offer.getBid(), minpower, maxpower)
for offer in voting.getOffers()
if self._isGood(offer.getBid())
]
)
return Votes(self._me, votes)
| [
[
[
7,
14
],
[
1559,
1566
],
[
3197,
3204
],
[
3763,
3770
]
],
[
[
34,
41
],
[
4758,
4765
]
],
[
[
49,
58
]
],
[
[
78,
82
],
[
1878,
1882
],
[
2443,
2447
],
[
2560,
2564
],
[
2953,
2957
]
],
[
[
84,
88
]
],
[
[
90,
94
]
],
[
[
96,
99
],
[
5227,
5230
]
],
[
[
101,
111
]
],
[
[
150,
156
],
[
4024,
4030
]
],
[
[
194,
200
],
[
2434,
2440
]
],
[
[
244,
256
],
[
2164,
2176
]
],
[
[
293,
298
],
[
2512,
2517
],
[
2565,
2570
],
[
4283,
4288
]
],
[
[
337,
344
]
],
[
[
380,
384
],
[
5274,
5278
],
[
5231,
5235
]
],
[
[
421,
426
],
[
4830,
4835
],
[
5460,
5465
]
],
[
[
470,
481
],
[
4711,
4722
]
],
[
[
522,
532
],
[
2401,
2411
],
[
2448,
2458
]
],
[
[
571,
579
],
[
2804,
2812
]
],
[
[
616,
622
],
[
1717,
1723
]
],
[
[
658,
663
],
[
3063,
3068
]
],
[
[
702,
710
],
[
1828,
1836
],
[
1883,
1891
],
[
1867,
1875
]
],
[
[
747,
753
],
[
2874,
2880
],
[
2958,
2964
],
[
4819,
4825
]
],
[
[
792,
800
],
[
2619,
2627
]
],
[
[
838,
841
],
[
1657,
1660
],
[
4374,
4377
],
[
4688,
4691
]
],
[
[
882,
888
],
[
4677,
4683
]
],
[
[
928,
933
]
],
[
[
976,
984
]
],
[
[
1026,
1038
],
[
3314,
3326
],
[
3343,
3355
]
],
[
[
1080,
1092
],
[
1374,
1386
]
],
[
[
1149,
1161
],
[
4513,
4525
]
],
[
[
1233,
1257
],
[
2253,
2277
]
],
[
[
1307,
1321
],
[
2699,
2713
]
],
[
[
1350,
1353
],
[
2985,
2988
],
[
3083,
3086
]
],
[
[
1362,
1373
]
]
] |
METER_TO_KM = 1e-3
ONE_TO_KILO = 1e3
KM_TO_METER = 1e3
KILO_TO_ONE = 1e3
# Average earth radius, see https://en.wikipedia.org/wiki/Earth_radius
EARTH_RADIUS_KM = 6371.0088
# in reality air density varies between 1.14 and 1.42 in kg/m^3
AIR_DENSITY_RHO = 1.225
# of course this introduces a small mistake due to leap years, but in average it's quite ok
# Warning: in most cases it might be better to use mean() instead of sum()/HOURS_PER_YEAR
HOURS_PER_YEAR = 8765.812536
| [
[
[
0,
11
]
],
[
[
19,
30
]
],
[
[
38,
49
]
],
[
[
56,
67
]
],
[
[
146,
161
]
],
[
[
239,
254
]
],
[
[
446,
460
]
]
] |
# Time: O(n)
# Space: O(1)
class Solution(object):
# @param a, a string
# @param b, a string
# @return a string
def addBinary(self, a, b):
result, carry, val = "", 0, 0
for i in range(max(len(a), len(b))):
val = carry
if i < len(a):
val += int(a[-(i + 1)])
if i < len(b):
val += int(b[-(i + 1)])
carry, val = divmod(val, 2)
result += str(val)
if carry:
result += str(carry)
return result[::-1]
# Time: O(n)
# Space: O(1)
from itertools import zip_longest
class Solution2(object):
def addBinary(self, a, b):
"""
:type a: str
:type b: str
:rtype: str
"""
result = ""
carry = 0
for x, y in zip_longest(reversed(a), reversed(b), fillvalue="0"):
carry, remainder = divmod(int(x)+int(y)+carry, 2)
result += str(remainder)
if carry:
result += str(carry)
return result[::-1]
| [
[
[
35,
43
]
],
[
[
600,
611
],
[
814,
825
]
],
[
[
620,
629
]
]
] |
"""Test whether all elements of cls.args are instances of Basic. """
# NOTE: keep tests sorted by (module, class name) key. If a class can't
# be instantiated, add it here anyway with @SKIP("abstract class) (see
# e.g. Function).
import os
import re
import warnings
import io
from sympy import Basic, S, symbols, sqrt, sin, oo, Interval, exp
from sympy.core.compatibility import range
from sympy.utilities.pytest import XFAIL, SKIP
from sympy.utilities.exceptions import SymPyDeprecationWarning
x, y, z = symbols('x,y,z')
def test_all_classes_are_tested():
this = os.path.split(__file__)[0]
path = os.path.join(this, os.pardir, os.pardir)
sympy_path = os.path.abspath(path)
prefix = os.path.split(sympy_path)[0] + os.sep
re_cls = re.compile("^class ([A-Za-z][A-Za-z0-9_]*)\s*\(", re.MULTILINE)
modules = {}
for root, dirs, files in os.walk(sympy_path):
module = root.replace(prefix, "").replace(os.sep, ".")
for file in files:
if file.startswith(("_", "test_", "bench_")):
continue
if not file.endswith(".py"):
continue
with io.open(os.path.join(root, file), "r", encoding='utf-8') as f:
text = f.read()
submodule = module + '.' + file[:-3]
names = re_cls.findall(text)
if not names:
continue
try:
mod = __import__(submodule, fromlist=names)
except ImportError:
continue
def is_Basic(name):
cls = getattr(mod, name)
return issubclass(cls, Basic)
names = list(filter(is_Basic, names))
if names:
modules[submodule] = names
ns = globals()
failed = []
for module, names in modules.items():
mod = module.replace('.', '__')
for name in names:
test = 'test_' + mod + '__' + name
if test not in ns:
failed.append(module + '.' + name)
# reset all SymPyDeprecationWarning into errors
warnings.simplefilter("error", category=SymPyDeprecationWarning)
assert not failed, "Missing classes: %s. Please add tests for these to sympy/core/tests/test_args.py." % ", ".join(failed)
def _test_args(obj):
return all(isinstance(arg, Basic) for arg in obj.args)
def test_sympy__assumptions__assume__AppliedPredicate():
from sympy.assumptions.assume import AppliedPredicate, Predicate
assert _test_args(AppliedPredicate(Predicate("test"), 2))
def test_sympy__assumptions__assume__Predicate():
from sympy.assumptions.assume import Predicate
assert _test_args(Predicate("test"))
@XFAIL
def test_sympy__combinatorics__graycode__GrayCode():
from sympy.combinatorics.graycode import GrayCode
# an integer is given and returned from GrayCode as the arg
assert _test_args(GrayCode(3, start='100'))
assert _test_args(GrayCode(3, rank=1))
def test_sympy__combinatorics__subsets__Subset():
from sympy.combinatorics.subsets import Subset
assert _test_args(Subset([0, 1], [0, 1, 2, 3]))
assert _test_args(Subset(['c', 'd'], ['a', 'b', 'c', 'd']))
@XFAIL
def test_sympy__combinatorics__permutations__Permutation():
from sympy.combinatorics.permutations import Permutation
assert _test_args(Permutation([0, 1, 2, 3]))
def test_sympy__combinatorics__perm_groups__PermutationGroup():
from sympy.combinatorics.permutations import Permutation
from sympy.combinatorics.perm_groups import PermutationGroup
assert _test_args(PermutationGroup([Permutation([0, 1])]))
def test_sympy__combinatorics__polyhedron__Polyhedron():
from sympy.combinatorics.permutations import Permutation
from sympy.combinatorics.polyhedron import Polyhedron
from sympy.abc import w, x, y, z
pgroup = [Permutation([[0, 1, 2], [3]]),
Permutation([[0, 1, 3], [2]]),
Permutation([[0, 2, 3], [1]]),
Permutation([[1, 2, 3], [0]]),
Permutation([[0, 1], [2, 3]]),
Permutation([[0, 2], [1, 3]]),
Permutation([[0, 3], [1, 2]]),
Permutation([[0, 1, 2, 3]])]
corners = [w, x, y, z]
faces = [(w, x, y), (w, y, z), (w, z, x), (x, y, z)]
assert _test_args(Polyhedron(corners, faces, pgroup))
@XFAIL
def test_sympy__combinatorics__prufer__Prufer():
from sympy.combinatorics.prufer import Prufer
assert _test_args(Prufer([[0, 1], [0, 2], [0, 3]], 4))
def test_sympy__combinatorics__partitions__Partition():
from sympy.combinatorics.partitions import Partition
assert _test_args(Partition([1]))
@XFAIL
def test_sympy__combinatorics__partitions__IntegerPartition():
from sympy.combinatorics.partitions import IntegerPartition
assert _test_args(IntegerPartition([1]))
def test_sympy__concrete__products__Product():
from sympy.concrete.products import Product
assert _test_args(Product(x, (x, 0, 10)))
assert _test_args(Product(x, (x, 0, y), (y, 0, 10)))
@SKIP("abstract Class")
def test_sympy__concrete__expr_with_limits__ExprWithLimits():
from sympy.concrete.expr_with_limits import ExprWithLimits
assert _test_args(ExprWithLimits(x, (x, 0, 10)))
assert _test_args(ExprWithLimits(x*y, (x, 0, 10.),(y,1.,3)))
@SKIP("abstract Class")
def test_sympy__concrete__expr_with_limits__AddWithLimits():
from sympy.concrete.expr_with_limits import AddWithLimits
assert _test_args(AddWithLimits(x, (x, 0, 10)))
assert _test_args(AddWithLimits(x*y, (x, 0, 10),(y,1,3)))
@SKIP("abstract Class")
def test_sympy__concrete__expr_with_intlimits__ExprWithIntLimits():
from sympy.concrete.expr_with_intlimits import ExprWithIntLimits
assert _test_args(ExprWithIntLimits(x, (x, 0, 10)))
assert _test_args(ExprWithIntLimits(x*y, (x, 0, 10),(y,1,3)))
def test_sympy__concrete__summations__Sum():
from sympy.concrete.summations import Sum
assert _test_args(Sum(x, (x, 0, 10)))
assert _test_args(Sum(x, (x, 0, y), (y, 0, 10)))
def test_sympy__core__add__Add():
from sympy.core.add import Add
assert _test_args(Add(x, y, z, 2))
def test_sympy__core__basic__Atom():
from sympy.core.basic import Atom
assert _test_args(Atom())
def test_sympy__core__basic__Basic():
from sympy.core.basic import Basic
assert _test_args(Basic())
def test_sympy__core__containers__Dict():
from sympy.core.containers import Dict
assert _test_args(Dict({x: y, y: z}))
def test_sympy__core__containers__Tuple():
from sympy.core.containers import Tuple
assert _test_args(Tuple(x, y, z, 2))
def test_sympy__core__expr__AtomicExpr():
from sympy.core.expr import AtomicExpr
assert _test_args(AtomicExpr())
def test_sympy__core__expr__Expr():
from sympy.core.expr import Expr
assert _test_args(Expr())
def test_sympy__core__function__Application():
from sympy.core.function import Application
assert _test_args(Application(1, 2, 3))
def test_sympy__core__function__AppliedUndef():
from sympy.core.function import AppliedUndef
assert _test_args(AppliedUndef(1, 2, 3))
def test_sympy__core__function__Derivative():
from sympy.core.function import Derivative
assert _test_args(Derivative(2, x, y, 3))
@SKIP("abstract class")
def test_sympy__core__function__Function():
pass
def test_sympy__core__function__Lambda():
from sympy.core.function import Lambda
assert _test_args(Lambda((x, y), x + y + z))
def test_sympy__core__function__Subs():
from sympy.core.function import Subs
assert _test_args(Subs(x + y, x, 2))
def test_sympy__core__function__WildFunction():
from sympy.core.function import WildFunction
assert _test_args(WildFunction('f'))
def test_sympy__core__mod__Mod():
from sympy.core.mod import Mod
assert _test_args(Mod(x, 2))
def test_sympy__core__mul__Mul():
from sympy.core.mul import Mul
assert _test_args(Mul(2, x, y, z))
def test_sympy__core__numbers__Catalan():
from sympy.core.numbers import Catalan
assert _test_args(Catalan())
def test_sympy__core__numbers__ComplexInfinity():
from sympy.core.numbers import ComplexInfinity
assert _test_args(ComplexInfinity())
def test_sympy__core__numbers__EulerGamma():
from sympy.core.numbers import EulerGamma
assert _test_args(EulerGamma())
def test_sympy__core__numbers__Exp1():
from sympy.core.numbers import Exp1
assert _test_args(Exp1())
def test_sympy__core__numbers__Float():
from sympy.core.numbers import Float
assert _test_args(Float(1.23))
def test_sympy__core__numbers__GoldenRatio():
from sympy.core.numbers import GoldenRatio
assert _test_args(GoldenRatio())
def test_sympy__core__numbers__Half():
from sympy.core.numbers import Half
assert _test_args(Half())
def test_sympy__core__numbers__ImaginaryUnit():
from sympy.core.numbers import ImaginaryUnit
assert _test_args(ImaginaryUnit())
def test_sympy__core__numbers__Infinity():
from sympy.core.numbers import Infinity
assert _test_args(Infinity())
def test_sympy__core__numbers__Integer():
from sympy.core.numbers import Integer
assert _test_args(Integer(7))
@SKIP("abstract class")
def test_sympy__core__numbers__IntegerConstant():
pass
def test_sympy__core__numbers__NaN():
from sympy.core.numbers import NaN
assert _test_args(NaN())
def test_sympy__core__numbers__NegativeInfinity():
from sympy.core.numbers import NegativeInfinity
assert _test_args(NegativeInfinity())
def test_sympy__core__numbers__NegativeOne():
from sympy.core.numbers import NegativeOne
assert _test_args(NegativeOne())
def test_sympy__core__numbers__Number():
from sympy.core.numbers import Number
assert _test_args(Number(1, 7))
def test_sympy__core__numbers__NumberSymbol():
from sympy.core.numbers import NumberSymbol
assert _test_args(NumberSymbol())
def test_sympy__core__numbers__One():
from sympy.core.numbers import One
assert _test_args(One())
def test_sympy__core__numbers__Pi():
from sympy.core.numbers import Pi
assert _test_args(Pi())
def test_sympy__core__numbers__Rational():
from sympy.core.numbers import Rational
assert _test_args(Rational(1, 7))
@SKIP("abstract class")
def test_sympy__core__numbers__RationalConstant():
pass
def test_sympy__core__numbers__Zero():
from sympy.core.numbers import Zero
assert _test_args(Zero())
@SKIP("abstract class")
def test_sympy__core__operations__AssocOp():
pass
@SKIP("abstract class")
def test_sympy__core__operations__LatticeOp():
pass
def test_sympy__core__power__Pow():
from sympy.core.power import Pow
assert _test_args(Pow(x, 2))
def test_sympy__core__relational__Equality():
from sympy.core.relational import Equality
assert _test_args(Equality(x, 2))
def test_sympy__core__relational__GreaterThan():
from sympy.core.relational import GreaterThan
assert _test_args(GreaterThan(x, 2))
def test_sympy__core__relational__LessThan():
from sympy.core.relational import LessThan
assert _test_args(LessThan(x, 2))
@SKIP("abstract class")
def test_sympy__core__relational__Relational():
pass
def test_sympy__core__relational__StrictGreaterThan():
from sympy.core.relational import StrictGreaterThan
assert _test_args(StrictGreaterThan(x, 2))
def test_sympy__core__relational__StrictLessThan():
from sympy.core.relational import StrictLessThan
assert _test_args(StrictLessThan(x, 2))
def test_sympy__core__relational__Unequality():
from sympy.core.relational import Unequality
assert _test_args(Unequality(x, 2))
def test_sympy__sets__sets__EmptySet():
from sympy.sets.sets import EmptySet
assert _test_args(EmptySet())
def test_sympy__sets__sets__UniversalSet():
from sympy.sets.sets import UniversalSet
assert _test_args(UniversalSet())
def test_sympy__sets__sets__FiniteSet():
from sympy.sets.sets import FiniteSet
assert _test_args(FiniteSet(x, y, z))
def test_sympy__sets__sets__Interval():
from sympy.sets.sets import Interval
assert _test_args(Interval(0, 1))
def test_sympy__sets__sets__ProductSet():
from sympy.sets.sets import ProductSet, Interval
assert _test_args(ProductSet(Interval(0, 1), Interval(0, 1)))
@SKIP("does it make sense to test this?")
def test_sympy__sets__sets__Set():
from sympy.sets.sets import Set
assert _test_args(Set())
def test_sympy__sets__sets__Intersection():
from sympy.sets.sets import Intersection, Interval
assert _test_args(Intersection(Interval(0, 3), Interval(2, 4),
evaluate=False))
def test_sympy__sets__sets__Union():
from sympy.sets.sets import Union, Interval
assert _test_args(Union(Interval(0, 1), Interval(2, 3)))
def test_sympy__sets__sets__Complement():
from sympy.sets.sets import Complement
assert _test_args(Complement(Interval(0, 2), Interval(0, 1)))
def test_sympy__sets__sets__SymmetricDifference():
from sympy.sets.sets import FiniteSet, SymmetricDifference
assert _test_args(SymmetricDifference(FiniteSet(1, 2, 3), \
FiniteSet(2, 3, 4)))
def test_sympy__core__trace__Tr():
from sympy.core.trace import Tr
a, b = symbols('a b')
assert _test_args(Tr(a + b))
def test_sympy__sets__fancysets__Naturals():
from sympy.sets.fancysets import Naturals
assert _test_args(Naturals())
def test_sympy__sets__fancysets__Naturals0():
from sympy.sets.fancysets import Naturals0
assert _test_args(Naturals0())
def test_sympy__sets__fancysets__Integers():
from sympy.sets.fancysets import Integers
assert _test_args(Integers())
def test_sympy__sets__fancysets__Reals():
from sympy.sets.fancysets import Reals
assert _test_args(Reals())
def test_sympy__sets__fancysets__ImageSet():
from sympy.sets.fancysets import ImageSet
from sympy import S, Lambda, Symbol
x = Symbol('x')
assert _test_args(ImageSet(Lambda(x, x**2), S.Naturals))
def test_sympy__sets__fancysets__Range():
from sympy.sets.fancysets import Range
assert _test_args(Range(1, 5, 1))
def test_sympy__sets__contains__Contains():
from sympy.sets.fancysets import Range
from sympy.sets.contains import Contains
assert _test_args(Contains(x, Range(0, 10, 2)))
# STATS
from sympy.stats.crv_types import NormalDistribution
nd = NormalDistribution(0, 1)
from sympy.stats.frv_types import DieDistribution
die = DieDistribution(6)
def test_sympy__stats__crv__ContinuousDomain():
from sympy.stats.crv import ContinuousDomain
assert _test_args(ContinuousDomain(set([x]), Interval(-oo, oo)))
def test_sympy__stats__crv__SingleContinuousDomain():
from sympy.stats.crv import SingleContinuousDomain
assert _test_args(SingleContinuousDomain(x, Interval(-oo, oo)))
def test_sympy__stats__crv__ProductContinuousDomain():
from sympy.stats.crv import SingleContinuousDomain, ProductContinuousDomain
D = SingleContinuousDomain(x, Interval(-oo, oo))
E = SingleContinuousDomain(y, Interval(0, oo))
assert _test_args(ProductContinuousDomain(D, E))
def test_sympy__stats__crv__ConditionalContinuousDomain():
from sympy.stats.crv import (SingleContinuousDomain,
ConditionalContinuousDomain)
D = SingleContinuousDomain(x, Interval(-oo, oo))
assert _test_args(ConditionalContinuousDomain(D, x > 0))
def test_sympy__stats__crv__ContinuousPSpace():
from sympy.stats.crv import ContinuousPSpace, SingleContinuousDomain
D = SingleContinuousDomain(x, Interval(-oo, oo))
assert _test_args(ContinuousPSpace(D, nd))
def test_sympy__stats__crv__SingleContinuousPSpace():
from sympy.stats.crv import SingleContinuousPSpace
assert _test_args(SingleContinuousPSpace(x, nd))
def test_sympy__stats__crv__ProductContinuousPSpace():
from sympy.stats.crv import ProductContinuousPSpace, SingleContinuousPSpace
A = SingleContinuousPSpace(x, nd)
B = SingleContinuousPSpace(y, nd)
assert _test_args(ProductContinuousPSpace(A, B))
@SKIP("abstract class")
def test_sympy__stats__crv__SingleContinuousDistribution():
pass
def test_sympy__stats__drv__SingleDiscreteDomain():
from sympy.stats.drv import SingleDiscreteDomain
assert _test_args(SingleDiscreteDomain(x, S.Naturals))
def test_sympy__stats__drv__SingleDiscretePSpace():
from sympy.stats.drv import SingleDiscretePSpace
from sympy.stats.drv_types import PoissonDistribution
assert _test_args(SingleDiscretePSpace(x, PoissonDistribution(1)))
@SKIP("abstract class")
def test_sympy__stats__drv__SingleDiscreteDistribution():
pass
def test_sympy__stats__rv__RandomDomain():
from sympy.stats.rv import RandomDomain
from sympy.sets.sets import FiniteSet
assert _test_args(RandomDomain(FiniteSet(x), FiniteSet(1, 2, 3)))
def test_sympy__stats__rv__SingleDomain():
from sympy.stats.rv import SingleDomain
from sympy.sets.sets import FiniteSet
assert _test_args(SingleDomain(x, FiniteSet(1, 2, 3)))
def test_sympy__stats__rv__ConditionalDomain():
from sympy.stats.rv import ConditionalDomain, RandomDomain
from sympy.sets.sets import FiniteSet
D = RandomDomain(FiniteSet(x), FiniteSet(1, 2))
assert _test_args(ConditionalDomain(D, x > 1))
def test_sympy__stats__rv__PSpace():
from sympy.stats.rv import PSpace, RandomDomain
from sympy import FiniteSet
D = RandomDomain(FiniteSet(x), FiniteSet(1, 2, 3, 4, 5, 6))
assert _test_args(PSpace(D, die))
@SKIP("abstract Class")
def test_sympy__stats__rv__SinglePSpace():
pass
def test_sympy__stats__rv__RandomSymbol():
from sympy.stats.rv import RandomSymbol
from sympy.stats.crv import SingleContinuousPSpace
A = SingleContinuousPSpace(x, nd)
assert _test_args(RandomSymbol(A, x))
def test_sympy__stats__rv__ProductPSpace():
from sympy.stats.rv import ProductPSpace
from sympy.stats.crv import SingleContinuousPSpace
A = SingleContinuousPSpace(x, nd)
B = SingleContinuousPSpace(y, nd)
assert _test_args(ProductPSpace(A, B))
def test_sympy__stats__rv__ProductDomain():
from sympy.stats.rv import ProductDomain, SingleDomain
D = SingleDomain(x, Interval(-oo, oo))
E = SingleDomain(y, Interval(0, oo))
assert _test_args(ProductDomain(D, E))
def test_sympy__stats__frv_types__DiscreteUniformDistribution():
from sympy.stats.frv_types import DiscreteUniformDistribution
from sympy.core.containers import Tuple
assert _test_args(DiscreteUniformDistribution(Tuple(*list(range(6)))))
def test_sympy__stats__frv_types__DieDistribution():
from sympy.stats.frv_types import DieDistribution
assert _test_args(DieDistribution(6))
def test_sympy__stats__frv_types__BernoulliDistribution():
from sympy.stats.frv_types import BernoulliDistribution
assert _test_args(BernoulliDistribution(S.Half, 0, 1))
def test_sympy__stats__frv_types__BinomialDistribution():
from sympy.stats.frv_types import BinomialDistribution
assert _test_args(BinomialDistribution(5, S.Half, 1, 0))
def test_sympy__stats__frv_types__HypergeometricDistribution():
from sympy.stats.frv_types import HypergeometricDistribution
assert _test_args(HypergeometricDistribution(10, 5, 3))
def test_sympy__stats__frv_types__RademacherDistribution():
from sympy.stats.frv_types import RademacherDistribution
assert _test_args(RademacherDistribution())
def test_sympy__stats__frv__FiniteDomain():
from sympy.stats.frv import FiniteDomain
assert _test_args(FiniteDomain(set([(x, 1), (x, 2)]))) # x can be 1 or 2
def test_sympy__stats__frv__SingleFiniteDomain():
from sympy.stats.frv import SingleFiniteDomain
assert _test_args(SingleFiniteDomain(x, set([1, 2]))) # x can be 1 or 2
def test_sympy__stats__frv__ProductFiniteDomain():
from sympy.stats.frv import SingleFiniteDomain, ProductFiniteDomain
xd = SingleFiniteDomain(x, set([1, 2]))
yd = SingleFiniteDomain(y, set([1, 2]))
assert _test_args(ProductFiniteDomain(xd, yd))
def test_sympy__stats__frv__ConditionalFiniteDomain():
from sympy.stats.frv import SingleFiniteDomain, ConditionalFiniteDomain
xd = SingleFiniteDomain(x, set([1, 2]))
assert _test_args(ConditionalFiniteDomain(xd, x > 1))
def test_sympy__stats__frv__FinitePSpace():
from sympy.stats.frv import FinitePSpace, SingleFiniteDomain
xd = SingleFiniteDomain(x, set([1, 2, 3, 4, 5, 6]))
p = 1.0/6
xd = SingleFiniteDomain(x, set([1, 2]))
assert _test_args(FinitePSpace(xd, {(x, 1): S.Half, (x, 2): S.Half}))
def test_sympy__stats__frv__SingleFinitePSpace():
from sympy.stats.frv import SingleFinitePSpace
from sympy import Symbol
assert _test_args(SingleFinitePSpace(Symbol('x'), die))
def test_sympy__stats__frv__ProductFinitePSpace():
from sympy.stats.frv import SingleFinitePSpace, ProductFinitePSpace
from sympy import Symbol
xp = SingleFinitePSpace(Symbol('x'), die)
yp = SingleFinitePSpace(Symbol('y'), die)
assert _test_args(ProductFinitePSpace(xp, yp))
@SKIP("abstract class")
def test_sympy__stats__frv__SingleFiniteDistribution():
pass
@SKIP("abstract class")
def test_sympy__stats__crv__ContinuousDistribution():
pass
def test_sympy__stats__frv_types__FiniteDistributionHandmade():
from sympy.stats.frv_types import FiniteDistributionHandmade
assert _test_args(FiniteDistributionHandmade({1: 1}))
def test_sympy__stats__crv__ContinuousDistributionHandmade():
from sympy.stats.crv import ContinuousDistributionHandmade
from sympy import Symbol, Interval
assert _test_args(ContinuousDistributionHandmade(Symbol('x'),
Interval(0, 2)))
def test_sympy__stats__rv__Density():
from sympy.stats.rv import Density
from sympy.stats.crv_types import Normal
assert _test_args(Density(Normal('x', 0, 1)))
def test_sympy__stats__crv_types__ArcsinDistribution():
from sympy.stats.crv_types import ArcsinDistribution
assert _test_args(ArcsinDistribution(0, 1))
def test_sympy__stats__crv_types__BeniniDistribution():
from sympy.stats.crv_types import BeniniDistribution
assert _test_args(BeniniDistribution(1, 1, 1))
def test_sympy__stats__crv_types__BetaDistribution():
from sympy.stats.crv_types import BetaDistribution
assert _test_args(BetaDistribution(1, 1))
def test_sympy__stats__crv_types__BetaPrimeDistribution():
from sympy.stats.crv_types import BetaPrimeDistribution
assert _test_args(BetaPrimeDistribution(1, 1))
def test_sympy__stats__crv_types__CauchyDistribution():
from sympy.stats.crv_types import CauchyDistribution
assert _test_args(CauchyDistribution(0, 1))
def test_sympy__stats__crv_types__ChiDistribution():
from sympy.stats.crv_types import ChiDistribution
assert _test_args(ChiDistribution(1))
def test_sympy__stats__crv_types__ChiNoncentralDistribution():
from sympy.stats.crv_types import ChiNoncentralDistribution
assert _test_args(ChiNoncentralDistribution(1,1))
def test_sympy__stats__crv_types__ChiSquaredDistribution():
from sympy.stats.crv_types import ChiSquaredDistribution
assert _test_args(ChiSquaredDistribution(1))
def test_sympy__stats__crv_types__DagumDistribution():
from sympy.stats.crv_types import DagumDistribution
assert _test_args(DagumDistribution(1, 1, 1))
def test_sympy__stats__crv_types__ExponentialDistribution():
from sympy.stats.crv_types import ExponentialDistribution
assert _test_args(ExponentialDistribution(1))
def test_sympy__stats__crv_types__FDistributionDistribution():
from sympy.stats.crv_types import FDistributionDistribution
assert _test_args(FDistributionDistribution(1, 1))
def test_sympy__stats__crv_types__FisherZDistribution():
from sympy.stats.crv_types import FisherZDistribution
assert _test_args(FisherZDistribution(1, 1))
def test_sympy__stats__crv_types__FrechetDistribution():
from sympy.stats.crv_types import FrechetDistribution
assert _test_args(FrechetDistribution(1, 1, 1))
def test_sympy__stats__crv_types__GammaInverseDistribution():
from sympy.stats.crv_types import GammaInverseDistribution
assert _test_args(GammaInverseDistribution(1, 1))
def test_sympy__stats__crv_types__GammaDistribution():
from sympy.stats.crv_types import GammaDistribution
assert _test_args(GammaDistribution(1, 1))
def test_sympy__stats__crv_types__KumaraswamyDistribution():
from sympy.stats.crv_types import KumaraswamyDistribution
assert _test_args(KumaraswamyDistribution(1, 1))
def test_sympy__stats__crv_types__LaplaceDistribution():
from sympy.stats.crv_types import LaplaceDistribution
assert _test_args(LaplaceDistribution(0, 1))
def test_sympy__stats__crv_types__LogisticDistribution():
from sympy.stats.crv_types import LogisticDistribution
assert _test_args(LogisticDistribution(0, 1))
def test_sympy__stats__crv_types__LogNormalDistribution():
from sympy.stats.crv_types import LogNormalDistribution
assert _test_args(LogNormalDistribution(0, 1))
def test_sympy__stats__crv_types__MaxwellDistribution():
from sympy.stats.crv_types import MaxwellDistribution
assert _test_args(MaxwellDistribution(1))
def test_sympy__stats__crv_types__NakagamiDistribution():
from sympy.stats.crv_types import NakagamiDistribution
assert _test_args(NakagamiDistribution(1, 1))
def test_sympy__stats__crv_types__NormalDistribution():
from sympy.stats.crv_types import NormalDistribution
assert _test_args(NormalDistribution(0, 1))
def test_sympy__stats__crv_types__ParetoDistribution():
from sympy.stats.crv_types import ParetoDistribution
assert _test_args(ParetoDistribution(1, 1))
def test_sympy__stats__crv_types__QuadraticUDistribution():
from sympy.stats.crv_types import QuadraticUDistribution
assert _test_args(QuadraticUDistribution(1, 2))
def test_sympy__stats__crv_types__RaisedCosineDistribution():
from sympy.stats.crv_types import RaisedCosineDistribution
assert _test_args(RaisedCosineDistribution(1, 1))
def test_sympy__stats__crv_types__RayleighDistribution():
from sympy.stats.crv_types import RayleighDistribution
assert _test_args(RayleighDistribution(1))
def test_sympy__stats__crv_types__StudentTDistribution():
from sympy.stats.crv_types import StudentTDistribution
assert _test_args(StudentTDistribution(1))
def test_sympy__stats__crv_types__TriangularDistribution():
from sympy.stats.crv_types import TriangularDistribution
assert _test_args(TriangularDistribution(-1, 0, 1))
def test_sympy__stats__crv_types__UniformDistribution():
from sympy.stats.crv_types import UniformDistribution
assert _test_args(UniformDistribution(0, 1))
def test_sympy__stats__crv_types__UniformSumDistribution():
from sympy.stats.crv_types import UniformSumDistribution
assert _test_args(UniformSumDistribution(1))
def test_sympy__stats__crv_types__VonMisesDistribution():
from sympy.stats.crv_types import VonMisesDistribution
assert _test_args(VonMisesDistribution(1, 1))
def test_sympy__stats__crv_types__WeibullDistribution():
from sympy.stats.crv_types import WeibullDistribution
assert _test_args(WeibullDistribution(1, 1))
def test_sympy__stats__crv_types__WignerSemicircleDistribution():
from sympy.stats.crv_types import WignerSemicircleDistribution
assert _test_args(WignerSemicircleDistribution(1))
def test_sympy__stats__drv_types__PoissonDistribution():
from sympy.stats.drv_types import PoissonDistribution
assert _test_args(PoissonDistribution(1))
def test_sympy__stats__drv_types__GeometricDistribution():
from sympy.stats.drv_types import GeometricDistribution
assert _test_args(GeometricDistribution(.5))
def test_sympy__core__symbol__Dummy():
from sympy.core.symbol import Dummy
assert _test_args(Dummy('t'))
def test_sympy__core__symbol__Symbol():
from sympy.core.symbol import Symbol
assert _test_args(Symbol('t'))
def test_sympy__core__symbol__Wild():
from sympy.core.symbol import Wild
assert _test_args(Wild('x', exclude=[x]))
@SKIP("abstract class")
def test_sympy__functions__combinatorial__factorials__CombinatorialFunction():
pass
def test_sympy__functions__combinatorial__factorials__FallingFactorial():
from sympy.functions.combinatorial.factorials import FallingFactorial
assert _test_args(FallingFactorial(2, x))
def test_sympy__functions__combinatorial__factorials__MultiFactorial():
from sympy.functions.combinatorial.factorials import MultiFactorial
assert _test_args(MultiFactorial(x))
def test_sympy__functions__combinatorial__factorials__RisingFactorial():
from sympy.functions.combinatorial.factorials import RisingFactorial
assert _test_args(RisingFactorial(2, x))
def test_sympy__functions__combinatorial__factorials__binomial():
from sympy.functions.combinatorial.factorials import binomial
assert _test_args(binomial(2, x))
def test_sympy__functions__combinatorial__factorials__subfactorial():
from sympy.functions.combinatorial.factorials import subfactorial
assert _test_args(subfactorial(1))
def test_sympy__functions__combinatorial__factorials__factorial():
from sympy.functions.combinatorial.factorials import factorial
assert _test_args(factorial(x))
def test_sympy__functions__combinatorial__factorials__factorial2():
from sympy.functions.combinatorial.factorials import factorial2
assert _test_args(factorial2(x))
def test_sympy__functions__combinatorial__numbers__bell():
from sympy.functions.combinatorial.numbers import bell
assert _test_args(bell(x, y))
def test_sympy__functions__combinatorial__numbers__bernoulli():
from sympy.functions.combinatorial.numbers import bernoulli
assert _test_args(bernoulli(x))
def test_sympy__functions__combinatorial__numbers__catalan():
from sympy.functions.combinatorial.numbers import catalan
assert _test_args(catalan(x))
def test_sympy__functions__combinatorial__numbers__genocchi():
from sympy.functions.combinatorial.numbers import genocchi
assert _test_args(genocchi(x))
def test_sympy__functions__combinatorial__numbers__euler():
from sympy.functions.combinatorial.numbers import euler
assert _test_args(euler(x))
def test_sympy__functions__combinatorial__numbers__fibonacci():
from sympy.functions.combinatorial.numbers import fibonacci
assert _test_args(fibonacci(x))
def test_sympy__functions__combinatorial__numbers__harmonic():
from sympy.functions.combinatorial.numbers import harmonic
assert _test_args(harmonic(x, 2))
def test_sympy__functions__combinatorial__numbers__lucas():
from sympy.functions.combinatorial.numbers import lucas
assert _test_args(lucas(x))
def test_sympy__functions__elementary__complexes__Abs():
from sympy.functions.elementary.complexes import Abs
assert _test_args(Abs(x))
def test_sympy__functions__elementary__complexes__adjoint():
from sympy.functions.elementary.complexes import adjoint
assert _test_args(adjoint(x))
def test_sympy__functions__elementary__complexes__arg():
from sympy.functions.elementary.complexes import arg
assert _test_args(arg(x))
def test_sympy__functions__elementary__complexes__conjugate():
from sympy.functions.elementary.complexes import conjugate
assert _test_args(conjugate(x))
def test_sympy__functions__elementary__complexes__im():
from sympy.functions.elementary.complexes import im
assert _test_args(im(x))
def test_sympy__functions__elementary__complexes__re():
from sympy.functions.elementary.complexes import re
assert _test_args(re(x))
def test_sympy__functions__elementary__complexes__sign():
from sympy.functions.elementary.complexes import sign
assert _test_args(sign(x))
def test_sympy__functions__elementary__complexes__polar_lift():
from sympy.functions.elementary.complexes import polar_lift
assert _test_args(polar_lift(x))
def test_sympy__functions__elementary__complexes__periodic_argument():
from sympy.functions.elementary.complexes import periodic_argument
assert _test_args(periodic_argument(x, y))
def test_sympy__functions__elementary__complexes__principal_branch():
from sympy.functions.elementary.complexes import principal_branch
assert _test_args(principal_branch(x, y))
def test_sympy__functions__elementary__complexes__transpose():
from sympy.functions.elementary.complexes import transpose
assert _test_args(transpose(x))
def test_sympy__functions__elementary__exponential__LambertW():
from sympy.functions.elementary.exponential import LambertW
assert _test_args(LambertW(2))
@SKIP("abstract class")
def test_sympy__functions__elementary__exponential__ExpBase():
pass
def test_sympy__functions__elementary__exponential__exp():
from sympy.functions.elementary.exponential import exp
assert _test_args(exp(2))
def test_sympy__functions__elementary__exponential__exp_polar():
from sympy.functions.elementary.exponential import exp_polar
assert _test_args(exp_polar(2))
def test_sympy__functions__elementary__exponential__log():
from sympy.functions.elementary.exponential import log
assert _test_args(log(2))
@SKIP("abstract class")
def test_sympy__functions__elementary__hyperbolic__HyperbolicFunction():
pass
@SKIP("abstract class")
def test_sympy__functions__elementary__hyperbolic__ReciprocalHyperbolicFunction():
pass
def test_sympy__functions__elementary__hyperbolic__acosh():
from sympy.functions.elementary.hyperbolic import acosh
assert _test_args(acosh(2))
def test_sympy__functions__elementary__hyperbolic__acoth():
from sympy.functions.elementary.hyperbolic import acoth
assert _test_args(acoth(2))
def test_sympy__functions__elementary__hyperbolic__asinh():
from sympy.functions.elementary.hyperbolic import asinh
assert _test_args(asinh(2))
def test_sympy__functions__elementary__hyperbolic__atanh():
from sympy.functions.elementary.hyperbolic import atanh
assert _test_args(atanh(2))
def test_sympy__functions__elementary__hyperbolic__cosh():
from sympy.functions.elementary.hyperbolic import cosh
assert _test_args(cosh(2))
def test_sympy__functions__elementary__hyperbolic__coth():
from sympy.functions.elementary.hyperbolic import coth
assert _test_args(coth(2))
def test_sympy__functions__elementary__hyperbolic__csch():
from sympy.functions.elementary.hyperbolic import csch
assert _test_args(csch(2))
def test_sympy__functions__elementary__hyperbolic__sech():
from sympy.functions.elementary.hyperbolic import sech
assert _test_args(sech(2))
def test_sympy__functions__elementary__hyperbolic__sinh():
from sympy.functions.elementary.hyperbolic import sinh
assert _test_args(sinh(2))
def test_sympy__functions__elementary__hyperbolic__tanh():
from sympy.functions.elementary.hyperbolic import tanh
assert _test_args(tanh(2))
@SKIP("does this work at all?")
def test_sympy__functions__elementary__integers__RoundFunction():
from sympy.functions.elementary.integers import RoundFunction
assert _test_args(RoundFunction())
def test_sympy__functions__elementary__integers__ceiling():
from sympy.functions.elementary.integers import ceiling
assert _test_args(ceiling(x))
def test_sympy__functions__elementary__integers__floor():
from sympy.functions.elementary.integers import floor
assert _test_args(floor(x))
def test_sympy__functions__elementary__miscellaneous__IdentityFunction():
from sympy.functions.elementary.miscellaneous import IdentityFunction
assert _test_args(IdentityFunction())
def test_sympy__functions__elementary__miscellaneous__Max():
from sympy.functions.elementary.miscellaneous import Max
assert _test_args(Max(x, 2))
def test_sympy__functions__elementary__miscellaneous__Min():
from sympy.functions.elementary.miscellaneous import Min
assert _test_args(Min(x, 2))
@SKIP("abstract class")
def test_sympy__functions__elementary__miscellaneous__MinMaxBase():
pass
def test_sympy__functions__elementary__piecewise__ExprCondPair():
from sympy.functions.elementary.piecewise import ExprCondPair
assert _test_args(ExprCondPair(1, True))
def test_sympy__functions__elementary__piecewise__Piecewise():
from sympy.functions.elementary.piecewise import Piecewise
assert _test_args(Piecewise((1, x >= 0), (0, True)))
@SKIP("abstract class")
def test_sympy__functions__elementary__trigonometric__TrigonometricFunction():
pass
@SKIP("abstract class")
def test_sympy__functions__elementary__trigonometric__ReciprocalTrigonometricFunction():
pass
@SKIP("abstract class")
def test_sympy__functions__elementary__trigonometric__InverseTrigonometricFunction():
pass
def test_sympy__functions__elementary__trigonometric__acos():
from sympy.functions.elementary.trigonometric import acos
assert _test_args(acos(2))
def test_sympy__functions__elementary__trigonometric__acot():
from sympy.functions.elementary.trigonometric import acot
assert _test_args(acot(2))
def test_sympy__functions__elementary__trigonometric__asin():
from sympy.functions.elementary.trigonometric import asin
assert _test_args(asin(2))
def test_sympy__functions__elementary__trigonometric__asec():
from sympy.functions.elementary.trigonometric import asec
assert _test_args(asec(2))
def test_sympy__functions__elementary__trigonometric__acsc():
from sympy.functions.elementary.trigonometric import acsc
assert _test_args(acsc(2))
def test_sympy__functions__elementary__trigonometric__atan():
from sympy.functions.elementary.trigonometric import atan
assert _test_args(atan(2))
def test_sympy__functions__elementary__trigonometric__atan2():
from sympy.functions.elementary.trigonometric import atan2
assert _test_args(atan2(2, 3))
def test_sympy__functions__elementary__trigonometric__cos():
from sympy.functions.elementary.trigonometric import cos
assert _test_args(cos(2))
def test_sympy__functions__elementary__trigonometric__csc():
from sympy.functions.elementary.trigonometric import csc
assert _test_args(csc(2))
def test_sympy__functions__elementary__trigonometric__cot():
from sympy.functions.elementary.trigonometric import cot
assert _test_args(cot(2))
def test_sympy__functions__elementary__trigonometric__sin():
assert _test_args(sin(2))
def test_sympy__functions__elementary__trigonometric__sec():
from sympy.functions.elementary.trigonometric import sec
assert _test_args(sec(2))
def test_sympy__functions__elementary__trigonometric__tan():
from sympy.functions.elementary.trigonometric import tan
assert _test_args(tan(2))
@SKIP("abstract class")
def test_sympy__functions__special__bessel__BesselBase():
pass
@SKIP("abstract class")
def test_sympy__functions__special__bessel__SphericalBesselBase():
pass
def test_sympy__functions__special__bessel__besseli():
from sympy.functions.special.bessel import besseli
assert _test_args(besseli(x, 1))
def test_sympy__functions__special__bessel__besselj():
from sympy.functions.special.bessel import besselj
assert _test_args(besselj(x, 1))
def test_sympy__functions__special__bessel__besselk():
from sympy.functions.special.bessel import besselk
assert _test_args(besselk(x, 1))
def test_sympy__functions__special__bessel__bessely():
from sympy.functions.special.bessel import bessely
assert _test_args(bessely(x, 1))
def test_sympy__functions__special__bessel__hankel1():
from sympy.functions.special.bessel import hankel1
assert _test_args(hankel1(x, 1))
def test_sympy__functions__special__bessel__hankel2():
from sympy.functions.special.bessel import hankel2
assert _test_args(hankel2(x, 1))
def test_sympy__functions__special__bessel__jn():
from sympy.functions.special.bessel import jn
assert _test_args(jn(0, x))
def test_sympy__functions__special__bessel__yn():
from sympy.functions.special.bessel import yn
assert _test_args(yn(0, x))
def test_sympy__functions__special__bessel__AiryBase():
pass
def test_sympy__functions__special__bessel__airyai():
from sympy.functions.special.bessel import airyai
assert _test_args(airyai(2))
def test_sympy__functions__special__bessel__airybi():
from sympy.functions.special.bessel import airybi
assert _test_args(airybi(2))
def test_sympy__functions__special__bessel__airyaiprime():
from sympy.functions.special.bessel import airyaiprime
assert _test_args(airyaiprime(2))
def test_sympy__functions__special__bessel__airybiprime():
from sympy.functions.special.bessel import airybiprime
assert _test_args(airybiprime(2))
def test_sympy__functions__special__elliptic_integrals__elliptic_k():
from sympy.functions.special.elliptic_integrals import elliptic_k as K
assert _test_args(K(x))
def test_sympy__functions__special__elliptic_integrals__elliptic_f():
from sympy.functions.special.elliptic_integrals import elliptic_f as F
assert _test_args(F(x, y))
def test_sympy__functions__special__elliptic_integrals__elliptic_e():
from sympy.functions.special.elliptic_integrals import elliptic_e as E
assert _test_args(E(x))
assert _test_args(E(x, y))
def test_sympy__functions__special__elliptic_integrals__elliptic_pi():
from sympy.functions.special.elliptic_integrals import elliptic_pi as P
assert _test_args(P(x, y))
assert _test_args(P(x, y, z))
def test_sympy__functions__special__delta_functions__DiracDelta():
from sympy.functions.special.delta_functions import DiracDelta
assert _test_args(DiracDelta(x, 1))
def test_sympy__functions__special__delta_functions__Heaviside():
from sympy.functions.special.delta_functions import Heaviside
assert _test_args(Heaviside(x))
def test_sympy__functions__special__error_functions__erf():
from sympy.functions.special.error_functions import erf
assert _test_args(erf(2))
def test_sympy__functions__special__error_functions__erfc():
from sympy.functions.special.error_functions import erfc
assert _test_args(erfc(2))
def test_sympy__functions__special__error_functions__erfi():
from sympy.functions.special.error_functions import erfi
assert _test_args(erfi(2))
def test_sympy__functions__special__error_functions__erf2():
from sympy.functions.special.error_functions import erf2
assert _test_args(erf2(2, 3))
def test_sympy__functions__special__error_functions__erfinv():
from sympy.functions.special.error_functions import erfinv
assert _test_args(erfinv(2))
def test_sympy__functions__special__error_functions__erfcinv():
from sympy.functions.special.error_functions import erfcinv
assert _test_args(erfcinv(2))
def test_sympy__functions__special__error_functions__erf2inv():
from sympy.functions.special.error_functions import erf2inv
assert _test_args(erf2inv(2, 3))
@SKIP("abstract class")
def test_sympy__functions__special__error_functions__FresnelIntegral():
pass
def test_sympy__functions__special__error_functions__fresnels():
from sympy.functions.special.error_functions import fresnels
assert _test_args(fresnels(2))
def test_sympy__functions__special__error_functions__fresnelc():
from sympy.functions.special.error_functions import fresnelc
assert _test_args(fresnelc(2))
def test_sympy__functions__special__error_functions__erfs():
from sympy.functions.special.error_functions import _erfs
assert _test_args(_erfs(2))
def test_sympy__functions__special__error_functions__Ei():
from sympy.functions.special.error_functions import Ei
assert _test_args(Ei(2))
def test_sympy__functions__special__error_functions__li():
from sympy.functions.special.error_functions import li
assert _test_args(li(2))
def test_sympy__functions__special__error_functions__Li():
from sympy.functions.special.error_functions import Li
assert _test_args(Li(2))
@SKIP("abstract class")
def test_sympy__functions__special__error_functions__TrigonometricIntegral():
pass
def test_sympy__functions__special__error_functions__Si():
from sympy.functions.special.error_functions import Si
assert _test_args(Si(2))
def test_sympy__functions__special__error_functions__Ci():
from sympy.functions.special.error_functions import Ci
assert _test_args(Ci(2))
def test_sympy__functions__special__error_functions__Shi():
from sympy.functions.special.error_functions import Shi
assert _test_args(Shi(2))
def test_sympy__functions__special__error_functions__Chi():
from sympy.functions.special.error_functions import Chi
assert _test_args(Chi(2))
def test_sympy__functions__special__error_functions__expint():
from sympy.functions.special.error_functions import expint
assert _test_args(expint(y, x))
def test_sympy__functions__special__gamma_functions__gamma():
from sympy.functions.special.gamma_functions import gamma
assert _test_args(gamma(x))
def test_sympy__functions__special__gamma_functions__loggamma():
from sympy.functions.special.gamma_functions import loggamma
assert _test_args(loggamma(2))
def test_sympy__functions__special__gamma_functions__lowergamma():
from sympy.functions.special.gamma_functions import lowergamma
assert _test_args(lowergamma(x, 2))
def test_sympy__functions__special__gamma_functions__polygamma():
from sympy.functions.special.gamma_functions import polygamma
assert _test_args(polygamma(x, 2))
def test_sympy__functions__special__gamma_functions__uppergamma():
from sympy.functions.special.gamma_functions import uppergamma
assert _test_args(uppergamma(x, 2))
def test_sympy__functions__special__beta_functions__beta():
from sympy.functions.special.beta_functions import beta
assert _test_args(beta(x, x))
@SKIP("abstract class")
def test_sympy__functions__special__hyper__TupleParametersBase():
pass
@SKIP("abstract class")
def test_sympy__functions__special__hyper__TupleArg():
pass
def test_sympy__functions__special__hyper__hyper():
from sympy.functions.special.hyper import hyper
assert _test_args(hyper([1, 2, 3], [4, 5], x))
def test_sympy__functions__special__hyper__meijerg():
from sympy.functions.special.hyper import meijerg
assert _test_args(meijerg([1, 2, 3], [4, 5], [6], [], x))
@SKIP("abstract class")
def test_sympy__functions__special__hyper__HyperRep():
pass
def test_sympy__functions__special__hyper__HyperRep_power1():
from sympy.functions.special.hyper import HyperRep_power1
assert _test_args(HyperRep_power1(x, y))
def test_sympy__functions__special__hyper__HyperRep_power2():
from sympy.functions.special.hyper import HyperRep_power2
assert _test_args(HyperRep_power2(x, y))
def test_sympy__functions__special__hyper__HyperRep_log1():
from sympy.functions.special.hyper import HyperRep_log1
assert _test_args(HyperRep_log1(x))
def test_sympy__functions__special__hyper__HyperRep_atanh():
from sympy.functions.special.hyper import HyperRep_atanh
assert _test_args(HyperRep_atanh(x))
def test_sympy__functions__special__hyper__HyperRep_asin1():
from sympy.functions.special.hyper import HyperRep_asin1
assert _test_args(HyperRep_asin1(x))
def test_sympy__functions__special__hyper__HyperRep_asin2():
from sympy.functions.special.hyper import HyperRep_asin2
assert _test_args(HyperRep_asin2(x))
def test_sympy__functions__special__hyper__HyperRep_sqrts1():
from sympy.functions.special.hyper import HyperRep_sqrts1
assert _test_args(HyperRep_sqrts1(x, y))
def test_sympy__functions__special__hyper__HyperRep_sqrts2():
from sympy.functions.special.hyper import HyperRep_sqrts2
assert _test_args(HyperRep_sqrts2(x, y))
def test_sympy__functions__special__hyper__HyperRep_log2():
from sympy.functions.special.hyper import HyperRep_log2
assert _test_args(HyperRep_log2(x))
def test_sympy__functions__special__hyper__HyperRep_cosasin():
from sympy.functions.special.hyper import HyperRep_cosasin
assert _test_args(HyperRep_cosasin(x, y))
def test_sympy__functions__special__hyper__HyperRep_sinasin():
from sympy.functions.special.hyper import HyperRep_sinasin
assert _test_args(HyperRep_sinasin(x, y))
@SKIP("abstract class")
def test_sympy__functions__special__polynomials__OrthogonalPolynomial():
pass
def test_sympy__functions__special__polynomials__jacobi():
from sympy.functions.special.polynomials import jacobi
assert _test_args(jacobi(x, 2, 2, 2))
def test_sympy__functions__special__polynomials__gegenbauer():
from sympy.functions.special.polynomials import gegenbauer
assert _test_args(gegenbauer(x, 2, 2))
def test_sympy__functions__special__polynomials__chebyshevt():
from sympy.functions.special.polynomials import chebyshevt
assert _test_args(chebyshevt(x, 2))
def test_sympy__functions__special__polynomials__chebyshevt_root():
from sympy.functions.special.polynomials import chebyshevt_root
assert _test_args(chebyshevt_root(3, 2))
def test_sympy__functions__special__polynomials__chebyshevu():
from sympy.functions.special.polynomials import chebyshevu
assert _test_args(chebyshevu(x, 2))
def test_sympy__functions__special__polynomials__chebyshevu_root():
from sympy.functions.special.polynomials import chebyshevu_root
assert _test_args(chebyshevu_root(3, 2))
def test_sympy__functions__special__polynomials__hermite():
from sympy.functions.special.polynomials import hermite
assert _test_args(hermite(x, 2))
def test_sympy__functions__special__polynomials__legendre():
from sympy.functions.special.polynomials import legendre
assert _test_args(legendre(x, 2))
def test_sympy__functions__special__polynomials__assoc_legendre():
from sympy.functions.special.polynomials import assoc_legendre
assert _test_args(assoc_legendre(x, 0, y))
def test_sympy__functions__special__polynomials__laguerre():
from sympy.functions.special.polynomials import laguerre
assert _test_args(laguerre(x, 2))
def test_sympy__functions__special__polynomials__assoc_laguerre():
from sympy.functions.special.polynomials import assoc_laguerre
assert _test_args(assoc_laguerre(x, 0, y))
def test_sympy__functions__special__spherical_harmonics__Ynm():
from sympy.functions.special.spherical_harmonics import Ynm
assert _test_args(Ynm(1, 1, x, y))
def test_sympy__functions__special__spherical_harmonics__Znm():
from sympy.functions.special.spherical_harmonics import Znm
assert _test_args(Znm(1, 1, x, y))
def test_sympy__functions__special__tensor_functions__LeviCivita():
from sympy.functions.special.tensor_functions import LeviCivita
assert _test_args(LeviCivita(x, y, 2))
def test_sympy__functions__special__tensor_functions__KroneckerDelta():
from sympy.functions.special.tensor_functions import KroneckerDelta
assert _test_args(KroneckerDelta(x, y))
def test_sympy__functions__special__zeta_functions__dirichlet_eta():
from sympy.functions.special.zeta_functions import dirichlet_eta
assert _test_args(dirichlet_eta(x))
def test_sympy__functions__special__zeta_functions__zeta():
from sympy.functions.special.zeta_functions import zeta
assert _test_args(zeta(101))
def test_sympy__functions__special__zeta_functions__lerchphi():
from sympy.functions.special.zeta_functions import lerchphi
assert _test_args(lerchphi(x, y, z))
def test_sympy__functions__special__zeta_functions__polylog():
from sympy.functions.special.zeta_functions import polylog
assert _test_args(polylog(x, y))
def test_sympy__integrals__integrals__Integral():
from sympy.integrals.integrals import Integral
assert _test_args(Integral(2, (x, 0, 1)))
def test_sympy__integrals__risch__NonElementaryIntegral():
from sympy.integrals.risch import NonElementaryIntegral
assert _test_args(NonElementaryIntegral(exp(-x**2), x))
@SKIP("abstract class")
def test_sympy__integrals__transforms__IntegralTransform():
pass
def test_sympy__integrals__transforms__MellinTransform():
from sympy.integrals.transforms import MellinTransform
assert _test_args(MellinTransform(2, x, y))
def test_sympy__integrals__transforms__InverseMellinTransform():
from sympy.integrals.transforms import InverseMellinTransform
assert _test_args(InverseMellinTransform(2, x, y, 0, 1))
def test_sympy__integrals__transforms__LaplaceTransform():
from sympy.integrals.transforms import LaplaceTransform
assert _test_args(LaplaceTransform(2, x, y))
def test_sympy__integrals__transforms__InverseLaplaceTransform():
from sympy.integrals.transforms import InverseLaplaceTransform
assert _test_args(InverseLaplaceTransform(2, x, y, 0))
@SKIP("abstract class")
def test_sympy__integrals__transforms__FourierTypeTransform():
pass
def test_sympy__integrals__transforms__InverseFourierTransform():
from sympy.integrals.transforms import InverseFourierTransform
assert _test_args(InverseFourierTransform(2, x, y))
def test_sympy__integrals__transforms__FourierTransform():
from sympy.integrals.transforms import FourierTransform
assert _test_args(FourierTransform(2, x, y))
@SKIP("abstract class")
def test_sympy__integrals__transforms__SineCosineTypeTransform():
pass
def test_sympy__integrals__transforms__InverseSineTransform():
from sympy.integrals.transforms import InverseSineTransform
assert _test_args(InverseSineTransform(2, x, y))
def test_sympy__integrals__transforms__SineTransform():
from sympy.integrals.transforms import SineTransform
assert _test_args(SineTransform(2, x, y))
def test_sympy__integrals__transforms__InverseCosineTransform():
from sympy.integrals.transforms import InverseCosineTransform
assert _test_args(InverseCosineTransform(2, x, y))
def test_sympy__integrals__transforms__CosineTransform():
from sympy.integrals.transforms import CosineTransform
assert _test_args(CosineTransform(2, x, y))
@SKIP("abstract class")
def test_sympy__integrals__transforms__HankelTypeTransform():
pass
def test_sympy__integrals__transforms__InverseHankelTransform():
from sympy.integrals.transforms import InverseHankelTransform
assert _test_args(InverseHankelTransform(2, x, y, 0))
def test_sympy__integrals__transforms__HankelTransform():
from sympy.integrals.transforms import HankelTransform
assert _test_args(HankelTransform(2, x, y, 0))
@XFAIL
def test_sympy__liealgebras__cartan_type__CartanType_generator():
from sympy.liealgebras.cartan_type import CartanType_generator
assert _test_args(CartanType_generator("A2"))
@XFAIL
def test_sympy__liealgebras__cartan_type__Standard_Cartan():
from sympy.liealgebras.cartan_type import Standard_Cartan
assert _test_args(Standard_Cartan("A", 2))
@XFAIL
def test_sympy__liealgebras__weyl_group__WeylGroup():
from sympy.liealgebras.weyl_group import WeylGroup
assert _test_args(WeylGroup("B4"))
@XFAIL
def test_sympy__liealgebras__root_system__RootSystem():
from sympy.liealgebras.root_system import RootSystem
assert _test_args(RootSystem("A2"))
@XFAIL
def test_sympy__liealgebras__type_a__TypeA():
from sympy.liealgebras.type_a import TypeA
assert _test_args(TypeA(2))
@XFAIL
def test_sympy__liealgebras__type_b__TypeB():
from sympy.liealgebras.type_b import TypeB
assert _test_args(TypeB(4))
@XFAIL
def test_sympy__liealgebras__type_c__TypeC():
from sympy.liealgebras.type_c import TypeC
assert _test_args(TypeC(4))
@XFAIL
def test_sympy__liealgebras__type_d__TypeD():
from sympy.liealgebras.type_d import TypeD
assert _test_args(TypeD(4))
@XFAIL
def test_sympy__liealgebras__type_e__TypeE():
from sympy.liealgebras.type_e import TypeE
assert _test_args(TypeE(6))
@XFAIL
def test_sympy__liealgebras__type_f__TypeF():
from sympy.liealgebras.type_f import TypeF
assert _test_args(TypeF(4))
@XFAIL
def test_sympy__liealgebras__type_g__TypeG():
from sympy.liealgebras.type_g import TypeG
assert _test_args(TypeG(2))
def test_sympy__logic__boolalg__And():
from sympy.logic.boolalg import And
assert _test_args(And(x, y, 2))
@SKIP("abstract class")
def test_sympy__logic__boolalg__Boolean():
pass
def test_sympy__logic__boolalg__BooleanFunction():
from sympy.logic.boolalg import BooleanFunction
assert _test_args(BooleanFunction(1, 2, 3))
@SKIP("abstract class")
def test_sympy__logic__boolalg__BooleanAtom():
pass
def test_sympy__logic__boolalg__BooleanTrue():
from sympy.logic.boolalg import true
assert _test_args(true)
def test_sympy__logic__boolalg__BooleanFalse():
from sympy.logic.boolalg import false
assert _test_args(false)
def test_sympy__logic__boolalg__Equivalent():
from sympy.logic.boolalg import Equivalent
assert _test_args(Equivalent(x, 2))
def test_sympy__logic__boolalg__ITE():
from sympy.logic.boolalg import ITE
assert _test_args(ITE(x, y, 2))
def test_sympy__logic__boolalg__Implies():
from sympy.logic.boolalg import Implies
assert _test_args(Implies(x, y))
def test_sympy__logic__boolalg__Nand():
from sympy.logic.boolalg import Nand
assert _test_args(Nand(x, y, 2))
def test_sympy__logic__boolalg__Nor():
from sympy.logic.boolalg import Nor
assert _test_args(Nor(x, y))
def test_sympy__logic__boolalg__Not():
from sympy.logic.boolalg import Not
assert _test_args(Not(x))
def test_sympy__logic__boolalg__Or():
from sympy.logic.boolalg import Or
assert _test_args(Or(x, y))
def test_sympy__logic__boolalg__Xor():
from sympy.logic.boolalg import Xor
assert _test_args(Xor(x, y, 2))
def test_sympy__matrices__matrices__DeferredVector():
from sympy.matrices.matrices import DeferredVector
assert _test_args(DeferredVector("X"))
@SKIP("abstract class")
def test_sympy__matrices__expressions__matexpr__MatrixBase():
pass
def test_sympy__matrices__immutable__ImmutableMatrix():
from sympy.matrices.immutable import ImmutableMatrix
m = ImmutableMatrix([[1, 2], [3, 4]])
assert _test_args(m)
assert _test_args(Basic(*list(m)))
m = ImmutableMatrix(1, 1, [1])
assert _test_args(m)
assert _test_args(Basic(*list(m)))
m = ImmutableMatrix(2, 2, lambda i, j: 1)
assert m[0, 0] is S.One
m = ImmutableMatrix(2, 2, lambda i, j: 1/(1 + i) + 1/(1 + j))
assert m[1, 1] is S.One # true div. will give 1.0 if i,j not sympified
assert _test_args(m)
assert _test_args(Basic(*list(m)))
def test_sympy__matrices__immutable__ImmutableSparseMatrix():
from sympy.matrices.immutable import ImmutableSparseMatrix
m = ImmutableSparseMatrix([[1, 2], [3, 4]])
assert _test_args(m)
assert _test_args(Basic(*list(m)))
m = ImmutableSparseMatrix(1, 1, {(0, 0): 1})
assert _test_args(m)
assert _test_args(Basic(*list(m)))
m = ImmutableSparseMatrix(1, 1, [1])
assert _test_args(m)
assert _test_args(Basic(*list(m)))
m = ImmutableSparseMatrix(2, 2, lambda i, j: 1)
assert m[0, 0] is S.One
m = ImmutableSparseMatrix(2, 2, lambda i, j: 1/(1 + i) + 1/(1 + j))
assert m[1, 1] is S.One # true div. will give 1.0 if i,j not sympified
assert _test_args(m)
assert _test_args(Basic(*list(m)))
def test_sympy__matrices__expressions__slice__MatrixSlice():
from sympy.matrices.expressions.slice import MatrixSlice
from sympy.matrices.expressions import MatrixSymbol
X = MatrixSymbol('X', 4, 4)
assert _test_args(MatrixSlice(X, (0, 2), (0, 2)))
def test_sympy__matrices__expressions__blockmatrix__BlockDiagMatrix():
from sympy.matrices.expressions.blockmatrix import BlockDiagMatrix
from sympy.matrices.expressions import MatrixSymbol
X = MatrixSymbol('X', x, x)
Y = MatrixSymbol('Y', y, y)
assert _test_args(BlockDiagMatrix(X, Y))
def test_sympy__matrices__expressions__blockmatrix__BlockMatrix():
from sympy.matrices.expressions.blockmatrix import BlockMatrix
from sympy.matrices.expressions import MatrixSymbol, ZeroMatrix
X = MatrixSymbol('X', x, x)
Y = MatrixSymbol('Y', y, y)
Z = MatrixSymbol('Z', x, y)
O = ZeroMatrix(y, x)
assert _test_args(BlockMatrix([[X, Z], [O, Y]]))
def test_sympy__matrices__expressions__inverse__Inverse():
from sympy.matrices.expressions.inverse import Inverse
from sympy.matrices.expressions import MatrixSymbol
assert _test_args(Inverse(MatrixSymbol('A', 3, 3)))
def test_sympy__matrices__expressions__matadd__MatAdd():
from sympy.matrices.expressions.matadd import MatAdd
from sympy.matrices.expressions import MatrixSymbol
X = MatrixSymbol('X', x, y)
Y = MatrixSymbol('Y', x, y)
assert _test_args(MatAdd(X, Y))
def test_sympy__matrices__expressions__matexpr__Identity():
from sympy.matrices.expressions.matexpr import Identity
assert _test_args(Identity(3))
@SKIP("abstract class")
def test_sympy__matrices__expressions__matexpr__MatrixExpr():
pass
def test_sympy__matrices__expressions__matexpr__MatrixElement():
from sympy.matrices.expressions.matexpr import MatrixSymbol, MatrixElement
from sympy import S
assert _test_args(MatrixElement(MatrixSymbol('A', 3, 5), S(2), S(3)))
@XFAIL
def test_sympy__matrices__expressions__matexpr__MatrixSymbol():
from sympy.matrices.expressions.matexpr import MatrixSymbol
assert _test_args(MatrixSymbol('A', 3, 5))
def test_sympy__matrices__expressions__matexpr__ZeroMatrix():
from sympy.matrices.expressions.matexpr import ZeroMatrix
assert _test_args(ZeroMatrix(3, 5))
def test_sympy__matrices__expressions__matmul__MatMul():
from sympy.matrices.expressions.matmul import MatMul
from sympy.matrices.expressions import MatrixSymbol
X = MatrixSymbol('X', x, y)
Y = MatrixSymbol('Y', y, x)
assert _test_args(MatMul(X, Y))
def test_sympy__matrices__expressions__diagonal__DiagonalMatrix():
from sympy.matrices.expressions.diagonal import DiagonalMatrix
from sympy.matrices.expressions import MatrixSymbol
x = MatrixSymbol('x', 10, 1)
assert _test_args(DiagonalMatrix(x))
def test_sympy__matrices__expressions__diagonal__DiagonalOf():
from sympy.matrices.expressions.diagonal import DiagonalOf
from sympy.matrices.expressions import MatrixSymbol
X = MatrixSymbol('x', 10, 10)
assert _test_args(DiagonalOf(X))
def test_sympy__matrices__expressions__hadamard__HadamardProduct():
from sympy.matrices.expressions.hadamard import HadamardProduct
from sympy.matrices.expressions import MatrixSymbol
X = MatrixSymbol('X', x, y)
Y = MatrixSymbol('Y', x, y)
assert _test_args(HadamardProduct(X, Y))
def test_sympy__matrices__expressions__matpow__MatPow():
from sympy.matrices.expressions.matpow import MatPow
from sympy.matrices.expressions import MatrixSymbol
X = MatrixSymbol('X', x, x)
assert _test_args(MatPow(X, 2))
def test_sympy__matrices__expressions__transpose__Transpose():
from sympy.matrices.expressions.transpose import Transpose
from sympy.matrices.expressions import MatrixSymbol
assert _test_args(Transpose(MatrixSymbol('A', 3, 5)))
def test_sympy__matrices__expressions__adjoint__Adjoint():
from sympy.matrices.expressions.adjoint import Adjoint
from sympy.matrices.expressions import MatrixSymbol
assert _test_args(Adjoint(MatrixSymbol('A', 3, 5)))
def test_sympy__matrices__expressions__trace__Trace():
from sympy.matrices.expressions.trace import Trace
from sympy.matrices.expressions import MatrixSymbol
assert _test_args(Trace(MatrixSymbol('A', 3, 3)))
def test_sympy__matrices__expressions__determinant__Determinant():
from sympy.matrices.expressions.determinant import Determinant
from sympy.matrices.expressions import MatrixSymbol
assert _test_args(Determinant(MatrixSymbol('A', 3, 3)))
def test_sympy__matrices__expressions__funcmatrix__FunctionMatrix():
from sympy.matrices.expressions.funcmatrix import FunctionMatrix
from sympy import Lambda, symbols
i, j = symbols('i,j')
assert _test_args(FunctionMatrix(3, 3, Lambda((i, j), i - j) ))
def test_sympy__matrices__expressions__fourier__DFT():
from sympy.matrices.expressions.fourier import DFT
from sympy import S
assert _test_args(DFT(S(2)))
def test_sympy__matrices__expressions__fourier__IDFT():
from sympy.matrices.expressions.fourier import IDFT
from sympy import S
assert _test_args(IDFT(S(2)))
from sympy.matrices.expressions import MatrixSymbol
X = MatrixSymbol('X', 10, 10)
def test_sympy__matrices__expressions__factorizations__LofLU():
from sympy.matrices.expressions.factorizations import LofLU
assert _test_args(LofLU(X))
def test_sympy__matrices__expressions__factorizations__UofLU():
from sympy.matrices.expressions.factorizations import UofLU
assert _test_args(UofLU(X))
def test_sympy__matrices__expressions__factorizations__QofQR():
from sympy.matrices.expressions.factorizations import QofQR
assert _test_args(QofQR(X))
def test_sympy__matrices__expressions__factorizations__RofQR():
from sympy.matrices.expressions.factorizations import RofQR
assert _test_args(RofQR(X))
def test_sympy__matrices__expressions__factorizations__LofCholesky():
from sympy.matrices.expressions.factorizations import LofCholesky
assert _test_args(LofCholesky(X))
def test_sympy__matrices__expressions__factorizations__UofCholesky():
from sympy.matrices.expressions.factorizations import UofCholesky
assert _test_args(UofCholesky(X))
def test_sympy__matrices__expressions__factorizations__EigenVectors():
from sympy.matrices.expressions.factorizations import EigenVectors
assert _test_args(EigenVectors(X))
def test_sympy__matrices__expressions__factorizations__EigenValues():
from sympy.matrices.expressions.factorizations import EigenValues
assert _test_args(EigenValues(X))
def test_sympy__matrices__expressions__factorizations__UofSVD():
from sympy.matrices.expressions.factorizations import UofSVD
assert _test_args(UofSVD(X))
def test_sympy__matrices__expressions__factorizations__VofSVD():
from sympy.matrices.expressions.factorizations import VofSVD
assert _test_args(VofSVD(X))
def test_sympy__matrices__expressions__factorizations__SofSVD():
from sympy.matrices.expressions.factorizations import SofSVD
assert _test_args(SofSVD(X))
@SKIP("abstract class")
def test_sympy__matrices__expressions__factorizations__Factorization():
pass
def test_sympy__physics__vector__frame__CoordinateSym():
from sympy.physics.vector import CoordinateSym
from sympy.physics.vector import ReferenceFrame
assert _test_args(CoordinateSym('R_x', ReferenceFrame('R'), 0))
def test_sympy__physics__paulialgebra__Pauli():
from sympy.physics.paulialgebra import Pauli
assert _test_args(Pauli(1))
def test_sympy__physics__quantum__anticommutator__AntiCommutator():
from sympy.physics.quantum.anticommutator import AntiCommutator
assert _test_args(AntiCommutator(x, y))
def test_sympy__physics__quantum__cartesian__PositionBra3D():
from sympy.physics.quantum.cartesian import PositionBra3D
assert _test_args(PositionBra3D(x, y, z))
def test_sympy__physics__quantum__cartesian__PositionKet3D():
from sympy.physics.quantum.cartesian import PositionKet3D
assert _test_args(PositionKet3D(x, y, z))
def test_sympy__physics__quantum__cartesian__PositionState3D():
from sympy.physics.quantum.cartesian import PositionState3D
assert _test_args(PositionState3D(x, y, z))
def test_sympy__physics__quantum__cartesian__PxBra():
from sympy.physics.quantum.cartesian import PxBra
assert _test_args(PxBra(x, y, z))
def test_sympy__physics__quantum__cartesian__PxKet():
from sympy.physics.quantum.cartesian import PxKet
assert _test_args(PxKet(x, y, z))
def test_sympy__physics__quantum__cartesian__PxOp():
from sympy.physics.quantum.cartesian import PxOp
assert _test_args(PxOp(x, y, z))
def test_sympy__physics__quantum__cartesian__XBra():
from sympy.physics.quantum.cartesian import XBra
assert _test_args(XBra(x))
def test_sympy__physics__quantum__cartesian__XKet():
from sympy.physics.quantum.cartesian import XKet
assert _test_args(XKet(x))
def test_sympy__physics__quantum__cartesian__XOp():
from sympy.physics.quantum.cartesian import XOp
assert _test_args(XOp(x))
def test_sympy__physics__quantum__cartesian__YOp():
from sympy.physics.quantum.cartesian import YOp
assert _test_args(YOp(x))
def test_sympy__physics__quantum__cartesian__ZOp():
from sympy.physics.quantum.cartesian import ZOp
assert _test_args(ZOp(x))
def test_sympy__physics__quantum__cg__CG():
from sympy.physics.quantum.cg import CG
from sympy import S
assert _test_args(CG(S(3)/2, S(3)/2, S(1)/2, -S(1)/2, 1, 1))
def test_sympy__physics__quantum__cg__Wigner3j():
from sympy.physics.quantum.cg import Wigner3j
assert _test_args(Wigner3j(6, 0, 4, 0, 2, 0))
def test_sympy__physics__quantum__cg__Wigner6j():
from sympy.physics.quantum.cg import Wigner6j
assert _test_args(Wigner6j(1, 2, 3, 2, 1, 2))
def test_sympy__physics__quantum__cg__Wigner9j():
from sympy.physics.quantum.cg import Wigner9j
assert _test_args(Wigner9j(2, 1, 1, S(3)/2, S(1)/2, 1, S(1)/2, S(1)/2, 0))
def test_sympy__physics__quantum__circuitplot__Mz():
from sympy.physics.quantum.circuitplot import Mz
assert _test_args(Mz(0))
def test_sympy__physics__quantum__circuitplot__Mx():
from sympy.physics.quantum.circuitplot import Mx
assert _test_args(Mx(0))
def test_sympy__physics__quantum__commutator__Commutator():
from sympy.physics.quantum.commutator import Commutator
A, B = symbols('A,B', commutative=False)
assert _test_args(Commutator(A, B))
def test_sympy__physics__quantum__constants__HBar():
from sympy.physics.quantum.constants import HBar
assert _test_args(HBar())
def test_sympy__physics__quantum__dagger__Dagger():
from sympy.physics.quantum.dagger import Dagger
from sympy.physics.quantum.state import Ket
assert _test_args(Dagger(Dagger(Ket('psi'))))
def test_sympy__physics__quantum__gate__CGate():
from sympy.physics.quantum.gate import CGate, Gate
assert _test_args(CGate((0, 1), Gate(2)))
def test_sympy__physics__quantum__gate__CGateS():
from sympy.physics.quantum.gate import CGateS, Gate
assert _test_args(CGateS((0, 1), Gate(2)))
def test_sympy__physics__quantum__gate__CNotGate():
from sympy.physics.quantum.gate import CNotGate
assert _test_args(CNotGate(0, 1))
def test_sympy__physics__quantum__gate__Gate():
from sympy.physics.quantum.gate import Gate
assert _test_args(Gate(0))
def test_sympy__physics__quantum__gate__HadamardGate():
from sympy.physics.quantum.gate import HadamardGate
assert _test_args(HadamardGate(0))
def test_sympy__physics__quantum__gate__IdentityGate():
from sympy.physics.quantum.gate import IdentityGate
assert _test_args(IdentityGate(0))
def test_sympy__physics__quantum__gate__OneQubitGate():
from sympy.physics.quantum.gate import OneQubitGate
assert _test_args(OneQubitGate(0))
def test_sympy__physics__quantum__gate__PhaseGate():
from sympy.physics.quantum.gate import PhaseGate
assert _test_args(PhaseGate(0))
def test_sympy__physics__quantum__gate__SwapGate():
from sympy.physics.quantum.gate import SwapGate
assert _test_args(SwapGate(0, 1))
def test_sympy__physics__quantum__gate__TGate():
from sympy.physics.quantum.gate import TGate
assert _test_args(TGate(0))
def test_sympy__physics__quantum__gate__TwoQubitGate():
from sympy.physics.quantum.gate import TwoQubitGate
assert _test_args(TwoQubitGate(0))
def test_sympy__physics__quantum__gate__UGate():
from sympy.physics.quantum.gate import UGate
from sympy.matrices.immutable import ImmutableMatrix
from sympy import Integer, Tuple
assert _test_args(
UGate(Tuple(Integer(1)), ImmutableMatrix([[1, 0], [0, 2]])))
def test_sympy__physics__quantum__gate__XGate():
from sympy.physics.quantum.gate import XGate
assert _test_args(XGate(0))
def test_sympy__physics__quantum__gate__YGate():
from sympy.physics.quantum.gate import YGate
assert _test_args(YGate(0))
def test_sympy__physics__quantum__gate__ZGate():
from sympy.physics.quantum.gate import ZGate
assert _test_args(ZGate(0))
@SKIP("TODO: sympy.physics")
def test_sympy__physics__quantum__grover__OracleGate():
from sympy.physics.quantum.grover import OracleGate
assert _test_args(OracleGate())
def test_sympy__physics__quantum__grover__WGate():
from sympy.physics.quantum.grover import WGate
assert _test_args(WGate(1))
def test_sympy__physics__quantum__hilbert__ComplexSpace():
from sympy.physics.quantum.hilbert import ComplexSpace
assert _test_args(ComplexSpace(x))
def test_sympy__physics__quantum__hilbert__DirectSumHilbertSpace():
from sympy.physics.quantum.hilbert import DirectSumHilbertSpace, ComplexSpace, FockSpace
c = ComplexSpace(2)
f = FockSpace()
assert _test_args(DirectSumHilbertSpace(c, f))
def test_sympy__physics__quantum__hilbert__FockSpace():
from sympy.physics.quantum.hilbert import FockSpace
assert _test_args(FockSpace())
def test_sympy__physics__quantum__hilbert__HilbertSpace():
from sympy.physics.quantum.hilbert import HilbertSpace
assert _test_args(HilbertSpace())
def test_sympy__physics__quantum__hilbert__L2():
from sympy.physics.quantum.hilbert import L2
from sympy import oo, Interval
assert _test_args(L2(Interval(0, oo)))
def test_sympy__physics__quantum__hilbert__TensorPowerHilbertSpace():
from sympy.physics.quantum.hilbert import TensorPowerHilbertSpace, FockSpace
f = FockSpace()
assert _test_args(TensorPowerHilbertSpace(f, 2))
def test_sympy__physics__quantum__hilbert__TensorProductHilbertSpace():
from sympy.physics.quantum.hilbert import TensorProductHilbertSpace, FockSpace, ComplexSpace
c = ComplexSpace(2)
f = FockSpace()
assert _test_args(TensorProductHilbertSpace(f, c))
def test_sympy__physics__quantum__innerproduct__InnerProduct():
from sympy.physics.quantum import Bra, Ket, InnerProduct
b = Bra('b')
k = Ket('k')
assert _test_args(InnerProduct(b, k))
def test_sympy__physics__quantum__operator__DifferentialOperator():
from sympy.physics.quantum.operator import DifferentialOperator
from sympy import Derivative, Function
f = Function('f')
assert _test_args(DifferentialOperator(1/x*Derivative(f(x), x), f(x)))
def test_sympy__physics__quantum__operator__HermitianOperator():
from sympy.physics.quantum.operator import HermitianOperator
assert _test_args(HermitianOperator('H'))
def test_sympy__physics__quantum__operator__IdentityOperator():
from sympy.physics.quantum.operator import IdentityOperator
assert _test_args(IdentityOperator(5))
def test_sympy__physics__quantum__operator__Operator():
from sympy.physics.quantum.operator import Operator
assert _test_args(Operator('A'))
def test_sympy__physics__quantum__operator__OuterProduct():
from sympy.physics.quantum.operator import OuterProduct
from sympy.physics.quantum import Ket, Bra
b = Bra('b')
k = Ket('k')
assert _test_args(OuterProduct(k, b))
def test_sympy__physics__quantum__operator__UnitaryOperator():
from sympy.physics.quantum.operator import UnitaryOperator
assert _test_args(UnitaryOperator('U'))
def test_sympy__physics__quantum__piab__PIABBra():
from sympy.physics.quantum.piab import PIABBra
assert _test_args(PIABBra('B'))
def test_sympy__physics__quantum__boson__BosonOp():
from sympy.physics.quantum.boson import BosonOp
assert _test_args(BosonOp('a'))
assert _test_args(BosonOp('a', False))
def test_sympy__physics__quantum__boson__BosonFockKet():
from sympy.physics.quantum.boson import BosonFockKet
assert _test_args(BosonFockKet(1))
def test_sympy__physics__quantum__boson__BosonFockBra():
from sympy.physics.quantum.boson import BosonFockBra
assert _test_args(BosonFockBra(1))
def test_sympy__physics__quantum__boson__BosonCoherentKet():
from sympy.physics.quantum.boson import BosonCoherentKet
assert _test_args(BosonCoherentKet(1))
def test_sympy__physics__quantum__boson__BosonCoherentBra():
from sympy.physics.quantum.boson import BosonCoherentBra
assert _test_args(BosonCoherentBra(1))
def test_sympy__physics__quantum__fermion__FermionOp():
from sympy.physics.quantum.fermion import FermionOp
assert _test_args(FermionOp('c'))
assert _test_args(FermionOp('c', False))
def test_sympy__physics__quantum__fermion__FermionFockKet():
from sympy.physics.quantum.fermion import FermionFockKet
assert _test_args(FermionFockKet(1))
def test_sympy__physics__quantum__fermion__FermionFockBra():
from sympy.physics.quantum.fermion import FermionFockBra
assert _test_args(FermionFockBra(1))
def test_sympy__physics__quantum__pauli__SigmaOpBase():
from sympy.physics.quantum.pauli import SigmaOpBase
assert _test_args(SigmaOpBase())
def test_sympy__physics__quantum__pauli__SigmaX():
from sympy.physics.quantum.pauli import SigmaX
assert _test_args(SigmaX())
def test_sympy__physics__quantum__pauli__SigmaY():
from sympy.physics.quantum.pauli import SigmaY
assert _test_args(SigmaY())
def test_sympy__physics__quantum__pauli__SigmaZ():
from sympy.physics.quantum.pauli import SigmaZ
assert _test_args(SigmaZ())
def test_sympy__physics__quantum__pauli__SigmaMinus():
from sympy.physics.quantum.pauli import SigmaMinus
assert _test_args(SigmaMinus())
def test_sympy__physics__quantum__pauli__SigmaPlus():
from sympy.physics.quantum.pauli import SigmaPlus
assert _test_args(SigmaPlus())
def test_sympy__physics__quantum__pauli__SigmaZKet():
from sympy.physics.quantum.pauli import SigmaZKet
assert _test_args(SigmaZKet(0))
def test_sympy__physics__quantum__pauli__SigmaZBra():
from sympy.physics.quantum.pauli import SigmaZBra
assert _test_args(SigmaZBra(0))
def test_sympy__physics__quantum__piab__PIABHamiltonian():
from sympy.physics.quantum.piab import PIABHamiltonian
assert _test_args(PIABHamiltonian('P'))
def test_sympy__physics__quantum__piab__PIABKet():
from sympy.physics.quantum.piab import PIABKet
assert _test_args(PIABKet('K'))
def test_sympy__physics__quantum__qexpr__QExpr():
from sympy.physics.quantum.qexpr import QExpr
assert _test_args(QExpr(0))
def test_sympy__physics__quantum__qft__Fourier():
from sympy.physics.quantum.qft import Fourier
assert _test_args(Fourier(0, 1))
def test_sympy__physics__quantum__qft__IQFT():
from sympy.physics.quantum.qft import IQFT
assert _test_args(IQFT(0, 1))
def test_sympy__physics__quantum__qft__QFT():
from sympy.physics.quantum.qft import QFT
assert _test_args(QFT(0, 1))
def test_sympy__physics__quantum__qft__RkGate():
from sympy.physics.quantum.qft import RkGate
assert _test_args(RkGate(0, 1))
def test_sympy__physics__quantum__qubit__IntQubit():
from sympy.physics.quantum.qubit import IntQubit
assert _test_args(IntQubit(0))
def test_sympy__physics__quantum__qubit__IntQubitBra():
from sympy.physics.quantum.qubit import IntQubitBra
assert _test_args(IntQubitBra(0))
def test_sympy__physics__quantum__qubit__IntQubitState():
from sympy.physics.quantum.qubit import IntQubitState, QubitState
assert _test_args(IntQubitState(QubitState(0, 1)))
def test_sympy__physics__quantum__qubit__Qubit():
from sympy.physics.quantum.qubit import Qubit
assert _test_args(Qubit(0, 0, 0))
def test_sympy__physics__quantum__qubit__QubitBra():
from sympy.physics.quantum.qubit import QubitBra
assert _test_args(QubitBra('1', 0))
def test_sympy__physics__quantum__qubit__QubitState():
from sympy.physics.quantum.qubit import QubitState
assert _test_args(QubitState(0, 1))
def test_sympy__physics__quantum__density__Density():
from sympy.physics.quantum.density import Density
from sympy.physics.quantum.state import Ket
assert _test_args(Density([Ket(0), 0.5], [Ket(1), 0.5]))
@SKIP("TODO: sympy.physics.quantum.shor: Cmod Not Implemented")
def test_sympy__physics__quantum__shor__CMod():
from sympy.physics.quantum.shor import CMod
assert _test_args(CMod())
def test_sympy__physics__quantum__spin__CoupledSpinState():
from sympy.physics.quantum.spin import CoupledSpinState
assert _test_args(CoupledSpinState(1, 0, (1, 1)))
assert _test_args(CoupledSpinState(1, 0, (1, S(1)/2, S(1)/2)))
assert _test_args(CoupledSpinState(
1, 0, (1, S(1)/2, S(1)/2), ((2, 3, S(1)/2), (1, 2, 1)) ))
j, m, j1, j2, j3, j12, x = symbols('j m j1:4 j12 x')
assert CoupledSpinState(
j, m, (j1, j2, j3)).subs(j2, x) == CoupledSpinState(j, m, (j1, x, j3))
assert CoupledSpinState(j, m, (j1, j2, j3), ((1, 3, j12), (1, 2, j)) ).subs(j12, x) == \
CoupledSpinState(j, m, (j1, j2, j3), ((1, 3, x), (1, 2, j)) )
def test_sympy__physics__quantum__spin__J2Op():
from sympy.physics.quantum.spin import J2Op
assert _test_args(J2Op('J'))
def test_sympy__physics__quantum__spin__JminusOp():
from sympy.physics.quantum.spin import JminusOp
assert _test_args(JminusOp('J'))
def test_sympy__physics__quantum__spin__JplusOp():
from sympy.physics.quantum.spin import JplusOp
assert _test_args(JplusOp('J'))
def test_sympy__physics__quantum__spin__JxBra():
from sympy.physics.quantum.spin import JxBra
assert _test_args(JxBra(1, 0))
def test_sympy__physics__quantum__spin__JxBraCoupled():
from sympy.physics.quantum.spin import JxBraCoupled
assert _test_args(JxBraCoupled(1, 0, (1, 1)))
def test_sympy__physics__quantum__spin__JxKet():
from sympy.physics.quantum.spin import JxKet
assert _test_args(JxKet(1, 0))
def test_sympy__physics__quantum__spin__JxKetCoupled():
from sympy.physics.quantum.spin import JxKetCoupled
assert _test_args(JxKetCoupled(1, 0, (1, 1)))
def test_sympy__physics__quantum__spin__JxOp():
from sympy.physics.quantum.spin import JxOp
assert _test_args(JxOp('J'))
def test_sympy__physics__quantum__spin__JyBra():
from sympy.physics.quantum.spin import JyBra
assert _test_args(JyBra(1, 0))
def test_sympy__physics__quantum__spin__JyBraCoupled():
from sympy.physics.quantum.spin import JyBraCoupled
assert _test_args(JyBraCoupled(1, 0, (1, 1)))
def test_sympy__physics__quantum__spin__JyKet():
from sympy.physics.quantum.spin import JyKet
assert _test_args(JyKet(1, 0))
def test_sympy__physics__quantum__spin__JyKetCoupled():
from sympy.physics.quantum.spin import JyKetCoupled
assert _test_args(JyKetCoupled(1, 0, (1, 1)))
def test_sympy__physics__quantum__spin__JyOp():
from sympy.physics.quantum.spin import JyOp
assert _test_args(JyOp('J'))
def test_sympy__physics__quantum__spin__JzBra():
from sympy.physics.quantum.spin import JzBra
assert _test_args(JzBra(1, 0))
def test_sympy__physics__quantum__spin__JzBraCoupled():
from sympy.physics.quantum.spin import JzBraCoupled
assert _test_args(JzBraCoupled(1, 0, (1, 1)))
def test_sympy__physics__quantum__spin__JzKet():
from sympy.physics.quantum.spin import JzKet
assert _test_args(JzKet(1, 0))
def test_sympy__physics__quantum__spin__JzKetCoupled():
from sympy.physics.quantum.spin import JzKetCoupled
assert _test_args(JzKetCoupled(1, 0, (1, 1)))
def test_sympy__physics__quantum__spin__JzOp():
from sympy.physics.quantum.spin import JzOp
assert _test_args(JzOp('J'))
def test_sympy__physics__quantum__spin__Rotation():
from sympy.physics.quantum.spin import Rotation
from sympy import pi
assert _test_args(Rotation(pi, 0, pi/2))
def test_sympy__physics__quantum__spin__SpinState():
from sympy.physics.quantum.spin import SpinState
assert _test_args(SpinState(1, 0))
def test_sympy__physics__quantum__spin__WignerD():
from sympy.physics.quantum.spin import WignerD
assert _test_args(WignerD(0, 1, 2, 3, 4, 5))
def test_sympy__physics__quantum__state__Bra():
from sympy.physics.quantum.state import Bra
assert _test_args(Bra(0))
def test_sympy__physics__quantum__state__BraBase():
from sympy.physics.quantum.state import BraBase
assert _test_args(BraBase(0))
def test_sympy__physics__quantum__state__Ket():
from sympy.physics.quantum.state import Ket
assert _test_args(Ket(0))
def test_sympy__physics__quantum__state__KetBase():
from sympy.physics.quantum.state import KetBase
assert _test_args(KetBase(0))
def test_sympy__physics__quantum__state__State():
from sympy.physics.quantum.state import State
assert _test_args(State(0))
def test_sympy__physics__quantum__state__StateBase():
from sympy.physics.quantum.state import StateBase
assert _test_args(StateBase(0))
def test_sympy__physics__quantum__state__TimeDepBra():
from sympy.physics.quantum.state import TimeDepBra
assert _test_args(TimeDepBra('psi', 't'))
def test_sympy__physics__quantum__state__TimeDepKet():
from sympy.physics.quantum.state import TimeDepKet
assert _test_args(TimeDepKet('psi', 't'))
def test_sympy__physics__quantum__state__TimeDepState():
from sympy.physics.quantum.state import TimeDepState
assert _test_args(TimeDepState('psi', 't'))
def test_sympy__physics__quantum__state__Wavefunction():
from sympy.physics.quantum.state import Wavefunction
from sympy.functions import sin
from sympy import Piecewise, pi
n = 1
L = 1
g = Piecewise((0, x < 0), (0, x > L), (sqrt(2//L)*sin(n*pi*x/L), True))
assert _test_args(Wavefunction(g, x))
def test_sympy__physics__quantum__tensorproduct__TensorProduct():
from sympy.physics.quantum.tensorproduct import TensorProduct
assert _test_args(TensorProduct(x, y))
def test_sympy__physics__quantum__identitysearch__GateIdentity():
from sympy.physics.quantum.gate import X
from sympy.physics.quantum.identitysearch import GateIdentity
assert _test_args(GateIdentity(X(0), X(0)))
def test_sympy__physics__quantum__sho1d__SHOOp():
from sympy.physics.quantum.sho1d import SHOOp
assert _test_args(SHOOp('a'))
def test_sympy__physics__quantum__sho1d__RaisingOp():
from sympy.physics.quantum.sho1d import RaisingOp
assert _test_args(RaisingOp('a'))
def test_sympy__physics__quantum__sho1d__LoweringOp():
from sympy.physics.quantum.sho1d import LoweringOp
assert _test_args(LoweringOp('a'))
def test_sympy__physics__quantum__sho1d__NumberOp():
from sympy.physics.quantum.sho1d import NumberOp
assert _test_args(NumberOp('N'))
def test_sympy__physics__quantum__sho1d__Hamiltonian():
from sympy.physics.quantum.sho1d import Hamiltonian
assert _test_args(Hamiltonian('H'))
def test_sympy__physics__quantum__sho1d__SHOState():
from sympy.physics.quantum.sho1d import SHOState
assert _test_args(SHOState(0))
def test_sympy__physics__quantum__sho1d__SHOKet():
from sympy.physics.quantum.sho1d import SHOKet
assert _test_args(SHOKet(0))
def test_sympy__physics__quantum__sho1d__SHOBra():
from sympy.physics.quantum.sho1d import SHOBra
assert _test_args(SHOBra(0))
def test_sympy__physics__secondquant__AnnihilateBoson():
from sympy.physics.secondquant import AnnihilateBoson
assert _test_args(AnnihilateBoson(0))
def test_sympy__physics__secondquant__AnnihilateFermion():
from sympy.physics.secondquant import AnnihilateFermion
assert _test_args(AnnihilateFermion(0))
@SKIP("abstract class")
def test_sympy__physics__secondquant__Annihilator():
pass
def test_sympy__physics__secondquant__AntiSymmetricTensor():
from sympy.physics.secondquant import AntiSymmetricTensor
i, j = symbols('i j', below_fermi=True)
a, b = symbols('a b', above_fermi=True)
assert _test_args(AntiSymmetricTensor('v', (a, i), (b, j)))
def test_sympy__physics__secondquant__BosonState():
from sympy.physics.secondquant import BosonState
assert _test_args(BosonState((0, 1)))
@SKIP("abstract class")
def test_sympy__physics__secondquant__BosonicOperator():
pass
def test_sympy__physics__secondquant__Commutator():
from sympy.physics.secondquant import Commutator
assert _test_args(Commutator(x, y))
def test_sympy__physics__secondquant__CreateBoson():
from sympy.physics.secondquant import CreateBoson
assert _test_args(CreateBoson(0))
def test_sympy__physics__secondquant__CreateFermion():
from sympy.physics.secondquant import CreateFermion
assert _test_args(CreateFermion(0))
@SKIP("abstract class")
def test_sympy__physics__secondquant__Creator():
pass
def test_sympy__physics__secondquant__Dagger():
from sympy.physics.secondquant import Dagger
from sympy import I
assert _test_args(Dagger(2*I))
def test_sympy__physics__secondquant__FermionState():
from sympy.physics.secondquant import FermionState
assert _test_args(FermionState((0, 1)))
def test_sympy__physics__secondquant__FermionicOperator():
from sympy.physics.secondquant import FermionicOperator
assert _test_args(FermionicOperator(0))
def test_sympy__physics__secondquant__FockState():
from sympy.physics.secondquant import FockState
assert _test_args(FockState((0, 1)))
def test_sympy__physics__secondquant__FockStateBosonBra():
from sympy.physics.secondquant import FockStateBosonBra
assert _test_args(FockStateBosonBra((0, 1)))
def test_sympy__physics__secondquant__FockStateBosonKet():
from sympy.physics.secondquant import FockStateBosonKet
assert _test_args(FockStateBosonKet((0, 1)))
def test_sympy__physics__secondquant__FockStateBra():
from sympy.physics.secondquant import FockStateBra
assert _test_args(FockStateBra((0, 1)))
def test_sympy__physics__secondquant__FockStateFermionBra():
from sympy.physics.secondquant import FockStateFermionBra
assert _test_args(FockStateFermionBra((0, 1)))
def test_sympy__physics__secondquant__FockStateFermionKet():
from sympy.physics.secondquant import FockStateFermionKet
assert _test_args(FockStateFermionKet((0, 1)))
def test_sympy__physics__secondquant__FockStateKet():
from sympy.physics.secondquant import FockStateKet
assert _test_args(FockStateKet((0, 1)))
def test_sympy__physics__secondquant__InnerProduct():
from sympy.physics.secondquant import InnerProduct
from sympy.physics.secondquant import FockStateKet, FockStateBra
assert _test_args(InnerProduct(FockStateBra((0, 1)), FockStateKet((0, 1))))
def test_sympy__physics__secondquant__NO():
from sympy.physics.secondquant import NO, F, Fd
assert _test_args(NO(Fd(x)*F(y)))
def test_sympy__physics__secondquant__PermutationOperator():
from sympy.physics.secondquant import PermutationOperator
assert _test_args(PermutationOperator(0, 1))
def test_sympy__physics__secondquant__SqOperator():
from sympy.physics.secondquant import SqOperator
assert _test_args(SqOperator(0))
def test_sympy__physics__secondquant__TensorSymbol():
from sympy.physics.secondquant import TensorSymbol
assert _test_args(TensorSymbol(x))
def test_sympy__physics__units__Unit():
from sympy.physics.units import Unit
assert _test_args(Unit("meter", "m"))
def test_sympy__physics__unitsystems__dimensions__Dimension():
from sympy.physics.unitsystems.dimensions import Dimension
assert _test_args(Dimension(name="length", symbol="L", length=1))
def test_sympy__physics__unitsystems__quantities__Quantity():
from sympy.physics.unitsystems.quantities import Quantity
from sympy.physics.unitsystems.systems import mks
assert _test_args(Quantity(10, mks["m"]))
def test_sympy__physics__unitsystems__units__Constant():
from sympy.physics.unitsystems.units import Constant
from sympy.physics.unitsystems.dimensions import Dimension
length = Dimension(length=1)
assert _test_args(Constant(length, abbrev="u", factor=10))
def test_sympy__physics__unitsystems__units__Unit():
from sympy.physics.unitsystems.units import Unit
from sympy.physics.unitsystems.dimensions import Dimension
length = Dimension(length=1)
assert _test_args(Unit(length, abbrev="u", factor=10))
def test_sympy__core__numbers__AlgebraicNumber():
from sympy.core.numbers import AlgebraicNumber
assert _test_args(AlgebraicNumber(sqrt(2), [1, 2, 3]))
def test_sympy__polys__polytools__GroebnerBasis():
from sympy.polys.polytools import GroebnerBasis
assert _test_args(GroebnerBasis([x, y, z], x, y, z))
def test_sympy__polys__polytools__Poly():
from sympy.polys.polytools import Poly
assert _test_args(Poly(2, x, y))
def test_sympy__polys__polytools__PurePoly():
from sympy.polys.polytools import PurePoly
assert _test_args(PurePoly(2, x, y))
def test_sympy__polys__rootoftools__RootOf():
from sympy.polys.rootoftools import RootOf
assert _test_args(RootOf(x**3 + x + 1, 0))
def test_sympy__polys__rootoftools__RootSum():
from sympy.polys.rootoftools import RootSum
assert _test_args(RootSum(x**3 + x + 1, sin))
def test_sympy__series__limits__Limit():
from sympy.series.limits import Limit
assert _test_args(Limit(x, x, 0, dir='-'))
def test_sympy__series__order__Order():
from sympy.series.order import Order
assert _test_args(Order(1, x, y))
def test_sympy__simplify__hyperexpand__Hyper_Function():
from sympy.simplify.hyperexpand import Hyper_Function
assert _test_args(Hyper_Function([2], [1]))
def test_sympy__simplify__hyperexpand__G_Function():
from sympy.simplify.hyperexpand import G_Function
assert _test_args(G_Function([2], [1], [], []))
def test_sympy__tensor__indexed__Idx():
from sympy.tensor.indexed import Idx
assert _test_args(Idx('test'))
assert _test_args(Idx(1, (0, 10)))
def test_sympy__tensor__indexed__Indexed():
from sympy.tensor.indexed import Indexed, Idx
assert _test_args(Indexed('A', Idx('i'), Idx('j')))
def test_sympy__tensor__indexed__IndexedBase():
from sympy.tensor.indexed import IndexedBase
assert _test_args(IndexedBase('A', shape=(x, y)))
assert _test_args(IndexedBase('A', 1))
assert _test_args(IndexedBase('A')[0, 1])
@XFAIL
def test_sympy__physics__hep__gamma_matrices__GammaMatrixHead():
# This test fails, this class can be reconstructed from the *args
# of an instance using `TensorHead(*args)`
from sympy.physics.hep.gamma_matrices import GammaMatrixHead, Lorentz
from sympy.tensor.tensor import tensor_indices
i = tensor_indices('i', Lorentz)
assert _test_args(GammaMatrixHead())
def test_sympy__tensor__tensor__TensorIndexType():
from sympy.tensor.tensor import TensorIndexType
assert _test_args(TensorIndexType('Lorentz', metric=False))
def test_sympy__tensor__tensor__TensorSymmetry():
from sympy.tensor.tensor import TensorSymmetry, get_symmetric_group_sgs
assert _test_args(TensorSymmetry(get_symmetric_group_sgs(2)))
def test_sympy__tensor__tensor__TensorType():
from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, get_symmetric_group_sgs, TensorType
Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')
sym = TensorSymmetry(get_symmetric_group_sgs(1))
assert _test_args(TensorType([Lorentz], sym))
def test_sympy__tensor__tensor__TensorHead():
from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, TensorType, get_symmetric_group_sgs, TensorHead
Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')
sym = TensorSymmetry(get_symmetric_group_sgs(1))
S1 = TensorType([Lorentz], sym)
assert _test_args(TensorHead('p', S1, 0))
def test_sympy__tensor__tensor__TensorIndex():
from sympy.tensor.tensor import TensorIndexType, TensorIndex
Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')
assert _test_args(TensorIndex('i', Lorentz))
@SKIP("abstract class")
def test_sympy__tensor__tensor__TensExpr():
pass
def test_sympy__tensor__tensor__TensAdd():
from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, TensorType, get_symmetric_group_sgs, tensor_indices, TensAdd
Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')
a, b = tensor_indices('a,b', Lorentz)
sym = TensorSymmetry(get_symmetric_group_sgs(1))
S1 = TensorType([Lorentz], sym)
p, q = S1('p,q')
t1 = p(a)
t2 = q(a)
assert _test_args(TensAdd(t1, t2))
def test_sympy__tensor__tensor__Tensor():
from sympy.core import S
from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, TensorType, get_symmetric_group_sgs, tensor_indices, TensMul, TIDS
Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')
a, b = tensor_indices('a,b', Lorentz)
sym = TensorSymmetry(get_symmetric_group_sgs(1))
S1 = TensorType([Lorentz], sym)
p = S1('p')
assert _test_args(p(a))
def test_sympy__tensor__tensor__TensMul():
from sympy.core import S
from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, TensorType, get_symmetric_group_sgs, tensor_indices, TensMul, TIDS
Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')
a, b = tensor_indices('a,b', Lorentz)
sym = TensorSymmetry(get_symmetric_group_sgs(1))
S1 = TensorType([Lorentz], sym)
p = S1('p')
q = S1('q')
assert _test_args(3*p(a)*q(b))
def test_as_coeff_add():
assert (7, (3*x, 4*x**2)) == (7 + 3*x + 4*x**2).as_coeff_add()
def test_sympy__geometry__curve__Curve():
from sympy.geometry.curve import Curve
assert _test_args(Curve((x, 1), (x, 0, 1)))
def test_sympy__geometry__point__Point():
from sympy.geometry.point import Point
assert _test_args(Point(0, 1))
def test_sympy__geometry__point3d__Point3D():
from sympy.geometry.point3d import Point3D
assert _test_args(Point3D(0, 1, 2))
def test_sympy__geometry__ellipse__Ellipse():
from sympy.geometry.ellipse import Ellipse
assert _test_args(Ellipse((0, 1), 2, 3))
def test_sympy__geometry__ellipse__Circle():
from sympy.geometry.ellipse import Circle
assert _test_args(Circle((0, 1), 2))
@SKIP("abstract class")
def test_sympy__geometry__line__LinearEntity():
pass
def test_sympy__geometry__line__Line():
from sympy.geometry.line import Line
assert _test_args(Line((0, 1), (2, 3)))
def test_sympy__geometry__line__Ray():
from sympy.geometry.line import Ray
assert _test_args(Ray((0, 1), (2, 3)))
def test_sympy__geometry__line__Segment():
from sympy.geometry.line import Segment
assert _test_args(Segment((0, 1), (2, 3)))
@SKIP("abstract class")
def test_sympy__geometry__line3d__LinearEntity3D():
pass
def test_sympy__geometry__line3d__Line3D():
from sympy.geometry.line3d import Line3D
assert _test_args(Line3D((0, 1, 1), (2, 3, 4)))
def test_sympy__geometry__line3d__Segment3D():
from sympy.geometry.line3d import Segment3D
assert _test_args(Segment3D((0, 1, 1), (2, 3, 4)))
def test_sympy__geometry__line3d__Ray3D():
from sympy.geometry.line3d import Ray3D
assert _test_args(Ray3D((0, 1, 1), (2, 3, 4)))
def test_sympy__geometry__plane__Plane():
from sympy.geometry.plane import Plane
assert _test_args(Plane((1, 1, 1), (-3, 4, -2), (1, 2, 3)))
def test_sympy__geometry__polygon__Polygon():
from sympy.geometry.polygon import Polygon
assert _test_args(Polygon((0, 1), (2, 3), (4, 5), (6, 7)))
def test_sympy__geometry__polygon__RegularPolygon():
from sympy.geometry.polygon import RegularPolygon
assert _test_args(RegularPolygon((0, 1), 2, 3, 4))
def test_sympy__geometry__polygon__Triangle():
from sympy.geometry.polygon import Triangle
assert _test_args(Triangle((0, 1), (2, 3), (4, 5)))
def test_sympy__geometry__entity__GeometryEntity():
from sympy.geometry.entity import GeometryEntity
from sympy.geometry.point import Point
assert _test_args(GeometryEntity(Point(1, 0), 1, [1, 2]))
def test_sympy__diffgeom__diffgeom__Manifold():
from sympy.diffgeom import Manifold
assert _test_args(Manifold('name', 3))
def test_sympy__diffgeom__diffgeom__Patch():
from sympy.diffgeom import Manifold, Patch
assert _test_args(Patch('name', Manifold('name', 3)))
def test_sympy__diffgeom__diffgeom__CoordSystem():
from sympy.diffgeom import Manifold, Patch, CoordSystem
assert _test_args(CoordSystem('name', Patch('name', Manifold('name', 3))))
@XFAIL
def test_sympy__diffgeom__diffgeom__Point():
from sympy.diffgeom import Manifold, Patch, CoordSystem, Point
assert _test_args(Point(
CoordSystem('name', Patch('name', Manifold('name', 3))), [x, y]))
def test_sympy__diffgeom__diffgeom__BaseScalarField():
from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField
cs = CoordSystem('name', Patch('name', Manifold('name', 3)))
assert _test_args(BaseScalarField(cs, 0))
def test_sympy__diffgeom__diffgeom__BaseVectorField():
from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseVectorField
cs = CoordSystem('name', Patch('name', Manifold('name', 3)))
assert _test_args(BaseVectorField(cs, 0))
def test_sympy__diffgeom__diffgeom__Differential():
from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, Differential
cs = CoordSystem('name', Patch('name', Manifold('name', 3)))
assert _test_args(Differential(BaseScalarField(cs, 0)))
def test_sympy__diffgeom__diffgeom__Commutator():
from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseVectorField, Commutator
cs = CoordSystem('name', Patch('name', Manifold('name', 3)))
cs1 = CoordSystem('name1', Patch('name', Manifold('name', 3)))
v = BaseVectorField(cs, 0)
v1 = BaseVectorField(cs1, 0)
assert _test_args(Commutator(v, v1))
def test_sympy__diffgeom__diffgeom__TensorProduct():
from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, Differential, TensorProduct
cs = CoordSystem('name', Patch('name', Manifold('name', 3)))
d = Differential(BaseScalarField(cs, 0))
assert _test_args(TensorProduct(d, d))
def test_sympy__diffgeom__diffgeom__WedgeProduct():
from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, Differential, WedgeProduct
cs = CoordSystem('name', Patch('name', Manifold('name', 3)))
d = Differential(BaseScalarField(cs, 0))
d1 = Differential(BaseScalarField(cs, 1))
assert _test_args(WedgeProduct(d, d1))
def test_sympy__diffgeom__diffgeom__LieDerivative():
from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, Differential, BaseVectorField, LieDerivative
cs = CoordSystem('name', Patch('name', Manifold('name', 3)))
d = Differential(BaseScalarField(cs, 0))
v = BaseVectorField(cs, 0)
assert _test_args(LieDerivative(v, d))
@XFAIL
def test_sympy__diffgeom__diffgeom__BaseCovarDerivativeOp():
from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseCovarDerivativeOp
cs = CoordSystem('name', Patch('name', Manifold('name', 3)))
assert _test_args(BaseCovarDerivativeOp(cs, 0, [[[0, ]*3, ]*3, ]*3))
def test_sympy__diffgeom__diffgeom__CovarDerivativeOp():
from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseVectorField, CovarDerivativeOp
cs = CoordSystem('name', Patch('name', Manifold('name', 3)))
v = BaseVectorField(cs, 0)
_test_args(CovarDerivativeOp(v, [[[0, ]*3, ]*3, ]*3))
def test_sympy__categories__baseclasses__Class():
from sympy.categories.baseclasses import Class
assert _test_args(Class())
def test_sympy__categories__baseclasses__Object():
from sympy.categories import Object
assert _test_args(Object("A"))
@XFAIL
def test_sympy__categories__baseclasses__Morphism():
from sympy.categories import Object, Morphism
assert _test_args(Morphism(Object("A"), Object("B")))
def test_sympy__categories__baseclasses__IdentityMorphism():
from sympy.categories import Object, IdentityMorphism
assert _test_args(IdentityMorphism(Object("A")))
def test_sympy__categories__baseclasses__NamedMorphism():
from sympy.categories import Object, NamedMorphism
assert _test_args(NamedMorphism(Object("A"), Object("B"), "f"))
def test_sympy__categories__baseclasses__CompositeMorphism():
from sympy.categories import Object, NamedMorphism, CompositeMorphism
A = Object("A")
B = Object("B")
C = Object("C")
f = NamedMorphism(A, B, "f")
g = NamedMorphism(B, C, "g")
assert _test_args(CompositeMorphism(f, g))
def test_sympy__categories__baseclasses__Diagram():
from sympy.categories import Object, NamedMorphism, Diagram
A = Object("A")
B = Object("B")
C = Object("C")
f = NamedMorphism(A, B, "f")
d = Diagram([f])
assert _test_args(d)
def test_sympy__categories__baseclasses__Category():
from sympy.categories import Object, NamedMorphism, Diagram, Category
A = Object("A")
B = Object("B")
C = Object("C")
f = NamedMorphism(A, B, "f")
g = NamedMorphism(B, C, "g")
d1 = Diagram([f, g])
d2 = Diagram([f])
K = Category("K", commutative_diagrams=[d1, d2])
assert _test_args(K)
def test_sympy__ntheory__factor___totient():
from sympy.ntheory.factor_ import totient
k = symbols('k', integer=True)
t = totient(k)
assert _test_args(t)
def test_sympy__ntheory__factor___divisor_sigma():
from sympy.ntheory.factor_ import divisor_sigma
k = symbols('k', integer=True)
n = symbols('n', integer=True)
t = divisor_sigma(n, k)
assert _test_args(t)
def test_sympy__ntheory__residue_ntheory__mobius():
from sympy.ntheory import mobius
assert _test_args(mobius(2))
def test_sympy__physics__optics__waves__TWave():
from sympy.physics.optics import TWave
A, f, phi = symbols('A, f, phi')
assert _test_args(TWave(A, f, phi))
def test_sympy__physics__optics__gaussopt__BeamParameter():
from sympy.physics.optics import BeamParameter
assert _test_args(BeamParameter(530e-9, 1, w=1e-3))
def test_sympy__physics__optics__medium__Medium():
from sympy.physics.optics import Medium
assert _test_args(Medium('m'))
def test_sympy__printing__codeprinter__Assignment():
from sympy.printing.codeprinter import Assignment
assert _test_args(Assignment(x, y))
def test_sympy__vector__coordsysrect__CoordSysCartesian():
from sympy.vector.coordsysrect import CoordSysCartesian
assert _test_args(CoordSysCartesian('C'))
def test_sympy__vector__point__Point():
from sympy.vector.point import Point
assert _test_args(Point('P'))
def test_sympy__vector__basisdependent__BasisDependent():
from sympy.vector.basisdependent import BasisDependent
#These classes have been created to maintain an OOP hierarchy
#for Vectors and Dyadics. Are NOT meant to be initialized
def test_sympy__vector__basisdependent__BasisDependentMul():
from sympy.vector.basisdependent import BasisDependentMul
#These classes have been created to maintain an OOP hierarchy
#for Vectors and Dyadics. Are NOT meant to be initialized
def test_sympy__vector__basisdependent__BasisDependentAdd():
from sympy.vector.basisdependent import BasisDependentAdd
#These classes have been created to maintain an OOP hierarchy
#for Vectors and Dyadics. Are NOT meant to be initialized
def test_sympy__vector__basisdependent__BasisDependentZero():
from sympy.vector.basisdependent import BasisDependentZero
#These classes have been created to maintain an OOP hierarchy
#for Vectors and Dyadics. Are NOT meant to be initialized
def test_sympy__vector__vector__BaseVector():
from sympy.vector.vector import BaseVector
from sympy.vector.coordsysrect import CoordSysCartesian
C = CoordSysCartesian('C')
assert _test_args(BaseVector('Ci', 0, C, ' ', ' '))
def test_sympy__vector__vector__VectorAdd():
from sympy.vector.vector import VectorAdd, VectorMul
from sympy.vector.coordsysrect import CoordSysCartesian
C = CoordSysCartesian('C')
from sympy.abc import a, b, c, x, y, z
v1 = a*C.i + b*C.j + c*C.k
v2 = x*C.i + y*C.j + z*C.k
assert _test_args(VectorAdd(v1, v2))
assert _test_args(VectorMul(x, v1))
def test_sympy__vector__vector__VectorMul():
from sympy.vector.vector import VectorMul
from sympy.vector.coordsysrect import CoordSysCartesian
C = CoordSysCartesian('C')
from sympy.abc import a
assert _test_args(VectorMul(a, C.i))
def test_sympy__vector__vector__VectorZero():
from sympy.vector.vector import VectorZero
assert _test_args(VectorZero())
def test_sympy__vector__vector__Vector():
from sympy.vector.vector import Vector
#Vector is never to be initialized using args
pass
def test_sympy__vector__dyadic__Dyadic():
from sympy.vector.dyadic import Dyadic
#Dyadic is never to be initialized using args
pass
def test_sympy__vector__dyadic__BaseDyadic():
from sympy.vector.dyadic import BaseDyadic
from sympy.vector.coordsysrect import CoordSysCartesian
C = CoordSysCartesian('C')
assert _test_args(BaseDyadic(C.i, C.j))
def test_sympy__vector__dyadic__DyadicMul():
from sympy.vector.dyadic import BaseDyadic, DyadicMul
from sympy.vector.coordsysrect import CoordSysCartesian
C = CoordSysCartesian('C')
assert _test_args(DyadicMul(3, BaseDyadic(C.i, C.j)))
def test_sympy__vector__dyadic__DyadicAdd():
from sympy.vector.dyadic import BaseDyadic, DyadicAdd
from sympy.vector.coordsysrect import CoordSysCartesian
C = CoordSysCartesian('C')
assert _test_args(2 * DyadicAdd(BaseDyadic(C.i, C.i),
BaseDyadic(C.i, C.j)))
def test_sympy__vector__dyadic__DyadicZero():
from sympy.vector.dyadic import DyadicZero
assert _test_args(DyadicZero())
def test_sympy__vector__deloperator__Del():
from sympy.vector.deloperator import Del
from sympy.vector.coordsysrect import CoordSysCartesian
C = CoordSysCartesian('C')
assert _test_args(Del(C))
def test_sympy__vector__orienters__Orienter():
from sympy.vector.orienters import Orienter
#Not to be initialized
def test_sympy__vector__orienters__ThreeAngleOrienter():
from sympy.vector.orienters import ThreeAngleOrienter
#Not to be initialized
def test_sympy__vector__orienters__AxisOrienter():
from sympy.vector.orienters import AxisOrienter
from sympy.vector.coordsysrect import CoordSysCartesian
C = CoordSysCartesian('C')
assert _test_args(AxisOrienter(x, C.i))
def test_sympy__vector__orienters__BodyOrienter():
from sympy.vector.orienters import BodyOrienter
assert _test_args(BodyOrienter(x, y, z, '123'))
def test_sympy__vector__orienters__SpaceOrienter():
from sympy.vector.orienters import SpaceOrienter
assert _test_args(SpaceOrienter(x, y, z, '123'))
def test_sympy__vector__orienters__QuaternionOrienter():
from sympy.vector.orienters import QuaternionOrienter
a, b, c, d = symbols('a b c d')
assert _test_args(QuaternionOrienter(a, b, c, d))
def test_sympy__vector__scalar__BaseScalar():
from sympy.vector.scalar import BaseScalar
from sympy.vector.coordsysrect import CoordSysCartesian
C = CoordSysCartesian('C')
assert _test_args(BaseScalar('Cx', 0, C, ' ', ' '))
| [
[
[
239,
241
],
[
574,
576
],
[
612,
614
],
[
631,
633
],
[
642,
644
],
[
670,
672
],
[
705,
707
],
[
736,
738
],
[
869,
871
],
[
940,
942
],
[
1156,
1158
]
],
[
[
249,
251
],
[
757,
759
],
[
807,
809
]
],
[
[
259,
267
],
[
2092,
2100
]
],
[
[
275,
277
],
[
1148,
1150
]
],
[
[
297,
302
],
[
2340,
2345
],
[
58535,
58540
],
[
58634,
58639
],
[
58914,
58919
],
[
59153,
59158
],
[
59266,
59271
],
[
59371,
59376
],
[
59663,
59668
],
[
1634,
1639
]
],
[
[
304,
305
],
[
16350,
16351
],
[
18930,
18931
],
[
19110,
19111
],
[
20608,
20609
],
[
20624,
20625
],
[
58719,
58720
],
[
58813,
58814
],
[
59462,
59463
],
[
59562,
59563
],
[
69734,
69735
],
[
69742,
69743
],
[
69753,
69754
],
[
69761,
69762
],
[
81166,
81167
],
[
81174,
81175
],
[
81242,
81243
],
[
81250,
81251
],
[
81267,
81268
]
],
[
[
307,
314
],
[
509,
516
],
[
13294,
13301
],
[
70177,
70184
],
[
81321,
81328
],
[
88420,
88427
],
[
88464,
88471
],
[
105888,
105895
],
[
106072,
106079
],
[
106107,
106114
],
[
106421,
106428
],
[
111622,
111629
]
],
[
[
316,
320
],
[
86242,
86246
],
[
93051,
93055
]
],
[
[
322,
325
],
[
38652,
38655
],
[
93777,
93780
]
],
[
[
327,
329
],
[
14695,
14697
],
[
14699,
14701
],
[
14874,
14876
],
[
14878,
14880
],
[
15065,
15067
],
[
15069,
15071
],
[
15120,
15122
],
[
15381,
15383
],
[
15385,
15387
],
[
15618,
15620
],
[
15622,
15624
],
[
18269,
18271
],
[
18273,
18275
],
[
18314,
18316
]
],
[
[
331,
339
],
[
12965,
12973
],
[
12981,
12989
],
[
14685,
14693
],
[
14864,
14872
],
[
15055,
15063
],
[
15108,
15116
],
[
15371,
15379
],
[
15608,
15616
],
[
18259,
18267
],
[
18302,
18310
]
],
[
[
341,
344
],
[
52295,
52298
]
],
[
[
382,
387
],
[
18601,
18606
]
],
[
[
423,
428
],
[
2705,
2710
],
[
3195,
3200
],
[
4344,
4349
],
[
4664,
4669
],
[
54849,
54854
],
[
55040,
55045
],
[
55218,
55223
],
[
55374,
55379
],
[
55535,
55540
],
[
55668,
55673
],
[
55801,
55806
],
[
55934,
55939
],
[
56067,
56072
],
[
56200,
56205
],
[
56333,
56338
],
[
61636,
61641
],
[
94916,
94921
],
[
101057,
101062
],
[
103453,
103458
],
[
104315,
104320
]
],
[
[
430,
434
],
[
5045,
5049
],
[
5314,
5318
],
[
5577,
5581
],
[
7290,
7294
],
[
9230,
9234
],
[
10298,
10302
],
[
10495,
10499
],
[
10575,
10579
],
[
11173,
11177
],
[
12363,
12367
],
[
16106,
16110
],
[
16600,
16604
],
[
17566,
17570
],
[
21126,
21130
],
[
21216,
21220
],
[
28252,
28256
],
[
32849,
32853
],
[
33415,
33419
],
[
33523,
33527
],
[
35163,
35167
],
[
36180,
36184
],
[
36647,
36651
],
[
36760,
36764
],
[
36883,
36887
],
[
38971,
38975
],
[
39064,
39068
],
[
43221,
43225
],
[
44266,
44270
],
[
46151,
46155
],
[
46252,
46256
],
[
46671,
46675
],
[
48612,
48616
],
[
52314,
52318
],
[
53134,
53138
],
[
53593,
53597
],
[
54392,
54396
],
[
56584,
56588
],
[
56814,
56818
],
[
58237,
58241
],
[
61297,
61301
],
[
66814,
66818
],
[
72890,
72894
],
[
80752,
80756
],
[
88199,
88203
],
[
88713,
88717
],
[
89252,
89256
],
[
96558,
96562
],
[
98750,
98754
],
[
99220,
99224
]
],
[
[
474,
497
],
[
2132,
2155
]
],
[
[
499,
500
],
[
4969,
4970
],
[
4973,
4974
],
[
5015,
5016
],
[
5019,
5020
],
[
5230,
5231
],
[
5234,
5235
],
[
5283,
5284
],
[
5289,
5290
],
[
5496,
5497
],
[
5500,
5501
],
[
5548,
5549
],
[
5554,
5555
],
[
5777,
5778
],
[
5781,
5782
],
[
5833,
5834
],
[
5839,
5840
],
[
5978,
5979
],
[
5982,
5983
],
[
6020,
6021
],
[
6024,
6025
],
[
6144,
6145
],
[
6489,
6490
],
[
6620,
6621
],
[
7277,
7278
],
[
7483,
7484
],
[
7490,
7491
],
[
7612,
7613
],
[
7619,
7620
],
[
7863,
7864
],
[
7970,
7971
],
[
10755,
10756
],
[
10888,
10889
],
[
11030,
11031
],
[
11163,
11164
],
[
11406,
11407
],
[
11557,
11558
],
[
11696,
11697
],
[
12066,
12067
],
[
14346,
14347
],
[
14680,
14681
],
[
14861,
14862
],
[
15052,
15053
],
[
15368,
15369
],
[
15443,
15444
],
[
15605,
15606
],
[
15830,
15831
],
[
16006,
16007
],
[
16347,
16348
],
[
16570,
16571
],
[
16865,
16866
],
[
17056,
17057
],
[
17266,
17267
],
[
17330,
17331
],
[
17492,
17493
],
[
17816,
17817
],
[
17861,
17862
],
[
18042,
18043
],
[
18256,
18257
],
[
19619,
19620
],
[
19627,
19628
],
[
19800,
19801
],
[
19989,
19990
],
[
20261,
20262
],
[
20327,
20328
],
[
20474,
20475
],
[
20544,
20545
],
[
20601,
20602
],
[
20617,
20618
],
[
28244,
28245
],
[
28555,
28556
],
[
28742,
28743
],
[
28935,
28936
],
[
29107,
29108
],
[
29460,
29461
],
[
29635,
29636
],
[
29786,
29787
],
[
29955,
29956
],
[
30115,
30116
],
[
30278,
30279
],
[
30432,
30433
],
[
30598,
30599
],
[
30761,
30762
],
[
30918,
30919
],
[
31064,
31065
],
[
31222,
31223
],
[
31368,
31369
],
[
31532,
31533
],
[
31675,
31676
],
[
31818,
31819
],
[
31967,
31968
],
[
32134,
32135
],
[
32322,
32323
],
[
32510,
32511
],
[
32677,
32678
],
[
35517,
35518
],
[
35667,
35668
],
[
36013,
36014
],
[
36170,
36171
],
[
36623,
36624
],
[
39305,
39306
],
[
39454,
39455
],
[
39603,
39604
],
[
39752,
39753
],
[
39901,
39902
],
[
40050,
40051
],
[
40187,
40188
],
[
40321,
40322
],
[
41165,
41166
],
[
41340,
41341
],
[
41518,
41519
],
[
41546,
41547
],
[
41726,
41727
],
[
41757,
41758
],
[
41936,
41937
],
[
42109,
42110
],
[
45138,
45139
],
[
45296,
45297
],
[
45636,
45637
],
[
45809,
45810
],
[
45985,
45986
],
[
46141,
46142
],
[
46144,
46145
],
[
46492,
46493
],
[
46664,
46665
],
[
46922,
46923
],
[
47093,
47094
],
[
47258,
47259
],
[
47423,
47424
],
[
47588,
47589
],
[
47753,
47754
],
[
47921,
47922
],
[
48092,
48093
],
[
48257,
48258
],
[
48428,
48429
],
[
48602,
48603
],
[
48866,
48867
],
[
49040,
49041
],
[
49211,
49212
],
[
49562,
49563
],
[
49904,
49905
],
[
50066,
50067
],
[
50246,
50247
],
[
50411,
50412
],
[
50591,
50592
],
[
50763,
50764
],
[
50932,
50933
],
[
51110,
51111
],
[
51303,
51304
],
[
51486,
51487
],
[
51806,
51807
],
[
51974,
51975
],
[
52119,
52120
],
[
52300,
52301
],
[
52307,
52308
],
[
52566,
52567
],
[
52754,
52755
],
[
52930,
52931
],
[
53121,
53122
],
[
53413,
53414
],
[
53583,
53584
],
[
53866,
53867
],
[
54027,
54028
],
[
54215,
54216
],
[
54382,
54383
],
[
54667,
54668
],
[
54837,
54838
],
[
56571,
56572
],
[
57257,
57258
],
[
57371,
57372
],
[
57500,
57501
],
[
57617,
57618
],
[
57734,
57735
],
[
57848,
57849
],
[
57956,
57957
],
[
58070,
58071
],
[
60172,
60173
],
[
60175,
60176
],
[
60485,
60486
],
[
60488,
60489
],
[
60549,
60550
],
[
60577,
60578
],
[
61063,
61064
],
[
61095,
61096
],
[
62181,
62182
],
[
62216,
62217
],
[
62993,
62994
],
[
63025,
63026
],
[
63274,
63275
],
[
63277,
63278
],
[
67453,
67454
],
[
67622,
67623
],
[
67794,
67795
],
[
67972,
67973
],
[
68120,
68121
],
[
68268,
68269
],
[
68413,
68414
],
[
68558,
68559
],
[
68697,
68698
],
[
68833,
68834
],
[
68969,
68970
],
[
69105,
69106
],
[
73357,
73358
],
[
75051,
75052
],
[
75066,
75067
],
[
75070,
75071
],
[
75076,
75077
],
[
86221,
86222
],
[
86233,
86234
],
[
86262,
86263
],
[
86313,
86314
],
[
86487,
86488
],
[
88942,
88943
],
[
91345,
91346
],
[
91819,
91820
],
[
93214,
93215
],
[
93224,
93225
],
[
93351,
93352
],
[
93487,
93488
],
[
93618,
93619
],
[
93625,
93626
],
[
93763,
93764
],
[
93770,
93771
],
[
93896,
93897
],
[
93899,
93900
],
[
94029,
94030
],
[
94816,
94817
],
[
98032,
98033
],
[
98037,
98038
],
[
98054,
98055
],
[
98060,
98061
],
[
98197,
98198
],
[
98205,
98206
],
[
101270,
101271
],
[
106925,
106926
],
[
111162,
111163
],
[
111311,
111312
],
[
111471,
111472
]
],
[
[
502,
503
],
[
5025,
5026
],
[
5030,
5031
],
[
5285,
5286
],
[
5301,
5302
],
[
5550,
5551
],
[
5565,
5566
],
[
5835,
5836
],
[
5850,
5851
],
[
6030,
6031
],
[
6035,
6036
],
[
6147,
6148
],
[
6495,
6496
],
[
6492,
6493
],
[
6623,
6624
],
[
7280,
7281
],
[
7486,
7487
],
[
7494,
7495
],
[
7616,
7617
],
[
7973,
7974
],
[
12069,
12070
],
[
15105,
15106
],
[
16044,
16045
],
[
18080,
18081
],
[
18299,
18300
],
[
20033,
20034
],
[
29789,
29790
],
[
32325,
32326
],
[
32513,
32514
],
[
41343,
41344
],
[
41549,
41550
],
[
41729,
41730
],
[
41760,
41761
],
[
45135,
45136
],
[
46925,
46926
],
[
47096,
47097
],
[
47924,
47925
],
[
48095,
48096
],
[
48431,
48432
],
[
48605,
48606
],
[
50252,
50253
],
[
50597,
50598
],
[
50766,
50767
],
[
50935,
50936
],
[
51113,
51114
],
[
51306,
51307
],
[
51809,
51810
],
[
51977,
51978
],
[
52569,
52570
],
[
52757,
52758
],
[
52933,
52934
],
[
53124,
53125
],
[
53416,
53417
],
[
53586,
53587
],
[
53869,
53870
],
[
54030,
54031
],
[
54218,
54219
],
[
54385,
54386
],
[
54670,
54671
],
[
54840,
54841
],
[
56574,
56575
],
[
57374,
57375
],
[
57503,
57504
],
[
57620,
57621
],
[
57737,
57738
],
[
57959,
57960
],
[
58073,
58074
],
[
60204,
60205
],
[
60207,
60208
],
[
60517,
60518
],
[
60520,
60521
],
[
60552,
60553
],
[
60574,
60575
],
[
61066,
61067
],
[
61098,
61099
],
[
62184,
62185
],
[
62213,
62214
],
[
62996,
62997
],
[
63028,
63029
],
[
67456,
67457
],
[
67625,
67626
],
[
67797,
67798
],
[
67975,
67976
],
[
68123,
68124
],
[
68271,
68272
],
[
68416,
68417
],
[
86490,
86491
],
[
88945,
88946
],
[
91350,
91351
],
[
93217,
93218
],
[
93227,
93228
],
[
93354,
93355
],
[
93490,
93491
],
[
94032,
94033
],
[
94819,
94820
],
[
101273,
101274
],
[
106928,
106929
],
[
111314,
111315
],
[
111474,
111475
]
],
[
[
505,
506
],
[
6150,
6151
],
[
6498,
6499
],
[
6626,
6627
],
[
7498,
7499
],
[
7976,
7977
],
[
12072,
12073
],
[
41763,
41764
],
[
51812,
51813
],
[
67628,
67629
],
[
67800,
67801
],
[
67978,
67979
],
[
68126,
68127
],
[
68274,
68275
],
[
68419,
68420
],
[
93220,
93221
],
[
93230,
93231
],
[
111317,
111318
],
[
111477,
111478
]
],
[
[
532,
559
]
],
[
[
2292,
2302
],
[
2507,
2517
],
[
2672,
2682
],
[
2893,
2903
],
[
2941,
2951
],
[
3087,
3097
],
[
3139,
3149
],
[
3333,
3343
],
[
3574,
3584
],
[
4294,
4304
],
[
4460,
4470
],
[
4634,
4644
],
[
4808,
4818
],
[
4950,
4960
],
[
4996,
5006
],
[
5204,
5214
],
[
5257,
5267
],
[
5471,
5481
],
[
5523,
5533
],
[
5748,
5758
],
[
5804,
5814
],
[
5963,
5973
],
[
6005,
6015
],
[
6129,
6139
],
[
6245,
6255
],
[
6354,
6364
],
[
6472,
6482
],
[
6603,
6613
],
[
6731,
6741
],
[
6842,
6852
],
[
6969,
6979
],
[
7112,
7122
],
[
7252,
7262
],
[
7464,
7474
],
[
7596,
7606
],
[
7736,
7746
],
[
7848,
7858
],
[
7952,
7962
],
[
8078,
8088
],
[
8214,
8224
],
[
8348,
8358
],
[
8465,
8475
],
[
8578,
8588
],
[
8708,
8718
],
[
8826,
8836
],
[
8955,
8965
],
[
9083,
9093
],
[
9204,
9214
],
[
9402,
9412
],
[
9536,
9546
],
[
9673,
9683
],
[
9795,
9805
],
[
9928,
9938
],
[
10045,
10055
],
[
10151,
10161
],
[
10268,
10278
],
[
10473,
10483
],
[
10740,
10750
],
[
10868,
10878
],
[
11007,
11017
],
[
11143,
11153
],
[
11377,
11387
],
[
11531,
11541
],
[
11674,
11684
],
[
11797,
11807
],
[
11922,
11932
],
[
12045,
12055
],
[
12170,
12180
],
[
12305,
12315
],
[
12486,
12496
],
[
12616,
12626
],
[
12795,
12805
],
[
12943,
12953
],
[
13125,
13135
],
[
13320,
13330
],
[
13446,
13456
],
[
13574,
13584
],
[
13701,
13711
],
[
13822,
13832
],
[
14006,
14016
],
[
14154,
14164
],
[
14326,
14336
],
[
14647,
14657
],
[
14827,
14837
],
[
15136,
15146
],
[
15401,
15411
],
[
15638,
15648
],
[
15796,
15806
],
[
16062,
16072
],
[
16315,
16325
],
[
16538,
16548
],
[
16831,
16841
],
[
17032,
17042
],
[
17298,
17308
],
[
17536,
17546
],
[
17834,
17844
],
[
18098,
18108
],
[
18330,
18340
],
[
18550,
18560
],
[
18734,
18744
],
[
18897,
18907
],
[
19075,
19085
],
[
19267,
19277
],
[
19450,
19460
],
[
19589,
19599
],
[
19770,
19780
],
[
20060,
20070
],
[
20288,
20298
],
[
20571,
20581
],
[
20778,
20788
],
[
21084,
21094
],
[
21444,
21454
],
[
21668,
21678
],
[
21927,
21937
],
[
22092,
22102
],
[
22255,
22265
],
[
22417,
22427
],
[
22584,
22594
],
[
22750,
22760
],
[
22907,
22917
],
[
23078,
23088
],
[
23255,
23265
],
[
23417,
23427
],
[
23592,
23602
],
[
23771,
23781
],
[
23943,
23953
],
[
24109,
24119
],
[
24288,
24298
],
[
24455,
24465
],
[
24627,
24637
],
[
24796,
24806
],
[
24964,
24974
],
[
25135,
25145
],
[
25303,
25313
],
[
25468,
25478
],
[
25633,
25643
],
[
25796,
25806
],
[
25967,
25977
],
[
26145,
26155
],
[
26317,
26327
],
[
26483,
26493
],
[
26653,
26663
],
[
26826,
26836
],
[
26998,
27008
],
[
27166,
27176
],
[
27333,
27343
],
[
27517,
27527
],
[
27688,
27698
],
[
27854,
27864
],
[
27983,
27993
],
[
28100,
28110
],
[
28214,
28224
],
[
28524,
28534
],
[
28716,
28726
],
[
28905,
28915
],
[
29084,
29094
],
[
29264,
29274
],
[
29439,
29449
],
[
29613,
29623
],
[
29770,
29780
],
[
29934,
29944
],
[
30096,
30106
],
[
30258,
30268
],
[
30415,
30425
],
[
30577,
30587
],
[
30741,
30751
],
[
30901,
30911
],
[
31049,
31059
],
[
31203,
31213
],
[
31353,
31363
],
[
31511,
31521
],
[
31661,
31671
],
[
31804,
31814
],
[
31951,
31961
],
[
32112,
32122
],
[
32293,
32303
],
[
32482,
32492
],
[
32656,
32666
],
[
32822,
32832
],
[
33075,
33085
],
[
33237,
33247
],
[
33393,
33403
],
[
33771,
33781
],
[
33925,
33935
],
[
34079,
34089
],
[
34233,
34243
],
[
34385,
34395
],
[
34536,
34546
],
[
34687,
34697
],
[
34838,
34848
],
[
34989,
34999
],
[
35140,
35150
],
[
35337,
35347
],
[
35498,
35508
],
[
35650,
35660
],
[
35832,
35842
],
[
35998,
36008
],
[
36155,
36165
],
[
36425,
36435
],
[
36598,
36608
],
[
37137,
37147
],
[
37294,
37304
],
[
37451,
37461
],
[
37608,
37618
],
[
37765,
37775
],
[
37922,
37932
],
[
38081,
38091
],
[
38240,
38250
],
[
38394,
38404
],
[
38548,
38558
],
[
38641,
38651
],
[
38795,
38805
],
[
38949,
38959
],
[
39286,
39296
],
[
39435,
39445
],
[
39584,
39594
],
[
39733,
39743
],
[
39882,
39892
],
[
40031,
40041
],
[
40170,
40180
],
[
40304,
40314
],
[
40513,
40523
],
[
40656,
40666
],
[
40809,
40819
],
[
40967,
40977
],
[
41152,
41162
],
[
41327,
41337
],
[
41505,
41515
],
[
41533,
41543
],
[
41713,
41723
],
[
41744,
41754
],
[
41914,
41924
],
[
42088,
42098
],
[
42246,
42256
],
[
42399,
42409
],
[
42553,
42563
],
[
42707,
42717
],
[
42868,
42878
],
[
43030,
43040
],
[
43193,
43203
],
[
43468,
43478
],
[
43635,
43645
],
[
43795,
43805
],
[
43947,
43957
],
[
44096,
44106
],
[
44245,
44255
],
[
44507,
44517
],
[
44656,
44666
],
[
44807,
44817
],
[
44959,
44969
],
[
45117,
45127
],
[
45279,
45289
],
[
45443,
45453
],
[
45614,
45624
],
[
45788,
45798
],
[
45963,
45973
],
[
46125,
46135
],
[
46456,
46466
],
[
46617,
46627
],
[
46895,
46905
],
[
47066,
47076
],
[
47233,
47243
],
[
47397,
47407
],
[
47562,
47572
],
[
47727,
47737
],
[
47894,
47904
],
[
48065,
48075
],
[
48232,
48242
],
[
48400,
48410
],
[
48574,
48584
],
[
48848,
48858
],
[
49018,
49028
],
[
49189,
49199
],
[
49367,
49377
],
[
49540,
49550
],
[
49718,
49728
],
[
49885,
49895
],
[
50046,
50056
],
[
50220,
50230
],
[
50391,
50401
],
[
50565,
50575
],
[
50742,
50752
],
[
50911,
50921
],
[
51088,
51098
],
[
51277,
51287
],
[
51461,
51471
],
[
51623,
51633
],
[
51786,
51796
],
[
51955,
51965
],
[
52095,
52105
],
[
52262,
52272
],
[
52536,
52546
],
[
52717,
52727
],
[
52899,
52909
],
[
53083,
53093
],
[
53375,
53385
],
[
53552,
53562
],
[
53831,
53841
],
[
53999,
54009
],
[
54178,
54188
],
[
54352,
54362
],
[
54630,
54640
],
[
54807,
54817
],
[
54999,
55009
],
[
55180,
55190
],
[
55344,
55354
],
[
55504,
55514
],
[
55645,
55655
],
[
55778,
55788
],
[
55911,
55921
],
[
56044,
56054
],
[
56177,
56187
],
[
56310,
56320
],
[
56443,
56453
],
[
56556,
56566
],
[
56775,
56785
],
[
56993,
57003
],
[
57112,
57122
],
[
57235,
57245
],
[
57356,
57366
],
[
57481,
57491
],
[
57601,
57611
],
[
57719,
57729
],
[
57833,
57843
],
[
57942,
57952
],
[
58055,
58065
],
[
58202,
58212
],
[
58499,
58509
],
[
58524,
58534
],
[
58598,
58608
],
[
58623,
58633
],
[
58878,
58888
],
[
58903,
58913
],
[
59117,
59127
],
[
59142,
59152
],
[
59230,
59240
],
[
59255,
59265
],
[
59335,
59345
],
[
59360,
59370
],
[
59627,
59637
],
[
59652,
59662
],
[
59903,
59913
],
[
60221,
60231
],
[
60591,
60601
],
[
60820,
60830
],
[
61112,
61122
],
[
61270,
61280
],
[
61571,
61581
],
[
61781,
61791
],
[
61954,
61964
],
[
62230,
62240
],
[
62490,
62500
],
[
62748,
62758
],
[
63042,
63052
],
[
63291,
63301
],
[
63511,
63521
],
[
63745,
63755
],
[
63969,
63979
],
[
64214,
64224
],
[
64478,
64488
],
[
64681,
64691
],
[
64851,
64861
],
[
65097,
65107
],
[
65258,
65268
],
[
65419,
65429
],
[
65580,
65590
],
[
65753,
65763
],
[
65932,
65942
],
[
66113,
66123
],
[
66293,
66303
],
[
66462,
66472
],
[
66626,
66636
],
[
66790,
66800
],
[
67090,
67100
],
[
67257,
67267
],
[
67427,
67437
],
[
67597,
67607
],
[
67769,
67779
],
[
67945,
67955
],
[
68103,
68113
],
[
68251,
68261
],
[
68397,
68407
],
[
68542,
68552
],
[
68681,
68691
],
[
68818,
68828
],
[
68954,
68964
],
[
69090,
69100
],
[
69234,
69244
],
[
69401,
69411
],
[
69553,
69563
],
[
69705,
69715
],
[
69891,
69901
],
[
70027,
70037
],
[
70222,
70232
],
[
70370,
70380
],
[
70554,
70564
],
[
70710,
70720
],
[
70864,
70874
],
[
71017,
71027
],
[
71153,
71163
],
[
71298,
71308
],
[
71451,
71461
],
[
71604,
71614
],
[
71751,
71761
],
[
71893,
71903
],
[
72031,
72041
],
[
72177,
72187
],
[
72410,
72420
],
[
72602,
72612
],
[
72734,
72744
],
[
72866,
72876
],
[
73041,
73051
],
[
73181,
73191
],
[
73333,
73343
],
[
73579,
73589
],
[
73744,
73754
],
[
73899,
73909
],
[
74072,
74082
],
[
74288,
74298
],
[
74556,
74566
],
[
74772,
74782
],
[
75017,
75027
],
[
75224,
75234
],
[
75400,
75410
],
[
75557,
75567
],
[
75797,
75807
],
[
75967,
75977
],
[
76115,
76125
],
[
76257,
76267
],
[
76293,
76303
],
[
76452,
76462
],
[
76607,
76617
],
[
76770,
76780
],
[
76937,
76947
],
[
77094,
77104
],
[
77132,
77142
],
[
77301,
77311
],
[
77466,
77476
],
[
77621,
77631
],
[
77762,
77772
],
[
77898,
77908
],
[
78034,
78044
],
[
78178,
78188
],
[
78324,
78334
],
[
78469,
78479
],
[
78615,
78625
],
[
78771,
78781
],
[
78919,
78929
],
[
79057,
79067
],
[
79191,
79201
],
[
79324,
79334
],
[
79452,
79462
],
[
79585,
79595
],
[
79729,
79739
],
[
79878,
79888
],
[
80046,
80056
],
[
80203,
80213
],
[
80349,
80359
],
[
80501,
80511
],
[
80699,
80709
],
[
80922,
80932
],
[
81074,
81084
],
[
81128,
81138
],
[
81195,
81205
],
[
81727,
81737
],
[
81866,
81876
],
[
82007,
82017
],
[
82143,
82153
],
[
82292,
82302
],
[
82442,
82452
],
[
82591,
82601
],
[
82739,
82749
],
[
82872,
82882
],
[
83021,
83031
],
[
83171,
83181
],
[
83320,
83330
],
[
83468,
83478
],
[
83601,
83611
],
[
83750,
83760
],
[
83900,
83910
],
[
84049,
84059
],
[
84197,
84207
],
[
84361,
84371
],
[
84514,
84524
],
[
84657,
84667
],
[
84804,
84814
],
[
84940,
84950
],
[
85072,
85082
],
[
85208,
85218
],
[
85344,
85354
],
[
85486,
85496
],
[
85634,
85644
],
[
85792,
85802
],
[
85954,
85964
],
[
86286,
86296
],
[
86462,
86472
],
[
86684,
86694
],
[
86834,
86844
],
[
86978,
86988
],
[
87128,
87138
],
[
87275,
87285
],
[
87426,
87436
],
[
87574,
87584
],
[
87713,
87723
],
[
87850,
87860
],
[
88000,
88010
],
[
88163,
88173
],
[
88508,
88518
],
[
88679,
88689
],
[
88920,
88930
],
[
89069,
89079
],
[
89220,
89230
],
[
89467,
89477
],
[
89613,
89623
],
[
89778,
89788
],
[
89927,
89937
],
[
90089,
90099
],
[
90259,
90269
],
[
90419,
90429
],
[
90588,
90598
],
[
90764,
90774
],
[
90926,
90936
],
[
91150,
91160
],
[
91328,
91338
],
[
91491,
91501
],
[
91647,
91657
],
[
91795,
91805
],
[
91917,
91927
],
[
92087,
92097
],
[
92337,
92347
],
[
92595,
92605
],
[
92862,
92872
],
[
93024,
93034
],
[
93188,
93198
],
[
93332,
93342
],
[
93464,
93474
],
[
93600,
93610
],
[
93744,
93754
],
[
93879,
93889
],
[
94009,
94019
],
[
94164,
94174
],
[
94321,
94331
],
[
94456,
94466
],
[
94491,
94501
],
[
94626,
94636
],
[
94781,
94791
],
[
94835,
94845
],
[
94878,
94888
],
[
95277,
95287
],
[
95422,
95432
],
[
95614,
95624
],
[
95942,
95952
],
[
96302,
96312
],
[
96518,
96528
],
[
97055,
97065
],
[
97506,
97516
],
[
97963,
97973
],
[
98179,
98189
],
[
98314,
98324
],
[
98444,
98454
],
[
98579,
98589
],
[
98717,
98727
],
[
98924,
98934
],
[
99049,
99059
],
[
99181,
99191
],
[
99406,
99416
],
[
99555,
99565
],
[
99699,
99709
],
[
99837,
99847
],
[
99996,
100006
],
[
100168,
100178
],
[
100320,
100330
],
[
100526,
100536
],
[
100678,
100688
],
[
100815,
100825
],
[
100986,
100996
],
[
101186,
101196
],
[
101488,
101498
],
[
101733,
101743
],
[
101989,
101999
],
[
102386,
102396
],
[
102698,
102708
],
[
103056,
103066
],
[
103418,
103428
],
[
103679,
103689
],
[
103996,
104006
],
[
104164,
104174
],
[
104288,
104298
],
[
104435,
104445
],
[
104614,
104624
],
[
104782,
104792
],
[
105114,
105124
],
[
105393,
105403
],
[
105773,
105783
],
[
105945,
105955
],
[
106173,
106183
],
[
106289,
106299
],
[
106453,
106463
],
[
106606,
106616
],
[
106759,
106769
],
[
106903,
106913
],
[
107064,
107074
],
[
107193,
107203
],
[
108421,
108431
],
[
108777,
108787
],
[
108818,
108828
],
[
109070,
109080
],
[
109206,
109216
],
[
109720,
109730
],
[
109960,
109970
],
[
110214,
110224
],
[
110426,
110436
],
[
110644,
110654
],
[
111138,
111148
],
[
111287,
111297
],
[
111446,
111456
],
[
111652,
111662
],
[
111892,
111902
]
],
[
[
2374,
2423
]
],
[
[
2564,
2606
]
],
[
[
2715,
2760
]
],
[
[
2979,
3021
]
],
[
[
3205,
3257
]
],
[
[
3377,
3433
]
],
[
[
3632,
3681
]
],
[
[
4354,
4395
]
],
[
[
4514,
4562
]
],
[
[
4674,
4729
]
],
[
[
4848,
4887
]
],
[
[
5072,
5126
]
],
[
[
5341,
5394
]
],
[
[
5604,
5664
]
],
[
[
5865,
5902
]
],
[
[
6053,
6079
]
],
[
[
6163,
6192
]
],
[
[
6270,
6300
]
],
[
[
6380,
6414
]
],
[
[
6509,
6544
]
],
[
[
6639,
6673
]
],
[
[
6762,
6790
]
],
[
[
6867,
6906
]
],
[
[
7008,
7048
]
],
[
[
7152,
7190
]
],
[
[
7317,
7353
]
],
[
[
7372,
7406
]
],
[
[
7508,
7540
]
],
[
[
7632,
7672
]
],
[
[
7772,
7798
]
],
[
[
7876,
7902
]
],
[
[
7986,
8020
]
],
[
[
8106,
8148
]
],
[
[
8250,
8287
]
],
[
[
8379,
8410
]
],
[
[
8490,
8522
]
],
[
[
8608,
8646
]
],
[
[
8740,
8771
]
],
[
[
8851,
8891
]
],
[
[
8989,
9024
]
],
[
[
9112,
9146
]
],
[
[
9257,
9299
]
],
[
[
9318,
9348
]
],
[
[
9426,
9469
]
],
[
[
9573,
9611
]
],
[
[
9705,
9738
]
],
[
[
9826,
9865
]
],
[
[
9961,
9991
]
],
[
[
10069,
10098
]
],
[
[
10174,
10209
]
],
[
[
10325,
10368
]
],
[
[
10387,
10418
]
],
[
[
10522,
10559
]
],
[
[
10602,
10641
]
],
[
[
10660,
10688
]
],
[
[
10768,
10806
]
],
[
[
10901,
10942
]
],
[
[
11043,
11081
]
],
[
[
11200,
11240
]
],
[
[
11259,
11306
]
],
[
[
11419,
11463
]
],
[
[
11570,
11610
]
],
[
[
11709,
11741
]
],
[
[
11826,
11862
]
],
[
[
11955,
11988
]
],
[
[
12082,
12114
]
],
[
[
12203,
12237
]
],
[
[
12408,
12435
]
],
[
[
12510,
12546
]
],
[
[
12703,
12732
]
],
[
[
12851,
12885
]
],
[
[
13004,
13047
]
],
[
[
13216,
13243
]
],
[
[
13348,
13385
]
],
[
[
13474,
13512
]
],
[
[
13603,
13640
]
],
[
[
13730,
13764
]
],
[
[
13848,
13885
]
],
[
[
14062,
14096
]
],
[
[
14187,
14223
]
],
[
[
14413,
14431
],
[
14437,
14455
]
],
[
[
14432,
14434
],
[
15669,
15671
],
[
15833,
15835
],
[
16009,
16011
],
[
16047,
16049
],
[
17819,
17821
],
[
18045,
18047
],
[
18083,
18085
]
],
[
[
14496,
14511
],
[
14518,
14533
]
],
[
[
14512,
14515
],
[
17557,
17560
],
[
20821,
20824
],
[
21022,
21025
],
[
21068,
21071
]
],
[
[
14543,
14583
]
],
[
[
14711,
14757
]
],
[
[
14890,
14937
]
],
[
[
15184,
15235
]
],
[
[
15457,
15497
]
],
[
[
15680,
15726
]
],
[
[
15844,
15891
]
],
[
[
16133,
16185
]
],
[
[
16203,
16247
]
],
[
[
16368,
16412
]
],
[
[
16627,
16677
]
],
[
[
16695,
16730
]
],
[
[
16896,
16931
]
],
[
[
17086,
17126
]
],
[
[
17344,
17373
]
],
[
[
17593,
17628
]
],
[
[
17647,
17682
]
],
[
[
17871,
17907
]
],
[
[
18136,
18172
]
],
[
[
18368,
18425
]
],
[
[
18620,
18665
]
],
[
[
18771,
18822
]
],
[
[
18951,
19001
]
],
[
[
19131,
19187
]
],
[
[
19322,
19374
]
],
[
[
19493,
19529
]
],
[
[
19662,
19704
]
],
[
[
19842,
19885
]
],
[
[
20106,
20153
]
],
[
[
20341,
20377
]
],
[
[
20640,
20682
]
],
[
[
20833,
20876
]
],
[
[
21153,
21201
]
],
[
[
21243,
21289
]
],
[
[
21308,
21364
]
],
[
[
21497,
21551
]
],
[
[
21798,
21828
]
],
[
[
21972,
22020
]
],
[
[
22135,
22183
]
],
[
[
22301,
22347
]
],
[
[
22458,
22509
]
],
[
[
22630,
22678
]
],
[
[
22793,
22838
]
],
[
[
22944,
22999
]
],
[
[
23127,
23179
]
],
[
[
23299,
23346
]
],
[
[
23462,
23515
]
],
[
[
23637,
23692
]
],
[
[
23821,
23870
]
],
[
[
23987,
24036
]
],
[
[
24156,
24210
]
],
[
[
24337,
24384
]
],
[
[
24497,
24550
]
],
[
[
24674,
24723
]
],
[
[
24840,
24890
]
],
[
[
25009,
25060
]
],
[
[
25181,
25230
]
],
[
[
25344,
25394
]
],
[
[
25513,
25561
]
],
[
[
25676,
25724
]
],
[
[
25839,
25891
]
],
[
[
26013,
26067
]
],
[
[
26193,
26243
]
],
[
[
26359,
26409
]
],
[
[
26525,
26577
]
],
[
[
26704,
26753
]
],
[
[
26870,
26922
]
],
[
[
27042,
27092
]
],
[
[
27211,
27260
]
],
[
[
27377,
27435
]
],
[
[
27566,
27615
]
],
[
[
27728,
27779
]
],
[
[
27897,
27928
]
],
[
[
28012,
28044
]
],
[
[
28130,
28160
]
],
[
[
28279,
28350
]
],
[
[
28369,
28435
]
],
[
[
28565,
28629
]
],
[
[
28752,
28817
]
],
[
[
28945,
29003
]
],
[
[
29117,
29179
]
],
[
[
29298,
29357
]
],
[
[
29470,
29530
]
],
[
[
29645,
29696
]
],
[
[
29799,
29855
]
],
[
[
29965,
30019
]
],
[
[
30125,
30180
]
],
[
[
30288,
30340
]
],
[
[
30442,
30498
]
],
[
[
30608,
30663
]
],
[
[
30774,
30826
]
],
[
[
30928,
30977
]
],
[
[
31074,
31127
]
],
[
[
31232,
31281
]
],
[
[
31378,
31433
]
],
[
[
31542,
31590
]
],
[
[
31685,
31733
]
],
[
[
31828,
31878
]
],
[
[
31977,
32033
]
],
[
[
32144,
32207
]
],
[
[
32335,
32397
]
],
[
[
32523,
32578
]
],
[
[
32687,
32743
]
],
[
[
32876,
32931
]
],
[
[
32950,
33001
]
],
[
[
33100,
33157
]
],
[
[
33268,
33319
]
],
[
[
33442,
33507
]
],
[
[
33550,
33625
]
],
[
[
33644,
33696
]
],
[
[
33798,
33850
]
],
[
[
33952,
34004
]
],
[
[
34106,
34158
]
],
[
[
34260,
34311
]
],
[
[
34411,
34462
]
],
[
[
34562,
34613
]
],
[
[
34713,
34764
]
],
[
[
34864,
34915
]
],
[
[
35015,
35066
]
],
[
[
35198,
35256
]
],
[
[
35371,
35423
]
],
[
[
35527,
35577
]
],
[
[
35677,
35743
]
],
[
[
35869,
35922
]
],
[
[
36026,
36079
]
],
[
[
36207,
36267
]
],
[
[
36286,
36344
]
],
[
[
36465,
36520
]
],
[
[
36674,
36745
]
],
[
[
36787,
36868
]
],
[
[
36910,
36988
]
],
[
[
37006,
37060
]
],
[
[
37163,
37217
]
],
[
[
37320,
37374
]
],
[
[
37477,
37531
]
],
[
[
37634,
37688
]
],
[
[
37791,
37845
]
],
[
[
37948,
38003
]
],
[
[
38111,
38164
]
],
[
[
38265,
38318
]
],
[
[
38419,
38472
]
],
[
[
38573,
38626
]
],
[
[
38666,
38719
]
],
[
[
38820,
38873
]
],
[
[
38998,
39048
]
],
[
[
39091,
39150
]
],
[
[
39169,
39216
]
],
[
[
39318,
39365
]
],
[
[
39467,
39514
]
],
[
[
39616,
39663
]
],
[
[
39765,
39812
]
],
[
[
39914,
39961
]
],
[
[
40063,
40105
]
],
[
[
40197,
40239
]
],
[
[
40331,
40379
]
],
[
[
40398,
40444
]
],
[
[
40541,
40587
]
],
[
[
40684,
40735
]
],
[
[
40842,
40893
]
],
[
[
41000,
41062
]
],
[
[
41175,
41237
]
],
[
[
41353,
41415
]
],
[
[
41559,
41622
]
],
[
[
41773,
41832
]
],
[
[
41949,
42007
]
],
[
[
42119,
42171
]
],
[
[
42270,
42323
]
],
[
[
42424,
42477
]
],
[
[
42578,
42631
]
],
[
[
42735,
42790
]
],
[
[
42895,
42951
]
],
[
[
43058,
43114
]
],
[
[
43248,
43312
]
],
[
[
43331,
43388
]
],
[
[
43498,
43555
]
],
[
[
43665,
43718
]
],
[
[
43822,
43873
]
],
[
[
43971,
44022
]
],
[
[
44120,
44171
]
],
[
[
44293,
44363
]
],
[
[
44382,
44433
]
],
[
[
44531,
44582
]
],
[
[
44680,
44732
]
],
[
[
44832,
44884
]
],
[
[
44984,
45039
]
],
[
[
45148,
45202
]
],
[
[
45306,
45363
]
],
[
[
45473,
45532
]
],
[
[
45649,
45707
]
],
[
[
45822,
45881
]
],
[
[
45998,
46050
]
],
[
[
46178,
46236
]
],
[
[
46279,
46326
]
],
[
[
46345,
46389
]
],
[
[
46502,
46548
]
],
[
[
46698,
46745
]
],
[
[
46764,
46818
]
],
[
[
46935,
46989
]
],
[
[
47106,
47158
]
],
[
[
47268,
47321
]
],
[
[
47433,
47486
]
],
[
[
47598,
47651
]
],
[
[
47763,
47817
]
],
[
[
47934,
47988
]
],
[
[
48105,
48157
]
],
[
[
48267,
48322
]
],
[
[
48441,
48496
]
],
[
[
48639,
48704
]
],
[
[
48723,
48774
]
],
[
[
48885,
48940
]
],
[
[
49056,
49111
]
],
[
[
49224,
49284
]
],
[
[
49407,
49462
]
],
[
[
49575,
49635
]
],
[
[
49758,
49810
]
],
[
[
49917,
49970
]
],
[
[
50079,
50138
]
],
[
[
50262,
50315
]
],
[
[
50424,
50483
]
],
[
[
50607,
50663
]
],
[
[
50776,
50832
]
],
[
[
50945,
51005
]
],
[
[
51126,
51190
]
],
[
[
51316,
51377
]
],
[
[
51496,
51548
]
],
[
[
51651,
51707
]
],
[
[
51822,
51877
]
],
[
[
51987,
52029
]
],
[
[
52136,
52187
]
],
[
[
52341,
52393
]
],
[
[
52412,
52462
]
],
[
[
52579,
52636
]
],
[
[
52773,
52824
]
],
[
[
52943,
53001
]
],
[
[
53161,
53216
]
],
[
[
53235,
53293
]
],
[
[
53426,
53477
]
],
[
[
53620,
53678
]
],
[
[
53697,
53752
]
],
[
[
53879,
53927
]
],
[
[
54040,
54097
]
],
[
[
54228,
54278
]
],
[
[
54419,
54473
]
],
[
[
54492,
54549
]
],
[
[
54683,
54733
]
],
[
[
54859,
54917
]
],
[
[
55050,
55103
]
],
[
[
55228,
55274
]
],
[
[
55384,
55432
]
],
[
[
55545,
55583
]
],
[
[
55678,
55716
]
],
[
[
55811,
55849
]
],
[
[
55944,
55982
]
],
[
[
56077,
56115
]
],
[
[
56210,
56248
]
],
[
[
56343,
56381
]
],
[
[
56470,
56501
]
],
[
[
56611,
56646
]
],
[
[
56665,
56708
]
],
[
[
56841,
56880
]
],
[
[
56898,
56937
]
],
[
[
57015,
57055
]
],
[
[
57135,
57173
]
],
[
[
57270,
57301
]
],
[
[
57387,
57422
]
],
[
[
57513,
57545
]
],
[
[
57633,
57664
]
],
[
[
57747,
57778
]
],
[
[
57858,
57888
]
],
[
[
57969,
58000
]
],
[
[
58086,
58132
]
],
[
[
58264,
58318
]
],
[
[
58337,
58385
]
],
[
[
58937,
58991
]
],
[
[
59686,
59739
]
],
[
[
59952,
60015
]
],
[
[
60261,
60320
]
],
[
[
60639,
60690
]
],
[
[
60871,
60920
]
],
[
[
61143,
61195
]
],
[
[
61324,
61378
]
],
[
[
61396,
61453
]
],
[
[
61646,
61702
]
],
[
[
61823,
61877
]
],
[
[
61989,
62038
]
],
[
[
62260,
62319
]
],
[
[
62525,
62580
]
],
[
[
62779,
62839
]
],
[
[
63082,
63131
]
],
[
[
63322,
63377
]
],
[
[
63564,
63615
]
],
[
[
63796,
63843
]
],
[
[
64017,
64076
]
],
[
[
64269,
64330
]
],
[
[
64540,
64587
]
],
[
[
64708,
64756
]
],
[
[
64914,
64926
],
[
64931,
64943
]
],
[
[
64927,
64928
],
[
65114,
65115
],
[
65275,
65276
],
[
65436,
65437
],
[
65597,
65598
],
[
65776,
65777
],
[
65955,
65956
],
[
66137,
66138
],
[
66316,
66317
],
[
66480,
66481
],
[
66644,
66645
],
[
66808,
66809
]
],
[
[
64962,
65018
]
],
[
[
65123,
65179
]
],
[
[
65284,
65340
]
],
[
[
65445,
65501
]
],
[
[
65606,
65668
]
],
[
[
65785,
65847
]
],
[
[
65964,
66027
]
],
[
[
66146,
66208
]
],
[
[
66325,
66382
]
],
[
[
66489,
66546
]
],
[
[
66653,
66710
]
],
[
[
66841,
66905
]
],
[
[
66923,
66972
]
],
[
[
67153,
67193
]
],
[
[
67284,
67344
]
],
[
[
67466,
67520
]
],
[
[
67638,
67692
]
],
[
[
67810,
67866
]
],
[
[
67988,
68034
]
],
[
[
68136,
68182
]
],
[
[
68284,
68329
]
],
[
[
68429,
68474
]
],
[
[
68568,
68613
]
],
[
[
68707,
68751
]
],
[
[
68843,
68887
]
],
[
[
68979,
69023
]
],
[
[
69115,
69151
]
],
[
[
69294,
69336
]
],
[
[
69446,
69488
]
],
[
[
69598,
69640
]
],
[
[
69778,
69823
]
],
[
[
69914,
69959
]
],
[
[
70050,
70102
]
],
[
[
70257,
70302
]
],
[
[
70395,
70439
]
],
[
[
70599,
70640
]
],
[
[
70751,
70793
]
],
[
[
70906,
70950
]
],
[
[
71050,
71090
]
],
[
[
71179,
71227
]
],
[
[
71332,
71380
]
],
[
[
71485,
71533
]
],
[
[
71638,
71683
]
],
[
[
71782,
71826
]
],
[
[
71926,
71967
]
],
[
[
72058,
72106
]
],
[
[
72211,
72252
]
],
[
[
72497,
72538
]
],
[
[
72629,
72670
]
],
[
[
72761,
72802
]
],
[
[
72922,
72970
]
],
[
[
73072,
73115
]
],
[
[
73208,
73259
]
],
[
[
73367,
73427
]
],
[
[
73625,
73673
]
],
[
[
73774,
73825
]
],
[
[
73932,
73973
]
],
[
[
74110,
74172
]
],
[
[
74336,
74400
]
],
[
[
74606,
74662
]
],
[
[
74809,
74869
]
],
[
[
75087,
75144
]
],
[
[
75265,
75321
]
],
[
[
75438,
75486
]
],
[
[
75589,
75641
]
],
[
[
75834,
75889
]
],
[
[
76006,
76049
]
],
[
[
76146,
76190
]
],
[
[
76331,
76380
]
],
[
[
76486,
76535
]
],
[
[
76641,
76694
]
],
[
[
76808,
76861
]
],
[
[
76975,
77023
]
],
[
[
77172,
77225
]
],
[
[
77337,
77390
]
],
[
[
77502,
77550
]
],
[
[
77653,
77696
]
],
[
[
77789,
77832
]
],
[
[
77925,
77968
]
],
[
[
78061,
78108
]
],
[
[
78209,
78255
]
],
[
[
78354,
78400
]
],
[
[
78500,
78546
]
],
[
[
78646,
78697
]
],
[
[
78810,
78853
]
],
[
[
78950,
78992
]
],
[
[
79084,
79126
]
],
[
[
79223,
79262
]
],
[
[
79353,
79391
]
],
[
[
79480,
79521
]
],
[
[
79616,
79661
]
],
[
[
79759,
79807
]
],
[
[
79911,
79961
]
],
[
[
80096,
80138
]
],
[
[
80236,
80281
]
],
[
[
80384,
80431
]
],
[
[
80536,
80582
]
],
[
[
80819,
80859
]
],
[
[
80947,
80999
]
],
[
[
81624,
81664
]
],
[
[
81755,
81799
]
],
[
[
81898,
81941
]
],
[
[
82038,
82079
]
],
[
[
82173,
82221
]
],
[
[
82337,
82378
]
],
[
[
82472,
82520
]
],
[
[
82636,
82676
]
],
[
[
82767,
82808
]
],
[
[
82902,
82950
]
],
[
[
83066,
83107
]
],
[
[
83201,
83249
]
],
[
[
83365,
83405
]
],
[
[
83496,
83537
]
],
[
[
83631,
83679
]
],
[
[
83795,
83836
]
],
[
[
83930,
83978
]
],
[
[
84094,
84134
]
],
[
[
84225,
84269
]
],
[
[
84401,
84446
]
],
[
[
84548,
84591
]
],
[
[
84701,
84741
]
],
[
[
84829,
84873
]
],
[
[
84969,
85009
]
],
[
[
85097,
85141
]
],
[
[
85237,
85279
]
],
[
[
85371,
85417
]
],
[
[
85517,
85564
]
],
[
[
85675,
85722
]
],
[
[
85833,
85882
]
],
[
[
85997,
86046
]
],
[
[
86323,
86381
]
],
[
[
86500,
86558
]
],
[
[
86727,
86769
]
],
[
[
86863,
86909
]
],
[
[
87011,
87058
]
],
[
[
87162,
87207
]
],
[
[
87307,
87355
]
],
[
[
87461,
87506
]
],
[
[
87604,
87647
]
],
[
[
87741,
87784
]
],
[
[
87878,
87927
]
],
[
[
88037,
88088
]
],
[
[
88226,
88271
]
],
[
[
88290,
88343
]
],
[
[
88567,
88611
]
],
[
[
88740,
88789
]
],
[
[
88808,
88852
]
],
[
[
88955,
89000
]
],
[
[
89102,
89149
]
],
[
[
89279,
89320
]
],
[
[
89339,
89379
]
],
[
[
89497,
89543
]
],
[
[
89652,
89703
]
],
[
[
89817,
89860
]
],
[
[
89963,
90014
]
],
[
[
90133,
90184
]
],
[
[
90303,
90349
]
],
[
[
90458,
90511
]
],
[
[
90634,
90687
]
],
[
[
90810,
90856
]
],
[
[
90965,
91011
]
],
[
[
91225,
91261
]
],
[
[
91361,
91414
]
],
[
[
91535,
91579
]
],
[
[
91679,
91725
]
],
[
[
91829,
91861
]
],
[
[
91954,
92009
]
],
[
[
92152,
92206
]
],
[
[
92378,
92427
]
],
[
[
92653,
92698
]
],
[
[
92916,
92958
]
],
[
[
93078,
93121
]
],
[
[
93240,
93274
]
],
[
[
93364,
93402
]
],
[
[
93500,
93538
]
],
[
[
93642,
93681
]
],
[
[
93789,
93822
]
],
[
[
93921,
93953
]
],
[
[
94042,
94091
]
],
[
[
94207,
94252
]
],
[
[
94368,
94400
]
],
[
[
94525,
94561
]
],
[
[
94677,
94717
]
],
[
[
94926,
94983
]
],
[
[
95312,
95355
]
],
[
[
95481,
95523
]
],
[
[
95675,
95713
]
],
[
[
95987,
96025
]
],
[
[
96343,
96382
]
],
[
[
96585,
96621
]
],
[
[
96639,
96674
]
],
[
[
97089,
97123
]
],
[
[
97529,
97564
]
],
[
[
97993,
98010
]
],
[
[
98087,
98121
]
],
[
[
98222,
98256
]
],
[
[
98344,
98382
]
],
[
[
98479,
98517
]
],
[
[
98619,
98656
]
],
[
[
98777,
98817
]
],
[
[
98836,
98868
]
],
[
[
98963,
98994
]
],
[
[
99087,
99122
]
],
[
[
99247,
99291
]
],
[
[
99310,
99346
]
],
[
[
99453,
99492
]
],
[
[
99605,
99640
]
],
[
[
99745,
99779
]
],
[
[
99896,
99934
]
],
[
[
100054,
100099
]
],
[
[
100218,
100257
]
],
[
[
100371,
100415
]
],
[
[
100583,
100623
]
],
[
[
100716,
100753
]
],
[
[
100868,
100911
]
],
[
[
101067,
101104
]
],
[
[
101284,
101331
]
],
[
[
101529,
101576
]
],
[
[
101774,
101818
]
],
[
[
102044,
102086
]
],
[
[
102422,
102467
]
],
[
[
102736,
102780
]
],
[
[
103094,
103139
]
],
[
[
103463,
103516
]
],
[
[
103747,
103796
]
],
[
[
104056,
104098
]
],
[
[
104190,
104233
]
],
[
[
104325,
104370
]
],
[
[
104488,
104541
]
],
[
[
104662,
104712
]
],
[
[
104845,
104899
]
],
[
[
105156,
105200
]
],
[
[
105413,
105458
]
],
[
[
105793,
105830
]
],
[
[
105965,
106008
]
],
[
[
106193,
106237
]
],
[
[
106317,
106358
]
],
[
[
106488,
106540
]
],
[
[
106657,
106700
]
],
[
[
106789,
106834
]
],
[
[
106938,
106989
]
],
[
[
107105,
107137
]
],
[
[
107222,
107272
]
],
[
[
107469,
107522
]
],
[
[
107722,
107775
]
],
[
[
107975,
108029
]
],
[
[
108230,
108268
]
],
[
[
108472,
108509
]
],
[
[
108853,
108890
]
],
[
[
109106,
109144
]
],
[
[
109237,
109271
]
],
[
[
109383,
109417
]
],
[
[
109529,
109567
]
],
[
[
109759,
109796
]
],
[
[
110013,
110050
]
],
[
[
110326,
110364
]
],
[
[
110457,
110493
]
],
[
[
110669,
110708
]
],
[
[
110793,
110842
]
],
[
[
110937,
110980
]
],
[
[
111177,
111220
]
],
[
[
111334,
111378
]
],
[
[
111494,
111543
]
],
[
[
111701,
111739
]
]
] |
#ARC076e
def main():
import sys
input=sys.stdin.readline
sys.setrecursionlimit(10**6)
if __name__ == '__main__':
main() | [
[
[
13,
17
],
[
130,
134
]
]
] |
from django.core.management.base import BaseCommand
from django.utils.timezone import now
class Command(BaseCommand):
args = '[event_slug...]'
help = 'Create missing email aliases'
def handle(*args, **opts):
from access.models import InternalEmailAlias
InternalEmailAlias.ensure_internal_email_aliases()
| [
[
[
40,
51
],
[
106,
117
]
],
[
[
86,
89
]
],
[
[
98,
105
]
]
] |
from django.contrib.auth.models import User
from rest_framework.test import APITestCase
class FVHAPITestCase(APITestCase):
def assert_dict_contains(self, superset, subset, path=''):
for key, expected in subset.items():
full_path = path + key
received = superset.get(key, None)
if isinstance(expected, dict) and isinstance(received, dict):
self.assert_dict_contains(superset[key], expected, full_path + '.')
else:
assert received == expected, 'Value mismatch for key {}: {} != {}'.format(
full_path, expected, received
)
def create_user(self):
return User.objects.create(
username='courier', first_name='Coranne', last_name='Courier', email='[email protected]')
def create_and_login_user(self):
user = self.create_user()
self.client.force_login(user)
return user
| [
[
[
39,
43
],
[
693,
697
]
],
[
[
76,
87
],
[
111,
122
]
],
[
[
96,
110
]
]
] |
"ts_project rule"
load("@build_bazel_rules_nodejs//:providers.bzl", "DeclarationInfo", "NpmPackageInfo", "declaration_info", "js_module_info", "run_node")
_DEFAULT_TSC = (
# BEGIN-INTERNAL
"@npm" +
# END-INTERNAL
"//typescript/bin:tsc"
)
_ATTRS = {
"args": attr.string_list(),
"declaration_dir": attr.string(),
"deps": attr.label_list(providers = [DeclarationInfo]),
"extends": attr.label_list(allow_files = [".json"]),
"out_dir": attr.string(),
"root_dir": attr.string(),
# NB: no restriction on extensions here, because tsc sometimes adds type-check support
# for more file kinds (like require('some.json')) and also
# if you swap out the `compiler` attribute (like with ngtsc)
# that compiler might allow more sources than tsc does.
"srcs": attr.label_list(allow_files = True, mandatory = True),
"tsc": attr.label(default = Label(_DEFAULT_TSC), executable = True, cfg = "host"),
"tsconfig": attr.label(mandatory = True, allow_single_file = [".json"]),
}
# tsc knows how to produce the following kinds of output files.
# NB: the macro `ts_project_macro` will set these outputs based on user
# telling us which settings are enabled in the tsconfig for this project.
_OUTPUTS = {
"buildinfo_out": attr.output(),
"js_outs": attr.output_list(),
"map_outs": attr.output_list(),
"typing_maps_outs": attr.output_list(),
"typings_outs": attr.output_list(),
}
_TsConfigInfo = provider(
doc = """Passes tsconfig.json files to downstream compilations so that TypeScript can read them.
This is needed to support Project References""",
fields = {
"tsconfigs": "depset of tsconfig.json files",
},
)
def _join(*elements):
return "/".join([f for f in elements if f])
def _ts_project_impl(ctx):
arguments = ctx.actions.args()
# Add user specified arguments *before* rule supplied arguments
arguments.add_all(ctx.attr.args)
arguments.add_all([
"--project",
ctx.file.tsconfig.path,
"--outDir",
_join(ctx.bin_dir.path, ctx.label.package, ctx.attr.out_dir),
"--rootDir",
_join(ctx.label.package, ctx.attr.root_dir) if ctx.label.package else ".",
])
if len(ctx.outputs.typings_outs) > 0:
declaration_dir = ctx.attr.declaration_dir if ctx.attr.declaration_dir else ctx.attr.out_dir
arguments.add_all([
"--declarationDir",
_join(ctx.bin_dir.path, ctx.label.package, declaration_dir),
])
# When users report problems, we can ask them to re-build with
# --define=VERBOSE_LOGS=1
# so anything that's useful to diagnose rule failures belongs here
if "VERBOSE_LOGS" in ctx.var.keys():
arguments.add_all([
# What files were in the ts.Program
"--listFiles",
# Did tsc write all outputs to the place we expect to find them?
"--listEmittedFiles",
# Why did module resolution fail?
"--traceResolution",
# Why was the build slow?
"--diagnostics",
"--extendedDiagnostics",
])
deps_depsets = []
for dep in ctx.attr.deps:
if _TsConfigInfo in dep:
deps_depsets.append(dep[_TsConfigInfo].tsconfigs)
if NpmPackageInfo in dep:
# TODO: we could maybe filter these to be tsconfig.json or *.d.ts only
# we don't expect tsc wants to read any other files from npm packages.
deps_depsets.append(dep[NpmPackageInfo].sources)
if DeclarationInfo in dep:
deps_depsets.append(dep[DeclarationInfo].transitive_declarations)
inputs = ctx.files.srcs + depset(transitive = deps_depsets).to_list() + [ctx.file.tsconfig]
if ctx.attr.extends:
inputs.extend(ctx.files.extends)
# We do not try to predeclare json_outs, because their output locations generally conflict with their path in the source tree.
# (The exception is when out_dir is used, then the .json output is a different path than the input.)
# However tsc will copy .json srcs to the output tree so we want to declare these outputs to include along with .js Default outs
# NB: We don't have emit_declaration_only setting here, so use presence of any JS outputs as an equivalent.
# tsc will only produce .json if it also produces .js
if len(ctx.outputs.js_outs):
json_outs = [
ctx.actions.declare_file(_join(ctx.attr.out_dir, src.short_path[len(ctx.label.package) + 1:]))
for src in ctx.files.srcs
if src.basename.endswith(".json")
]
else:
json_outs = []
outputs = json_outs + ctx.outputs.js_outs + ctx.outputs.map_outs + ctx.outputs.typings_outs + ctx.outputs.typing_maps_outs
if ctx.outputs.buildinfo_out:
outputs.append(ctx.outputs.buildinfo_out)
runtime_outputs = depset(json_outs + ctx.outputs.js_outs + ctx.outputs.map_outs)
typings_outputs = ctx.outputs.typings_outs + ctx.outputs.typing_maps_outs + [s for s in ctx.files.srcs if s.path.endswith(".d.ts")]
if len(outputs) > 0:
run_node(
ctx,
inputs = inputs,
arguments = [arguments],
outputs = outputs,
executable = "tsc",
progress_message = "Compiling TypeScript project %s [tsc -p %s]" % (
ctx.label,
ctx.file.tsconfig.short_path,
),
)
providers = [
# DefaultInfo is what you see on the command-line for a built library,
# and determines what files are used by a simple non-provider-aware
# downstream library.
# Only the JavaScript outputs are intended for use in non-TS-aware
# dependents.
DefaultInfo(
files = runtime_outputs,
runfiles = ctx.runfiles(
transitive_files = runtime_outputs,
collect_default = True,
),
),
js_module_info(
sources = runtime_outputs,
deps = ctx.attr.deps,
),
_TsConfigInfo(tsconfigs = depset([ctx.file.tsconfig] + ctx.files.extends, transitive = [
dep[_TsConfigInfo].tsconfigs
for dep in ctx.attr.deps
if _TsConfigInfo in dep
])),
]
# Don't provide DeclarationInfo if there are no typings to provide.
# Improves error messaging if a ts_project needs declaration = True
if len(typings_outputs) or len(ctx.attr.deps):
providers.append(declaration_info(depset(typings_outputs), ctx.attr.deps))
providers.append(OutputGroupInfo(types = depset(typings_outputs)))
return providers
ts_project = rule(
implementation = _ts_project_impl,
attrs = dict(_ATTRS, **_OUTPUTS),
)
def _validate_options_impl(ctx):
# Bazel won't run our action unless its output is needed, so make a marker file
# We make it a .d.ts file so we can plumb it to the deps of the ts_project compile.
marker = ctx.actions.declare_file("%s.optionsvalid.d.ts" % ctx.label.name)
arguments = ctx.actions.args()
arguments.add_all([ctx.file.tsconfig.path, marker.path, ctx.attr.target, struct(
declaration = ctx.attr.declaration,
declaration_map = ctx.attr.declaration_map,
composite = ctx.attr.composite,
emit_declaration_only = ctx.attr.emit_declaration_only,
source_map = ctx.attr.source_map,
incremental = ctx.attr.incremental,
).to_json()])
run_node(
ctx,
inputs = [ctx.file.tsconfig] + ctx.files.extends,
outputs = [marker],
arguments = [arguments],
executable = "validator",
)
return [
DeclarationInfo(
transitive_declarations = depset([marker]),
),
]
validate_options = rule(
implementation = _validate_options_impl,
attrs = {
"composite": attr.bool(),
"declaration": attr.bool(),
"declaration_map": attr.bool(),
"emit_declaration_only": attr.bool(),
"extends": attr.label_list(allow_files = [".json"]),
"incremental": attr.bool(),
"source_map": attr.bool(),
"target": attr.string(),
"tsconfig": attr.label(mandatory = True, allow_single_file = [".json"]),
"validator": attr.label(default = Label("//packages/typescript/bin:ts_project_options_validator"), executable = True, cfg = "host"),
},
)
def _out_paths(srcs, outdir, rootdir, ext):
rootdir_replace_pattern = rootdir + "/" if rootdir else ""
return [_join(outdir, f[:f.rindex(".")].replace(rootdir_replace_pattern, "") + ext) for f in srcs if not f.endswith(".d.ts") and not f.endswith(".json")]
def ts_project_macro(
name = "tsconfig",
tsconfig = None,
srcs = None,
args = [],
deps = [],
extends = None,
declaration = False,
source_map = False,
declaration_map = False,
composite = False,
incremental = False,
emit_declaration_only = False,
tsc = None,
validate = True,
declaration_dir = None,
out_dir = None,
root_dir = None,
**kwargs):
"""Compiles one TypeScript project using `tsc --project`
This is a drop-in replacement for the `tsc` rule automatically generated for the "typescript"
package, typically loaded from `@npm//typescript:index.bzl`. Unlike bare `tsc`, this rule understands
the Bazel interop mechanism (Providers) so that this rule works with others that produce or consume
TypeScript typings (`.d.ts` files).
Unlike `ts_library`, this rule is the thinnest possible layer of Bazel interoperability on top
of the TypeScript compiler. It shifts the burden of configuring TypeScript into the tsconfig.json file.
See https://github.com/bazelbuild/rules_nodejs/blob/master/docs/TypeScript.md#alternatives
for more details about the trade-offs between the two rules.
Some TypeScript options affect which files are emitted, and Bazel wants to know these ahead-of-time.
So several options from the tsconfig file must be mirrored as attributes to ts_project.
See https://www.typescriptlang.org/v2/en/tsconfig for a listing of the TypeScript options.
Any code that works with `tsc` should work with `ts_project` with a few caveats:
- Bazel requires that the `outDir` (and `declarationDir`) be set to
`bazel-out/[target architecture]/bin/path/to/package`
so we override whatever settings appear in your tsconfig.
- Bazel expects that each output is produced by a single rule.
Thus if you have two `ts_project` rules with overlapping sources (the same `.ts` file
appears in more than one) then you get an error about conflicting `.js` output
files if you try to build both together.
Worse, if you build them separately then the output directory will contain whichever
one you happened to build most recently. This is highly discouraged.
> Note: in order for TypeScript to resolve relative references to the bazel-out folder,
> we recommend that the base tsconfig contain a rootDirs section that includes all
> possible locations they may appear.
>
> We hope this will not be needed in some future release of TypeScript.
> Follow https://github.com/microsoft/TypeScript/issues/37257 for more info.
>
> For example, if the base tsconfig file relative to the workspace root is
> `path/to/tsconfig.json` then you should configure like:
>
> ```
> "compilerOptions": {
> "rootDirs": [
> ".",
> "../../bazel-out/darwin-fastbuild/bin/path/to",
> "../../bazel-out/k8-fastbuild/bin/path/to",
> "../../bazel-out/x64_windows-fastbuild/bin/path/to",
> "../../bazel-out/darwin-dbg/bin/path/to",
> "../../bazel-out/k8-dbg/bin/path/to",
> "../../bazel-out/x64_windows-dbg/bin/path/to",
> ]
> }
> ```
### Issues when running non-sandboxed
When using a non-sandboxed spawn strategy (which is the default on Windows), you may
observe these problems which require workarounds:
1) Bazel deletes outputs from the previous execution before running `tsc`.
This causes a problem with TypeScript's incremental mode: if the `.tsbuildinfo` file
is not known to be an output of the rule, then Bazel will leave it in the output
directory, and when `tsc` runs, it may see that the outputs written by the prior
invocation are up-to-date and skip the emit of these files. This will cause Bazel
to intermittently fail with an error that some outputs were not written.
This is why we depend on `composite` and/or `incremental` attributes to be provided,
so we can tell Bazel to expect a `.tsbuildinfo` output to ensure it is deleted before a
subsequent compilation.
At present, we don't do anything useful with the `.tsbuildinfo` output, and this rule
does not actually have incremental behavior. Deleting the file is actually
counter-productive in terms of TypeScript compile performance.
Follow https://github.com/bazelbuild/rules_nodejs/issues/1726
2) When using Project References, TypeScript will expect to verify that the outputs of referenced
projects are up-to-date with respect to their inputs.
(This is true even without using the `--build` option).
When using a non-sandboxed spawn strategy, `tsc` can read the sources from other `ts_project`
rules in your project, and will expect that the `tsconfig.json` file for those references will
indicate where the outputs were written. However the `outDir` is determined by this Bazel rule so
it cannot be known from reading the `tsconfig.json` file.
This problem is manifested as a TypeScript diagnostic like
`error TS6305: Output file '/path/to/execroot/a.d.ts' has not been built from source file '/path/to/execroot/a.ts'.`
As a workaround, you can give the Windows "fastbuild" output directory as the `outDir` in your tsconfig file.
On other platforms, the value isn't read so it does no harm.
See https://github.com/bazelbuild/rules_nodejs/tree/stable/packages/typescript/test/ts_project as an example.
We hope this will be fixed in a future release of TypeScript;
follow https://github.com/microsoft/TypeScript/issues/37378
3) When TypeScript encounters an import statement, it adds the source file resolved by that reference
to the program. However you may have included that source file in a different project, so this causes
the problem mentioned above where a source file is in multiple programs.
(Note, if you use Project References this is not the case, TS will know the referenced
file is part of the other program.)
This will result in duplicate emit for the same file, which produces an error
since the files written to the output tree are read-only.
Workarounds include using using Project References, or simply grouping the whole compilation
into one program (if this doesn't exceed your time budget).
Args:
name: A name for the target.
We recommend you use the basename (no `.json` extension) of the tsconfig file that should be compiled.
srcs: List of labels of TypeScript source files to be provided to the compiler.
If absent, defaults to `**/*.ts[x]` (all TypeScript files in the package).
deps: List of labels of other rules that produce TypeScript typings (.d.ts files)
tsconfig: Label of the tsconfig.json file to use for the compilation.
By default, we add `.json` to the `name` attribute.
extends: List of labels of tsconfig file(s) referenced in `extends` section of tsconfig.
Must include any tsconfig files "chained" by extends clauses.
args: List of strings of additional command-line arguments to pass to tsc.
tsc: Label of the TypeScript compiler binary to run.
For example, `tsc = "@my_deps//typescript/bin:tsc"`
Or you can pass a custom compiler binary instead.
validate: boolean; whether to check that the tsconfig settings match the attributes.
root_dir: a string specifying a subdirectory under the input package which should be consider the
root directory of all the input files.
Equivalent to the TypeScript --rootDir option.
By default it is '.', meaning the source directory where the BUILD file lives.
out_dir: a string specifying a subdirectory under the bazel-out folder where outputs are written.
Equivalent to the TypeScript --outDir option.
Note that Bazel always requires outputs be written under a subdirectory matching the input package,
so if your rule appears in path/to/my/package/BUILD.bazel and out_dir = "foo" then the .js files
will appear in bazel-out/[arch]/bin/path/to/my/package/foo/*.js.
By default the out_dir is '.', meaning the packages folder in bazel-out.
declaration_dir: a string specifying a subdirectory under the bazel-out folder where generated declaration
outputs are written. Equivalent to the TypeScript --declarationDir option.
By default declarations are written to the out_dir.
declaration: if the `declaration` bit is set in the tsconfig.
Instructs Bazel to expect a `.d.ts` output for each `.ts` source.
source_map: if the `sourceMap` bit is set in the tsconfig.
Instructs Bazel to expect a `.js.map` output for each `.ts` source.
declaration_map: if the `declarationMap` bit is set in the tsconfig.
Instructs Bazel to expect a `.d.ts.map` output for each `.ts` source.
composite: if the `composite` bit is set in the tsconfig.
Instructs Bazel to expect a `.tsbuildinfo` output and a `.d.ts` output for each `.ts` source.
incremental: if the `incremental` bit is set in the tsconfig.
Instructs Bazel to expect a `.tsbuildinfo` output.
emit_declaration_only: if the `emitDeclarationOnly` bit is set in the tsconfig.
Instructs Bazel *not* to expect `.js` or `.js.map` outputs for `.ts` sources.
**kwargs: passed through to underlying rule, allows eg. visibility, tags
"""
if srcs == None:
srcs = native.glob(["**/*.ts", "**/*.tsx"])
if tsconfig == None:
tsconfig = name + ".json"
extra_deps = []
if validate:
validate_options(
name = "_validate_%s_options" % name,
target = "//%s:%s" % (native.package_name(), name),
declaration = declaration,
source_map = source_map,
declaration_map = declaration_map,
composite = composite,
incremental = incremental,
emit_declaration_only = emit_declaration_only,
tsconfig = tsconfig,
extends = extends,
)
extra_deps.append("_validate_%s_options" % name)
typings_out_dir = declaration_dir if declaration_dir else out_dir
ts_project(
name = name,
srcs = srcs,
args = args,
deps = deps + extra_deps,
tsconfig = tsconfig,
extends = extends,
declaration_dir = declaration_dir,
out_dir = out_dir,
root_dir = root_dir,
js_outs = _out_paths(srcs, out_dir, root_dir, ".js") if not emit_declaration_only else [],
map_outs = _out_paths(srcs, out_dir, root_dir, ".js.map") if source_map and not emit_declaration_only else [],
typings_outs = _out_paths(srcs, typings_out_dir, root_dir, ".d.ts") if declaration or composite else [],
typing_maps_outs = _out_paths(srcs, typings_out_dir, root_dir, ".d.ts.map") if declaration_map else [],
buildinfo_out = tsconfig[:-5] + ".tsbuildinfo" if composite or incremental else None,
tsc = tsc,
**kwargs
)
| [
[
[
157,
169
],
[
900,
912
]
],
[
[
257,
263
],
[
6752,
6758
]
],
[
[
1239,
1247
],
[
6762,
6770
]
],
[
[
1446,
1459
],
[
3196,
3209
],
[
3254,
3267
],
[
6078,
6091
],
[
6260,
6273
],
[
6183,
6196
]
],
[
[
1713,
1718
],
[
2054,
2059
],
[
2145,
2150
],
[
2442,
2447
],
[
4449,
4454
],
[
8540,
8545
]
],
[
[
1784,
1800
],
[
6717,
6733
]
],
[
[
6677,
6687
],
[
19246,
19256
]
],
[
[
6780,
6802
],
[
7830,
7852
]
],
[
[
7784,
7800
],
[
18651,
18667
]
],
[
[
8425,
8435
],
[
19528,
19538
],
[
19628,
19638
],
[
19751,
19761
],
[
19868,
19878
]
],
[
[
8691,
8707
]
]
] |
# encoding: utf-8
from __future__ import unicode_literals
class TranslationError(Exception):
"""Failure to translate source."""
pass
| [
[
[
42,
58
]
],
[
[
67,
83
]
]
] |
# Write a Python program to get execution time for a Python method.
import time
def sum_of_n_numbers(x):
start_time = time.time()
s = 0
for i in range(1, x + 1):
s = s + i
end_time = time.time()
return s, end_time - start_time
n = 5
print("\nTime to sum of 1 to ", n, " and required time to calculate is :", sum_of_n_numbers(n))
| [
[
[
76,
80
],
[
125,
129
],
[
210,
214
]
],
[
[
87,
103
],
[
341,
357
]
],
[
[
260,
261
],
[
298,
299
],
[
358,
359
]
]
] |
'''
Native support for Multitouch devices on Linux, using libmtdev.
===============================================================
The Mtdev project is a part of the Ubuntu Maverick multitouch architecture.
You can read more on http://wiki.ubuntu.com/Multitouch
To configure MTDev, it's preferable to use probesysfs providers.
Check :py:class:`~kivy.input.providers.probesysfs` for more information.
Otherwise, add this to your configuration::
[input]
# devicename = hidinput,/dev/input/eventXX
acert230h = mtdev,/dev/input/event2
.. note::
You must have read access to the input event.
You can use a custom range for the X, Y and pressure values.
On some drivers, the range reported is invalid.
To fix that, you can add these options to the argument line:
* invert_x : 1 to invert X axis
* invert_y : 1 to invert Y axis
* min_position_x : X minimum
* max_position_x : X maximum
* min_position_y : Y minimum
* max_position_y : Y maximum
* min_pressure : pressure minimum
* max_pressure : pressure maximum
* min_touch_major : width shape minimum
* max_touch_major : width shape maximum
* min_touch_minor : width shape minimum
* max_touch_minor : height shape maximum
* rotation : 0,90,180 or 270 to rotate
'''
__all__ = ('MTDMotionEventProvider', 'MTDMotionEvent')
import os
from kivy.input.motionevent import MotionEvent
from kivy.input.shape import ShapeRect
class MTDMotionEvent(MotionEvent):
def depack(self, args):
self.is_touch = True
if 'x' in args:
self.sx = args['x']
else:
self.sx = -1
if 'y' in args:
self.sy = args['y']
else:
self.sy = -1
self.profile = ['pos']
if 'size_w' in args and 'size_h' in args:
self.shape = ShapeRect()
self.shape.width = args['size_w']
self.shape.height = args['size_h']
self.profile.append('shape')
if 'pressure' in args:
self.pressure = args['pressure']
self.profile.append('pressure')
super(MTDMotionEvent, self).depack(args)
def __str__(self):
i, sx, sy, d = (self.id, self.sx, self.sy, self.device)
return '<MTDMotionEvent id=%d pos=(%f, %f) device=%s>' % (i, sx, sy, d)
if 'KIVY_DOC' in os.environ:
# documentation hack
MTDMotionEventProvider = None
else:
import threading
import collections
from kivy.lib.mtdev import Device, \
MTDEV_TYPE_EV_ABS, MTDEV_CODE_SLOT, MTDEV_CODE_POSITION_X, \
MTDEV_CODE_POSITION_Y, MTDEV_CODE_PRESSURE, \
MTDEV_CODE_TOUCH_MAJOR, MTDEV_CODE_TOUCH_MINOR, \
MTDEV_CODE_TRACKING_ID, MTDEV_ABS_POSITION_X, \
MTDEV_ABS_POSITION_Y, MTDEV_ABS_TOUCH_MINOR, \
MTDEV_ABS_TOUCH_MAJOR
from kivy.input.provider import MotionEventProvider
from kivy.input.factory import MotionEventFactory
from kivy.logger import Logger
class MTDMotionEventProvider(MotionEventProvider):
options = ('min_position_x', 'max_position_x',
'min_position_y', 'max_position_y',
'min_pressure', 'max_pressure',
'min_touch_major', 'max_touch_major',
'min_touch_minor', 'max_touch_minor',
'invert_x', 'invert_y',
'rotation')
def __init__(self, device, args):
super(MTDMotionEventProvider, self).__init__(device, args)
self._device = None
self.input_fn = None
self.default_ranges = dict()
# split arguments
args = args.split(',')
if not args:
Logger.error('MTD: No filename pass to MTD configuration')
Logger.error('MTD: Use /dev/input/event0 for example')
return
# read filename
self.input_fn = args[0]
Logger.info('MTD: Read event from <%s>' % self.input_fn)
# read parameters
for arg in args[1:]:
if arg == '':
continue
arg = arg.split('=')
# ensure it's a key = value
if len(arg) != 2:
err = 'MTD: Bad parameter %s: Not in key=value format' %\
arg
Logger.error(err)
continue
# ensure the key exist
key, value = arg
if key not in MTDMotionEventProvider.options:
Logger.error('MTD: unknown %s option' % key)
continue
# ensure the value
try:
self.default_ranges[key] = int(value)
except ValueError:
err = 'MTD: invalid value %s for option %s' % (key, value)
Logger.error(err)
continue
# all good!
Logger.info('MTD: Set custom %s to %d' % (key, int(value)))
if 'rotation' not in self.default_ranges:
self.default_ranges['rotation'] = 0
elif self.default_ranges['rotation'] not in (0, 90, 180, 270):
Logger.error('HIDInput: invalid rotation value ({})'.format(
self.default_ranges['rotation']))
self.default_ranges['rotation'] = 0
def start(self):
if self.input_fn is None:
return
self.uid = 0
self.queue = collections.deque()
self.thread = threading.Thread(
target=self._thread_run,
kwargs=dict(
queue=self.queue,
input_fn=self.input_fn,
device=self.device,
default_ranges=self.default_ranges))
self.thread.daemon = True
self.thread.start()
def _thread_run(self, **kwargs):
input_fn = kwargs.get('input_fn')
queue = kwargs.get('queue')
device = kwargs.get('device')
drs = kwargs.get('default_ranges').get
touches = {}
touches_sent = []
point = {}
l_points = {}
def assign_coord(point, value, invert, coords):
cx, cy = coords
if invert:
value = 1. - value
if rotation == 0:
point[cx] = value
elif rotation == 90:
point[cy] = value
elif rotation == 180:
point[cx] = 1. - value
elif rotation == 270:
point[cy] = 1. - value
def process(points):
for args in points:
# this can happen if we have a touch going on already at
# the start of the app
if 'id' not in args:
continue
tid = args['id']
try:
touch = touches[tid]
except KeyError:
touch = MTDMotionEvent(device, tid, args)
touches[touch.id] = touch
touch.move(args)
action = 'update'
if tid not in touches_sent:
action = 'begin'
touches_sent.append(tid)
if 'delete' in args:
action = 'end'
del args['delete']
del touches[touch.id]
touches_sent.remove(tid)
touch.update_time_end()
queue.append((action, touch))
def normalize(value, vmin, vmax):
return (value - vmin) / float(vmax - vmin)
# open mtdev device
_fn = input_fn
_slot = 0
try:
_device = Device(_fn)
except OSError as e:
if e.errno == 13: # Permission denied
Logger.warn(
'MTD: Unable to open device "{0}". Please ensure you'
' have the appropriate permissions.'.format(_fn))
return
else:
raise
_changes = set()
# prepare some vars to get limit of some component
ab = _device.get_abs(MTDEV_ABS_POSITION_X)
range_min_position_x = drs('min_position_x', ab.minimum)
range_max_position_x = drs('max_position_x', ab.maximum)
Logger.info('MTD: <%s> range position X is %d - %d' %
(_fn, range_min_position_x, range_max_position_x))
ab = _device.get_abs(MTDEV_ABS_POSITION_Y)
range_min_position_y = drs('min_position_y', ab.minimum)
range_max_position_y = drs('max_position_y', ab.maximum)
Logger.info('MTD: <%s> range position Y is %d - %d' %
(_fn, range_min_position_y, range_max_position_y))
ab = _device.get_abs(MTDEV_ABS_TOUCH_MAJOR)
range_min_major = drs('min_touch_major', ab.minimum)
range_max_major = drs('max_touch_major', ab.maximum)
Logger.info('MTD: <%s> range touch major is %d - %d' %
(_fn, range_min_major, range_max_major))
ab = _device.get_abs(MTDEV_ABS_TOUCH_MINOR)
range_min_minor = drs('min_touch_minor', ab.minimum)
range_max_minor = drs('max_touch_minor', ab.maximum)
Logger.info('MTD: <%s> range touch minor is %d - %d' %
(_fn, range_min_minor, range_max_minor))
range_min_pressure = drs('min_pressure', 0)
range_max_pressure = drs('max_pressure', 255)
Logger.info('MTD: <%s> range pressure is %d - %d' %
(_fn, range_min_pressure, range_max_pressure))
invert_x = int(bool(drs('invert_x', 0)))
invert_y = int(bool(drs('invert_y', 0)))
Logger.info('MTD: <%s> axes invertion: X is %d, Y is %d' %
(_fn, invert_x, invert_y))
rotation = drs('rotation', 0)
Logger.info('MTD: <%s> rotation set to %d' %
(_fn, rotation))
while _device:
# idle as much as we can.
while _device.idle(1000):
continue
# got data, read all without redoing idle
while True:
data = _device.get()
if data is None:
break
# set the working slot
if data.type == MTDEV_TYPE_EV_ABS and \
data.code == MTDEV_CODE_SLOT:
_slot = data.value
continue
# fill the slot
if _slot not in l_points:
l_points[_slot] = dict()
point = l_points[_slot]
ev_value = data.value
ev_code = data.code
if ev_code == MTDEV_CODE_POSITION_X:
val = normalize(ev_value,
range_min_position_x,
range_max_position_x)
assign_coord(point, val, invert_x, 'xy')
elif ev_code == MTDEV_CODE_POSITION_Y:
val = 1. - normalize(ev_value,
range_min_position_y,
range_max_position_y)
assign_coord(point, val, invert_y, 'yx')
elif ev_code == MTDEV_CODE_PRESSURE:
point['pressure'] = normalize(ev_value,
range_min_pressure,
range_max_pressure)
elif ev_code == MTDEV_CODE_TOUCH_MAJOR:
point['size_w'] = normalize(ev_value,
range_min_major,
range_max_major)
elif ev_code == MTDEV_CODE_TOUCH_MINOR:
point['size_h'] = normalize(ev_value,
range_min_minor,
range_max_minor)
elif ev_code == MTDEV_CODE_TRACKING_ID:
if ev_value == -1:
point['delete'] = True
# force process of changes here, as the slot can be
# reused.
_changes.add(_slot)
process([l_points[x] for x in _changes])
_changes.clear()
continue
else:
point['id'] = ev_value
else:
# unrecognized command, ignore.
continue
_changes.add(_slot)
# push all changes
if _changes:
process([l_points[x] for x in _changes])
_changes.clear()
def update(self, dispatch_fn):
# dispatch all event from threads
try:
while True:
event_type, touch = self.queue.popleft()
dispatch_fn(event_type, touch)
except:
pass
MotionEventFactory.register('mtdev', MTDMotionEventProvider)
| [
[
[
1234,
1241
]
],
[
[
1297,
1299
],
[
2279,
2281
]
],
[
[
1335,
1346
],
[
1409,
1420
]
],
[
[
1376,
1385
],
[
1777,
1786
]
],
[
[
1394,
1408
],
[
2057,
2071
],
[
7090,
7104
]
],
[
[
2321,
2343
]
],
[
[
2369,
2378
],
[
5520,
5529
]
],
[
[
2390,
2401
],
[
5474,
5485
]
],
[
[
2433,
2439
],
[
7934,
7940
]
],
[
[
2451,
2468
],
[
10730,
10747
]
],
[
[
2470,
2485
],
[
10790,
10805
]
],
[
[
2487,
2508
],
[
11175,
11196
]
],
[
[
2520,
2541
],
[
11473,
11494
]
],
[
[
2543,
2562
],
[
11786,
11805
]
],
[
[
2574,
2596
],
[
12055,
12077
]
],
[
[
2598,
2620
],
[
12315,
12337
]
],
[
[
2632,
2654
],
[
12575,
12597
]
],
[
[
2656,
2676
],
[
8420,
8440
]
],
[
[
2688,
2708
],
[
8755,
8775
]
],
[
[
2710,
2731
],
[
9409,
9430
]
],
[
[
2743,
2764
],
[
9090,
9111
]
],
[
[
2801,
2820
],
[
2944,
2963
]
],
[
[
2856,
2874
],
[
13698,
13716
]
],
[
[
2903,
2909
],
[
3643,
3649
],
[
3718,
3724
],
[
3873,
3879
],
[
4295,
4301
],
[
4497,
4503
],
[
4820,
4826
],
[
4912,
4918
],
[
5170,
5176
],
[
8054,
8060
],
[
8592,
8598
],
[
8927,
8933
],
[
9255,
9261
],
[
9574,
9580
],
[
9821,
9827
],
[
10063,
10069
],
[
10228,
10234
]
],
[
[
2921,
2943
],
[
13735,
13757
],
[
3377,
3399
],
[
4445,
4467
]
]
] |
import unittest
from unittest.mock import MagicMock
import pandas as pd
from pandas.testing import assert_frame_equal
from data_export.pipeline.dataset import Dataset
class TestDataset(unittest.TestCase):
def setUp(self):
example = MagicMock()
example.to_dict.return_value = {"data": "example"}
self.examples = MagicMock()
self.examples.__iter__.return_value = [example]
label = MagicMock()
label.find_by.return_value = {"labels": ["label"]}
self.labels = MagicMock()
self.labels.__iter__.return_value = [label]
def test_to_dataframe(self):
dataset = Dataset(self.examples, self.labels)
df = dataset.to_dataframe()
expected = pd.DataFrame([{"data": "example", "labels": ["label"]}])
assert_frame_equal(df, expected)
| [
[
[
7,
15
],
[
189,
197
]
],
[
[
42,
51
],
[
248,
257
],
[
343,
352
],
[
427,
436
],
[
520,
529
]
],
[
[
60,
72
],
[
727,
729
]
],
[
[
100,
118
],
[
792,
810
]
],
[
[
161,
168
],
[
636,
643
]
],
[
[
177,
188
]
]
] |
# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_channelz/grpc_version.py.template`!!!
VERSION = '1.23.0.dev0'
| [
[
[
682,
689
]
]
] |
from ..proto import *
from ..graph_io import *
import paddle.fluid as fluid
import numpy as np
from paddle.fluid.core import VarDesc, AttrType
def union(list_a, list_b):
return list(set(list_a).union(set(list_b)))
def difference(list_a, list_b):
return list(set(list_a).difference(set(list_b)))
class Edge_for_fluid:
def __init__(self, param, target, var):
self.param = param
self.target = target
self.var = var
class Fluid_edger:
def __init__(self, param = None, target = None, var = None):
self.edges = []
if param is not None and target is not None:
edge = Edge_for_fluid(param, target, var)
self.edges.append(edge)
def __call__(self):
return self.all_targets()
def add(self, param, target, var = None):
edge = Edge_for_fluid(param, target, var)
self.edges.append(edge)
def rm_edges_by_param(self, param):
for edge in self.edges:
if edge.param == param:
edge_idx = self.edges.index(edge)
del self.edges[edge_idx]
def rm(self, target):
res = -1
for edge in self.edges:
if target == edge.target:
edge_idx = self.edges.index(edge)
del self.edges[edge_idx]
res = res + 1
if res != 0:
pass
def mv(self, old_target, new_target):
res = -1
for edge in self.edges:
if old_target == edge.target:
edge.target = new_target
res = res + 1
if res != 0:
pass
def all_params(self):
params = []
for edge in self.edges:
if edge.param not in params:
params.append(edge.param)
return params
def all_targets(self):
targets = []
for edge in self.edges:
targets.append(edge.target)
return targets
def targets(self, param):
targets = []
for edge in self.edges:
if edge.param == param:
targets.append(edge.target)
return targets
def target(self, param, idx = 0):
return self.targets(param)[idx]
def clear(self):
targets_list = self.all_targets()
for target in targets_list:
self.rm(target)
def targets_with_params(self):
list_of_targets_and_params = []
for edge in self.edges:
target_and_param = [edge.target, edge.param]
list_of_targets_and_params.append(target_and_param)
return list_of_targets_and_params
def vars_by_target(self, target):
vars = []
for edge in self.edges:
if edge.target == target and edge.var is not None:
vars.append(edge.var)
return vars
def __getitem__(self, idx):
if idx < len(self.edges):
return self.edges[idx]
return None
class Fluid_helper:
def __init__(self, scope, block):
self.scope = scope
self.block = block
def args_by_input_param(self, op, param_name):
if param_name in op.input_names:
return op.input(param_name)
else:
raise NameError('ERROR: param_name %s is not exists.' % ( param_name ) )
def args_by_output_param(self, op, param_name):
if param_name in op.output_names:
return op.output(param_name)
else:
raise NameError('ERROR: param_name %s is not exists.' % ( param_name ) )
def var_by_input_param(self, op, param_name, var_idx = 0):
var_name = self.args_by_input_param(op, param_name)[var_idx]
var = self.block.var(var_name)
return var
def var_by_output_param(self, op, param_name, var_idx = 0):
var_name = self.args_by_output_param(op, param_name)[var_idx]
var = self.block.var(var_name)
return var
def var_name_by_param(self, op, param_name, var_idx = 0):
if param_name not in op.input_names + op.output_names:
raise NameError('ERROR: param_name %s is not exists.' % ( param_name ) )
elif param_name in op.input_names:
if len(op.input(param_name)) > 0:
var_name_unicode = op.input(param_name)[var_idx]
else:
raise NameError('ERROR: param %s has not var.' % ( param_name ) )
elif param_name in op.output_names:
if len(op.output(param_name)) > 0:
var_name_unicode = op.output(param_name)[var_idx]
else:
raise NameError('ERROR: param %s has not var.' % ( param_name ) )
var = self.block.var(var_name_unicode)
var_name = var.name
return var_name
def var_by_param(self, op, param_name, var_idx = 0):
var_name = self.var_name_by_param(op, param_name, var_idx)
var = self.block.var(var_name)
return var
def shape_by_var_name(self, var_name, layout = 'NCHW'):
var = self.block.var(var_name)
long_tuple = var.shape
long_list = list(long_tuple)
if layout == 'NCHW':
int_list_4d = map(int, [1]*(4-len(long_list)) + long_list)
return int_list_4d
elif layout == 'UNMODIFIED':
return long_list
else:
raise NameError('ERROR: layout %s is not implemented yet.' % ( layout ) )
def np_data_by_var_name(self, var_name):
numpy_array = fluid.executor.fetch_var(var_name, self.scope, True)
return numpy_array
def dtype_by_var_name(self, var_name):
var = self.block.var(var_name)
fluid_var_type = var.dtype
dtype = ANAKIN_TENSOR_DTYPE[fluid_var_type]
return dtype
def is_persistable_param(self, op, param_name, var_idx = 0):
var = self.var_by_param(op, param_name, var_idx)
is_persistable_var = var.persistable
return is_persistable_var
def var_shape_by_param(self, transpose, op, param_name, var_idx = 0, layout = 'NCHW'):
if transpose is True:
raise NameError('ERROR: var_shape transpose is not implemented yet.')
else:
var_name = self.var_name_by_param(op, param_name, var_idx)
shape = self.shape_by_var_name(var_name, layout)
return shape
def data_with_shape_by_param(self,
op,
param_name,
transpose = False,
axes = None,
var_idx = 0,
is_flat_list = True,
layout = 'NCHW'):
np.set_printoptions(threshold=np.inf, suppress=True)
var_name = self.var_name_by_param(op, param_name, var_idx)
np_array = self.np_data_by_var_name(var_name)
if transpose is True:
np_array = np.transpose(np_array, axes)
np_shape = np.shape(np_array)
if layout == 'NCHW':
np_shape = map(int, [1]*(4-len(np_shape)) + list(np_shape))
if is_flat_list is True:
flat_list = list(np_array.flatten())
return [flat_list, np_shape]
else:
return [np_array, np_shape]
def np_param(self,
op,
param_name,
transpose = False,
axes = None,
var_idx = 0):
[data, np_shape] = self.data_with_shape_by_param(op, param_name, transpose, \
axes, var_idx, False)
return data
def dtype_by_param(self, op, param_name, var_idx = 0):
var_name = self.var_name_by_param(op, param_name, var_idx)
dtype = self.dtype_by_var_name(var_name)
return dtype
def is_list_type(self, op, attr_name):
if op.has_attr(attr_name):
fluid_attr_type = op.attr_type(attr_name)
if fluid_attr_type in ANAKIN_ATTR_IS_LIST.keys():
return ANAKIN_ATTR_IS_LIST[fluid_attr_type]
else:
return False # AttrType.LONG
else:
raise NameError('ERROR: attr_name %s is not exists.' % ( attr_name ) )
def dtype_of_attr(self, op, attr_name):
if op.has_attr(attr_name):
fluid_attr_type = op.attr_type(attr_name)
if fluid_attr_type in ANAKIN_ATTR_DTYPE.keys():
return ANAKIN_ATTR_DTYPE[fluid_attr_type]
else:
return INT32 # AttrType.LONG
else:
raise NameError('ERROR: attr_name %s is not exists.' % ( attr_name ) )
def attr_data_required(self, op, attr_name):
data = op.attr(attr_name)
is_list = self.is_list_type(op, attr_name)
dtype = self.dtype_of_attr(op, attr_name)
if dtype not in [INT32, FLOAT, STR]:
return data
elif dtype == INT32:
return map(int, data) if is_list else int(data)
elif dtype == FLOAT:
return map(float, data) if is_list else float(data)
elif dtype == STR:
return bytes(data)
def attr_data(self, op, attr_name, default_value = 0, type = None):
if op.has_attr(attr_name):
return self.attr_data_required(op, attr_name)
else:
#raise NameError('ERROR: attr_name %s is not exists.' % ( attr_name ) )
return default_value
def param_tensor_sh(self,
op,
param_name,
transpose = False,
axes = None,
reshape = None,
var_idx = 0,
layout = 'NCHW'):
tensor = TensorProtoIO()
[flat_data, shape] = self.data_with_shape_by_param(op, param_name, transpose, \
axes, var_idx, True, layout)
dtype = self.dtype_by_param(op, param_name, var_idx)
tensor.set_data_type(dtype)
if dtype in ANAKIN_TENSOR_DTYPESTR.keys():
tensor.set_data(flat_data, ANAKIN_TENSOR_DTYPESTR[dtype])
#pass #debug
else:
raise NameError('ERROR: Unknown data type (%s)' % ( dtype ) )
if reshape is not None:
tensor.set_shape(reshape)
else:
tensor.set_shape(shape)
return [tensor, shape]
def param_tensor(self,
op,
param_name,
transpose = False,
axes = None,
reshape = None,
var_idx = 0,
layout = 'NCHW'):
[tensor, shape] = self.param_tensor_sh(op, param_name, transpose, axes, \
reshape, var_idx, layout)
return tensor
def create_tensor(self, data_list, data_shape, dtype):
tensor = TensorProtoIO()
tensor.set_data_type(dtype)
tensor.set_data(data_list, ANAKIN_TENSOR_DTYPESTR[dtype])
tensor.set_shape(data_shape)
return tensor
def gru_tensor_convert(self, origin_h2h, origin_i2h, origin_b, offset=[2, 1, 0]):
hidden_size = int(origin_b.size // 3)
word_size = int(origin_i2h.size // hidden_size // 3)
tar_h2h=np.array(origin_h2h.flatten().tolist()[2*hidden_size*hidden_size:]\
+np.array(origin_h2h.flatten().tolist()[:2*hidden_size*hidden_size])\
.reshape(hidden_size,2,hidden_size)[:,[1,0],:].flatten().tolist())\
.reshape(1,1,hidden_size,3*hidden_size)
tar_i2h=origin_i2h.reshape(word_size,3,hidden_size)[:,offset,:]\
.reshape(1,1,word_size,3*hidden_size)
tar_b=origin_b.reshape(3, hidden_size)[offset, :].reshape(1,1,1,3 * hidden_size)
tar_i2h_h2h=np.concatenate([tar_i2h.flatten(),tar_h2h.flatten()])\
.reshape(1,1,1,3*hidden_size*hidden_size+3*word_size*hidden_size)
return tar_i2h_h2h, tar_b
def lstm_fc_tensor_merge_convert(self, origin_hidden_size, origin_lstm_w, origin_lstm_b, origin_fc_w, origin_fc_b):
layer_size = int (origin_hidden_size // 4)
input_size = int (origin_fc_w.size // origin_hidden_size)
lstm_bias_num = int (origin_lstm_b.size // layer_size)
tar_w = np.vstack((np.hstack((origin_fc_w[:, 1 * layer_size : 2 * layer_size],
origin_fc_w[:, 2 * layer_size : 3 * layer_size],
origin_fc_w[:, : 1 * layer_size],
origin_fc_w[:, 3 * layer_size :])),
np.hstack((origin_lstm_w[:, 1 * layer_size : 2 * layer_size],
origin_lstm_w[:, 2 * layer_size : 3 * layer_size],
origin_lstm_w[:, : 1 * layer_size],
origin_lstm_w[:, 3 * layer_size : ]))))
if origin_fc_b is not None:
split_fc_bc = origin_fc_b.flatten()[: 1 * layer_size]
split_fc_bi = origin_fc_b.flatten()[1 * layer_size : 2 * layer_size]
split_fc_bf = origin_fc_b.flatten()[2 * layer_size : 3 * layer_size]
split_fc_bo = origin_fc_b.flatten()[3 * layer_size : 4 * layer_size]
else:
split_fc_bc = np.zeros(layer_size)
split_fc_bi = np.zeros(layer_size)
split_fc_bf = np.zeros(layer_size)
split_fc_bo = np.zeros(layer_size)
split_lstm_bc = origin_lstm_b.flatten()[: 1 * layer_size]
split_lstm_bi = origin_lstm_b.flatten()[1 * layer_size: 2 * layer_size]
split_lstm_bf = origin_lstm_b.flatten()[2 * layer_size: 3 * layer_size]
split_lstm_bo = origin_lstm_b.flatten()[3 * layer_size: 4 * layer_size]
split_lstm_bc = np.add(split_lstm_bc, split_fc_bc)
split_lstm_bi = np.add(split_lstm_bi, split_fc_bi)
split_lstm_bf = np.add(split_lstm_bf, split_fc_bf)
split_lstm_bo = np.add(split_lstm_bo, split_fc_bo)
if lstm_bias_num == 4:
tar_b = np.array(split_lstm_bi.flatten().tolist()
+ split_lstm_bf.flatten().tolist()
+ split_lstm_bc.flatten().tolist()
+ split_lstm_bo.flatten().tolist())
else:
split_lstm_wic = origin_lstm_b.flatten()[4 * layer_size : 5 * layer_size]
split_lstm_wfc = origin_lstm_b.flatten()[5 * layer_size : 6 * layer_size]
split_lstm_woc = origin_lstm_b.flatten()[6 * layer_size :]
tar_b = np.array(split_lstm_bi.flatten().tolist()
+ split_lstm_bf.flatten().tolist()
+ split_lstm_bc.flatten().tolist()
+ split_lstm_bo.flatten().tolist()
+ split_lstm_wic.flatten().tolist()
+ split_lstm_wfc.flatten().tolist()
+ split_lstm_woc.flatten().tolist())
return tar_w.reshape(input_size+ layer_size, 4 * layer_size, 1, 1),\
tar_b.reshape(1, origin_lstm_b.size, 1, 1)
class Fluid_comparator:
def __init__(self, helper):
self.helper = helper
self.only_list = ['feed', 'fetch']
def compare_by_param(self, op_a, op_b, param):
is_weight_a = self.helper.is_persistable_param(op_a, param)
is_weight_b = self.helper.is_persistable_param(op_b, param)
if is_weight_a and is_weight_b:
np_a = self.helper.np_param(op_a, param)
np_b = self.helper.np_param(op_b, param)
if (np_a == np_b).all() == True:
return True
else:
return False
elif is_weight_a is is_weight_b:
return True
else:
return False
def have_same_weights(self, op_a, op_b):
is_same = True
if op_a.input_names == op_b.input_names:
params = op_a.input_names
for param in params:
if self.compare_by_param(op_a, op_b, param) is False:
is_same = False
return is_same
else:
return False
def compare_by_attr(self, op_a, op_b, attr_name):
data_a = self.helper.attr_data(op_a, attr_name)
data_b = self.helper.attr_data(op_b, attr_name)
return data_a == data_b
def have_same_attrs(self, op_a, op_b):
is_same = True
if op_a.attr_names == op_b.attr_names:
attrs = op_a.attr_names
for attr in attrs:
if self.compare_by_attr(op_a, op_b, attr) is False:
is_same = False
return is_same
else:
return False
def brothers(self, op_list):
is_same = True
if len(op_list) > 1:
idx = 0
for op_b in op_list[1:]:
if op_b.type not in self.only_list:
idx = op_list.index(op_b)
op_a = op_list[idx - 1]
if op_a.type not in self.only_list:
same_weights = self.have_same_weights(op_a, op_b)
same_attrs = self.have_same_attrs(op_a, op_b)
if (same_weights and same_attrs) is False:
is_same = False
else:
raise NameError('ERROR: %s is in only_list.' % ( op_a.type ))
else:
raise NameError('ERROR: %s is in only_list.' % ( op_b.type ))
return is_same
else:
raise NameError('ERROR: Members of op_list must be greater than 2.')
ANAKIN_TENSOR_DTYPE = {
VarDesc.VarType.BOOL: BOOLEN,
VarDesc.VarType.INT32: INT32,
VarDesc.VarType.FP16: FLOAT16,
VarDesc.VarType.FP32: FLOAT,
VarDesc.VarType.FP64: DOUBLE,
}
ANAKIN_TENSOR_DTYPESTR = {
STR: "string",
INT32: "int",
FLOAT: "float",
BOOLEN: "bool",
}
ANAKIN_ATTR_DTYPE = {
AttrType.INT: INT32,
AttrType.INTS: INT32,
AttrType.FLOAT: FLOAT,
AttrType.FLOATS: FLOAT,
AttrType.STRING: STR,
AttrType.STRINGS: STR,
AttrType.BOOL: BOOLEN,
AttrType.BOOLS: BOOLEN,
}
ANAKIN_ATTR_IS_LIST = {
AttrType.INT: False,
AttrType.INTS: True,
AttrType.FLOAT: False,
AttrType.FLOATS: True,
AttrType.STRING: False,
AttrType.STRINGS: True,
AttrType.BOOL: False,
AttrType.BOOLS: True,
}
APPEND_BIAS_OP_TYPE = [
'FC',
'mul',
'sequence_conv',
'conv2d',
'conv2d_transpose',
'depthwise_conv2d',
'elementwise_mul',
]
APPEND_ACT_OP_TYPE = [
'FC',
'mul',
'sequence_conv',
'conv2d',
'conv2d_transpose',
'batch_norm',
'layer_norm',
'row_conv',
'reshape',
]
| [
[
[
20,
21
]
],
[
[
45,
46
],
[
14341,
14347
],
[
14373,
14378
],
[
14403,
14410
],
[
14435,
14440
],
[
14465,
14471
],
[
14504,
14507
],
[
14520,
14525
],
[
14535,
14540
],
[
14552,
14558
],
[
14608,
14613
],
[
14631,
14636
],
[
14655,
14660
],
[
14680,
14685
],
[
14705,
14708
],
[
14729,
14732
],
[
14750,
14756
],
[
14775,
14781
],
[
6973,
6978
],
[
7260,
7265
],
[
7267,
7272
],
[
7274,
7277
],
[
7311,
7316
],
[
7385,
7390
],
[
7463,
7466
],
[
7922,
7935
],
[
8799,
8812
]
],
[
[
54,
75
],
[
4564,
4569
]
],
[
[
83,
94
],
[
5510,
5512
],
[
5540,
5542
],
[
5711,
5713
],
[
5753,
5755
],
[
9141,
9143
],
[
9213,
9215
],
[
9599,
9601
],
[
10041,
10043
],
[
10052,
10054
],
[
10273,
10275
],
[
10824,
10826
],
[
10862,
10864
],
[
10900,
10902
],
[
10938,
10940
],
[
11260,
11262
],
[
11313,
11315
],
[
11366,
11368
],
[
11419,
11421
],
[
11491,
11493
],
[
11898,
11900
]
],
[
[
125,
132
],
[
14319,
14326
],
[
14350,
14357
],
[
14381,
14388
],
[
14413,
14420
],
[
14443,
14450
]
],
[
[
134,
142
],
[
14594,
14602
],
[
14616,
14624
],
[
14639,
14647
],
[
14663,
14671
],
[
14688,
14696
],
[
14711,
14719
],
[
14735,
14743
],
[
14759,
14767
],
[
14811,
14819
],
[
14833,
14841
],
[
14855,
14863
],
[
14879,
14887
],
[
14903,
14911
],
[
14928,
14936
],
[
14953,
14961
],
[
14976,
14984
]
],
[
[
149,
154
]
],
[
[
222,
232
]
],
[
[
308,
322
],
[
586,
600
],
[
751,
765
]
],
[
[
435,
446
]
],
[
[
2429,
2441
]
],
[
[
12330,
12346
]
],
[
[
14294,
14313
],
[
4751,
4770
]
],
[
[
14476,
14498
],
[
8151,
8173
],
[
8212,
8234
],
[
8874,
8896
]
],
[
[
14571,
14588
],
[
6881,
6898
],
[
6918,
6935
]
],
[
[
14786,
14805
],
[
6540,
6559
],
[
6579,
6598
]
],
[
[
15001,
15020
]
],
[
[
15134,
15152
]
]
] |
import logging
from django.core.mail import EmailMultiAlternatives, EmailMessage
from django.utils.encoding import smart_text
from django.core.urlresolvers import reverse
from django.conf import settings
from disturbance.components.emails.emails import TemplateEmailBase
from ledger.accounts.models import EmailUser
logger = logging.getLogger(__name__)
SYSTEM_NAME = settings.SYSTEM_NAME_SHORT + ' Automated Message'
class ApprovalExpireNotificationEmail(TemplateEmailBase):
subject = 'Your Approval has expired.'
html_template = 'disturbance/emails/approval_expire_notification.html'
txt_template = 'disturbance/emails/approval_expire_notification.txt'
class ApprovalCancelNotificationEmail(TemplateEmailBase):
subject = 'Your Approval has been cancelled.'
html_template = 'disturbance/emails/approval_cancel_notification.html'
txt_template = 'disturbance/emails/approval_cancel_notification.txt'
class ApprovalSuspendNotificationEmail(TemplateEmailBase):
subject = 'Your Approval has been suspended.'
html_template = 'disturbance/emails/approval_suspend_notification.html'
txt_template = 'disturbance/emails/approval_suspend_notification.txt'
class ApprovalSurrenderNotificationEmail(TemplateEmailBase):
subject = 'Your Approval has been surrendered.'
html_template = 'disturbance/emails/approval_surrender_notification.html'
txt_template = 'disturbance/emails/approval_surrender_notification.txt'
class ApprovalReinstateNotificationEmail(TemplateEmailBase):
subject = 'Your Approval has been reinstated.'
html_template = 'disturbance/emails/approval_reinstate_notification.html'
txt_template = 'disturbance/emails/approval_reinstate_notification.txt'
class ApprovalRenewalNotificationEmail(TemplateEmailBase):
subject = 'Your Approval is due for renewal.'
html_template = 'disturbance/emails/approval_renewal_notification.html'
txt_template = 'disturbance/emails/approval_renewal_notification.txt'
def send_approval_expire_email_notification(approval):
email = ApprovalExpireNotificationEmail()
proposal = approval.current_proposal
context = {
'approval': approval,
'proposal': proposal
}
all_ccs = []
if proposal.applicant.email:
cc_list = proposal.applicant.email
if cc_list:
all_ccs = [cc_list]
msg = email.send(proposal.submitter.email,cc=all_ccs, context=context)
sender = settings.DEFAULT_FROM_EMAIL
try:
sender_user = EmailUser.objects.get(email__icontains=sender)
except:
EmailUser.objects.create(email=sender, password='')
sender_user = EmailUser.objects.get(email__icontains=sender)
_log_approval_email(msg, approval, sender=sender_user)
_log_org_email(msg, proposal.applicant, proposal.submitter, sender=sender_user)
def send_approval_cancel_email_notification(approval, future_cancel=False):
email = ApprovalCancelNotificationEmail()
proposal = approval.current_proposal
context = {
'approval': approval,
'future_cancel': future_cancel
}
all_ccs = []
if proposal.applicant.email:
cc_list = proposal.applicant.email
if cc_list:
all_ccs = [cc_list]
sender = settings.DEFAULT_FROM_EMAIL
try:
sender_user = EmailUser.objects.get(email__icontains=sender)
except:
EmailUser.objects.create(email=sender, password='')
sender_user = EmailUser.objects.get(email__icontains=sender)
msg = email.send(proposal.submitter.email, cc=all_ccs, context=context)
sender = settings.DEFAULT_FROM_EMAIL
_log_approval_email(msg, approval, sender=sender_user)
_log_org_email(msg, proposal.applicant, proposal.submitter, sender=sender_user)
def send_approval_suspend_email_notification(approval, future_suspend=False):
email = ApprovalSuspendNotificationEmail()
proposal = approval.current_proposal
context = {
'approval': approval,
'details': approval.suspension_details['details'],
'from_date': approval.suspension_details['from_date'],
'to_date': approval.suspension_details['to_date'],
'future_suspend': future_suspend
}
all_ccs = []
if proposal.applicant.email:
cc_list = proposal.applicant.email
if cc_list:
all_ccs = [cc_list]
sender = settings.DEFAULT_FROM_EMAIL
try:
sender_user = EmailUser.objects.get(email__icontains=sender)
except:
EmailUser.objects.create(email=sender, password='')
sender_user = EmailUser.objects.get(email__icontains=sender)
msg = email.send(proposal.submitter.email, cc=all_ccs, context=context)
sender = settings.DEFAULT_FROM_EMAIL
_log_approval_email(msg, approval, sender=sender_user)
_log_org_email(msg, proposal.applicant, proposal.submitter, sender=sender_user)
def send_approval_surrender_email_notification(approval, future_surrender=False):
email = ApprovalSurrenderNotificationEmail()
proposal = approval.current_proposal
context = {
'approval': approval,
'details': approval.surrender_details['details'],
'surrender_date': approval.surrender_details['surrender_date'],
'future_surrender': future_surrender
}
all_ccs = []
if proposal.applicant.email:
cc_list = proposal.applicant.email
if cc_list:
all_ccs = [cc_list]
sender = settings.DEFAULT_FROM_EMAIL
try:
sender_user = EmailUser.objects.get(email__icontains=sender)
except:
EmailUser.objects.create(email=sender, password='')
sender_user = EmailUser.objects.get(email__icontains=sender)
msg = email.send(proposal.submitter.email, cc=all_ccs, context=context)
_log_approval_email(msg, approval, sender=sender_user)
_log_org_email(msg, proposal.applicant, proposal.submitter, sender=sender_user)
#approval renewal notice
def send_approval_renewal_email_notification(approval):
email = ApprovalRenewalNotificationEmail()
proposal = approval.current_proposal
context = {
'approval': approval,
'proposal': approval.current_proposal
}
all_ccs = []
if proposal.applicant.email:
cc_list = proposal.applicant.email
if cc_list:
all_ccs = [cc_list]
sender = settings.DEFAULT_FROM_EMAIL
try:
sender_user = EmailUser.objects.get(email__icontains=sender)
except:
EmailUser.objects.create(email=sender, password='')
sender_user = EmailUser.objects.get(email__icontains=sender)
#attach renewal notice
renewal_document= approval.renewal_document._file
if renewal_document is not None:
file_name = approval.renewal_document.name
attachment = (file_name, renewal_document.file.read(), 'application/pdf')
attachment = [attachment]
else:
attachment = []
msg = email.send(proposal.submitter.email, cc=all_ccs, attachments=attachment, context=context)
sender = settings.DEFAULT_FROM_EMAIL
_log_approval_email(msg, approval, sender=sender_user)
_log_org_email(msg, proposal.applicant, proposal.submitter, sender=sender_user)
def send_approval_reinstate_email_notification(approval, request):
email = ApprovalReinstateNotificationEmail()
proposal = approval.current_proposal
context = {
'approval': approval,
}
all_ccs = []
if proposal.applicant.email:
cc_list = proposal.applicant.email
if cc_list:
all_ccs = [cc_list]
msg = email.send(proposal.submitter.email,cc=all_ccs, context=context)
sender = request.user if request else settings.DEFAULT_FROM_EMAIL
_log_approval_email(msg, approval, sender=sender)
_log_org_email(msg, proposal.applicant, proposal.submitter, sender=sender)
def _log_approval_email(email_message, approval, sender=None):
from disturbance.components.approvals.models import ApprovalLogEntry
if isinstance(email_message, (EmailMultiAlternatives, EmailMessage,)):
# TODO this will log the plain text body, should we log the html instead
text = email_message.body
subject = email_message.subject
fromm = smart_text(sender) if sender else smart_text(email_message.from_email)
# the to email is normally a list
if isinstance(email_message.to, list):
to = ','.join(email_message.to)
else:
to = smart_text(email_message.to)
# we log the cc and bcc in the same cc field of the log entry as a ',' comma separated string
all_ccs = []
if email_message.cc:
all_ccs += list(email_message.cc)
if email_message.bcc:
all_ccs += list(email_message.bcc)
all_ccs = ','.join(all_ccs)
else:
text = smart_text(email_message)
subject = ''
to = approval.current_proposal.submitter.email
fromm = smart_text(sender) if sender else SYSTEM_NAME
all_ccs = ''
customer = approval.current_proposal.submitter
staff = sender
kwargs = {
'subject': subject,
'text': text,
'approval': approval,
'customer': customer,
'staff': staff,
'to': to,
'fromm': fromm,
'cc': all_ccs
}
email_entry = ApprovalLogEntry.objects.create(**kwargs)
return email_entry
def _log_org_email(email_message, organisation, customer ,sender=None):
from disturbance.components.organisations.models import OrganisationLogEntry
if isinstance(email_message, (EmailMultiAlternatives, EmailMessage,)):
# TODO this will log the plain text body, should we log the html instead
text = email_message.body
subject = email_message.subject
fromm = smart_text(sender) if sender else smart_text(email_message.from_email)
# the to email is normally a list
if isinstance(email_message.to, list):
to = ','.join(email_message.to)
else:
to = smart_text(email_message.to)
# we log the cc and bcc in the same cc field of the log entry as a ',' comma separated string
all_ccs = []
if email_message.cc:
all_ccs += list(email_message.cc)
if email_message.bcc:
all_ccs += list(email_message.bcc)
all_ccs = ','.join(all_ccs)
else:
text = smart_text(email_message)
subject = ''
to = customer
fromm = smart_text(sender) if sender else SYSTEM_NAME
all_ccs = ''
customer = customer
staff = sender
kwargs = {
'subject': subject,
'text': text,
'organisation': organisation,
'customer': customer,
'staff': staff,
'to': to,
'fromm': fromm,
'cc': all_ccs
}
email_entry = OrganisationLogEntry.objects.create(**kwargs)
return email_entry
| [
[
[
7,
14
],
[
328,
335
]
],
[
[
45,
67
],
[
8077,
8099
],
[
9643,
9665
]
],
[
[
69,
81
],
[
8101,
8113
],
[
9667,
9679
]
],
[
[
116,
126
],
[
8289,
8299
],
[
8323,
8333
],
[
8524,
8534
],
[
8890,
8900
],
[
9008,
9018
],
[
9855,
9865
],
[
9889,
9899
],
[
10090,
10100
],
[
10456,
10466
],
[
10541,
10551
]
],
[
[
164,
171
]
],
[
[
196,
204
],
[
371,
379
],
[
2443,
2451
],
[
3255,
3263
],
[
3595,
3603
],
[
4380,
4388
],
[
4719,
4727
],
[
5467,
5475
],
[
6385,
6393
],
[
7067,
7075
],
[
7739,
7747
]
],
[
[
255,
272
],
[
459,
476
],
[
710,
727
],
[
968,
985
],
[
1230,
1247
],
[
1498,
1515
],
[
1763,
1780
]
],
[
[
308,
317
],
[
2499,
2508
],
[
2566,
2575
],
[
2640,
2649
],
[
3314,
3323
],
[
3381,
3390
],
[
3455,
3464
],
[
4439,
4448
],
[
4506,
4515
],
[
4580,
4589
],
[
5526,
5535
],
[
5593,
5602
],
[
5667,
5676
],
[
6444,
6453
],
[
6511,
6520
],
[
6585,
6594
]
],
[
[
319,
325
]
],
[
[
357,
368
],
[
9042,
9053
],
[
10575,
10586
]
],
[
[
427,
458
],
[
2051,
2082
]
],
[
[
678,
709
],
[
2919,
2950
]
],
[
[
935,
967
],
[
3862,
3894
]
],
[
[
1195,
1229
],
[
4990,
5024
]
],
[
[
1463,
1497
],
[
7324,
7358
]
],
[
[
1730,
1762
],
[
6030,
6062
]
],
[
[
1988,
2027
]
],
[
[
2835,
2874
]
],
[
[
3776,
3816
]
],
[
[
4900,
4942
]
],
[
[
5966,
6006
]
],
[
[
7249,
7291
]
],
[
[
7911,
7930
],
[
2691,
2710
],
[
3631,
3650
],
[
4755,
4774
],
[
5797,
5816
],
[
7103,
7122
],
[
7775,
7794
]
],
[
[
9460,
9474
],
[
2750,
2764
],
[
3690,
3704
],
[
4814,
4828
],
[
5856,
5870
],
[
7162,
7176
],
[
7829,
7843
]
]
] |
# -*- coding: utf-8 -
from iso8601 import parse_date
from datetime import datetime, date, time, timedelta
import dateutil.parser
from pytz import timezone
import os
from decimal import Decimal
import re
TZ = timezone(os.environ['TZ'] if 'TZ' in os.environ else 'Europe/Kiev')
def get_all_etender_dates(initial_tender_data):
tender_period = initial_tender_data.tenderPeriod
start_dt = dateutil.parser.parse(tender_period['startDate'])
end_dt = dateutil.parser.parse(tender_period['endDate'])
data = type('periods', (), { # dynamically creating objects instead of another dict
'tenderStart': type('date', (), {'date': start_dt.strftime("%d-%m-%Y"),
'time': start_dt.strftime("%H:%M")}),
'tenderEnd': type('date', (), {'date': end_dt.strftime("%d-%m-%Y"),
'time': end_dt.strftime("%H:%M")})})
if 'enquiryPeriod' in initial_tender_data:
end_period = dateutil.parser.parse(initial_tender_data.enquiryPeriod['endDate'])
data.enquiryEnd = type('date', (), {'date': end_period.strftime("%d-%m-%Y"),
'time': end_period.strftime("%H:%M")})
return data
def get_procedure_type(methodType):
return {
'aboveThresholdUA': 'Відкриті торги',
'belowThreshold': 'Допорогові закупівлі',
'negotiation': 'Переговорна процедура',
'aboveThresholdEU': 'Відкриті торги з публікацією англійською мовою',
'aboveThresholdUA.defense': 'Переговорна процедура для потреб оборони',
'reporting': 'Звіт про укладений договір',
'competitiveDialogueEU': 'Конкурентний діалог з публікацією англійською мовою 1-ий етап',
'competitiveDialogueUA': 'Конкурентний діалог 1-ий етап',
'open_esco': 'Відкриті торги для закупівлі енергосервісу',
'esco': 'Відкриті торги для закупівлі енергосервісу',
'closeFrameworkAgreementUA': 'Відкриті торги для укладання рамкової угоди',
'open_framework': 'Відкриті торгии для укладання рамкової угоди'
}[methodType].decode('utf-8')
def get_method_type(procedure_name):
return {
u'переговорна процедура для потреб оборони': 'aboveThresholdUA.defense',
u'допорогові закупівлі': 'belowThreshold',
u'відкриті торги з публікацією англійською мовою': 'aboveThresholdEU',
u'переговорна процедура': 'negotiation',
u'відкриті торги': 'aboveThresholdUA',
u'конкурентний діалог 1-ий етап': 'competitiveDialogueUA',
u'конкурентний діалог 2-ий етап': 'competitiveDialogueUA.stage2',
u'звіт про укладений договір': 'reporting',
u'відкриті торги для закупівлі енергосервісу': 'open_esco',
u'відкриті торги для закупівлі енергосервісу': 'esco',
u'конкурентний діалог з публікацією англійською мовою 1-ий етап': 'competitiveDialogueEU',
u'конкурентний діалог з публікацією англійською мовою 2-ий етап': 'competitiveDialogueEU.stage2',
u'відкриті торги для укладання рамкової угоди': 'closeFrameworkAgreementUA',
u'відкриті торгии для укладання рамкової угоди': 'open_framework'
}[procedure_name]
def parse_etender_date(date, as_string=False):
# converts date from ui to datetime
d = datetime.strptime(date, '%d-%m-%Y, %H:%M')
if as_string:
return str(d)
return d
def cut_letters_and_parse_etender_date(date, as_string=True):
# converts date from ui
d = datetime.strptime(date.split(' ')[1], '%d-%m-%Y')
if as_string:
return str(d)
return d
def prepare_locator_to_scroll(locator):
if locator[:3] == 'id=':
return '//*[@id="{}"]'.format(locator[3:])
return locator[6:].replace("'", '"') # 6 for xpath=
def to_iso(date):
return date.isoformat()
def convert_etender_date_to_iso_format(date):
return TZ.localize(parse_etender_date(date)).isoformat()
def convet_fra_to_variable(raw):
b = re.findall(r'P(\d+)Y(\d+)M(\d+)D.*', raw)
c, d, e = b[0]
return c, d, e
def convet_raw_to_chack(raw):
raw = raw.replace(' ', '')
b = re.findall(r'(\d+)р(\d+)м(\d+)д', raw)
c, d, e = b[0]
return c, d, e
def get_year_from_full_date(string):
data_as_str = string.split('T')[0]
data_as_datetime = datetime.strptime(data_as_str, '%Y-%m-%d')
return str(data_as_datetime.year)
def convert_date_to_etender_format(isodate):
iso_dt = parse_date(isodate)
date_string = iso_dt.strftime("%d-%m-%Y")
return date_string
def convert_datetime_for_delivery(isodate):
iso_dt = parse_date(isodate)
date_string = iso_dt.strftime("%Y-%m-%d %H:%M")
return date_string
def convert_time_to_etender_format(isodate):
iso_dt = parse_date(isodate)
time_string = iso_dt.strftime("%H:%M")
return time_string
def float_to_string_2f(value):
return '{:.2f}'.format(value)
def float_to_string_3f(value):
return '{:.3f}'.format(value)
def string_to_float(string):
return float(string)
def change_data(initial_data):
#TODO: remove redundant hardcoded values
# initial_data['data']['procuringEntity']['identifier']['legalName'] = u"TenderOwner#"
# initial_data['data']['procuringEntity']['identifier']['id'] = u"88008800"
# initial_data['data']['procuringEntity']['name'] = u"TenderOwner#"
initial_data['data']['items'][0]['deliveryAddress']['locality'] = u"м. Київ"
initial_data['data']['items'][0]['deliveryAddress']['region'] = u"Київська область"
initial_data['data']['procuringEntity']['address']['locality'] = u"Алупка"
initial_data['data']['procuringEntity']['address']['postalCode'] = u"13531"
initial_data['data']['procuringEntity']['address']['region'] = u"АР Крим"
initial_data['data']['procuringEntity']['address']['streetAddress'] = u"Фрунзе, 666"
initial_data['data']['procuringEntity']['contactPoint']['name'] = u"Владелец Этого Тендера"
initial_data['data']['procuringEntity']['contactPoint']['telephone'] = u"613371488228"
initial_data['data']['procuringEntity']['contactPoint']['url'] = u"http://e-tender.ua/"
return initial_data
def change_data_for_tender_owner(initial_data):
initial_data['data']['procuringEntity']['identifier']['legalName'] = u"TenderOwner#"
initial_data['data']['procuringEntity']['identifier']['id'] = u"88008800"
initial_data['data']['procuringEntity']['name'] = u"TenderOwner#"
return initial_data
def change_buyers_data(initial_data):
initial_data['data']['buyers'][0]['name'] = u"TenderOwner#"
initial_data['data']['buyers'][0]['identifier']['id'] = u"88008800"
initial_data['data']['buyers'][0]['identifier']['legalName'] = u"TenderOwner#"
initial_data['data']['procuringEntity']['name'] = initial_data['data']['buyers'][0]['name']
initial_data['data']['procuringEntity']['identifier']['id'] = initial_data['data']['buyers'][0]['identifier']['id']
initial_data['data']['procuringEntity']['identifier']['legalName'] = \
initial_data['data']['buyers'][0]['identifier']['legalName']
return initial_data
def convert_etender_date_to_iso_format_and_add_timezone(date):
return TZ.localize(parse_etender_date(date)).isoformat()
def get_time_now():
time_string = datetime.now().strftime("%H:%M")
return time_string
def get_date_now():
date_string = datetime.now().strftime("%d-%m-%Y")
return date_string
def get_date_10d_future():
date_string = (datetime.now() + timedelta(days=10)).strftime("%d-%m-%Y")
return date_string
def get_time_offset(add_minutes=17):
_now = datetime.now() + timedelta(minutes=add_minutes)
return _now.time().strftime('%H:%M')
def convert_common_string_to_etender_string(string):
dict = get_helper_dictionary()
for key, val in dict.iteritems():
if val == string:
return key
return string
def parse_currency_value_with_spaces(raw):
# to convert raw values like '2 216 162,83 UAH' to string which is ready for conversion to float
return ''.join(raw.split(' ')[:-1]).replace(',', '.')
def get_minimalStep_currency(raw_value):
# to get currency 'UAH' from raw values like '2 216 162,83 UAH'
result_dic = raw_value.split(' ')
result = result_dic[-1]
return result
def parse_currency_value_with_spaces_percentage(raw):
# to convert raw values like '1,3244%' to string which is ready for conversion to float
result = raw.replace('%', '')
result = Decimal(result)
result = (result / 100)
result = float(result)
return result
def parse_currency_value_with_spaces_percentage_NBU(raw):
# to convert raw values like 'Hi – 1,3244%' to string which is ready for conversion to float
result = raw.split(' ', 4)[4]
result = result.replace('%', '')
result = Decimal(result)
result = (result / 100)
result = float(result)
return result
def convert_etender_string_to_common_string(string):
return get_helper_dictionary().get(string, string)
def get_helper_dictionary():
return {
u"КЛАСИФІКАТОР ДК 021:2015 (CPV)": u"ДК021",
u"кг.": u"кілограм",
u"грн.": u"UAH",
u"(з ПДВ)": True,
u"з ПДВ": True,
u"без ПДВ": False,
# TODO: remove this temporary workaround, consult with quinta team about input data
u"Дніпро": u"Дніпропетровськ",
#tender statuses
u'період уточнень': u'active.enquiries',
u'очікування пропозицій': u'active.tendering',
u'прекваліфікація': u'active.pre-qualification',
u'оцінка пропозицій': u'active.pre-qualification',
u'блокування перед аукціоном': u'active.pre-qualification.stand-still',
u'проведення переговорів': u'active.pre-qualification.stand-still',
u'перший проміжний етап': u'active.stage2.pending',
u'період аукціону': u'active.auction',
u'кваліфікація переможця': u'active.qualification',
u'пропозиції розглянуто': u'active.awarded',
u'завершена закупівля': u'complete',
u'перший етап завершено': u'complete',
u'закупівля не відбулась': u'unsuccessful',
u'відмінена закупівля': u'cancelled',
#bid statuses
u'Пропозиція не дійсна': u'invalid',
u"ст.35 ч. 2 п. 1": u"artContestIP",
u"ст.35 ч. 2 п. 2": u"noCompetition",
u"ст.35 ч. 2 п. 4": u"twiceUnsuccessful",
u"ст.35 ч. 2 п. 5": u"additionalPurchase",
u"ст.35 ч. 2 п. 6": u"additionalConstruction",
u"ст.35 ч. 2 п. 7": u"stateLegalServices",
u"Договір поки що не опубліковано": u"pending",
u"Договір опубліковано": u"active",
u"Переможець торгів": u"active",
u"учасник виграв закупівлю": u"active",
u'вимога': u'claim',
u'відповідь надана': u'answered',
u'задоволено': u'resolved',
u'не задоволено': u'declined',
u'скасована скаржником': u'cancelled',
u'відхилено': u'invalid',
u'залишена без відповіді': u'ignored',
u'очікується кваліфікація': u'pending',
u'відкликається скаржником': u'stopping',
u'очікує розгляду органом оскарження': u'pending',
u'Співфінансування з бюджетних коштів': u'budget',
u'на розгляді': u'pending',
u'Пропозиція не активована': u'invalid'
}
def get_feature_index(i):
return {0.05: '1',
0.01: '2',
0: '3'}[i]
def get_doc_type_index(i):
return {'financial_documents': '1',
'qualification_documents': '2',
'eligibility_documents': '3'}.get(i, i)
def convert_unit_name_to_unit_code(string):
return {
u"блок": u"D64",
u"гектар": u"HAR",
u"кілограми": u"KGM",
u"кілометри": u"KMT",
u"літр": u"LTR",
u"лот": u"LO",
u"метри квадратні": u"MTK",
u"метри кубічні": u"MTQ",
u"метри": u"MTR",
u"місяць": u"MON",
u"набір": u"SET",
u"пара": u"PR",
u"пачка": u"RM",
u"пачок": u"NMP",
u"послуга": u"E48",
u"рейс": u"E54",
u"тони": u"TNE",
u"упаковка": u"PK",
u"Флакон": u"VI",
u"штуки": u"H87",
u"ящик": u"BX",
}.get(string, string)
def convert_milestone_from_text_to_code(string):
return {
u"Аванс": u"prepayment",
u"Пiсляоплата": u"postpayment"
}.get(string, string)
def convert_milestone_from_text_to_title(string):
return {
u"Виконання робіт": "executionOfWorks",
u"Поставка товару": "deliveryOfGoods",
u"Надання послуг": "submittingServices",
u"Підписання договору": "signingTheContract",
u"Дата подання заявки": "submissionDateOfApplications",
u"Дата виставлення рахунку": "dateOfInvoicing",
u"Дата закінчення звітного періоду": "endDateOfTheReportingPeriod",
u"Інша подія": "anotherEvent",
}.get(string, string)
def convert_milestone_from_text_to_day_type(string):
return {
u"Робочі": "working",
u"Банківські": "banking",
u"Календарні": "calendar"
}.get(string, string)
def convert_main_procurement_category(string):
return {
u"Товари": "goods",
u"Послуги": "services",
u"Роботи": "works"
}.get(string, string)
def get_modulus_from_number(number):
if isinstance(number, int):
pass
elif isinstance(number, str):
number = int(number)
elif isinstance(number, unicode):
number = int(number)
return abs(number)
| [
[
[
43,
53
],
[
4430,
4440
],
[
4578,
4588
],
[
4733,
4743
]
],
[
[
75,
83
],
[
3283,
3291
],
[
3479,
3487
],
[
4290,
4298
],
[
7292,
7300
],
[
7388,
7396
],
[
7495,
7503
],
[
7626,
7634
]
],
[
[
85,
89
]
],
[
[
91,
95
]
],
[
[
97,
106
],
[
7512,
7521
],
[
7643,
7652
]
],
[
[
114,
129
],
[
396,
404
],
[
459,
467
],
[
975,
983
]
],
[
[
147,
155
],
[
211,
219
]
],
[
[
163,
165
],
[
248,
250
],
[
220,
222
]
],
[
[
186,
193
],
[
8502,
8509
],
[
8832,
8839
]
],
[
[
201,
203
],
[
3962,
3964
],
[
4112,
4114
]
],
[
[
206,
208
],
[
3870,
3872
],
[
7202,
7204
]
],
[
[
284,
305
]
],
[
[
1233,
1251
]
],
[
[
2120,
2135
]
],
[
[
3192,
3210
],
[
3882,
3900
],
[
7214,
7232
]
],
[
[
3385,
3419
]
],
[
[
3588,
3613
]
],
[
[
3767,
3773
]
],
[
[
3817,
3851
]
],
[
[
3925,
3947
]
],
[
[
4047,
4066
]
],
[
[
4195,
4218
]
],
[
[
4376,
4406
]
],
[
[
4525,
4554
]
],
[
[
4679,
4709
]
],
[
[
4825,
4843
]
],
[
[
4891,
4909
]
],
[
[
4957,
4972
]
],
[
[
5013,
5024
]
],
[
[
6170,
6198
]
],
[
[
6487,
6505
]
],
[
[
7132,
7183
]
],
[
[
7258,
7270
]
],
[
[
7354,
7366
]
],
[
[
7453,
7472
]
],
[
[
7582,
7597
]
],
[
[
7721,
7760
]
],
[
[
7916,
7948
]
],
[
[
8119,
8143
]
],
[
[
8313,
8356
]
],
[
[
8597,
8644
]
],
[
[
8928,
8967
]
],
[
[
9038,
9059
],
[
7781,
7802
],
[
8988,
9009
]
],
[
[
11347,
11364
]
],
[
[
11443,
11461
]
],
[
[
11607,
11637
]
],
[
[
12258,
12293
]
],
[
[
12420,
12456
]
],
[
[
12944,
12983
]
],
[
[
13136,
13169
]
],
[
[
13311,
13334
]
]
] |
import os
import pytest
import sys
import random
import tempfile
import requests
from pathlib import Path
import ray
from ray.test_utils import (run_string_as_driver,
run_string_as_driver_nonblocking)
from ray._private.utils import (get_wheel_filename, get_master_wheel_url,
get_release_wheel_url)
import ray.experimental.internal_kv as kv
from time import sleep
driver_script = """
from time import sleep
import sys
import logging
sys.path.insert(0, "{working_dir}")
import ray
import ray.util
import os
try:
import test_module
except:
pass
try:
job_config = ray.job_config.JobConfig(
runtime_env={runtime_env}
)
if not job_config.runtime_env:
job_config=None
if os.environ.get("USE_RAY_CLIENT"):
ray.client("{address}").env({runtime_env}).namespace("").connect()
else:
ray.init(address="{address}",
job_config=job_config,
logging_level=logging.DEBUG,
namespace=""
)
except ValueError:
print("ValueError")
sys.exit(0)
except TypeError:
print("TypeError")
sys.exit(0)
except:
print("ERROR")
sys.exit(0)
if os.environ.get("EXIT_AFTER_INIT"):
sys.exit(0)
@ray.remote
def run_test():
return test_module.one()
@ray.remote
def check_file(name):
try:
with open(name) as f:
return f.read()
except:
return "FAILED"
@ray.remote
class TestActor(object):
@ray.method(num_returns=1)
def one(self):
return test_module.one()
{execute_statement}
if os.environ.get("USE_RAY_CLIENT"):
ray.util.disconnect()
else:
ray.shutdown()
sleep(10)
"""
def create_file(p):
if not p.parent.exists():
p.parent.mkdir()
with p.open("w") as f:
f.write("Test")
@pytest.fixture(scope="function")
def working_dir():
with tempfile.TemporaryDirectory() as tmp_dir:
path = Path(tmp_dir)
module_path = path / "test_module"
module_path.mkdir(parents=True)
init_file = module_path / "__init__.py"
test_file = module_path / "test.py"
with test_file.open(mode="w") as f:
f.write("""
def one():
return 1
""")
with init_file.open(mode="w") as f:
f.write("""
from test_module.test import one
""")
old_dir = os.getcwd()
os.chdir(tmp_dir)
yield tmp_dir
os.chdir(old_dir)
def start_client_server(cluster, client_mode):
from ray._private.runtime_env import PKG_DIR
if not client_mode:
return (cluster.address, {}, PKG_DIR)
ray.worker._global_node._ray_params.ray_client_server_port = "10003"
ray.worker._global_node.start_ray_client_server()
return ("localhost:10003", {"USE_RAY_CLIENT": "1"}, PKG_DIR)
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
def test_travel():
import uuid
with tempfile.TemporaryDirectory() as tmp_dir:
dir_paths = set()
file_paths = set()
item_num = 0
excludes = []
root = Path(tmp_dir) / "test"
def construct(path, excluded=False, depth=0):
nonlocal item_num
path.mkdir(parents=True)
if not excluded:
dir_paths.add(str(path))
if depth > 8:
return
if item_num > 500:
return
dir_num = random.randint(0, 10)
file_num = random.randint(0, 10)
for _ in range(dir_num):
uid = str(uuid.uuid4()).split("-")[0]
dir_path = path / uid
exclud_sub = random.randint(0, 5) == 0
if not excluded and exclud_sub:
excludes.append(str(dir_path.relative_to(root)))
if not excluded:
construct(dir_path, exclud_sub or excluded, depth + 1)
item_num += 1
if item_num > 1000:
return
for _ in range(file_num):
uid = str(uuid.uuid4()).split("-")[0]
with (path / uid).open("w") as f:
v = random.randint(0, 1000)
f.write(str(v))
if not excluded:
if random.randint(0, 5) == 0:
excludes.append(
str((path / uid).relative_to(root)))
else:
file_paths.add((str(path / uid), str(v)))
item_num += 1
construct(root)
exclude_spec = ray._private.runtime_env._get_excludes(root, excludes)
visited_dir_paths = set()
visited_file_paths = set()
def handler(path):
if path.is_dir():
visited_dir_paths.add(str(path))
else:
with open(path) as f:
visited_file_paths.add((str(path), f.read()))
ray._private.runtime_env._dir_travel(root, [exclude_spec], handler)
assert file_paths == visited_file_paths
assert dir_paths == visited_dir_paths
"""
The following test cases are related with runtime env. It following these steps
1) Creating a temporary dir with fixture working_dir
2) Using a template named driver_script defined globally
3) Overwrite runtime_env and execute_statement in the template
4) Execute it as a separate driver and return the result
"""
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_empty_working_dir(ray_start_cluster_head, client_mode):
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
env["EXIT_AFTER_INIT"] = "1"
with tempfile.TemporaryDirectory() as working_dir:
runtime_env = f"""{{
"working_dir": r"{working_dir}",
"py_modules": [r"{working_dir}"]
}}"""
# Execute the following cmd in driver with runtime_env
execute_statement = "sys.exit(0)"
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out != "ERROR"
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_invalid_working_dir(ray_start_cluster_head, working_dir, client_mode):
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
env["EXIT_AFTER_INIT"] = "1"
runtime_env = "{ 'working_dir': 10 }"
# Execute the following cmd in driver with runtime_env
execute_statement = ""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env).strip().split()[-1]
assert out == "TypeError"
runtime_env = "{ 'py_modules': [10] }"
# Execute the following cmd in driver with runtime_env
execute_statement = ""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env).strip().split()[-1]
assert out == "TypeError"
runtime_env = f"{{ 'working_dir': os.path.join(r'{working_dir}', 'na') }}"
# Execute the following cmd in driver with runtime_env
execute_statement = ""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env).strip().split()[-1]
assert out == "ValueError"
runtime_env = f"{{ 'py_modules': [os.path.join(r'{working_dir}', 'na')] }}"
# Execute the following cmd in driver with runtime_env
execute_statement = ""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env).strip().split()[-1]
assert out == "ValueError"
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_single_node(ray_start_cluster_head, working_dir, client_mode):
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
# Setup runtime env here
runtime_env = f"""{{ "working_dir": "{working_dir}" }}"""
# Execute the following cmd in driver with runtime_env
execute_statement = "print(sum(ray.get([run_test.remote()] * 1000)))"
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "1000"
assert len(list(Path(PKG_DIR).iterdir())) == 1
assert len(kv._internal_kv_list("gcs://")) == 0
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_two_node(two_node_cluster, working_dir, client_mode):
cluster, _ = two_node_cluster
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
# Testing runtime env with working_dir
runtime_env = f"""{{ "working_dir": "{working_dir}" }}"""
# Execute the following cmd in driver with runtime_env
execute_statement = "print(sum(ray.get([run_test.remote()] * 1000)))"
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "1000"
assert len(list(Path(PKG_DIR).iterdir())) == 1
assert len(kv._internal_kv_list("gcs://")) == 0
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_two_node_module(two_node_cluster, working_dir, client_mode):
cluster, _ = two_node_cluster
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
# test runtime_env iwth py_modules
runtime_env = """{ "py_modules": [test_module.__path__[0]] }"""
# Execute the following cmd in driver with runtime_env
execute_statement = "print(sum(ray.get([run_test.remote()] * 1000)))"
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "1000"
assert len(list(Path(PKG_DIR).iterdir())) == 1
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_two_node_local_file(two_node_cluster, working_dir, client_mode):
with open(os.path.join(working_dir, "test_file"), "w") as f:
f.write("1")
cluster, _ = two_node_cluster
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
# test runtime_env iwth working_dir
runtime_env = f"""{{ "working_dir": "{working_dir}" }}"""
# Execute the following cmd in driver with runtime_env
execute_statement = """
vals = ray.get([check_file.remote('test_file')] * 1000)
print(sum([int(v) for v in vals]))
"""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "1000"
assert len(list(Path(PKG_DIR).iterdir())) == 1
assert len(kv._internal_kv_list("gcs://")) == 0
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_exclusion(ray_start_cluster_head, working_dir, client_mode):
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
working_path = Path(working_dir)
create_file(working_path / "tmp_dir" / "test_1")
create_file(working_path / "tmp_dir" / "test_2")
create_file(working_path / "tmp_dir" / "test_3")
create_file(working_path / "tmp_dir" / "sub_dir" / "test_1")
create_file(working_path / "tmp_dir" / "sub_dir" / "test_2")
create_file(working_path / "test1")
create_file(working_path / "test2")
create_file(working_path / "test3")
tmp_dir_test_3 = str((working_path / "tmp_dir" / "test_3").absolute())
runtime_env = f"""{{
"working_dir": r"{working_dir}",
}}"""
execute_statement = """
vals = ray.get([
check_file.remote('test1'),
check_file.remote('test2'),
check_file.remote('test3'),
check_file.remote(os.path.join('tmp_dir', 'test_1')),
check_file.remote(os.path.join('tmp_dir', 'test_2')),
check_file.remote(os.path.join('tmp_dir', 'test_3')),
check_file.remote(os.path.join('tmp_dir', 'sub_dir', 'test_1')),
check_file.remote(os.path.join('tmp_dir', 'sub_dir', 'test_2')),
])
print(','.join(vals))
"""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
# Test it works before
assert out.strip().split("\n")[-1] == \
"Test,Test,Test,Test,Test,Test,Test,Test"
runtime_env = f"""{{
"working_dir": r"{working_dir}",
"excludes": [
# exclude by relative path
r"test2",
# exclude by dir
r"{str(Path("tmp_dir") / "sub_dir")}",
# exclude part of the dir
r"{str(Path("tmp_dir") / "test_1")}",
# exclude part of the dir
r"{str(Path("tmp_dir") / "test_2")}",
]
}}"""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split("\n")[-1] == \
"Test,FAILED,Test,FAILED,FAILED,Test,FAILED,FAILED"
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_exclusion_2(ray_start_cluster_head, working_dir, client_mode):
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
working_path = Path(working_dir)
def create_file(p):
if not p.parent.exists():
p.parent.mkdir(parents=True)
with p.open("w") as f:
f.write("Test")
create_file(working_path / "tmp_dir" / "test_1")
create_file(working_path / "tmp_dir" / "test_2")
create_file(working_path / "tmp_dir" / "test_3")
create_file(working_path / "tmp_dir" / "sub_dir" / "test_1")
create_file(working_path / "tmp_dir" / "sub_dir" / "test_2")
create_file(working_path / "test1")
create_file(working_path / "test2")
create_file(working_path / "test3")
create_file(working_path / "cache" / "test_1")
create_file(working_path / "tmp_dir" / "cache" / "test_1")
create_file(working_path / "another_dir" / "cache" / "test_1")
tmp_dir_test_3 = str((working_path / "tmp_dir" / "test_3").absolute())
runtime_env = f"""{{
"working_dir": r"{working_dir}",
}}"""
execute_statement = """
vals = ray.get([
check_file.remote('test1'),
check_file.remote('test2'),
check_file.remote('test3'),
check_file.remote(os.path.join('tmp_dir', 'test_1')),
check_file.remote(os.path.join('tmp_dir', 'test_2')),
check_file.remote(os.path.join('tmp_dir', 'test_3')),
check_file.remote(os.path.join('tmp_dir', 'sub_dir', 'test_1')),
check_file.remote(os.path.join('tmp_dir', 'sub_dir', 'test_2')),
check_file.remote(os.path.join("cache", "test_1")),
check_file.remote(os.path.join("tmp_dir", "cache", "test_1")),
check_file.remote(os.path.join("another_dir", "cache", "test_1")),
])
print(','.join(vals))
"""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
# Test it works before
assert out.strip().split("\n")[-1] == \
"Test,Test,Test,Test,Test,Test,Test,Test,Test,Test,Test"
with open(f"{working_dir}/.gitignore", "w") as f:
f.write("""
# Comment
test_[12]
/test1
!/tmp_dir/sub_dir/test_1
cache/
""")
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
t = out.strip().split("\n")[-1]
assert out.strip().split("\n")[-1] == \
"FAILED,Test,Test,FAILED,FAILED,Test,Test,FAILED,FAILED,FAILED,FAILED"
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_runtime_env_getter(ray_start_cluster_head, working_dir, client_mode):
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
runtime_env = f"""{{ "working_dir": "{working_dir}" }}"""
# Execute the following cmd in driver with runtime_env
execute_statement = """
print(ray.get_runtime_context().runtime_env["working_dir"])
"""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == working_dir
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_two_node_uri(two_node_cluster, working_dir, client_mode):
cluster, _ = two_node_cluster
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
import ray._private.runtime_env as runtime_env
import tempfile
with tempfile.NamedTemporaryFile(suffix="zip") as tmp_file:
pkg_name = runtime_env.get_project_package_name(working_dir, [], [])
pkg_uri = runtime_env.Protocol.PIN_GCS.value + "://" + pkg_name
runtime_env.create_project_package(working_dir, [], [], tmp_file.name)
runtime_env.push_package(pkg_uri, tmp_file.name)
runtime_env = f"""{{ "uris": ["{pkg_uri}"] }}"""
# Execute the following cmd in driver with runtime_env
execute_statement = "print(sum(ray.get([run_test.remote()] * 1000)))"
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "1000"
assert len(list(Path(PKG_DIR).iterdir())) == 1
# pinned uri will not be deleted
print(list(kv._internal_kv_list("")))
assert len(kv._internal_kv_list("pingcs://")) == 1
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_regular_actors(ray_start_cluster_head, working_dir, client_mode):
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
runtime_env = f"""{{ "working_dir": "{working_dir}" }}"""
# Execute the following cmd in driver with runtime_env
execute_statement = """
test_actor = TestActor.options(name="test_actor").remote()
print(sum(ray.get([test_actor.one.remote()] * 1000)))
"""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "1000"
assert len(list(Path(PKG_DIR).iterdir())) == 1
assert len(kv._internal_kv_list("gcs://")) == 0
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_detached_actors(ray_start_cluster_head, working_dir, client_mode):
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
runtime_env = f"""{{ "working_dir": "{working_dir}" }}"""
# Execute the following cmd in driver with runtime_env
execute_statement = """
test_actor = TestActor.options(name="test_actor", lifetime="detached").remote()
print(sum(ray.get([test_actor.one.remote()] * 1000)))
"""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "1000"
# It's a detached actors, so it should still be there
assert len(kv._internal_kv_list("gcs://")) == 1
assert len(list(Path(PKG_DIR).iterdir())) == 2
pkg_dir = [f for f in Path(PKG_DIR).glob("*") if f.is_dir()][0]
import sys
sys.path.insert(0, str(pkg_dir))
test_actor = ray.get_actor("test_actor")
assert sum(ray.get([test_actor.one.remote()] * 1000)) == 1000
ray.kill(test_actor)
from time import sleep
sleep(5)
assert len(list(Path(PKG_DIR).iterdir())) == 1
assert len(kv._internal_kv_list("gcs://")) == 0
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
def test_jobconfig_compatible_1(ray_start_cluster_head, working_dir):
# start job_config=None
# start job_config=something
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, True)
runtime_env = None
# To make the first one hanging there
execute_statement = """
sleep(600)
"""
script = driver_script.format(**locals())
# Have one running with job config = None
proc = run_string_as_driver_nonblocking(script, env)
# waiting it to be up
sleep(5)
runtime_env = f"""{{ "working_dir": "{working_dir}" }}"""
# Execute the second one which should work because Ray Client servers.
execute_statement = "print(sum(ray.get([run_test.remote()] * 1000)))"
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "1000"
proc.kill()
proc.wait()
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
def test_jobconfig_compatible_2(ray_start_cluster_head, working_dir):
# start job_config=something
# start job_config=None
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, True)
runtime_env = """{ "py_modules": [test_module.__path__[0]] }"""
# To make the first one hanging there
execute_statement = """
sleep(600)
"""
script = driver_script.format(**locals())
proc = run_string_as_driver_nonblocking(script, env)
sleep(5)
runtime_env = None
# Execute the following in the second one which should
# succeed
execute_statement = "print('OK')"
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "OK", out
proc.kill()
proc.wait()
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
def test_jobconfig_compatible_3(ray_start_cluster_head, working_dir):
# start job_config=something
# start job_config=something else
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, True)
runtime_env = """{ "py_modules": [test_module.__path__[0]] }"""
# To make the first one hanging ther
execute_statement = """
sleep(600)
"""
script = driver_script.format(**locals())
proc = run_string_as_driver_nonblocking(script, env)
sleep(5)
runtime_env = f"""
{{ "working_dir": test_module.__path__[0] }}""" # noqa: F541
# Execute the following cmd in the second one and ensure that
# it is able to run.
execute_statement = "print('OK')"
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
proc.kill()
proc.wait()
assert out.strip().split()[-1] == "OK"
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
def test_util_without_job_config(shutdown_only):
from ray.cluster_utils import Cluster
with tempfile.TemporaryDirectory() as tmp_dir:
with (Path(tmp_dir) / "lib.py").open("w") as f:
f.write("""
def one():
return 1
""")
old_dir = os.getcwd()
os.chdir(tmp_dir)
cluster = Cluster()
cluster.add_node(num_cpus=1)
ray.init(address=cluster.address)
(address, env, PKG_DIR) = start_client_server(cluster, True)
script = f"""
import ray
import ray.util
import os
ray.util.connect("{address}", job_config=None)
@ray.remote
def run():
from lib import one
return one()
print(ray.get([run.remote()])[0])
"""
out = run_string_as_driver(script, env)
print(out)
os.chdir(old_dir)
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
def test_init(shutdown_only):
with tempfile.TemporaryDirectory() as tmp_dir:
old_dir = os.getcwd()
os.chdir(tmp_dir)
with open("hello", "w") as f:
f.write("world")
job_config = ray.job_config.JobConfig(runtime_env={"working_dir": "."})
ray.init(job_config=job_config)
@ray.remote
class Test:
def test(self):
with open("hello") as f:
return f.read()
t = Test.remote()
assert ray.get(t.test.remote()) == "world"
os.chdir(old_dir)
def test_get_wheel_filename():
ray_version = "2.0.0.dev0"
for sys_platform in ["darwin", "linux", "win32"]:
for py_version in ["36", "37", "38"]:
filename = get_wheel_filename(sys_platform, ray_version,
py_version)
prefix = "https://s3-us-west-2.amazonaws.com/ray-wheels/latest/"
url = f"{prefix}{filename}"
assert requests.head(url).status_code == 200
def test_get_master_wheel_url():
ray_version = "2.0.0.dev0"
test_commit = "ba6cebe30fab6925e5b2d9e859ad064d53015246"
for sys_platform in ["darwin", "linux", "win32"]:
for py_version in ["36", "37", "38"]:
url = get_master_wheel_url(test_commit, sys_platform, ray_version,
py_version)
assert requests.head(url).status_code == 200, url
def test_get_release_wheel_url():
test_commits = {
"1.4.0rc1": "e7c7f6371a69eb727fa469e4cd6f4fbefd143b4c",
"1.3.0": "0b4b444fadcdc23226e11fef066b982175804232",
"1.2.0": "1b1a2496ca51b745c07c79fb859946d3350d471b"
}
for sys_platform in ["darwin", "linux", "win32"]:
for py_version in ["36", "37", "38"]:
for version, commit in test_commits.items():
url = get_release_wheel_url(commit, sys_platform, version,
py_version)
assert requests.head(url).status_code == 200, url
@pytest.mark.skipif(
sys.platform == "win32", reason="runtime_env unsupported on Windows.")
def test_decorator_task(ray_start_cluster_head):
@ray.remote(runtime_env={"env_vars": {"foo": "bar"}})
def f():
return os.environ.get("foo")
assert ray.get(f.remote()) == "bar"
@pytest.mark.skipif(
sys.platform == "win32", reason="runtime_env unsupported on Windows.")
def test_decorator_actor(ray_start_cluster_head):
@ray.remote(runtime_env={"env_vars": {"foo": "bar"}})
class A:
def g(self):
return os.environ.get("foo")
a = A.remote()
assert ray.get(a.g.remote()) == "bar"
@pytest.mark.skipif(
sys.platform == "win32", reason="runtime_env unsupported on Windows.")
def test_decorator_complex(shutdown_only):
ray.init(
job_config=ray.job_config.JobConfig(
runtime_env={"env_vars": {
"foo": "job"
}}))
@ray.remote
def env_from_job():
return os.environ.get("foo")
assert ray.get(env_from_job.remote()) == "job"
@ray.remote(runtime_env={"env_vars": {"foo": "task"}})
def f():
return os.environ.get("foo")
assert ray.get(f.remote()) == "task"
@ray.remote(runtime_env={"env_vars": {"foo": "actor"}})
class A:
def g(self):
return os.environ.get("foo")
a = A.remote()
assert ray.get(a.g.remote()) == "actor"
# Test that runtime_env can be overridden by specifying .options().
assert ray.get(
f.options(runtime_env={
"env_vars": {
"foo": "new"
}
}).remote()) == "new"
a = A.options(runtime_env={"env_vars": {"foo": "new2"}}).remote()
assert ray.get(a.g.remote()) == "new2"
def test_container_option_serialize():
runtime_env = {
"container": {
"image": "ray:latest",
"run_options": ["--name=test"]
}
}
job_config = ray.job_config.JobConfig(runtime_env=runtime_env)
job_config_serialized = job_config.serialize()
# job_config_serialized is JobConfig protobuf serialized string,
# job_config.runtime_env.raw_json has container_option info
# job_config.serialized_runtime_env also has container_option info
assert job_config_serialized.count(b"image") == 2
def test_working_dir_override_failure(shutdown_only):
ray.init()
@ray.remote(runtime_env={"working_dir": "."})
def f():
pass
with pytest.raises(NotImplementedError):
f.remote()
@ray.remote
def g():
pass
with pytest.raises(NotImplementedError):
g.options(runtime_env={"working_dir": "."}).remote()
@ray.remote(runtime_env={"working_dir": "."})
class A:
pass
with pytest.raises(NotImplementedError):
A.remote()
@ray.remote
class B:
pass
with pytest.raises(NotImplementedError):
B.options(runtime_env={"working_dir": "."}).remote()
if __name__ == "__main__":
import sys
sys.exit(pytest.main(["-sv", __file__]))
| [
[
[
7,
9
],
[
2359,
2361
],
[
2379,
2381
],
[
2427,
2429
],
[
10230,
10232
],
[
23085,
23087
],
[
23105,
23107
],
[
23586,
23588
],
[
23785,
23787
],
[
23805,
23807
],
[
24242,
24244
],
[
25975,
25977
],
[
26297,
26299
],
[
26722,
26724
],
[
26884,
26886
],
[
27062,
27064
]
],
[
[
17,
23
],
[
1831,
1837
],
[
2808,
2814
],
[
5450,
5456
],
[
5530,
5536
],
[
6191,
6197
],
[
6271,
6277
],
[
7704,
7710
],
[
7784,
7790
],
[
8485,
8491
],
[
8565,
8571
],
[
9268,
9274
],
[
9348,
9354
],
[
10008,
10014
],
[
10088,
10094
],
[
10934,
10940
],
[
11014,
11020
],
[
13204,
13210
],
[
13284,
13290
],
[
15805,
15811
],
[
15885,
15891
],
[
16484,
16490
],
[
16564,
16570
],
[
17732,
17738
],
[
17812,
17818
],
[
18558,
18564
],
[
18638,
18644
],
[
19863,
19869
],
[
20853,
20859
],
[
21742,
21748
],
[
22716,
22722
],
[
23607,
23613
],
[
25745,
25751
],
[
26041,
26047
],
[
26384,
26390
],
[
28753,
28759
],
[
28199,
28205
],
[
28307,
28313
],
[
28491,
28497
],
[
28599,
28605
]
],
[
[
31,
34
],
[
2827,
2830
],
[
5469,
5472
],
[
6210,
6213
],
[
7723,
7726
],
[
8504,
8507
],
[
9287,
9290
],
[
10027,
10030
],
[
10953,
10956
],
[
13223,
13226
],
[
15824,
15827
],
[
16503,
16506
],
[
17751,
17754
],
[
18577,
18580
],
[
19882,
19885
],
[
20872,
20875
],
[
21761,
21764
],
[
22735,
22738
],
[
23626,
23629
],
[
25769,
25772
],
[
26065,
26068
],
[
26408,
26411
]
],
[
[
42,
48
],
[
3424,
3430
],
[
3469,
3475
],
[
3649,
3655
],
[
4152,
4158
],
[
4276,
4282
]
],
[
[
56,
64
],
[
1892,
1900
],
[
2931,
2939
],
[
5800,
5808
],
[
22896,
22904
],
[
23725,
23733
]
],
[
[
72,
80
],
[
24683,
24691
],
[
25097,
25105
],
[
25699,
25707
]
],
[
[
101,
105
],
[
1949,
1953
],
[
3084,
3088
],
[
8399,
8403
],
[
9182,
9186
],
[
9974,
9978
],
[
10848,
10852
],
[
11266,
11270
],
[
12779,
12783
],
[
12868,
12872
],
[
12956,
12960
],
[
13538,
13542
],
[
17564,
17568
],
[
18472,
18476
],
[
19430,
19434
],
[
19487,
19491
],
[
19777,
19781
],
[
22952,
22956
]
],
[
[
113,
116
],
[
2617,
2620
],
[
2690,
2693
],
[
4595,
4598
],
[
4957,
4960
],
[
19598,
19601
],
[
19641,
19644
],
[
19696,
19699
],
[
23196,
23199
],
[
23911,
23914
],
[
23978,
23981
],
[
24020,
24023
],
[
24198,
24201
],
[
25894,
25897
],
[
26009,
26012
],
[
26191,
26194
],
[
26350,
26353
],
[
26526,
26529
],
[
26555,
26558
],
[
26672,
26675
],
[
26756,
26759
],
[
26802,
26805
],
[
26918,
26921
],
[
26954,
26957
],
[
27115,
27118
],
[
27233,
27236
],
[
27455,
27458
],
[
27682,
27685
],
[
28101,
28104
],
[
28118,
28121
],
[
28260,
28263
],
[
28410,
28413
],
[
28552,
28555
]
],
[
[
145,
165
],
[
6124,
6144
],
[
6732,
6752
],
[
7002,
7022
],
[
7308,
7328
],
[
7616,
7636
],
[
8300,
8320
],
[
9083,
9103
],
[
9875,
9895
],
[
10749,
10769
],
[
12427,
12447
],
[
13063,
13083
],
[
15245,
15265
],
[
15609,
15629
],
[
16397,
16417
],
[
17465,
17485
],
[
18373,
18393
],
[
19221,
19241
],
[
20739,
20759
],
[
21625,
21645
],
[
22604,
22624
],
[
23525,
23545
]
],
[
[
195,
227
],
[
20386,
20418
],
[
21376,
21408
],
[
22274,
22306
]
],
[
[
261,
279
],
[
24447,
24465
]
],
[
[
281,
301
],
[
24966,
24986
]
],
[
[
335,
356
],
[
25567,
25588
]
],
[
[
365,
399
],
[
8445,
8447
],
[
9228,
9230
],
[
10894,
10896
],
[
17647,
17649
],
[
17689,
17691
],
[
18518,
18520
],
[
19373,
19375
],
[
19823,
19825
]
],
[
[
417,
422
],
[
20462,
20467
],
[
21426,
21431
],
[
22324,
22329
]
],
[
[
423,
436
],
[
6077,
6090
],
[
6689,
6702
],
[
6959,
6972
],
[
7265,
7278
],
[
7573,
7586
],
[
8257,
8270
],
[
9040,
9053
],
[
9832,
9845
],
[
10706,
10719
],
[
12384,
12397
],
[
13020,
13033
],
[
15202,
15215
],
[
15566,
15579
],
[
16354,
16367
],
[
17422,
17435
],
[
18330,
18343
],
[
19178,
19191
],
[
20296,
20309
],
[
20696,
20709
],
[
21332,
21345
],
[
21582,
21595
],
[
22230,
22243
],
[
22561,
22574
]
],
[
[
1706,
1717
],
[
11289,
11300
],
[
11342,
11353
],
[
11395,
11406
],
[
11448,
11459
],
[
11513,
11524
],
[
11578,
11589
],
[
11618,
11629
],
[
11658,
11669
]
],
[
[
1868,
1879
]
],
[
[
2451,
2470
],
[
5716,
5735
],
[
6472,
6491
],
[
7977,
7996
],
[
8746,
8765
],
[
9536,
9555
],
[
10366,
10385
],
[
11205,
11224
],
[
13477,
13496
],
[
16085,
16104
],
[
16749,
16768
],
[
18008,
18027
],
[
18835,
18854
],
[
20140,
20159
],
[
21130,
21149
],
[
22029,
22048
],
[
23264,
23283
]
],
[
[
2891,
2902
]
],
[
[
5588,
5610
]
],
[
[
6329,
6353
]
],
[
[
7842,
7858
]
],
[
[
8623,
8636
]
],
[
[
9406,
9426
]
],
[
[
10146,
10170
]
],
[
[
11072,
11086
]
],
[
[
13342,
13358
]
],
[
[
15943,
15966
]
],
[
[
16622,
16639
]
],
[
[
17870,
17889
]
],
[
[
18696,
18716
]
],
[
[
19946,
19973
]
],
[
[
20936,
20963
]
],
[
[
21825,
21852
]
],
[
[
22799,
22827
]
],
[
[
23690,
23699
]
],
[
[
24266,
24289
]
],
[
[
24727,
24752
]
],
[
[
25146,
25172
]
],
[
[
25844,
25863
]
],
[
[
26140,
26160
]
],
[
[
26483,
26505
]
],
[
[
27493,
27524
]
],
[
[
28047,
28080
]
],
[
[
28736,
28739
],
[
28744,
28747
]
]
] |
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///posts.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
# app.config['FLASK_RUN_PORT'] = 5002
db = SQLAlchemy(app)
| [
[
[
18,
23
],
[
72,
77
]
],
[
[
53,
63
],
[
246,
256
]
],
[
[
66,
69
],
[
88,
91
],
[
149,
152
],
[
257,
260
]
],
[
[
241,
243
]
]
] |
# coding: utf-8
"""
SendinBlue API
SendinBlue provide a RESTFul API that can be used with any languages. With this API, you will be able to : - Manage your campaigns and get the statistics - Manage your contacts - Send transactional Emails and SMS - and much more... You can download our wrappers at https://github.com/orgs/sendinblue **Possible responses** | Code | Message | | :-------------: | ------------- | | 200 | OK. Successful Request | | 201 | OK. Successful Creation | | 202 | OK. Request accepted | | 204 | OK. Successful Update/Deletion | | 400 | Error. Bad Request | | 401 | Error. Authentication Needed | | 402 | Error. Not enough credit, plan upgrade needed | | 403 | Error. Permission denied | | 404 | Error. Object does not exist | | 405 | Error. Method not allowed | # noqa: E501
OpenAPI spec version: 3.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import sib_api_v3_sdk
from sib_api_v3_sdk.models.get_account_plan import GetAccountPlan # noqa: E501
from sib_api_v3_sdk.rest import ApiException
class TestGetAccountPlan(unittest.TestCase):
"""GetAccountPlan unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGetAccountPlan(self):
"""Test GetAccountPlan"""
# FIXME: construct object with mandatory attributes with example values
# model = sib_api_v3_sdk.models.get_account_plan.GetAccountPlan() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
[
[
1029,
1044
]
],
[
[
1053,
1061
],
[
1237,
1245
],
[
1654,
1662
]
],
[
[
1070,
1084
]
],
[
[
1136,
1150
]
],
[
[
1197,
1209
]
],
[
[
1218,
1236
]
]
] |
# coding: utf-8
import math
import numpy as np
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.nn.functional as F
def conv3x3(in_planes, out_planes, stride=1):
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
def conv1x1(in_planes, out_planes, stride=1):
return nn.Conv2d(in_planes, out_planes, kernel_size=1)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None, se=False):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
self.se = se
if(self.se):
self.gap = nn.AdaptiveAvgPool2d(1)
self.conv3 = conv1x1(planes, planes//16)
self.conv4 = conv1x1(planes//16, planes)
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
if(self.se):
w = self.gap(out)
w = self.conv3(w)
w = self.relu(w)
w = self.conv4(w).sigmoid()
out = out * w
out = out + residual
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, layers, se=False):
self.inplanes = 64
super(ResNet, self).__init__()
self.se = se
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
self.avgpool = nn.AdaptiveAvgPool2d(1)
self.bn = nn.BatchNorm1d(512)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm1d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample, se=self.se))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, se=self.se))
return nn.Sequential(*layers)
def forward(self, x):
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.bn(x)
return x
class VideoCNN(nn.Module):
def __init__(self, se=False):
super(VideoCNN, self).__init__()
# frontend3D
self.frontend3D = nn.Sequential(
nn.Conv3d(1, 64, kernel_size=(5, 7, 7), stride=(1, 2, 2), padding=(2, 3, 3), bias=False),
nn.BatchNorm3d(64),
nn.ReLU(True),
nn.MaxPool3d(kernel_size=(1, 3, 3), stride=(1, 2, 2), padding=(0, 1, 1))
)
# resnet
self.resnet18 = ResNet(BasicBlock, [2, 2, 2, 2], se=se)
self.dropout = nn.Dropout(p=0.5)
# backend_gru
# initialize
self._initialize_weights()
def visual_frontend_forward(self, x):
x = x.transpose(1, 2)
x = self.frontend3D(x)
x = x.transpose(1, 2)
x = x.contiguous()
x = x.view(-1, 64, x.size(3), x.size(4))
x = self.resnet18(x)
return x
def forward(self, x):
b, t = x.size()[:2]
x = self.visual_frontend_forward(x)
#x = self.dropout(x)
feat = x.view(b, -1, 512)
x = x.view(b, -1, 512)
return x
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv3d):
n = m.kernel_size[0] * m.kernel_size[1] * m.kernel_size[2] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.Conv1d):
n = m.kernel_size[0] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm3d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm1d):
m.weight.data.fill_(1)
m.bias.data.zero_()
| [
[
[
23,
27
],
[
2405,
2409
],
[
5014,
5018
],
[
5270,
5274
],
[
5507,
5511
]
],
[
[
35,
46
]
],
[
[
56,
61
]
],
[
[
69,
83
],
[
444,
446
],
[
1679,
1681
],
[
3632,
3634
],
[
211,
213
],
[
377,
379
],
[
671,
673
],
[
714,
716
],
[
800,
802
],
[
963,
965
],
[
2146,
2148
],
[
2197,
2199
],
[
2279,
2281
],
[
2455,
2457
],
[
2578,
2580
],
[
2851,
2853
],
[
2882,
2884
],
[
3018,
3020
],
[
3347,
3349
],
[
3775,
3777
],
[
3806,
3808
],
[
3912,
3914
],
[
3948,
3950
],
[
3979,
3981
],
[
4174,
4176
],
[
4869,
4871
],
[
5144,
5146
],
[
5400,
5402
],
[
5637,
5639
],
[
5761,
5763
],
[
5885,
5887
]
],
[
[
111,
119
]
],
[
[
127,
151
]
],
[
[
158,
165
],
[
618,
625
],
[
757,
764
]
],
[
[
324,
331
],
[
1012,
1019
],
[
1065,
1072
]
],
[
[
433,
443
],
[
568,
578
],
[
4118,
4128
]
],
[
[
1672,
1678
],
[
1782,
1788
],
[
4111,
4117
]
],
[
[
3623,
3631
],
[
3692,
3700
]
]
] |
# Tweepy
# Copyright 2009-2010 Joshua Roesslein
# See LICENSE for details.
"""
Tweepy Twitter API library
"""
__version__ = '3.2.0'
__author__ = 'Joshua Roesslein'
__license__ = 'MIT'
from tweepy.models import Status, User, DirectMessage, Friendship, SavedSearch, SearchResults, ModelFactory, Category
from tweepy.error import TweepError
from tweepy.api import API
from tweepy.cache import Cache, MemoryCache, FileCache
from tweepy.auth import OAuthHandler, AppAuthHandler
from tweepy.streaming import Stream, StreamListener
from tweepy.cursor import Cursor
# Global, unauthenticated instance of API
api = API()
def debug(enable=True, level=1):
from six.moves.http_client import HTTPConnection
HTTPConnection.debuglevel = level
| [
[
[
111,
122
]
],
[
[
133,
143
]
],
[
[
165,
176
]
],
[
[
212,
218
]
],
[
[
220,
224
]
],
[
[
226,
239
]
],
[
[
241,
251
]
],
[
[
253,
264
]
],
[
[
266,
279
]
],
[
[
281,
293
]
],
[
[
295,
303
]
],
[
[
329,
339
]
],
[
[
363,
366
],
[
609,
612
]
],
[
[
392,
397
]
],
[
[
399,
410
]
],
[
[
412,
421
]
],
[
[
446,
458
]
],
[
[
460,
474
]
],
[
[
504,
510
]
],
[
[
512,
526
]
],
[
[
553,
559
]
],
[
[
603,
606
]
],
[
[
620,
625
]
]
] |
# Copyright (c) 2017 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import shutil
import subprocess
import uuid
from nose import SkipTest
import six
from six.moves.urllib.parse import quote
from swift.common import direct_client, utils
from swift.common.manager import Manager
from swift.common.memcached import MemcacheRing
from swift.common.utils import ShardRange, parse_db_filename, get_db_files, \
quorum_size, config_true_value, Timestamp, md5
from swift.container.backend import ContainerBroker, UNSHARDED, SHARDING, \
SHARDED
from swift.container.sharder import CleavingContext, ContainerSharder
from swift.container.replicator import ContainerReplicator
from swiftclient import client, get_auth, ClientException
from swift.proxy.controllers.base import get_cache_key
from swift.proxy.controllers.obj import num_container_updates
from test import annotate_failure
from test.probe import PROXY_BASE_URL
from test.probe.brain import BrainSplitter
from test.probe.common import ReplProbeTest, get_server_number, \
wait_for_server_to_hangup
import mock
MIN_SHARD_CONTAINER_THRESHOLD = 4
MAX_SHARD_CONTAINER_THRESHOLD = 100
class ShardCollector(object):
"""
Returns map of node to tuples of (headers, shard ranges) returned from node
"""
def __init__(self):
self.ranges = {}
def __call__(self, cnode, cpart, account, container):
self.ranges[cnode['id']] = direct_client.direct_get_container(
cnode, cpart, account, container,
headers={'X-Backend-Record-Type': 'shard'})
class BaseTestContainerSharding(ReplProbeTest):
DELIM = '-'
def _maybe_skip_test(self):
try:
cont_configs = [
utils.readconf(p, 'container-sharder')
for p in self.configs['container-sharder'].values()]
except ValueError:
raise SkipTest('No [container-sharder] section found in '
'container-server configs')
skip_reasons = []
auto_shard = all(config_true_value(c.get('auto_shard', False))
for c in cont_configs)
if not auto_shard:
skip_reasons.append(
'auto_shard must be true in all container_sharder configs')
self.max_shard_size = max(
int(c.get('shard_container_threshold', '1000000'))
for c in cont_configs)
if not (MIN_SHARD_CONTAINER_THRESHOLD <= self.max_shard_size
<= MAX_SHARD_CONTAINER_THRESHOLD):
skip_reasons.append(
'shard_container_threshold %d must be between %d and %d' %
(self.max_shard_size, MIN_SHARD_CONTAINER_THRESHOLD,
MAX_SHARD_CONTAINER_THRESHOLD))
def skip_check(reason_list, option, required):
values = {int(c.get(option, required)) for c in cont_configs}
if values != {required}:
reason_list.append('%s must be %s' % (option, required))
skip_check(skip_reasons, 'shard_scanner_batch_size', 10)
skip_check(skip_reasons, 'shard_batch_size', 2)
if skip_reasons:
raise SkipTest(', '.join(skip_reasons))
def _load_rings_and_configs(self):
super(BaseTestContainerSharding, self)._load_rings_and_configs()
# perform checks for skipping test before starting services
self._maybe_skip_test()
def _make_object_names(self, number, start=0):
return ['obj%s%04d' % (self.DELIM, x)
for x in range(start, start + number)]
def _setup_container_name(self):
# Container where we're PUTting objects
self.container_name = 'container%s%s' % (self.DELIM, uuid.uuid4())
def setUp(self):
client.logger.setLevel(client.logging.WARNING)
client.requests.logging.getLogger().setLevel(
client.requests.logging.WARNING)
super(BaseTestContainerSharding, self).setUp()
_, self.admin_token = get_auth(
PROXY_BASE_URL + '/auth/v1.0', 'admin:admin', 'admin')
self._setup_container_name()
self.init_brain(self.container_name)
self.sharders = Manager(['container-sharder'])
self.internal_client = self.make_internal_client()
self.memcache = MemcacheRing(['127.0.0.1:11211'])
def init_brain(self, container_name):
self.container_to_shard = container_name
self.brain = BrainSplitter(
self.url, self.token, self.container_to_shard,
None, 'container')
self.brain.put_container(policy_index=int(self.policy))
def stop_container_servers(self, node_numbers=None):
if node_numbers:
ipports = []
server2ipport = {v: k for k, v in self.ipport2server.items()}
for number in self.brain.node_numbers[node_numbers]:
self.brain.servers.stop(number=number)
server = 'container%d' % number
ipports.append(server2ipport[server])
else:
ipports = [k for k, v in self.ipport2server.items()
if v.startswith('container')]
self.brain.servers.stop()
for ipport in ipports:
wait_for_server_to_hangup(ipport)
def put_objects(self, obj_names, contents=None):
conn = client.Connection(preauthurl=self.url, preauthtoken=self.token)
results = []
for obj in obj_names:
rdict = {}
conn.put_object(self.container_name, obj,
contents=contents, response_dict=rdict)
results.append((obj, rdict['headers'].get('x-object-version-id')))
return results
def delete_objects(self, obj_names_and_versions):
conn = client.Connection(preauthurl=self.url, preauthtoken=self.token)
for obj in obj_names_and_versions:
if isinstance(obj, tuple):
obj, version = obj
conn.delete_object(self.container_name, obj,
query_string='version-id=%s' % version)
else:
conn.delete_object(self.container_name, obj)
def get_container_shard_ranges(self, account=None, container=None,
include_deleted=False):
account = account if account else self.account
container = container if container else self.container_to_shard
path = self.internal_client.make_path(account, container)
headers = {'X-Backend-Record-Type': 'shard'}
if include_deleted:
headers['X-Backend-Include-Deleted'] = 'true'
resp = self.internal_client.make_request(
'GET', path + '?format=json', headers, [200])
return [ShardRange.from_dict(sr) for sr in json.loads(resp.body)]
def direct_get_container_shard_ranges(self, account=None, container=None,
expect_failure=False):
collector = ShardCollector()
self.direct_container_op(
collector, account, container, expect_failure)
return collector.ranges
def get_storage_dir(self, part, node, account=None, container=None):
account = account or self.brain.account
container = container or self.container_name
server_type, config_number = get_server_number(
(node['ip'], node['port']), self.ipport2server)
assert server_type == 'container'
repl_server = '%s-replicator' % server_type
conf = utils.readconf(self.configs[repl_server][config_number],
section_name=repl_server)
datadir = os.path.join(conf['devices'], node['device'], 'containers')
container_hash = utils.hash_path(account, container)
return (utils.storage_directory(datadir, part, container_hash),
container_hash)
def get_db_file(self, part, node, account=None, container=None):
container_dir, container_hash = self.get_storage_dir(
part, node, account=account, container=container)
db_file = os.path.join(container_dir, container_hash + '.db')
self.assertTrue(get_db_files(db_file)) # sanity check
return db_file
def get_broker(self, part, node, account=None, container=None):
return ContainerBroker(
self.get_db_file(part, node, account, container))
def get_shard_broker(self, shard_range, node_index=0):
shard_part, shard_nodes = self.brain.ring.get_nodes(
shard_range.account, shard_range.container)
return self.get_broker(
shard_part, shard_nodes[node_index], shard_range.account,
shard_range.container)
def categorize_container_dir_content(self, account=None, container=None):
account = account or self.brain.account
container = container or self.container_name
part, nodes = self.brain.ring.get_nodes(account, container)
storage_dirs = [
self.get_storage_dir(part, node, account=account,
container=container)[0]
for node in nodes]
result = {
'shard_dbs': [],
'normal_dbs': [],
'pendings': [],
'locks': [],
'other': [],
}
for storage_dir in storage_dirs:
for f in os.listdir(storage_dir):
path = os.path.join(storage_dir, f)
if path.endswith('.db'):
hash_, epoch, ext = parse_db_filename(path)
if epoch:
result['shard_dbs'].append(path)
else:
result['normal_dbs'].append(path)
elif path.endswith('.db.pending'):
result['pendings'].append(path)
elif path.endswith('/.lock'):
result['locks'].append(path)
else:
result['other'].append(path)
if result['other']:
self.fail('Found unexpected files in storage directory:\n %s' %
'\n '.join(result['other']))
return result
def assert_dict_contains(self, expected_items, actual_dict):
ignored = set(expected_items) ^ set(actual_dict)
filtered_actual = {k: actual_dict[k]
for k in actual_dict if k not in ignored}
self.assertEqual(expected_items, filtered_actual)
def assert_shard_ranges_contiguous(self, expected_number, shard_ranges,
first_lower='', last_upper=''):
if shard_ranges and isinstance(shard_ranges[0], ShardRange):
actual_shard_ranges = sorted(shard_ranges)
else:
actual_shard_ranges = sorted(ShardRange.from_dict(d)
for d in shard_ranges)
self.assertLengthEqual(actual_shard_ranges, expected_number)
if expected_number:
with annotate_failure('Ranges %s.' % actual_shard_ranges):
self.assertEqual(first_lower, actual_shard_ranges[0].lower_str)
for x, y in zip(actual_shard_ranges, actual_shard_ranges[1:]):
self.assertEqual(x.upper, y.lower)
self.assertEqual(last_upper, actual_shard_ranges[-1].upper_str)
def assert_shard_range_equal(self, expected, actual, excludes=None):
excludes = excludes or []
expected_dict = dict(expected)
actual_dict = dict(actual)
for k in excludes:
expected_dict.pop(k, None)
actual_dict.pop(k, None)
self.assertEqual(expected_dict, actual_dict)
def assert_shard_range_lists_equal(self, expected, actual, excludes=None):
self.assertEqual(len(expected), len(actual))
for expected, actual in zip(expected, actual):
self.assert_shard_range_equal(expected, actual, excludes=excludes)
def assert_shard_range_state(self, expected_state, shard_ranges):
if shard_ranges and not isinstance(shard_ranges[0], ShardRange):
shard_ranges = [ShardRange.from_dict(data)
for data in shard_ranges]
self.assertEqual([expected_state] * len(shard_ranges),
[sr.state for sr in shard_ranges])
def assert_total_object_count(self, expected_object_count, shard_ranges):
actual = sum(sr['object_count'] for sr in shard_ranges)
self.assertEqual(expected_object_count, actual)
def assert_container_listing(self, expected_listing, req_hdrs=None):
req_hdrs = req_hdrs if req_hdrs else {}
headers, actual_listing = client.get_container(
self.url, self.token, self.container_name, headers=req_hdrs)
self.assertIn('x-container-object-count', headers)
expected_obj_count = len(expected_listing)
self.assertEqual(expected_listing, [
x['name'].encode('utf-8') if six.PY2 else x['name']
for x in actual_listing])
self.assertEqual(str(expected_obj_count),
headers['x-container-object-count'])
return headers, actual_listing
def assert_container_object_count(self, expected_obj_count):
headers = client.head_container(
self.url, self.token, self.container_name)
self.assertIn('x-container-object-count', headers)
self.assertEqual(str(expected_obj_count),
headers['x-container-object-count'])
def assert_container_post_ok(self, meta_value):
key = 'X-Container-Meta-Assert-Post-Works'
headers = {key: meta_value}
client.post_container(
self.url, self.token, self.container_name, headers=headers)
resp_headers = client.head_container(
self.url, self.token, self.container_name)
self.assertEqual(meta_value, resp_headers.get(key.lower()))
def assert_container_post_fails(self, meta_value):
key = 'X-Container-Meta-Assert-Post-Works'
headers = {key: meta_value}
with self.assertRaises(ClientException) as cm:
client.post_container(
self.url, self.token, self.container_name, headers=headers)
self.assertEqual(404, cm.exception.http_status)
def assert_container_delete_fails(self):
with self.assertRaises(ClientException) as cm:
client.delete_container(self.url, self.token, self.container_name)
self.assertEqual(409, cm.exception.http_status)
def assert_container_not_found(self):
with self.assertRaises(ClientException) as cm:
client.get_container(self.url, self.token, self.container_name)
self.assertEqual(404, cm.exception.http_status)
# check for headers leaking out while deleted
resp_headers = cm.exception.http_response_headers
self.assertNotIn('X-Container-Object-Count', resp_headers)
self.assertNotIn('X-Container-Bytes-Used', resp_headers)
self.assertNotIn('X-Timestamp', resp_headers)
self.assertNotIn('X-PUT-Timestamp', resp_headers)
def assert_container_has_shard_sysmeta(self):
node_headers = self.direct_head_container()
for node_id, headers in node_headers.items():
with annotate_failure('%s in %s' % (node_id, node_headers.keys())):
for k, v in headers.items():
if k.lower().startswith('x-container-sysmeta-shard'):
break
else:
self.fail('No shard sysmeta found in %s' % headers)
def assert_container_state(self, node, expected_state, num_shard_ranges):
headers, shard_ranges = direct_client.direct_get_container(
node, self.brain.part, self.account, self.container_to_shard,
headers={'X-Backend-Record-Type': 'shard'})
self.assertEqual(num_shard_ranges, len(shard_ranges))
self.assertIn('X-Backend-Sharding-State', headers)
self.assertEqual(
expected_state, headers['X-Backend-Sharding-State'])
return [ShardRange.from_dict(sr) for sr in shard_ranges]
def assert_subprocess_success(self, cmd_args):
try:
subprocess.check_output(cmd_args, stderr=subprocess.STDOUT)
except Exception as exc:
# why not 'except CalledProcessError'? because in my py3.6 tests
# the CalledProcessError wasn't caught by that! despite type(exc)
# being a CalledProcessError, isinstance(exc, CalledProcessError)
# is False and the type has a different hash - could be
# related to https://github.com/eventlet/eventlet/issues/413
try:
# assume this is a CalledProcessError
self.fail('%s with output:\n%s' % (exc, exc.output))
except AttributeError:
raise exc
def get_part_and_node_numbers(self, shard_range):
"""Return the partition and node numbers for a shard range."""
part, nodes = self.brain.ring.get_nodes(
shard_range.account, shard_range.container)
return part, [n['id'] + 1 for n in nodes]
def run_sharders(self, shard_ranges):
"""Run the sharder on partitions for given shard ranges."""
if not isinstance(shard_ranges, (list, tuple, set)):
shard_ranges = (shard_ranges,)
partitions = ','.join(str(self.get_part_and_node_numbers(sr)[0])
for sr in shard_ranges)
self.sharders.once(additional_args='--partitions=%s' % partitions)
def run_sharder_sequentially(self, shard_range=None):
"""Run sharder node by node on partition for given shard range."""
if shard_range:
part, node_numbers = self.get_part_and_node_numbers(shard_range)
else:
part, node_numbers = self.brain.part, self.brain.node_numbers
for node_number in node_numbers:
self.sharders.once(number=node_number,
additional_args='--partitions=%s' % part)
def run_custom_sharder(self, conf_index, custom_conf, **kwargs):
return self.run_custom_daemon(ContainerSharder, 'container-sharder',
conf_index, custom_conf, **kwargs)
class TestContainerShardingNonUTF8(BaseTestContainerSharding):
def test_sharding_listing(self):
# verify parameterised listing of a container during sharding
all_obj_names = self._make_object_names(4 * self.max_shard_size)
obj_names = all_obj_names[::2]
obj_content = 'testing'
self.put_objects(obj_names, contents=obj_content)
# choose some names approx in middle of each expected shard range
markers = [
obj_names[i] for i in range(self.max_shard_size // 4,
2 * self.max_shard_size,
self.max_shard_size // 2)]
def check_listing(objects, req_hdrs=None, **params):
req_hdrs = req_hdrs if req_hdrs else {}
qs = '&'.join('%s=%s' % (k, quote(str(v)))
for k, v in params.items())
headers, listing = client.get_container(
self.url, self.token, self.container_name, query_string=qs,
headers=req_hdrs)
listing = [x['name'].encode('utf-8') if six.PY2 else x['name']
for x in listing]
if params.get('reverse'):
marker = params.get('marker', ShardRange.MAX)
end_marker = params.get('end_marker', ShardRange.MIN)
expected = [o for o in objects if end_marker < o < marker]
expected.reverse()
else:
marker = params.get('marker', ShardRange.MIN)
end_marker = params.get('end_marker', ShardRange.MAX)
expected = [o for o in objects if marker < o < end_marker]
if 'limit' in params:
expected = expected[:params['limit']]
self.assertEqual(expected, listing)
self.assertIn('x-timestamp', headers)
self.assertIn('last-modified', headers)
self.assertIn('x-trans-id', headers)
self.assertEqual('bytes', headers.get('accept-ranges'))
self.assertEqual('application/json; charset=utf-8',
headers.get('content-type'))
def check_listing_fails(exp_status, **params):
qs = '&'.join(['%s=%s' % param for param in params.items()])
with self.assertRaises(ClientException) as cm:
client.get_container(
self.url, self.token, self.container_name, query_string=qs)
self.assertEqual(exp_status, cm.exception.http_status)
return cm.exception
def do_listing_checks(objs, hdrs=None):
hdrs = hdrs if hdrs else {}
check_listing(objs, hdrs)
check_listing(objs, hdrs, marker=markers[0], end_marker=markers[1])
check_listing(objs, hdrs, marker=markers[0], end_marker=markers[2])
check_listing(objs, hdrs, marker=markers[1], end_marker=markers[3])
check_listing(objs, hdrs, marker=markers[1], end_marker=markers[3],
limit=self.max_shard_size // 4)
check_listing(objs, hdrs, marker=markers[1], end_marker=markers[3],
limit=self.max_shard_size // 4)
check_listing(objs, hdrs, marker=markers[1], end_marker=markers[2],
limit=self.max_shard_size // 2)
check_listing(objs, hdrs, marker=markers[1], end_marker=markers[1])
check_listing(objs, hdrs, reverse=True)
check_listing(objs, hdrs, reverse=True, end_marker=markers[1])
check_listing(objs, hdrs, reverse=True, marker=markers[3],
end_marker=markers[1],
limit=self.max_shard_size // 4)
check_listing(objs, hdrs, reverse=True, marker=markers[3],
end_marker=markers[1], limit=0)
check_listing([], hdrs, marker=markers[0], end_marker=markers[0])
check_listing([], hdrs, marker=markers[0], end_marker=markers[1],
reverse=True)
check_listing(objs, hdrs, prefix='obj')
check_listing([], hdrs, prefix='zzz')
# delimiter
headers, listing = client.get_container(
self.url, self.token, self.container_name,
query_string='delimiter=' + quote(self.DELIM), headers=hdrs)
self.assertEqual([{'subdir': 'obj' + self.DELIM}], listing)
headers, listing = client.get_container(
self.url, self.token, self.container_name,
query_string='delimiter=j' + quote(self.DELIM), headers=hdrs)
self.assertEqual([{'subdir': 'obj' + self.DELIM}], listing)
limit = self.cluster_info['swift']['container_listing_limit']
exc = check_listing_fails(412, limit=limit + 1)
self.assertIn(b'Maximum limit', exc.http_response_content)
exc = check_listing_fails(400, delimiter='%ff')
self.assertIn(b'not valid UTF-8', exc.http_response_content)
# sanity checks
do_listing_checks(obj_names)
# Shard the container
client.post_container(self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
# First run the 'leader' in charge of scanning, which finds all shard
# ranges and cleaves first two
self.sharders.once(number=self.brain.node_numbers[0],
additional_args='--partitions=%s' % self.brain.part)
# Then run sharder on other nodes which will also cleave first two
# shard ranges
for n in self.brain.node_numbers[1:]:
self.sharders.once(
number=n, additional_args='--partitions=%s' % self.brain.part)
# sanity check shard range states
for node in self.brain.nodes:
self.assert_container_state(node, 'sharding', 4)
shard_ranges = self.get_container_shard_ranges()
self.assertLengthEqual(shard_ranges, 4)
self.assert_shard_range_state(ShardRange.CLEAVED, shard_ranges[:2])
self.assert_shard_range_state(ShardRange.CREATED, shard_ranges[2:])
self.assert_container_delete_fails()
self.assert_container_has_shard_sysmeta() # confirm no sysmeta deleted
self.assert_container_post_ok('sharding')
do_listing_checks(obj_names)
# put some new objects spread through entire namespace; object updates
# should be directed to the shard container (both the cleaved and the
# created shards)
new_obj_names = all_obj_names[1::4]
self.put_objects(new_obj_names, obj_content)
# new objects that fell into the first two cleaved shard ranges are
# reported in listing; new objects in the yet-to-be-cleaved shard
# ranges are not yet included in listing because listings prefer the
# root over the final two shards that are not yet-cleaved
exp_obj_names = [o for o in obj_names + new_obj_names
if o <= shard_ranges[1].upper]
exp_obj_names += [o for o in obj_names
if o > shard_ranges[1].upper]
exp_obj_names.sort()
do_listing_checks(exp_obj_names)
# run all the sharders again and the last two shard ranges get cleaved
self.sharders.once(additional_args='--partitions=%s' % self.brain.part)
for node in self.brain.nodes:
self.assert_container_state(node, 'sharded', 4)
shard_ranges = self.get_container_shard_ranges()
self.assert_shard_range_state(ShardRange.ACTIVE, shard_ranges)
# listings are now gathered from all four shard ranges so should have
# all the specified objects
exp_obj_names = obj_names + new_obj_names
exp_obj_names.sort()
do_listing_checks(exp_obj_names)
# shard ranges may now be cached by proxy so do listings checks again
# forcing backend request
do_listing_checks(exp_obj_names, hdrs={'X-Newest': 'true'})
# post more metadata to the container and check that it is read back
# correctly from backend (using x-newest) and cache
test_headers = {'x-container-meta-test': 'testing',
'x-container-read': 'read_acl',
'x-container-write': 'write_acl',
'x-container-sync-key': 'sync_key',
# 'x-container-sync-to': 'sync_to',
'x-versions-location': 'versions',
'x-container-meta-access-control-allow-origin': 'aa',
'x-container-meta-access-control-expose-headers': 'bb',
'x-container-meta-access-control-max-age': '123'}
client.post_container(self.url, self.admin_token, self.container_name,
headers=test_headers)
headers, listing = client.get_container(
self.url, self.token, self.container_name,
headers={'X-Newest': 'true'})
exp_headers = dict(test_headers)
exp_headers.update({
'x-container-object-count': str(len(exp_obj_names)),
'x-container-bytes-used':
str(len(exp_obj_names) * len(obj_content))
})
for k, v in exp_headers.items():
self.assertIn(k, headers)
self.assertEqual(v, headers[k], dict(headers))
cache_headers, listing = client.get_container(
self.url, self.token, self.container_name)
for k, v in exp_headers.items():
self.assertIn(k, cache_headers)
self.assertEqual(v, cache_headers[k], dict(exp_headers))
# we don't expect any of these headers to be equal...
for k in ('x-timestamp', 'last-modified', 'date', 'x-trans-id',
'x-openstack-request-id'):
headers.pop(k, None)
cache_headers.pop(k, None)
self.assertEqual(headers, cache_headers)
self.assert_container_delete_fails()
self.assert_container_has_shard_sysmeta()
self.assert_container_post_ok('sharded')
# delete original objects
self.delete_objects(obj_names)
do_listing_checks(new_obj_names)
self.assert_container_delete_fails()
self.assert_container_has_shard_sysmeta()
self.assert_container_post_ok('sharded')
class TestContainerShardingFunkyNames(TestContainerShardingNonUTF8):
DELIM = '\n'
def _make_object_names(self, number):
return ['obj\n%04d%%Ff' % x for x in range(number)]
def _setup_container_name(self):
self.container_name = 'container\n%%Ff\n%s' % uuid.uuid4()
class TestContainerShardingUTF8(TestContainerShardingNonUTF8):
def _make_object_names(self, number, start=0):
# override default with names that include non-ascii chars
name_length = self.cluster_info['swift']['max_object_name_length']
obj_names = []
for x in range(start, start + number):
name = (u'obj-\u00e4\u00ea\u00ec\u00f2\u00fb\u1234-%04d' % x)
name = name.encode('utf8').ljust(name_length, b'o')
if not six.PY2:
name = name.decode('utf8')
obj_names.append(name)
return obj_names
def _setup_container_name(self):
# override default with max length name that includes non-ascii chars
super(TestContainerShardingUTF8, self)._setup_container_name()
name_length = self.cluster_info['swift']['max_container_name_length']
cont_name = \
self.container_name + u'-\u00e4\u00ea\u00ec\u00f2\u00fb\u1234'
self.container_name = cont_name.encode('utf8').ljust(name_length, b'x')
if not six.PY2:
self.container_name = self.container_name.decode('utf8')
class TestContainerShardingObjectVersioning(BaseTestContainerSharding):
def _maybe_skip_test(self):
super(TestContainerShardingObjectVersioning, self)._maybe_skip_test()
try:
vw_config = utils.readconf(self.configs['proxy-server'],
'filter:versioned_writes')
except ValueError:
raise SkipTest('No [filter:versioned_writes] section found in '
'proxy-server configs')
allow_object_versioning = config_true_value(
vw_config.get('allow_object_versioning', False))
if not allow_object_versioning:
raise SkipTest('allow_object_versioning must be true '
'in all versioned_writes configs')
def init_brain(self, container_name):
client.put_container(self.url, self.token, container_name, headers={
'X-Storage-Policy': self.policy.name,
'X-Versions-Enabled': 'true',
})
self.container_to_shard = '\x00versions\x00' + container_name
self.brain = BrainSplitter(
self.url, self.token, self.container_to_shard,
None, 'container')
def test_sharding_listing(self):
# verify parameterised listing of a container during sharding
all_obj_names = self._make_object_names(3) * self.max_shard_size
all_obj_names.extend(self._make_object_names(self.max_shard_size,
start=3))
obj_names = all_obj_names[::2]
obj_names_and_versions = self.put_objects(obj_names)
def sort_key(obj_and_ver):
obj, ver = obj_and_ver
return obj, ~Timestamp(ver)
obj_names_and_versions.sort(key=sort_key)
# choose some names approx in middle of each expected shard range
markers = [
obj_names_and_versions[i]
for i in range(self.max_shard_size // 4,
2 * self.max_shard_size,
self.max_shard_size // 2)]
def check_listing(objects, **params):
params['versions'] = ''
qs = '&'.join('%s=%s' % param for param in params.items())
headers, listing = client.get_container(
self.url, self.token, self.container_name, query_string=qs)
listing = [(x['name'].encode('utf-8') if six.PY2 else x['name'],
x['version_id'])
for x in listing]
if params.get('reverse'):
marker = (
params.get('marker', ShardRange.MAX),
~Timestamp(params['version_marker'])
if 'version_marker' in params else ~Timestamp('0'),
)
end_marker = (
params.get('end_marker', ShardRange.MIN),
Timestamp('0'),
)
expected = [o for o in objects
if end_marker < sort_key(o) < marker]
expected.reverse()
else:
marker = (
params.get('marker', ShardRange.MIN),
~Timestamp(params['version_marker'])
if 'version_marker' in params else Timestamp('0'),
)
end_marker = (
params.get('end_marker', ShardRange.MAX),
~Timestamp('0'),
)
expected = [o for o in objects
if marker < sort_key(o) < end_marker]
if 'limit' in params:
expected = expected[:params['limit']]
self.assertEqual(expected, listing)
def check_listing_fails(exp_status, **params):
params['versions'] = ''
qs = '&'.join('%s=%s' % param for param in params.items())
with self.assertRaises(ClientException) as cm:
client.get_container(
self.url, self.token, self.container_name, query_string=qs)
self.assertEqual(exp_status, cm.exception.http_status)
return cm.exception
def do_listing_checks(objects):
check_listing(objects)
check_listing(objects,
marker=markers[0][0], version_marker=markers[0][1])
check_listing(objects,
marker=markers[0][0], version_marker=markers[0][1],
limit=self.max_shard_size // 10)
check_listing(objects,
marker=markers[0][0], version_marker=markers[0][1],
limit=self.max_shard_size // 4)
check_listing(objects,
marker=markers[0][0], version_marker=markers[0][1],
limit=self.max_shard_size // 2)
check_listing(objects,
marker=markers[1][0], version_marker=markers[1][1])
check_listing(objects,
marker=markers[1][0], version_marker=markers[1][1],
limit=self.max_shard_size // 10)
check_listing(objects,
marker=markers[2][0], version_marker=markers[2][1],
limit=self.max_shard_size // 4)
check_listing(objects,
marker=markers[2][0], version_marker=markers[2][1],
limit=self.max_shard_size // 2)
check_listing(objects, reverse=True)
check_listing(objects, reverse=True,
marker=markers[1][0], version_marker=markers[1][1])
check_listing(objects, prefix='obj')
check_listing([], prefix='zzz')
# delimiter
headers, listing = client.get_container(
self.url, self.token, self.container_name,
query_string='delimiter=-')
self.assertEqual([{'subdir': 'obj-'}], listing)
headers, listing = client.get_container(
self.url, self.token, self.container_name,
query_string='delimiter=j-')
self.assertEqual([{'subdir': 'obj-'}], listing)
limit = self.cluster_info['swift']['container_listing_limit']
exc = check_listing_fails(412, limit=limit + 1)
self.assertIn(b'Maximum limit', exc.http_response_content)
exc = check_listing_fails(400, delimiter='%ff')
self.assertIn(b'not valid UTF-8', exc.http_response_content)
# sanity checks
do_listing_checks(obj_names_and_versions)
# Shard the container. Use an internal_client so we get an implicit
# X-Backend-Allow-Reserved-Names header
self.internal_client.set_container_metadata(
self.account, self.container_to_shard, {
'X-Container-Sysmeta-Sharding': 'True',
})
# First run the 'leader' in charge of scanning, which finds all shard
# ranges and cleaves first two
self.sharders.once(number=self.brain.node_numbers[0],
additional_args='--partitions=%s' % self.brain.part)
# Then run sharder on other nodes which will also cleave first two
# shard ranges
for n in self.brain.node_numbers[1:]:
self.sharders.once(
number=n, additional_args='--partitions=%s' % self.brain.part)
# sanity check shard range states
for node in self.brain.nodes:
self.assert_container_state(node, 'sharding', 4)
shard_ranges = self.get_container_shard_ranges()
self.assertLengthEqual(shard_ranges, 4)
self.assert_shard_range_state(ShardRange.CLEAVED, shard_ranges[:2])
self.assert_shard_range_state(ShardRange.CREATED, shard_ranges[2:])
self.assert_container_delete_fails()
self.assert_container_has_shard_sysmeta() # confirm no sysmeta deleted
self.assert_container_post_ok('sharding')
do_listing_checks(obj_names_and_versions)
# put some new objects spread through entire namespace
new_obj_names = all_obj_names[1::4]
new_obj_names_and_versions = self.put_objects(new_obj_names)
# new objects that fell into the first two cleaved shard ranges are
# reported in listing, new objects in the yet-to-be-cleaved shard
# ranges are not yet included in listing
exp_obj_names_and_versions = [
o for o in obj_names_and_versions + new_obj_names_and_versions
if '\x00' + o[0] <= shard_ranges[1].upper]
exp_obj_names_and_versions += [
o for o in obj_names_and_versions
if '\x00' + o[0] > shard_ranges[1].upper]
exp_obj_names_and_versions.sort(key=sort_key)
do_listing_checks(exp_obj_names_and_versions)
# run all the sharders again and the last two shard ranges get cleaved
self.sharders.once(additional_args='--partitions=%s' % self.brain.part)
for node in self.brain.nodes:
self.assert_container_state(node, 'sharded', 4)
shard_ranges = self.get_container_shard_ranges()
self.assert_shard_range_state(ShardRange.ACTIVE, shard_ranges)
exp_obj_names_and_versions = \
obj_names_and_versions + new_obj_names_and_versions
exp_obj_names_and_versions.sort(key=sort_key)
do_listing_checks(exp_obj_names_and_versions)
self.assert_container_delete_fails()
self.assert_container_has_shard_sysmeta()
self.assert_container_post_ok('sharded')
# delete original objects
self.delete_objects(obj_names_and_versions)
new_obj_names_and_versions.sort(key=sort_key)
do_listing_checks(new_obj_names_and_versions)
self.assert_container_delete_fails()
self.assert_container_has_shard_sysmeta()
self.assert_container_post_ok('sharded')
class TestContainerSharding(BaseTestContainerSharding):
def _test_sharded_listing(self, run_replicators=False):
obj_names = self._make_object_names(self.max_shard_size)
self.put_objects(obj_names)
# Verify that we start out with normal DBs, no shards
found = self.categorize_container_dir_content()
self.assertLengthEqual(found['normal_dbs'], 3)
self.assertLengthEqual(found['shard_dbs'], 0)
for db_file in found['normal_dbs']:
broker = ContainerBroker(db_file)
self.assertIs(True, broker.is_root_container())
self.assertEqual('unsharded', broker.get_db_state())
self.assertLengthEqual(broker.get_shard_ranges(), 0)
headers, pre_sharding_listing = client.get_container(
self.url, self.token, self.container_name)
self.assertEqual(obj_names, [
x['name'].encode('utf-8') if six.PY2 else x['name']
for x in pre_sharding_listing]) # sanity
# Shard it
client.post_container(self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
pre_sharding_headers = client.head_container(
self.url, self.admin_token, self.container_name)
self.assertEqual('True',
pre_sharding_headers.get('x-container-sharding'))
# Only run the one in charge of scanning
self.sharders.once(number=self.brain.node_numbers[0],
additional_args='--partitions=%s' % self.brain.part)
# Verify that we have one sharded db -- though the other normal DBs
# received the shard ranges that got defined
found = self.categorize_container_dir_content()
self.assertLengthEqual(found['shard_dbs'], 1)
broker = self.get_broker(self.brain.part, self.brain.nodes[0])
# sanity check - the shard db is on replica 0
self.assertEqual(found['shard_dbs'][0], broker.db_file)
self.assertIs(True, broker.is_root_container())
self.assertEqual('sharded', broker.get_db_state())
orig_root_shard_ranges = [dict(sr) for sr in broker.get_shard_ranges()]
self.assertLengthEqual(orig_root_shard_ranges, 2)
self.assert_total_object_count(len(obj_names), orig_root_shard_ranges)
self.assert_shard_ranges_contiguous(2, orig_root_shard_ranges)
self.assertEqual([ShardRange.ACTIVE, ShardRange.ACTIVE],
[sr['state'] for sr in orig_root_shard_ranges])
# Contexts should still be there, and should be complete
contexts = set([ctx.done()
for ctx, _ in CleavingContext.load_all(broker)])
self.assertEqual({True}, contexts)
self.direct_delete_container(expect_failure=True)
self.assertLengthEqual(found['normal_dbs'], 2)
for db_file in found['normal_dbs']:
broker = ContainerBroker(db_file)
self.assertIs(True, broker.is_root_container())
self.assertEqual('unsharded', broker.get_db_state())
shard_ranges = [dict(sr) for sr in broker.get_shard_ranges()]
self.assertEqual([ShardRange.CREATED, ShardRange.CREATED],
[sr['state'] for sr in shard_ranges])
# the sharded db had shard range meta_timestamps and state updated
# during cleaving, so we do not expect those to be equal on other
# nodes
self.assert_shard_range_lists_equal(
orig_root_shard_ranges, shard_ranges,
excludes=['meta_timestamp', 'state', 'state_timestamp'])
contexts = list(CleavingContext.load_all(broker))
self.assertEqual([], contexts) # length check
if run_replicators:
Manager(['container-replicator']).once()
# replication doesn't change the db file names
found = self.categorize_container_dir_content()
self.assertLengthEqual(found['shard_dbs'], 1)
self.assertLengthEqual(found['normal_dbs'], 2)
# Now that everyone has shard ranges, run *everyone*
self.sharders.once(additional_args='--partitions=%s' % self.brain.part)
# Verify that we only have shard dbs now
found = self.categorize_container_dir_content()
self.assertLengthEqual(found['shard_dbs'], 3)
self.assertLengthEqual(found['normal_dbs'], 0)
# Shards stayed the same
for db_file in found['shard_dbs']:
broker = ContainerBroker(db_file)
self.assertIs(True, broker.is_root_container())
self.assertEqual('sharded', broker.get_db_state())
# Well, except for meta_timestamps, since the shards each reported
self.assert_shard_range_lists_equal(
orig_root_shard_ranges, broker.get_shard_ranges(),
excludes=['meta_timestamp', 'state_timestamp'])
for orig, updated in zip(orig_root_shard_ranges,
broker.get_shard_ranges()):
self.assertGreaterEqual(updated.state_timestamp,
orig['state_timestamp'])
self.assertGreaterEqual(updated.meta_timestamp,
orig['meta_timestamp'])
# Contexts should still be there, and should be complete
contexts = set([ctx.done()
for ctx, _ in CleavingContext.load_all(broker)])
self.assertEqual({True}, contexts)
# Check that entire listing is available
headers, actual_listing = self.assert_container_listing(obj_names)
# ... and check some other container properties
self.assertEqual(headers['last-modified'],
pre_sharding_headers['last-modified'])
# It even works in reverse!
headers, listing = client.get_container(self.url, self.token,
self.container_name,
query_string='reverse=on')
self.assertEqual(pre_sharding_listing[::-1], listing)
# and repeat checks to use shard ranges now cached in proxy
headers, actual_listing = self.assert_container_listing(obj_names)
self.assertEqual(headers['last-modified'],
pre_sharding_headers['last-modified'])
headers, listing = client.get_container(self.url, self.token,
self.container_name,
query_string='reverse=on')
self.assertEqual(pre_sharding_listing[::-1], listing)
# Now put some new objects into first shard, taking its count to
# 3 shard ranges' worth
more_obj_names = [
'beta%03d' % x for x in range(self.max_shard_size)]
self.put_objects(more_obj_names)
# The listing includes new objects (shard ranges haven't changed, just
# their object content, so cached shard ranges are still correct)...
headers, listing = self.assert_container_listing(
more_obj_names + obj_names)
self.assertEqual(pre_sharding_listing, listing[len(more_obj_names):])
# ...but root object count is out of date until the sharders run and
# update the root
self.assert_container_object_count(len(obj_names))
# run sharders on the shard to get root updated
shard_1 = ShardRange.from_dict(orig_root_shard_ranges[0])
self.run_sharders(shard_1)
self.assert_container_object_count(len(more_obj_names + obj_names))
# we've added objects enough that we need to shard the first shard
# *again* into three new sub-shards, but nothing happens until the root
# leader identifies shard candidate...
root_shard_ranges = self.direct_get_container_shard_ranges()
for node, (hdrs, root_shards) in root_shard_ranges.items():
self.assertLengthEqual(root_shards, 2)
with annotate_failure('node %s. ' % node):
self.assertEqual(
[ShardRange.ACTIVE] * 2,
[sr['state'] for sr in root_shards])
# orig shards 0, 1 should be contiguous
self.assert_shard_ranges_contiguous(2, root_shards)
# Now run the root leader to identify shard candidate...while one of
# the shard container servers is down
shard_1_part, shard_1_nodes = self.get_part_and_node_numbers(shard_1)
self.brain.servers.stop(number=shard_1_nodes[2])
self.sharders.once(number=self.brain.node_numbers[0],
additional_args='--partitions=%s' % self.brain.part)
# ... so third replica of first shard state is not moved to sharding
found_for_shard = self.categorize_container_dir_content(
shard_1.account, shard_1.container)
self.assertLengthEqual(found_for_shard['normal_dbs'], 3)
self.assertEqual(
[ShardRange.SHARDING, ShardRange.SHARDING, ShardRange.ACTIVE],
[ContainerBroker(db_file).get_own_shard_range().state
for db_file in found_for_shard['normal_dbs']])
# ...then run first cycle of first shard sharders in order, leader
# first, to get to predictable state where all nodes have cleaved 2 out
# of 3 ranges...starting with first two nodes
for node_number in shard_1_nodes[:2]:
self.sharders.once(
number=node_number,
additional_args='--partitions=%s' % shard_1_part)
# ... first two replicas start sharding to sub-shards
found_for_shard = self.categorize_container_dir_content(
shard_1.account, shard_1.container)
self.assertLengthEqual(found_for_shard['shard_dbs'], 2)
for db_file in found_for_shard['shard_dbs'][:2]:
broker = ContainerBroker(db_file)
with annotate_failure('shard db file %s. ' % db_file):
self.assertIs(False, broker.is_root_container())
self.assertEqual('sharding', broker.get_db_state())
self.assertEqual(
ShardRange.SHARDING, broker.get_own_shard_range().state)
shard_shards = broker.get_shard_ranges()
self.assertEqual(
[ShardRange.CLEAVED, ShardRange.CLEAVED,
ShardRange.CREATED],
[sr.state for sr in shard_shards])
self.assert_shard_ranges_contiguous(
3, shard_shards,
first_lower=orig_root_shard_ranges[0]['lower'],
last_upper=orig_root_shard_ranges[0]['upper'])
contexts = list(CleavingContext.load_all(broker))
self.assertEqual(len(contexts), 1)
context, _lm = contexts[0]
self.assertIs(context.cleaving_done, False)
self.assertIs(context.misplaced_done, True)
self.assertEqual(context.ranges_done, 2)
self.assertEqual(context.ranges_todo, 1)
self.assertEqual(context.max_row,
self.max_shard_size * 3 // 2)
# but third replica still has no idea it should be sharding
self.assertLengthEqual(found_for_shard['normal_dbs'], 3)
self.assertEqual(
ShardRange.ACTIVE,
ContainerBroker(
found_for_shard['normal_dbs'][2]).get_own_shard_range().state)
# ...but once sharder runs on third replica it will learn its state;
# note that any root replica on the stopped container server also won't
# know about the shards being in sharding state, so leave that server
# stopped for now so that shard fetches its state from an up-to-date
# root replica
self.sharders.once(
number=shard_1_nodes[2],
additional_args='--partitions=%s' % shard_1_part)
# third replica is sharding but has no sub-shard ranges yet...
found_for_shard = self.categorize_container_dir_content(
shard_1.account, shard_1.container)
self.assertLengthEqual(found_for_shard['shard_dbs'], 2)
self.assertLengthEqual(found_for_shard['normal_dbs'], 3)
broker = ContainerBroker(found_for_shard['normal_dbs'][2])
self.assertEqual('unsharded', broker.get_db_state())
self.assertEqual(
ShardRange.SHARDING, broker.get_own_shard_range().state)
self.assertFalse(broker.get_shard_ranges())
contexts = list(CleavingContext.load_all(broker))
self.assertEqual([], contexts) # length check
# ...until sub-shard ranges are replicated from another shard replica;
# there may also be a sub-shard replica missing so run replicators on
# all nodes to fix that if necessary
self.brain.servers.start(number=shard_1_nodes[2])
self.replicators.once()
# Now that the replicators have all run, third replica sees cleaving
# contexts for the first two
contexts = list(CleavingContext.load_all(broker))
self.assertEqual(len(contexts), 2)
# now run sharder again on third replica
self.sharders.once(
number=shard_1_nodes[2],
additional_args='--partitions=%s' % shard_1_part)
sharding_broker = ContainerBroker(found_for_shard['normal_dbs'][2])
self.assertEqual('sharding', sharding_broker.get_db_state())
broker_id = broker.get_info()['id']
# Old, unsharded DB doesn't have the context...
contexts = list(CleavingContext.load_all(broker))
self.assertEqual(len(contexts), 2)
self.assertNotIn(broker_id, [ctx[0].ref for ctx in contexts])
# ...but the sharding one does
contexts = list(CleavingContext.load_all(sharding_broker))
self.assertEqual(len(contexts), 3)
self.assertIn(broker_id, [ctx[0].ref for ctx in contexts])
# check original first shard range state and sub-shards - all replicas
# should now be in consistent state
found_for_shard = self.categorize_container_dir_content(
shard_1.account, shard_1.container)
self.assertLengthEqual(found_for_shard['shard_dbs'], 3)
self.assertLengthEqual(found_for_shard['normal_dbs'], 3)
for db_file in found_for_shard['shard_dbs']:
broker = ContainerBroker(db_file)
with annotate_failure('shard db file %s. ' % db_file):
self.assertIs(False, broker.is_root_container())
self.assertEqual('sharding', broker.get_db_state())
self.assertEqual(
ShardRange.SHARDING, broker.get_own_shard_range().state)
shard_shards = broker.get_shard_ranges()
self.assertEqual(
[ShardRange.CLEAVED, ShardRange.CLEAVED,
ShardRange.CREATED],
[sr.state for sr in shard_shards])
self.assert_shard_ranges_contiguous(
3, shard_shards,
first_lower=orig_root_shard_ranges[0]['lower'],
last_upper=orig_root_shard_ranges[0]['upper'])
# check third sub-shard is in created state
sub_shard = shard_shards[2]
found_for_sub_shard = self.categorize_container_dir_content(
sub_shard.account, sub_shard.container)
self.assertFalse(found_for_sub_shard['shard_dbs'])
self.assertLengthEqual(found_for_sub_shard['normal_dbs'], 3)
for db_file in found_for_sub_shard['normal_dbs']:
broker = ContainerBroker(db_file)
with annotate_failure('sub shard db file %s. ' % db_file):
self.assertIs(False, broker.is_root_container())
self.assertEqual('unsharded', broker.get_db_state())
self.assertEqual(
ShardRange.CREATED, broker.get_own_shard_range().state)
self.assertFalse(broker.get_shard_ranges())
# check root shard ranges
root_shard_ranges = self.direct_get_container_shard_ranges()
for node, (hdrs, root_shards) in root_shard_ranges.items():
self.assertLengthEqual(root_shards, 5)
with annotate_failure('node %s. ' % node):
# shard ranges are sorted by upper, state, lower, so expect:
# sub-shards, orig shard 0, orig shard 1
self.assertEqual(
[ShardRange.CLEAVED, ShardRange.CLEAVED,
ShardRange.CREATED, ShardRange.SHARDING,
ShardRange.ACTIVE],
[sr['state'] for sr in root_shards])
# sub-shards 0, 1, 2, orig shard 1 should be contiguous
self.assert_shard_ranges_contiguous(
4, root_shards[:3] + root_shards[4:])
# orig shards 0, 1 should be contiguous
self.assert_shard_ranges_contiguous(2, root_shards[3:])
self.assert_container_listing(more_obj_names + obj_names)
self.assert_container_object_count(len(more_obj_names + obj_names))
# Before writing, kill the cache
self.memcache.delete(get_cache_key(
self.account, self.container_name, shard='updating'))
# add another object that lands in the first of the new sub-shards
self.put_objects(['alpha'])
# check that alpha object is in the first new shard
shard_listings = self.direct_get_container(shard_shards[0].account,
shard_shards[0].container)
for node, (hdrs, listing) in shard_listings.items():
with annotate_failure(node):
self.assertIn('alpha', [o['name'] for o in listing])
self.assert_container_listing(['alpha'] + more_obj_names + obj_names)
# Run sharders again so things settle.
self.run_sharders(shard_1)
# Also run replicators to settle cleaving contexts
self.replicators.once()
# check original first shard range shards
for db_file in found_for_shard['shard_dbs']:
broker = ContainerBroker(db_file)
with annotate_failure('shard db file %s. ' % db_file):
self.assertIs(False, broker.is_root_container())
self.assertEqual('sharded', broker.get_db_state())
self.assertEqual(
[ShardRange.ACTIVE] * 3,
[sr.state for sr in broker.get_shard_ranges()])
# Contexts should still be there, and should be complete
contexts = set([ctx.done()
for ctx, _
in CleavingContext.load_all(broker)])
self.assertEqual({True}, contexts)
# check root shard ranges
root_shard_ranges = self.direct_get_container_shard_ranges()
for node, (hdrs, root_shards) in root_shard_ranges.items():
# old first shard range should have been deleted
self.assertLengthEqual(root_shards, 4)
with annotate_failure('node %s. ' % node):
self.assertEqual(
[ShardRange.ACTIVE] * 4,
[sr['state'] for sr in root_shards])
self.assert_shard_ranges_contiguous(4, root_shards)
headers, final_listing = self.assert_container_listing(
['alpha'] + more_obj_names + obj_names)
# check root
found = self.categorize_container_dir_content()
self.assertLengthEqual(found['shard_dbs'], 3)
self.assertLengthEqual(found['normal_dbs'], 0)
new_shard_ranges = None
for db_file in found['shard_dbs']:
broker = ContainerBroker(db_file)
self.assertIs(True, broker.is_root_container())
self.assertEqual('sharded', broker.get_db_state())
if new_shard_ranges is None:
new_shard_ranges = broker.get_shard_ranges(
include_deleted=True)
self.assertLengthEqual(new_shard_ranges, 5)
# Second half is still there, and unchanged
self.assertIn(
dict(orig_root_shard_ranges[1], meta_timestamp=None,
state_timestamp=None),
[dict(sr, meta_timestamp=None, state_timestamp=None)
for sr in new_shard_ranges])
# But the first half split in three, then deleted
by_name = {sr.name: sr for sr in new_shard_ranges}
self.assertIn(orig_root_shard_ranges[0]['name'], by_name)
old_shard_range = by_name.pop(
orig_root_shard_ranges[0]['name'])
self.assertTrue(old_shard_range.deleted)
self.assert_shard_ranges_contiguous(4, list(by_name.values()))
else:
# Everyone's on the same page. Well, except for
# meta_timestamps, since the shards each reported
other_shard_ranges = broker.get_shard_ranges(
include_deleted=True)
self.assert_shard_range_lists_equal(
new_shard_ranges, other_shard_ranges,
excludes=['meta_timestamp', 'state_timestamp'])
for orig, updated in zip(orig_root_shard_ranges,
other_shard_ranges):
self.assertGreaterEqual(updated.meta_timestamp,
orig['meta_timestamp'])
self.assert_container_delete_fails()
for obj in final_listing:
client.delete_object(
self.url, self.token, self.container_name, obj['name'])
# the objects won't be listed anymore
self.assert_container_listing([])
# but root container stats will not yet be aware of the deletions
self.assert_container_delete_fails()
# One server was down while the shard sharded its first two sub-shards,
# so there may be undeleted handoff db(s) for sub-shard(s) that were
# not fully replicated; run replicators now to clean up so they no
# longer report bogus stats to root.
self.replicators.once()
# Run sharder so that shard containers update the root. Do not run
# sharder on root container because that triggers shrinks which can
# cause root object count to temporarily be non-zero and prevent the
# final delete.
self.run_sharders(self.get_container_shard_ranges())
# then root is empty and can be deleted
self.assert_container_listing([])
self.assert_container_object_count(0)
client.delete_container(self.url, self.token, self.container_name)
def test_sharded_listing_no_replicators(self):
self._test_sharded_listing()
def test_sharded_listing_with_replicators(self):
self._test_sharded_listing(run_replicators=True)
def test_async_pendings(self):
obj_names = self._make_object_names(self.max_shard_size * 2)
# There are some updates *everyone* gets
self.put_objects(obj_names[::5])
# But roll some outages so each container only get ~2/5 more object
# records i.e. total of 3/5 updates per container; and async pendings
# pile up
for i, n in enumerate(self.brain.node_numbers, start=1):
self.brain.servers.stop(number=n)
self.put_objects(obj_names[i::5])
self.brain.servers.start(number=n)
# But there are also 1/5 updates *no one* gets
self.brain.servers.stop()
self.put_objects(obj_names[4::5])
self.brain.servers.start()
# Shard it
client.post_container(self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
headers = client.head_container(self.url, self.admin_token,
self.container_name)
self.assertEqual('True', headers.get('x-container-sharding'))
# sanity check
found = self.categorize_container_dir_content()
self.assertLengthEqual(found['shard_dbs'], 0)
self.assertLengthEqual(found['normal_dbs'], 3)
for db_file in found['normal_dbs']:
broker = ContainerBroker(db_file)
self.assertIs(True, broker.is_root_container())
self.assertEqual(len(obj_names) * 3 // 5,
broker.get_info()['object_count'])
# Only run the 'leader' in charge of scanning.
# Each container has ~2 * max * 3/5 objects
# which are distributed from obj000 to obj<2 * max - 1>,
# so expect 3 shard ranges to be found: the first two will be complete
# shards with max/2 objects and lower/upper bounds spaced by approx:
# (2 * max - 1)/(2 * max * 3/5) * (max/2) =~ 5/6 * max
#
# Note that during this shard cycle the leader replicates to other
# nodes so they will end up with ~2 * max * 4/5 objects.
self.sharders.once(number=self.brain.node_numbers[0],
additional_args='--partitions=%s' % self.brain.part)
# Verify that we have one shard db -- though the other normal DBs
# received the shard ranges that got defined
found = self.categorize_container_dir_content()
self.assertLengthEqual(found['shard_dbs'], 1)
node_index_zero_db = found['shard_dbs'][0]
broker = ContainerBroker(node_index_zero_db)
self.assertIs(True, broker.is_root_container())
self.assertEqual(SHARDING, broker.get_db_state())
expected_shard_ranges = broker.get_shard_ranges()
self.assertLengthEqual(expected_shard_ranges, 3)
self.assertEqual(
[ShardRange.CLEAVED, ShardRange.CLEAVED, ShardRange.CREATED],
[sr.state for sr in expected_shard_ranges])
# Still have all three big DBs -- we've only cleaved 2 of the 3 shard
# ranges that got defined
self.assertLengthEqual(found['normal_dbs'], 3)
db_states = []
for db_file in found['normal_dbs']:
broker = ContainerBroker(db_file)
self.assertIs(True, broker.is_root_container())
db_states.append(broker.get_db_state())
# the sharded db had shard range meta_timestamps updated during
# cleaving, so we do not expect those to be equal on other nodes
self.assert_shard_range_lists_equal(
expected_shard_ranges, broker.get_shard_ranges(),
excludes=['meta_timestamp', 'state_timestamp', 'state'])
self.assertEqual(len(obj_names) * 3 // 5,
broker.get_info()['object_count'])
self.assertEqual([SHARDING, UNSHARDED, UNSHARDED], sorted(db_states))
# Run the other sharders so we're all in (roughly) the same state
for n in self.brain.node_numbers[1:]:
self.sharders.once(
number=n,
additional_args='--partitions=%s' % self.brain.part)
found = self.categorize_container_dir_content()
self.assertLengthEqual(found['shard_dbs'], 3)
self.assertLengthEqual(found['normal_dbs'], 3)
for db_file in found['normal_dbs']:
broker = ContainerBroker(db_file)
self.assertEqual(SHARDING, broker.get_db_state())
# no new rows
self.assertEqual(len(obj_names) * 3 // 5,
broker.get_info()['object_count'])
# Run updaters to clear the async pendings
Manager(['object-updater']).once()
# Our "big" dbs didn't take updates
for db_file in found['normal_dbs']:
broker = ContainerBroker(db_file)
self.assertEqual(len(obj_names) * 3 // 5,
broker.get_info()['object_count'])
# confirm that the async pending updates got redirected to the shards
for sr in expected_shard_ranges:
shard_listings = self.direct_get_container(sr.account,
sr.container)
for node, (hdrs, listing) in shard_listings.items():
shard_listing_names = [
o['name'].encode('utf-8') if six.PY2 else o['name']
for o in listing]
for obj in obj_names[4::5]:
if obj in sr:
self.assertIn(obj, shard_listing_names)
else:
self.assertNotIn(obj, shard_listing_names)
# The entire listing is not yet available - we have two cleaved shard
# ranges, complete with async updates, but for the remainder of the
# namespace only what landed in the original container
headers, listing = client.get_container(self.url, self.token,
self.container_name)
start_listing = [
o for o in obj_names if o <= expected_shard_ranges[1].upper]
self.assertEqual(
[x['name'].encode('utf-8') if six.PY2 else x['name']
for x in listing[:len(start_listing)]],
start_listing)
# we can't assert much about the remaining listing, other than that
# there should be something
self.assertTrue(
[x['name'].encode('utf-8') if six.PY2 else x['name']
for x in listing[len(start_listing):]])
self.assertIn('x-container-object-count', headers)
self.assertEqual(str(len(listing)),
headers['x-container-object-count'])
headers, listing = client.get_container(self.url, self.token,
self.container_name,
query_string='reverse=on')
self.assertEqual([x['name'].encode('utf-8') if six.PY2 else x['name']
for x in listing[-len(start_listing):]],
list(reversed(start_listing)))
self.assertIn('x-container-object-count', headers)
self.assertEqual(str(len(listing)),
headers['x-container-object-count'])
self.assertTrue(
[x['name'].encode('utf-8') if six.PY2 else x['name']
for x in listing[:-len(start_listing)]])
# Run the sharders again to get everything to settle
self.sharders.once()
found = self.categorize_container_dir_content()
self.assertLengthEqual(found['shard_dbs'], 3)
self.assertLengthEqual(found['normal_dbs'], 0)
# now all shards have been cleaved we should get the complete listing
headers, listing = client.get_container(self.url, self.token,
self.container_name)
self.assertEqual([x['name'].encode('utf-8') if six.PY2 else x['name']
for x in listing],
obj_names)
def test_shrinking(self):
int_client = self.make_internal_client()
def check_node_data(node_data, exp_hdrs, exp_obj_count, exp_shards,
exp_sharded_root_range=False):
hdrs, range_data = node_data
self.assert_dict_contains(exp_hdrs, hdrs)
sharded_root_range = False
other_range_data = []
for data in range_data:
sr = ShardRange.from_dict(data)
if (sr.account == self.account and
sr.container == self.container_name and
sr.state == ShardRange.SHARDED):
# only expect one root range
self.assertFalse(sharded_root_range, range_data)
sharded_root_range = True
self.assertEqual(ShardRange.MIN, sr.lower, sr)
self.assertEqual(ShardRange.MAX, sr.upper, sr)
else:
# include active root range in further assertions
other_range_data.append(data)
self.assertEqual(exp_sharded_root_range, sharded_root_range)
self.assert_shard_ranges_contiguous(exp_shards, other_range_data)
self.assert_total_object_count(exp_obj_count, other_range_data)
def check_shard_nodes_data(node_data, expected_state='unsharded',
expected_shards=0, exp_obj_count=0,
exp_sharded_root_range=False):
# checks that shard range is consistent on all nodes
root_path = '%s/%s' % (self.account, self.container_name)
exp_shard_hdrs = {
'X-Container-Sysmeta-Shard-Quoted-Root': quote(root_path),
'X-Backend-Sharding-State': expected_state}
object_counts = []
bytes_used = []
for node_id, node_data in node_data.items():
with annotate_failure('Node id %s.' % node_id):
check_node_data(
node_data, exp_shard_hdrs, exp_obj_count,
expected_shards, exp_sharded_root_range)
hdrs = node_data[0]
object_counts.append(int(hdrs['X-Container-Object-Count']))
bytes_used.append(int(hdrs['X-Container-Bytes-Used']))
if len(set(object_counts)) != 1:
self.fail('Inconsistent object counts: %s' % object_counts)
if len(set(bytes_used)) != 1:
self.fail('Inconsistent bytes used: %s' % bytes_used)
return object_counts[0], bytes_used[0]
repeat = [0]
def do_shard_then_shrink():
repeat[0] += 1
obj_names = ['obj-%s-%03d' % (repeat[0], x)
for x in range(self.max_shard_size)]
self.put_objects(obj_names)
# these two object names will fall at start of first shard range...
alpha = 'alpha-%s' % repeat[0]
beta = 'beta-%s' % repeat[0]
# Enable sharding
client.post_container(
self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
# sanity check
self.assert_container_listing(obj_names)
# Only run the one in charge of scanning
self.sharders.once(
number=self.brain.node_numbers[0],
additional_args='--partitions=%s' % self.brain.part)
# check root container
root_nodes_data = self.direct_get_container_shard_ranges()
self.assertEqual(3, len(root_nodes_data))
# nodes on which sharder has not run are still in unsharded state
# but have had shard ranges replicated to them
exp_obj_count = len(obj_names)
exp_hdrs = {'X-Backend-Sharding-State': 'unsharded',
'X-Container-Object-Count': str(exp_obj_count)}
node_id = self.brain.node_numbers[1] - 1
check_node_data(
root_nodes_data[node_id], exp_hdrs, exp_obj_count, 2)
node_id = self.brain.node_numbers[2] - 1
check_node_data(
root_nodes_data[node_id], exp_hdrs, exp_obj_count, 2)
# only one that ran sharder is in sharded state
exp_hdrs['X-Backend-Sharding-State'] = 'sharded'
node_id = self.brain.node_numbers[0] - 1
check_node_data(
root_nodes_data[node_id], exp_hdrs, exp_obj_count, 2)
orig_range_data = root_nodes_data[node_id][1]
orig_shard_ranges = [ShardRange.from_dict(r)
for r in orig_range_data]
# check first shard
shard_nodes_data = self.direct_get_container_shard_ranges(
orig_shard_ranges[0].account, orig_shard_ranges[0].container)
obj_count, bytes_used = check_shard_nodes_data(shard_nodes_data)
total_shard_object_count = obj_count
# check second shard
shard_nodes_data = self.direct_get_container_shard_ranges(
orig_shard_ranges[1].account, orig_shard_ranges[1].container)
obj_count, bytes_used = check_shard_nodes_data(shard_nodes_data)
total_shard_object_count += obj_count
self.assertEqual(exp_obj_count, total_shard_object_count)
# Now that everyone has shard ranges, run *everyone*
self.sharders.once(
additional_args='--partitions=%s' % self.brain.part)
# all root container nodes should now be in sharded state
root_nodes_data = self.direct_get_container_shard_ranges()
self.assertEqual(3, len(root_nodes_data))
for node_id, node_data in root_nodes_data.items():
with annotate_failure('Node id %s.' % node_id):
check_node_data(node_data, exp_hdrs, exp_obj_count, 2)
# run updaters to update .sharded account; shard containers have
# not updated account since having objects replicated to them
self.updaters.once()
shard_cont_count, shard_obj_count = int_client.get_account_info(
orig_shard_ranges[0].account, [204])
self.assertEqual(2 * repeat[0], shard_cont_count)
# the shards account should always have zero object count to avoid
# double accounting
self.assertEqual(0, shard_obj_count)
# checking the listing also refreshes proxy container info cache so
# that the proxy becomes aware that container is sharded and will
# now look up the shard target for subsequent updates
self.assert_container_listing(obj_names)
# Before writing, kill the cache
self.memcache.delete(get_cache_key(
self.account, self.container_name, shard='updating'))
# delete objects from first shard range
first_shard_objects = [obj_name for obj_name in obj_names
if obj_name <= orig_shard_ranges[0].upper]
for obj in first_shard_objects:
client.delete_object(
self.url, self.token, self.container_name, obj)
with self.assertRaises(ClientException):
client.get_object(
self.url, self.token, self.container_name, obj)
second_shard_objects = [obj_name for obj_name in obj_names
if obj_name > orig_shard_ranges[1].lower]
self.assert_container_listing(second_shard_objects)
# put a new object 'alpha' in first shard range
self.put_objects([alpha])
second_shard_objects = [obj_name for obj_name in obj_names
if obj_name > orig_shard_ranges[1].lower]
self.assert_container_listing([alpha] + second_shard_objects)
# while container servers are down, but proxy has container info in
# cache from recent listing, put another object; this update will
# lurk in async pending until the updaters run again; because all
# the root container servers are down and therefore cannot respond
# to a GET for a redirect target, the object update will default to
# being targeted at the root container
self.stop_container_servers()
# Before writing, kill the cache
self.memcache.delete(get_cache_key(
self.account, self.container_name, shard='updating'))
self.put_objects([beta])
self.brain.servers.start()
async_pendings = self.gather_async_pendings(
self.get_all_object_nodes())
num_container_replicas = len(self.brain.nodes)
num_obj_replicas = self.policy.object_ring.replica_count
expected_num_updates = num_container_updates(
num_container_replicas, quorum_size(num_container_replicas),
num_obj_replicas, self.policy.quorum)
expected_num_pendings = min(expected_num_updates, num_obj_replicas)
# sanity check
with annotate_failure('policy %s. ' % self.policy):
self.assertLengthEqual(async_pendings, expected_num_pendings)
# root object count is not updated...
self.assert_container_object_count(len(obj_names))
self.assert_container_listing([alpha] + second_shard_objects)
root_nodes_data = self.direct_get_container_shard_ranges()
self.assertEqual(3, len(root_nodes_data))
for node_id, node_data in root_nodes_data.items():
with annotate_failure('Node id %s.' % node_id):
check_node_data(node_data, exp_hdrs, exp_obj_count, 2)
range_data = node_data[1]
self.assert_shard_range_lists_equal(
orig_range_data, range_data,
excludes=['meta_timestamp', 'state_timestamp'])
# ...until the sharders run and update root; reclaim tombstones so
# that the shard is shrinkable
shard_0_part = self.get_part_and_node_numbers(
orig_shard_ranges[0])[0]
for conf_index in self.configs['container-sharder'].keys():
self.run_custom_sharder(conf_index, {'reclaim_age': 0},
override_partitions=[shard_0_part])
exp_obj_count = len(second_shard_objects) + 1
self.assert_container_object_count(exp_obj_count)
self.assert_container_listing([alpha] + second_shard_objects)
# root sharder finds donor, acceptor pair and pushes changes
self.sharders.once(
additional_args='--partitions=%s' % self.brain.part)
self.assert_container_listing([alpha] + second_shard_objects)
# run sharder on donor to shrink and replicate to acceptor
self.run_sharders(orig_shard_ranges[0])
self.assert_container_listing([alpha] + second_shard_objects)
# run sharder on acceptor to update root with stats
self.run_sharders(orig_shard_ranges[1])
self.assert_container_listing([alpha] + second_shard_objects)
self.assert_container_object_count(len(second_shard_objects) + 1)
# check root container
root_nodes_data = self.direct_get_container_shard_ranges()
self.assertEqual(3, len(root_nodes_data))
exp_hdrs['X-Container-Object-Count'] = str(exp_obj_count)
for node_id, node_data in root_nodes_data.items():
with annotate_failure('Node id %s.' % node_id):
# NB now only *one* shard range in root
check_node_data(node_data, exp_hdrs, exp_obj_count, 1)
# the acceptor shard is intact..
shard_nodes_data = self.direct_get_container_shard_ranges(
orig_shard_ranges[1].account, orig_shard_ranges[1].container)
obj_count, bytes_used = check_shard_nodes_data(shard_nodes_data)
# all objects should now be in this shard
self.assertEqual(exp_obj_count, obj_count)
# the donor shard is also still intact
donor = orig_shard_ranges[0]
shard_nodes_data = self.direct_get_container_shard_ranges(
donor.account, donor.container)
# the donor's shard range will have the acceptor's projected stats;
# donor also has copy of root shard range that will be ignored;
# note: expected_shards does not include the sharded root range
obj_count, bytes_used = check_shard_nodes_data(
shard_nodes_data, expected_state='sharded', expected_shards=1,
exp_obj_count=len(second_shard_objects) + 1,
exp_sharded_root_range=True)
# but the donor is empty and so reports zero stats
self.assertEqual(0, obj_count)
self.assertEqual(0, bytes_used)
# check the donor own shard range state
part, nodes = self.brain.ring.get_nodes(
donor.account, donor.container)
for node in nodes:
with annotate_failure(node):
broker = self.get_broker(
part, node, donor.account, donor.container)
own_sr = broker.get_own_shard_range()
self.assertEqual(ShardRange.SHRUNK, own_sr.state)
self.assertTrue(own_sr.deleted)
# delete all the second shard's object apart from 'alpha'
for obj in second_shard_objects:
client.delete_object(
self.url, self.token, self.container_name, obj)
self.assert_container_listing([alpha])
# run sharders: second range should not shrink away yet because it
# has tombstones
self.sharders.once() # second shard updates root stats
self.assert_container_listing([alpha])
self.sharders.once() # root finds shrinkable shard
self.assert_container_listing([alpha])
self.sharders.once() # shards shrink themselves
self.assert_container_listing([alpha])
# the acceptor shard is intact...
shard_nodes_data = self.direct_get_container_shard_ranges(
orig_shard_ranges[1].account, orig_shard_ranges[1].container)
obj_count, bytes_used = check_shard_nodes_data(shard_nodes_data)
self.assertEqual(1, obj_count)
# run sharders to reclaim tombstones so that the second shard is
# shrinkable
shard_1_part = self.get_part_and_node_numbers(
orig_shard_ranges[1])[0]
for conf_index in self.configs['container-sharder'].keys():
self.run_custom_sharder(conf_index, {'reclaim_age': 0},
override_partitions=[shard_1_part])
self.assert_container_listing([alpha])
# run sharders so second range shrinks away, requires up to 2
# cycles
self.sharders.once() # root finds shrinkable shard
self.assert_container_listing([alpha])
self.sharders.once() # shards shrink themselves
self.assert_container_listing([alpha])
# the second shard range has sharded and is empty
shard_nodes_data = self.direct_get_container_shard_ranges(
orig_shard_ranges[1].account, orig_shard_ranges[1].container)
check_shard_nodes_data(
shard_nodes_data, expected_state='sharded', expected_shards=1,
exp_obj_count=1)
# check root container
root_nodes_data = self.direct_get_container_shard_ranges()
self.assertEqual(3, len(root_nodes_data))
exp_hdrs = {'X-Backend-Sharding-State': 'collapsed',
# just the alpha object
'X-Container-Object-Count': '1'}
for node_id, node_data in root_nodes_data.items():
with annotate_failure('Node id %s.' % node_id):
# NB now no shard ranges in root
check_node_data(node_data, exp_hdrs, 0, 0)
# delete the alpha object
client.delete_object(
self.url, self.token, self.container_name, alpha)
# should now be able to delete the *apparently* empty container
client.delete_container(self.url, self.token, self.container_name)
self.assert_container_not_found()
self.direct_head_container(expect_failure=True)
# and the container stays deleted even after sharders run and shard
# send updates
self.sharders.once()
self.assert_container_not_found()
self.direct_head_container(expect_failure=True)
# now run updaters to deal with the async pending for the beta
# object
self.updaters.once()
# and the container is revived!
self.assert_container_listing([beta])
# finally, clear out the container
client.delete_object(
self.url, self.token, self.container_name, beta)
do_shard_then_shrink()
# repeat from starting point of a collapsed and previously deleted
# container
do_shard_then_shrink()
def test_delete_root_reclaim(self):
all_obj_names = self._make_object_names(self.max_shard_size)
self.put_objects(all_obj_names)
# Shard the container
client.post_container(self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
for n in self.brain.node_numbers:
self.sharders.once(
number=n, additional_args='--partitions=%s' % self.brain.part)
# sanity checks
for node in self.brain.nodes:
self.assert_container_state(node, 'sharded', 2)
self.assert_container_delete_fails()
self.assert_container_has_shard_sysmeta()
self.assert_container_post_ok('sharded')
self.assert_container_listing(all_obj_names)
# delete all objects - updates redirected to shards
self.delete_objects(all_obj_names)
self.assert_container_listing([])
self.assert_container_post_ok('has objects')
# root not yet updated with shard stats
self.assert_container_object_count(len(all_obj_names))
self.assert_container_delete_fails()
self.assert_container_has_shard_sysmeta()
# run sharder on shard containers to update root stats
shard_ranges = self.get_container_shard_ranges()
self.assertLengthEqual(shard_ranges, 2)
self.run_sharders(shard_ranges)
self.assert_container_listing([])
self.assert_container_post_ok('empty')
self.assert_container_object_count(0)
# and now we can delete it!
client.delete_container(self.url, self.token, self.container_name)
self.assert_container_post_fails('deleted')
self.assert_container_not_found()
# see if it will reclaim
Manager(['container-updater']).once()
for conf_file in self.configs['container-replicator'].values():
conf = utils.readconf(conf_file, 'container-replicator')
conf['reclaim_age'] = 0
ContainerReplicator(conf).run_once()
# we don't expect warnings from sharder root audits
for conf_index in self.configs['container-sharder'].keys():
sharder = self.run_custom_sharder(conf_index, {})
self.assertEqual([], sharder.logger.get_lines_for_level('warning'))
# until the root wants to start reclaiming but we haven't shrunk yet!
found_warning = False
for conf_index in self.configs['container-sharder'].keys():
sharder = self.run_custom_sharder(conf_index, {'reclaim_age': 0})
warnings = sharder.logger.get_lines_for_level('warning')
if warnings:
self.assertTrue(warnings[0].startswith(
'Reclaimable db stuck waiting for shrinking'))
self.assertEqual(1, len(warnings))
found_warning = True
self.assertTrue(found_warning)
# TODO: shrink empty shards and assert everything reclaims
def _setup_replication_scenario(self, num_shards, extra_objs=('alpha',)):
# Get cluster to state where 2 replicas are sharding or sharded but 3rd
# replica is unsharded and has an object that the first 2 are missing.
# put objects while all servers are up
obj_names = self._make_object_names(
num_shards * self.max_shard_size // 2)
self.put_objects(obj_names)
client.post_container(self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
node_numbers = self.brain.node_numbers
# run replicators first time to get sync points set
self.replicators.once()
# stop the leader node and one other server
self.stop_container_servers(slice(0, 2))
# ...then put one more object in first shard range namespace
self.put_objects(extra_objs)
# start leader and first other server, stop third server
for number in node_numbers[:2]:
self.brain.servers.start(number=number)
self.brain.servers.stop(number=node_numbers[2])
self.assert_container_listing(obj_names) # sanity check
# shard the container - first two shard ranges are cleaved
for number in node_numbers[:2]:
self.sharders.once(
number=number,
additional_args='--partitions=%s' % self.brain.part)
self.assert_container_listing(obj_names) # sanity check
return obj_names
def test_replication_to_sharding_container(self):
# verify that replication from an unsharded replica to a sharding
# replica does not replicate rows but does replicate shard ranges
obj_names = self._setup_replication_scenario(3)
for node in self.brain.nodes[:2]:
self.assert_container_state(node, 'sharding', 3)
# bring third server back up, run replicator
node_numbers = self.brain.node_numbers
self.brain.servers.start(number=node_numbers[2])
# sanity check...
self.assert_container_state(self.brain.nodes[2], 'unsharded', 0)
self.replicators.once(number=node_numbers[2])
# check db files unchanged
found = self.categorize_container_dir_content()
self.assertLengthEqual(found['shard_dbs'], 2)
self.assertLengthEqual(found['normal_dbs'], 3)
# the 'alpha' object is NOT replicated to the two sharded nodes
for node in self.brain.nodes[:2]:
broker = self.get_broker(self.brain.part, node)
with annotate_failure(
'Node id %s in %s' % (node['id'], self.brain.nodes[:2])):
self.assertFalse(broker.get_objects())
self.assert_container_state(node, 'sharding', 3)
self.brain.servers.stop(number=node_numbers[2])
self.assert_container_listing(obj_names)
# all nodes now have shard ranges
self.brain.servers.start(number=node_numbers[2])
node_data = self.direct_get_container_shard_ranges()
for node, (hdrs, shard_ranges) in node_data.items():
with annotate_failure(node):
self.assert_shard_ranges_contiguous(3, shard_ranges)
# complete cleaving third shard range on first two nodes
self.brain.servers.stop(number=node_numbers[2])
for number in node_numbers[:2]:
self.sharders.once(
number=number,
additional_args='--partitions=%s' % self.brain.part)
# ...and now they are in sharded state
self.assert_container_state(self.brain.nodes[0], 'sharded', 3)
self.assert_container_state(self.brain.nodes[1], 'sharded', 3)
# ...still no 'alpha' object in listing
self.assert_container_listing(obj_names)
# run the sharder on the third server, alpha object is included in
# shards that it cleaves
self.brain.servers.start(number=node_numbers[2])
self.assert_container_state(self.brain.nodes[2], 'unsharded', 3)
self.sharders.once(number=node_numbers[2],
additional_args='--partitions=%s' % self.brain.part)
self.assert_container_state(self.brain.nodes[2], 'sharding', 3)
self.sharders.once(number=node_numbers[2],
additional_args='--partitions=%s' % self.brain.part)
self.assert_container_state(self.brain.nodes[2], 'sharded', 3)
self.assert_container_listing(['alpha'] + obj_names)
def test_replication_to_sharded_container(self):
# verify that replication from an unsharded replica to a sharded
# replica does not replicate rows but does replicate shard ranges
obj_names = self._setup_replication_scenario(2)
for node in self.brain.nodes[:2]:
self.assert_container_state(node, 'sharded', 2)
# sanity check
found = self.categorize_container_dir_content()
self.assertLengthEqual(found['shard_dbs'], 2)
self.assertLengthEqual(found['normal_dbs'], 1)
for node in self.brain.nodes[:2]:
broker = self.get_broker(self.brain.part, node)
info = broker.get_info()
with annotate_failure(
'Node id %s in %s' % (node['id'], self.brain.nodes[:2])):
self.assertEqual(len(obj_names), info['object_count'])
self.assertFalse(broker.get_objects())
# bring third server back up, run replicator
node_numbers = self.brain.node_numbers
self.brain.servers.start(number=node_numbers[2])
# sanity check...
self.assert_container_state(self.brain.nodes[2], 'unsharded', 0)
self.replicators.once(number=node_numbers[2])
# check db files unchanged
found = self.categorize_container_dir_content()
self.assertLengthEqual(found['shard_dbs'], 2)
self.assertLengthEqual(found['normal_dbs'], 1)
# the 'alpha' object is NOT replicated to the two sharded nodes
for node in self.brain.nodes[:2]:
broker = self.get_broker(self.brain.part, node)
with annotate_failure(
'Node id %s in %s' % (node['id'], self.brain.nodes[:2])):
self.assertFalse(broker.get_objects())
self.assert_container_state(node, 'sharded', 2)
self.brain.servers.stop(number=node_numbers[2])
self.assert_container_listing(obj_names)
# all nodes now have shard ranges
self.brain.servers.start(number=node_numbers[2])
node_data = self.direct_get_container_shard_ranges()
for node, (hdrs, shard_ranges) in node_data.items():
with annotate_failure(node):
self.assert_shard_ranges_contiguous(2, shard_ranges)
# run the sharder on the third server, alpha object is included in
# shards that it cleaves
self.assert_container_state(self.brain.nodes[2], 'unsharded', 2)
self.sharders.once(number=node_numbers[2],
additional_args='--partitions=%s' % self.brain.part)
self.assert_container_state(self.brain.nodes[2], 'sharded', 2)
self.assert_container_listing(['alpha'] + obj_names)
def test_sharding_requires_sufficient_replication(self):
# verify that cleaving only progresses if each cleaved shard range is
# sufficiently replicated
# put enough objects for 4 shard ranges
obj_names = self._make_object_names(2 * self.max_shard_size)
self.put_objects(obj_names)
client.post_container(self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
node_numbers = self.brain.node_numbers
leader_node = self.brain.nodes[0]
leader_num = node_numbers[0]
# run replicators first time to get sync points set
self.replicators.once()
# start sharding on the leader node
self.sharders.once(number=leader_num,
additional_args='--partitions=%s' % self.brain.part)
shard_ranges = self.assert_container_state(leader_node, 'sharding', 4)
self.assertEqual([ShardRange.CLEAVED] * 2 + [ShardRange.CREATED] * 2,
[sr.state for sr in shard_ranges])
# Check the current progress. It shouldn't be complete.
recon = direct_client.direct_get_recon(leader_node, "sharding")
expected_in_progress = {'all': [{'account': 'AUTH_test',
'active': 0,
'cleaved': 2,
'created': 2,
'found': 0,
'db_state': 'sharding',
'state': 'sharding',
'error': None,
'file_size': mock.ANY,
'meta_timestamp': mock.ANY,
'node_index': 0,
'object_count': len(obj_names),
'container': mock.ANY,
'path': mock.ANY,
'root': mock.ANY}]}
actual = recon['sharding_stats']['sharding']['sharding_in_progress']
self.assertEqual(expected_in_progress, actual)
# stop *all* container servers for third shard range
sr_part, sr_node_nums = self.get_part_and_node_numbers(shard_ranges[2])
for node_num in sr_node_nums:
self.brain.servers.stop(number=node_num)
# attempt to continue sharding on the leader node
self.sharders.once(number=leader_num,
additional_args='--partitions=%s' % self.brain.part)
# no cleaving progress was made
for node_num in sr_node_nums:
self.brain.servers.start(number=node_num)
shard_ranges = self.assert_container_state(leader_node, 'sharding', 4)
self.assertEqual([ShardRange.CLEAVED] * 2 + [ShardRange.CREATED] * 2,
[sr.state for sr in shard_ranges])
# stop two of the servers for third shard range, not including any
# server that happens to be the leader node
stopped = []
for node_num in sr_node_nums:
if node_num != leader_num:
self.brain.servers.stop(number=node_num)
stopped.append(node_num)
if len(stopped) >= 2:
break
self.assertLengthEqual(stopped, 2) # sanity check
# attempt to continue sharding on the leader node
self.sharders.once(number=leader_num,
additional_args='--partitions=%s' % self.brain.part)
# no cleaving progress was made
for node_num in stopped:
self.brain.servers.start(number=node_num)
shard_ranges = self.assert_container_state(leader_node, 'sharding', 4)
self.assertEqual([ShardRange.CLEAVED] * 2 + [ShardRange.CREATED] * 2,
[sr.state for sr in shard_ranges])
# stop just one of the servers for third shard range
stopped = []
for node_num in sr_node_nums:
if node_num != leader_num:
self.brain.servers.stop(number=node_num)
stopped.append(node_num)
break
self.assertLengthEqual(stopped, 1) # sanity check
# attempt to continue sharding the container
self.sharders.once(number=leader_num,
additional_args='--partitions=%s' % self.brain.part)
# this time cleaving completed
self.brain.servers.start(number=stopped[0])
shard_ranges = self.assert_container_state(leader_node, 'sharded', 4)
self.assertEqual([ShardRange.ACTIVE] * 4,
[sr.state for sr in shard_ranges])
# Check the leader's progress again, this time is should be complete
recon = direct_client.direct_get_recon(leader_node, "sharding")
expected_in_progress = {'all': [{'account': 'AUTH_test',
'active': 4,
'cleaved': 0,
'created': 0,
'found': 0,
'db_state': 'sharded',
'state': 'sharded',
'error': None,
'file_size': mock.ANY,
'meta_timestamp': mock.ANY,
'node_index': 0,
'object_count': len(obj_names),
'container': mock.ANY,
'path': mock.ANY,
'root': mock.ANY}]}
actual = recon['sharding_stats']['sharding']['sharding_in_progress']
self.assertEqual(expected_in_progress, actual)
def test_sharded_delete(self):
all_obj_names = self._make_object_names(self.max_shard_size)
self.put_objects(all_obj_names)
# Shard the container
client.post_container(self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
for n in self.brain.node_numbers:
self.sharders.once(
number=n, additional_args='--partitions=%s' % self.brain.part)
# sanity checks
for node in self.brain.nodes:
self.assert_container_state(node, 'sharded', 2)
self.assert_container_delete_fails()
self.assert_container_has_shard_sysmeta()
self.assert_container_post_ok('sharded')
self.assert_container_listing(all_obj_names)
# delete all objects - updates redirected to shards
self.delete_objects(all_obj_names)
self.assert_container_listing([])
self.assert_container_post_ok('has objects')
# root not yet updated with shard stats
self.assert_container_object_count(len(all_obj_names))
self.assert_container_delete_fails()
self.assert_container_has_shard_sysmeta()
# run sharder on shard containers to update root stats
shard_ranges = self.get_container_shard_ranges()
self.assertLengthEqual(shard_ranges, 2)
self.run_sharders(shard_ranges)
self.assert_container_listing([])
self.assert_container_post_ok('empty')
self.assert_container_object_count(0)
# put a new object - update redirected to shard
self.put_objects(['alpha'])
self.assert_container_listing(['alpha'])
self.assert_container_object_count(0)
# before root learns about new object in shard, delete the container
client.delete_container(self.url, self.token, self.container_name)
self.assert_container_post_fails('deleted')
self.assert_container_not_found()
# run the sharders to update root with shard stats
self.run_sharders(shard_ranges)
self.assert_container_listing(['alpha'])
self.assert_container_object_count(1)
self.assert_container_delete_fails()
self.assert_container_post_ok('revived')
def test_object_update_redirection(self):
all_obj_names = self._make_object_names(self.max_shard_size)
self.put_objects(all_obj_names)
# Shard the container
client.post_container(self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
for n in self.brain.node_numbers:
self.sharders.once(
number=n, additional_args='--partitions=%s' % self.brain.part)
# sanity checks
for node in self.brain.nodes:
self.assert_container_state(node, 'sharded', 2)
self.assert_container_delete_fails()
self.assert_container_has_shard_sysmeta()
self.assert_container_post_ok('sharded')
self.assert_container_listing(all_obj_names)
# delete all objects - updates redirected to shards
self.delete_objects(all_obj_names)
self.assert_container_listing([])
self.assert_container_post_ok('has objects')
# run sharder on shard containers to update root stats; reclaim
# the tombstones so that the shards appear to be shrinkable
shard_ranges = self.get_container_shard_ranges()
self.assertLengthEqual(shard_ranges, 2)
shard_partitions = [self.get_part_and_node_numbers(sr)[0]
for sr in shard_ranges]
for conf_index in self.configs['container-sharder'].keys():
self.run_custom_sharder(conf_index, {'reclaim_age': 0},
override_partitions=shard_partitions)
self.assert_container_object_count(0)
# First, test a misplaced object moving from one shard to another.
# with one shard server down, put a new 'alpha' object...
shard_part, shard_nodes = self.get_part_and_node_numbers(
shard_ranges[0])
self.brain.servers.stop(number=shard_nodes[2])
self.put_objects(['alpha'])
self.assert_container_listing(['alpha'])
self.assert_container_object_count(0)
self.assertLengthEqual(
self.gather_async_pendings(self.get_all_object_nodes()), 1)
self.brain.servers.start(number=shard_nodes[2])
# run sharder on root to discover first shrink candidate
self.sharders.once(additional_args='--partitions=%s' % self.brain.part)
# then run sharder on the shard node without the alpha object
self.sharders.once(additional_args='--partitions=%s' % shard_part,
number=shard_nodes[2])
# root sees first shard has shrunk
self.assertLengthEqual(self.get_container_shard_ranges(), 1)
# cached shard ranges still show first shard range as active so listing
# will include 'alpha' if the shard listing is fetched from node (0,1)
# but not if fetched from node 2; to achieve predictability we use
# x-newest to use shard ranges from the root so that only the second
# shard range is used for listing, so alpha object not in listing
self.assert_container_listing([], req_hdrs={'x-newest': 'true'})
self.assert_container_object_count(0)
# run the updaters: the async pending update will be redirected from
# shrunk shard to second shard
self.updaters.once()
self.assert_container_listing(['alpha'])
self.assert_container_object_count(0) # root not yet updated
# then run sharder on other shard nodes to complete shrinking
for number in shard_nodes[:2]:
self.sharders.once(additional_args='--partitions=%s' % shard_part,
number=number)
# and get root updated
self.run_sharders(shard_ranges[1])
self.assert_container_listing(['alpha'])
self.assert_container_object_count(1)
self.assertLengthEqual(self.get_container_shard_ranges(), 1)
# Now we have just one active shard, test a misplaced object moving
# from that shard to the root.
# with one shard server down, delete 'alpha' and put a 'beta' object...
shard_part, shard_nodes = self.get_part_and_node_numbers(
shard_ranges[1])
self.brain.servers.stop(number=shard_nodes[2])
# Before writing, kill the cache
self.memcache.delete(get_cache_key(
self.account, self.container_name, shard='updating'))
self.delete_objects(['alpha'])
self.put_objects(['beta'])
self.assert_container_listing(['beta'])
self.assert_container_object_count(1)
self.assertLengthEqual(
self.gather_async_pendings(self.get_all_object_nodes()), 2)
self.brain.servers.start(number=shard_nodes[2])
# run sharder on root to discover second shrink candidate - root is not
# yet aware of the beta object
self.sharders.once(additional_args='--partitions=%s' % self.brain.part)
# then run sharder on the shard node without the beta object, to shrink
# it to root - note this moves stale copy of alpha to the root db
self.sharders.once(additional_args='--partitions=%s' % shard_part,
number=shard_nodes[2])
# now there are no active shards
self.assertFalse(self.get_container_shard_ranges())
# with other two shard servers down, listing won't find beta object
for number in shard_nodes[:2]:
self.brain.servers.stop(number=number)
self.assert_container_listing(['alpha'])
self.assert_container_object_count(1)
# run the updaters: the async pending update will be redirected from
# shrunk shard to the root
self.updaters.once()
self.assert_container_listing(['beta'])
self.assert_container_object_count(1)
def test_misplaced_object_movement(self):
def merge_object(shard_range, name, deleted=0):
# it's hard to get a test to put a misplaced object into a shard,
# so this hack is used force an object record directly into a shard
# container db. Note: the actual object won't exist, we're just
# using this to test object records in container dbs.
shard_part, shard_nodes = self.brain.ring.get_nodes(
shard_range.account, shard_range.container)
shard_broker = self.get_broker(
shard_part, shard_nodes[0], shard_range.account,
shard_range.container)
shard_broker.merge_items(
[{'name': name, 'created_at': Timestamp.now().internal,
'size': 0, 'content_type': 'text/plain',
'etag': md5(usedforsecurity=False).hexdigest(),
'deleted': deleted,
'storage_policy_index': shard_broker.storage_policy_index}])
return shard_nodes[0]
all_obj_names = self._make_object_names(self.max_shard_size)
self.put_objects(all_obj_names)
# Shard the container
client.post_container(self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
for n in self.brain.node_numbers:
self.sharders.once(
number=n, additional_args='--partitions=%s' % self.brain.part)
# sanity checks
for node in self.brain.nodes:
self.assert_container_state(node, 'sharded', 2)
self.assert_container_delete_fails()
self.assert_container_has_shard_sysmeta()
self.assert_container_post_ok('sharded')
self.assert_container_listing(all_obj_names)
# delete all objects in first shard range - updates redirected to shard
shard_ranges = self.get_container_shard_ranges()
self.assertLengthEqual(shard_ranges, 2)
shard_0_objects = [name for name in all_obj_names
if name in shard_ranges[0]]
shard_1_objects = [name for name in all_obj_names
if name in shard_ranges[1]]
self.delete_objects(shard_0_objects)
self.assert_container_listing(shard_1_objects)
self.assert_container_post_ok('has objects')
# run sharder on first shard container to update root stats; reclaim
# the tombstones so that the shard appears to be shrinkable
shard_0_part = self.get_part_and_node_numbers(shard_ranges[0])[0]
for conf_index in self.configs['container-sharder'].keys():
self.run_custom_sharder(conf_index, {'reclaim_age': 0},
override_partitions=[shard_0_part])
self.assert_container_object_count(len(shard_1_objects))
# First, test a misplaced object moving from one shard to another.
# run sharder on root to discover first shrink candidate
self.sharders.once(additional_args='--partitions=%s' % self.brain.part)
# then run sharder on first shard range to shrink it
self.run_sharders(shard_ranges[0])
# force a misplaced object into the shrunken shard range to simulate
# a client put that was in flight when it started to shrink
misplaced_node = merge_object(shard_ranges[0], 'alpha', deleted=0)
# root sees first shard has shrunk, only second shard range used for
# listing so alpha object not in listing
self.assertLengthEqual(self.get_container_shard_ranges(), 1)
self.assert_container_listing(shard_1_objects)
self.assert_container_object_count(len(shard_1_objects))
# until sharder runs on that node to move the misplaced object to the
# second shard range
shard_part, shard_nodes_numbers = self.get_part_and_node_numbers(
shard_ranges[0])
self.sharders.once(additional_args='--partitions=%s' % shard_part,
number=misplaced_node['id'] + 1)
self.assert_container_listing(['alpha'] + shard_1_objects)
# root not yet updated
self.assert_container_object_count(len(shard_1_objects))
# run sharder to get root updated
self.run_sharders(shard_ranges[1])
self.assert_container_listing(['alpha'] + shard_1_objects)
self.assert_container_object_count(len(shard_1_objects) + 1)
self.assertLengthEqual(self.get_container_shard_ranges(), 1)
# Now we have just one active shard, test a misplaced object moving
# from that shard to the root.
# delete most objects from second shard range, reclaim the tombstones,
# and run sharder on root to discover second shrink candidate
self.delete_objects(shard_1_objects)
shard_1_part = self.get_part_and_node_numbers(shard_ranges[1])[0]
for conf_index in self.configs['container-sharder'].keys():
self.run_custom_sharder(conf_index, {'reclaim_age': 0},
override_partitions=[shard_1_part])
self.sharders.once(additional_args='--partitions=%s' % self.brain.part)
# then run sharder on the shard node to shrink it to root - note this
# moves alpha to the root db
self.run_sharders(shard_ranges[1])
# now there are no active shards
self.assertFalse(self.get_container_shard_ranges())
# force some misplaced object updates into second shrunk shard range
merge_object(shard_ranges[1], 'alpha', deleted=1)
misplaced_node = merge_object(shard_ranges[1], 'beta', deleted=0)
# root is not yet aware of them
self.assert_container_listing(['alpha'])
self.assert_container_object_count(1)
# until sharder runs on that node to move the misplaced object
shard_part, shard_nodes_numbers = self.get_part_and_node_numbers(
shard_ranges[1])
self.sharders.once(additional_args='--partitions=%s' % shard_part,
number=misplaced_node['id'] + 1)
self.assert_container_listing(['beta'])
self.assert_container_object_count(1)
self.assert_container_delete_fails()
def test_misplaced_object_movement_from_deleted_shard(self):
def merge_object(shard_range, name, deleted=0):
# it's hard to get a test to put a misplaced object into a shard,
# so this hack is used force an object record directly into a shard
# container db. Note: the actual object won't exist, we're just
# using this to test object records in container dbs.
shard_part, shard_nodes = self.brain.ring.get_nodes(
shard_range.account, shard_range.container)
shard_broker = self.get_shard_broker(shard_range)
# In this test we want to merge into a deleted container shard
shard_broker.delete_db(Timestamp.now().internal)
shard_broker.merge_items(
[{'name': name, 'created_at': Timestamp.now().internal,
'size': 0, 'content_type': 'text/plain',
'etag': md5(usedforsecurity=False).hexdigest(),
'deleted': deleted,
'storage_policy_index': shard_broker.storage_policy_index}])
return shard_nodes[0]
all_obj_names = self._make_object_names(self.max_shard_size)
self.put_objects(all_obj_names)
# Shard the container
client.post_container(self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
for n in self.brain.node_numbers:
self.sharders.once(
number=n, additional_args='--partitions=%s' % self.brain.part)
# sanity checks
for node in self.brain.nodes:
self.assert_container_state(node, 'sharded', 2)
self.assert_container_delete_fails()
self.assert_container_has_shard_sysmeta()
self.assert_container_post_ok('sharded')
self.assert_container_listing(all_obj_names)
# delete all objects in first shard range - updates redirected to shard
shard_ranges = self.get_container_shard_ranges()
self.assertLengthEqual(shard_ranges, 2)
shard_0_objects = [name for name in all_obj_names
if name in shard_ranges[0]]
shard_1_objects = [name for name in all_obj_names
if name in shard_ranges[1]]
self.delete_objects(shard_0_objects)
self.assert_container_listing(shard_1_objects)
self.assert_container_post_ok('has objects')
# run sharder on first shard container to update root stats
shard_0_part = self.get_part_and_node_numbers(shard_ranges[0])[0]
for conf_index in self.configs['container-sharder'].keys():
self.run_custom_sharder(conf_index, {'reclaim_age': 0},
override_partitions=[shard_0_part])
self.assert_container_object_count(len(shard_1_objects))
# First, test a misplaced object moving from one shard to another.
# run sharder on root to discover first shrink candidate
self.sharders.once(additional_args='--partitions=%s' % self.brain.part)
# then run sharder on first shard range to shrink it
self.run_sharders(shard_ranges[0])
# force a misplaced object into the shrunken shard range to simulate
# a client put that was in flight when it started to shrink
misplaced_node = merge_object(shard_ranges[0], 'alpha', deleted=0)
# root sees first shard has shrunk, only second shard range used for
# listing so alpha object not in listing
self.assertLengthEqual(self.get_container_shard_ranges(), 1)
self.assert_container_listing(shard_1_objects)
self.assert_container_object_count(len(shard_1_objects))
# until sharder runs on that node to move the misplaced object to the
# second shard range
shard_part, shard_nodes_numbers = self.get_part_and_node_numbers(
shard_ranges[0])
self.sharders.once(additional_args='--partitions=%s' % shard_part,
number=misplaced_node['id'] + 1)
self.assert_container_listing(['alpha'] + shard_1_objects)
# root not yet updated
self.assert_container_object_count(len(shard_1_objects))
# check the deleted shard did not push the wrong root path into the
# other container
for replica in 0, 1, 2:
shard_x_broker = self.get_shard_broker(shard_ranges[1], replica)
self.assertEqual("%s/%s" % (self.account, self.container_name),
shard_x_broker.root_path)
# run the sharder of the existing shard to update the root stats
# to prove the misplaced object was moved to the other shard _and_
# the other shard still has the correct root because it updates root's
# stats
self.run_sharders(shard_ranges[1])
self.assert_container_object_count(len(shard_1_objects) + 1)
def test_replication_to_sharded_container_from_unsharded_old_primary(self):
primary_ids = [n['id'] for n in self.brain.nodes]
handoff_node = next(n for n in self.brain.ring.devs
if n['id'] not in primary_ids)
# start with two sharded replicas and one unsharded with extra object
obj_names = self._setup_replication_scenario(2)
for node in self.brain.nodes[:2]:
self.assert_container_state(node, 'sharded', 2)
# Fake a ring change - copy unsharded db which has no shard ranges to a
# handoff to create illusion of a new unpopulated primary node
node_numbers = self.brain.node_numbers
new_primary_node = self.brain.nodes[2]
new_primary_node_number = node_numbers[2]
new_primary_dir, container_hash = self.get_storage_dir(
self.brain.part, new_primary_node)
old_primary_dir, container_hash = self.get_storage_dir(
self.brain.part, handoff_node)
utils.mkdirs(os.path.dirname(old_primary_dir))
shutil.move(new_primary_dir, old_primary_dir)
# make the cluster more or less "healthy" again
self.brain.servers.start(number=new_primary_node_number)
# get a db on every node...
client.put_container(self.url, self.token, self.container_name)
self.assertTrue(os.path.exists(os.path.join(
new_primary_dir, container_hash + '.db')))
found = self.categorize_container_dir_content()
self.assertLengthEqual(found['normal_dbs'], 1) # "new" primary
self.assertLengthEqual(found['shard_dbs'], 2) # existing primaries
# catastrophic failure! drive dies and is replaced on unchanged primary
failed_node = self.brain.nodes[0]
failed_dir, _container_hash = self.get_storage_dir(
self.brain.part, failed_node)
shutil.rmtree(failed_dir)
# replicate the "old primary" to everybody except the "new primary"
self.brain.servers.stop(number=new_primary_node_number)
self.replicators.once(number=handoff_node['id'] + 1)
# We're willing to rsync the retiring db to the failed primary.
# This may or may not have shard ranges, depending on the order in
# which we hit the primaries, but it definitely *doesn't* have an
# epoch in its name yet. All objects are replicated.
self.assertTrue(os.path.exists(os.path.join(
failed_dir, container_hash + '.db')))
self.assertLengthEqual(os.listdir(failed_dir), 1)
broker = self.get_broker(self.brain.part, failed_node)
self.assertLengthEqual(broker.get_objects(), len(obj_names) + 1)
# The other out-of-date primary is within usync range but objects are
# not replicated to it because the handoff db learns about shard ranges
broker = self.get_broker(self.brain.part, self.brain.nodes[1])
self.assertLengthEqual(broker.get_objects(), 0)
# Handoff db still exists and now has shard ranges!
self.assertTrue(os.path.exists(os.path.join(
old_primary_dir, container_hash + '.db')))
broker = self.get_broker(self.brain.part, handoff_node)
shard_ranges = broker.get_shard_ranges()
self.assertLengthEqual(shard_ranges, 2)
self.assert_container_state(handoff_node, 'unsharded', 2)
# Replicate again, this time *including* "new primary"
self.brain.servers.start(number=new_primary_node_number)
self.replicators.once(number=handoff_node['id'] + 1)
# Ordinarily, we would have rsync_then_merge'd to "new primary"
# but instead we wait
broker = self.get_broker(self.brain.part, new_primary_node)
self.assertLengthEqual(broker.get_objects(), 0)
shard_ranges = broker.get_shard_ranges()
self.assertLengthEqual(shard_ranges, 2)
# so the next time the sharder comes along, it can push rows out
# and delete the big db
self.sharders.once(number=handoff_node['id'] + 1,
additional_args='--partitions=%s' % self.brain.part)
self.assert_container_state(handoff_node, 'sharded', 2)
self.assertFalse(os.path.exists(os.path.join(
old_primary_dir, container_hash + '.db')))
# the sharded db hangs around until replication confirms durability
# first attempt is not sufficiently successful
self.brain.servers.stop(number=node_numbers[0])
self.replicators.once(number=handoff_node['id'] + 1)
self.assertTrue(os.path.exists(old_primary_dir))
self.assert_container_state(handoff_node, 'sharded', 2)
# second attempt is successful and handoff db is deleted
self.brain.servers.start(number=node_numbers[0])
self.replicators.once(number=handoff_node['id'] + 1)
self.assertFalse(os.path.exists(old_primary_dir))
# run all the sharders, get us into a consistent state
self.sharders.once(additional_args='--partitions=%s' % self.brain.part)
self.assert_container_listing(['alpha'] + obj_names)
def test_replication_to_empty_new_primary_from_sharding_old_primary(self):
primary_ids = [n['id'] for n in self.brain.nodes]
handoff_node = next(n for n in self.brain.ring.devs
if n['id'] not in primary_ids)
num_shards = 3
obj_names = self._make_object_names(
num_shards * self.max_shard_size // 2)
self.put_objects(obj_names)
client.post_container(self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
# run replicators first time to get sync points set
self.replicators.once()
# start sharding on only the leader node
leader_node = self.brain.nodes[0]
leader_node_number = self.brain.node_numbers[0]
self.sharders.once(number=leader_node_number)
self.assert_container_state(leader_node, 'sharding', 3)
for node in self.brain.nodes[1:]:
self.assert_container_state(node, 'unsharded', 3)
# Fake a ring change - copy leader node db to a handoff to create
# illusion of a new unpopulated primary leader node
new_primary_dir, container_hash = self.get_storage_dir(
self.brain.part, leader_node)
old_primary_dir, container_hash = self.get_storage_dir(
self.brain.part, handoff_node)
utils.mkdirs(os.path.dirname(old_primary_dir))
shutil.move(new_primary_dir, old_primary_dir)
self.assert_container_state(handoff_node, 'sharding', 3)
# run replicator on handoff node to create a fresh db on new primary
self.assertFalse(os.path.exists(new_primary_dir))
self.replicators.once(number=handoff_node['id'] + 1)
self.assertTrue(os.path.exists(new_primary_dir))
self.assert_container_state(leader_node, 'sharded', 3)
broker = self.get_broker(self.brain.part, leader_node)
shard_ranges = broker.get_shard_ranges()
self.assertLengthEqual(shard_ranges, 3)
self.assertEqual(
[ShardRange.CLEAVED, ShardRange.CLEAVED, ShardRange.CREATED],
[sr.state for sr in shard_ranges])
# db still exists on handoff
self.assertTrue(os.path.exists(old_primary_dir))
self.assert_container_state(handoff_node, 'sharding', 3)
# continue sharding it...
self.sharders.once(number=handoff_node['id'] + 1)
self.assert_container_state(leader_node, 'sharded', 3)
# now handoff is fully sharded the replicator will delete it
self.replicators.once(number=handoff_node['id'] + 1)
self.assertFalse(os.path.exists(old_primary_dir))
# all primaries now have active shard ranges but only one is in sharded
# state
self.assert_container_state(leader_node, 'sharded', 3)
for node in self.brain.nodes[1:]:
self.assert_container_state(node, 'unsharded', 3)
node_data = self.direct_get_container_shard_ranges()
for node_id, (hdrs, shard_ranges) in node_data.items():
with annotate_failure(
'node id %s from %s' % (node_id, node_data.keys)):
self.assert_shard_range_state(ShardRange.ACTIVE, shard_ranges)
# check handoff cleaved all objects before it was deleted - stop all
# but leader node so that listing is fetched from shards
for number in self.brain.node_numbers[1:3]:
self.brain.servers.stop(number=number)
self.assert_container_listing(obj_names)
for number in self.brain.node_numbers[1:3]:
self.brain.servers.start(number=number)
self.sharders.once()
self.assert_container_state(leader_node, 'sharded', 3)
for node in self.brain.nodes[1:]:
self.assert_container_state(node, 'sharding', 3)
self.sharders.once()
for node in self.brain.nodes:
self.assert_container_state(node, 'sharded', 3)
self.assert_container_listing(obj_names)
def test_sharded_account_updates(self):
# verify that .shards account updates have zero object count and bytes
# to avoid double accounting
all_obj_names = self._make_object_names(self.max_shard_size)
self.put_objects(all_obj_names, contents='xyz')
# Shard the container into 2 shards
client.post_container(self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
for n in self.brain.node_numbers:
self.sharders.once(
number=n, additional_args='--partitions=%s' % self.brain.part)
# sanity checks
for node in self.brain.nodes:
shard_ranges = self.assert_container_state(node, 'sharded', 2)
self.assert_container_delete_fails()
self.assert_container_has_shard_sysmeta()
self.assert_container_post_ok('sharded')
self.assert_container_listing(all_obj_names)
# run the updaters to get account stats updated
self.updaters.once()
# check user account stats
metadata = self.internal_client.get_account_metadata(self.account)
self.assertEqual(1, int(metadata.get('x-account-container-count')))
self.assertEqual(self.max_shard_size,
int(metadata.get('x-account-object-count')))
self.assertEqual(3 * self.max_shard_size,
int(metadata.get('x-account-bytes-used')))
# check hidden .shards account stats
metadata = self.internal_client.get_account_metadata(
shard_ranges[0].account)
self.assertEqual(2, int(metadata.get('x-account-container-count')))
self.assertEqual(0, int(metadata.get('x-account-object-count')))
self.assertEqual(0, int(metadata.get('x-account-bytes-used')))
class TestContainerShardingMoreUTF8(TestContainerSharding):
def _make_object_names(self, number):
# override default with names that include non-ascii chars
name_length = self.cluster_info['swift']['max_object_name_length']
obj_names = []
for x in range(number):
name = (u'obj-\u00e4\u00ea\u00ec\u00f2\u00fb-%04d' % x)
name = name.encode('utf8').ljust(name_length, b'o')
if not six.PY2:
name = name.decode('utf8')
obj_names.append(name)
return obj_names
def _setup_container_name(self):
# override default with max length name that includes non-ascii chars
super(TestContainerShardingMoreUTF8, self)._setup_container_name()
name_length = self.cluster_info['swift']['max_container_name_length']
cont_name = \
self.container_name + u'-\u00e4\u00ea\u00ec\u00f2\u00fb\u1234'
self.container_name = cont_name.encode('utf8').ljust(name_length, b'x')
if not six.PY2:
self.container_name = self.container_name.decode('utf8')
class TestManagedContainerSharding(BaseTestContainerSharding):
'''Test sharding using swift-manage-shard-ranges'''
def sharders_once(self, **kwargs):
# inhibit auto_sharding regardless of the config setting
additional_args = kwargs.get('additional_args', [])
if not isinstance(additional_args, list):
additional_args = [additional_args]
additional_args.append('--no-auto-shard')
kwargs['additional_args'] = additional_args
self.sharders.once(**kwargs)
def test_manage_shard_ranges(self):
obj_names = self._make_object_names(7)
self.put_objects(obj_names)
client.post_container(self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
# run replicators first time to get sync points set
self.replicators.once()
# sanity check: we don't have nearly enough objects for this to shard
# automatically
self.sharders_once(number=self.brain.node_numbers[0],
additional_args='--partitions=%s' % self.brain.part)
self.assert_container_state(self.brain.nodes[0], 'unsharded', 0)
self.assert_subprocess_success([
'swift-manage-shard-ranges',
self.get_db_file(self.brain.part, self.brain.nodes[0]),
'find_and_replace', '3', '--enable', '--minimum-shard-size', '2'])
self.assert_container_state(self.brain.nodes[0], 'unsharded', 2)
# "Run container-replicator to replicate them to other nodes."
self.replicators.once()
# "Run container-sharder on all nodes to shard the container."
self.sharders_once(additional_args='--partitions=%s' % self.brain.part)
# Everybody's settled
self.assert_container_state(self.brain.nodes[0], 'sharded', 2)
self.assert_container_state(self.brain.nodes[1], 'sharded', 2)
self.assert_container_state(self.brain.nodes[2], 'sharded', 2)
self.assert_container_listing(obj_names)
def test_manage_shard_ranges_compact(self):
# verify shard range compaction using swift-manage-shard-ranges
obj_names = self._make_object_names(8)
self.put_objects(obj_names)
client.post_container(self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
# run replicators first time to get sync points set, and get container
# sharded into 4 shards
self.replicators.once()
self.assert_subprocess_success([
'swift-manage-shard-ranges',
self.get_db_file(self.brain.part, self.brain.nodes[0]),
'find_and_replace', '2', '--enable'])
self.assert_container_state(self.brain.nodes[0], 'unsharded', 4)
self.replicators.once()
# run sharders twice to cleave all 4 shard ranges
self.sharders_once(additional_args='--partitions=%s' % self.brain.part)
self.sharders_once(additional_args='--partitions=%s' % self.brain.part)
self.assert_container_state(self.brain.nodes[0], 'sharded', 4)
self.assert_container_state(self.brain.nodes[1], 'sharded', 4)
self.assert_container_state(self.brain.nodes[2], 'sharded', 4)
self.assert_container_listing(obj_names)
# now compact some ranges; use --max-shrinking to allow 2 shrinking
# shards
self.assert_subprocess_success([
'swift-manage-shard-ranges',
self.get_db_file(self.brain.part, self.brain.nodes[0]),
'compact', '--max-expanding', '1', '--max-shrinking', '2',
'--yes'])
shard_ranges = self.assert_container_state(
self.brain.nodes[0], 'sharded', 4)
self.assertEqual([ShardRange.SHRINKING] * 2 + [ShardRange.ACTIVE] * 2,
[sr.state for sr in shard_ranges])
self.replicators.once()
self.sharders_once()
# check there's now just 2 remaining shard ranges
shard_ranges = self.assert_container_state(
self.brain.nodes[0], 'sharded', 2)
self.assertEqual([ShardRange.ACTIVE] * 2,
[sr.state for sr in shard_ranges])
self.assert_container_listing(obj_names, req_hdrs={'X-Newest': 'True'})
# root container own shard range should still be SHARDED
for i, node in enumerate(self.brain.nodes):
with annotate_failure('node[%d]' % i):
broker = self.get_broker(self.brain.part, self.brain.nodes[0])
self.assertEqual(ShardRange.SHARDED,
broker.get_own_shard_range().state)
# now compact the final two shard ranges to the root; use
# --max-shrinking to allow 2 shrinking shards
self.assert_subprocess_success([
'swift-manage-shard-ranges',
self.get_db_file(self.brain.part, self.brain.nodes[0]),
'compact', '--yes', '--max-shrinking', '2'])
shard_ranges = self.assert_container_state(
self.brain.nodes[0], 'sharded', 2)
self.assertEqual([ShardRange.SHRINKING] * 2,
[sr.state for sr in shard_ranges])
self.replicators.once()
self.sharders_once()
self.assert_container_state(self.brain.nodes[0], 'collapsed', 0)
self.assert_container_listing(obj_names, req_hdrs={'X-Newest': 'True'})
# root container own shard range should now be ACTIVE
for i, node in enumerate(self.brain.nodes):
with annotate_failure('node[%d]' % i):
broker = self.get_broker(self.brain.part, self.brain.nodes[0])
self.assertEqual(ShardRange.ACTIVE,
broker.get_own_shard_range().state)
def test_manage_shard_ranges_repair_root(self):
# provoke overlaps in root container and repair
obj_names = self._make_object_names(16)
self.put_objects(obj_names)
client.post_container(self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
# run replicators first time to get sync points set
self.replicators.once()
# find 4 shard ranges on nodes[0] - let's denote these ranges 0.0, 0.1,
# 0.2 and 0.3 that are installed with epoch_0
self.assert_subprocess_success([
'swift-manage-shard-ranges',
self.get_db_file(self.brain.part, self.brain.nodes[0]),
'find_and_replace', '4', '--enable'])
shard_ranges_0 = self.assert_container_state(self.brain.nodes[0],
'unsharded', 4)
# *Also* go find 3 shard ranges on *another node*, like a dumb-dumb -
# let's denote these ranges 1.0, 1.1 and 1.2 that are installed with
# epoch_1
self.assert_subprocess_success([
'swift-manage-shard-ranges',
self.get_db_file(self.brain.part, self.brain.nodes[1]),
'find_and_replace', '7', '--enable'])
shard_ranges_1 = self.assert_container_state(self.brain.nodes[1],
'unsharded', 3)
# Run sharder in specific order so that the replica with the older
# epoch_0 starts sharding first - this will prove problematic later!
# On first pass the first replica passes audit, creates shards and then
# syncs shard ranges with the other replicas, so it has a mix of 0.*
# shard ranges in CLEAVED state and 1.* ranges in FOUND state. It
# proceeds to cleave shard 0.0, but after 0.0 cleaving stalls because
# next in iteration is shard range 1.0 in FOUND state from the other
# replica that it cannot yet cleave.
self.sharders_once(number=self.brain.node_numbers[0],
additional_args='--partitions=%s' % self.brain.part)
# On first pass the second replica passes audit (it has its own found
# ranges and the first replica's created shard ranges but none in the
# same state overlap), creates its shards and then syncs shard ranges
# with the other replicas. All of the 7 shard ranges on this replica
# are now in CREATED state so it proceeds to cleave the first two shard
# ranges, 0.1 and 1.0.
self.sharders_once(number=self.brain.node_numbers[1],
additional_args='--partitions=%s' % self.brain.part)
self.replicators.once()
# Uh-oh
self.assert_container_state(self.brain.nodes[0], 'sharding', 7)
self.assert_container_state(self.brain.nodes[1], 'sharding', 7)
# There's a race: the third replica may be sharding, may be unsharded
# Try it again a few times
self.sharders_once(additional_args='--partitions=%s' % self.brain.part)
self.replicators.once()
self.sharders_once(additional_args='--partitions=%s' % self.brain.part)
# It's not really fixing itself... the sharder audit will detect
# overlapping ranges which prevents cleaving proceeding; expect the
# shard ranges to be mostly still in created state, with one or two
# possibly cleaved during first pass before the sharding got stalled
shard_ranges = self.assert_container_state(self.brain.nodes[0],
'sharding', 7)
self.assertEqual([ShardRange.CLEAVED] * 2 + [ShardRange.CREATED] * 5,
[sr.state for sr in shard_ranges])
shard_ranges = self.assert_container_state(self.brain.nodes[1],
'sharding', 7)
self.assertEqual([ShardRange.CLEAVED] * 2 + [ShardRange.CREATED] * 5,
[sr.state for sr in shard_ranges])
# But hey, at least listings still work! They're just going to get
# horribly out of date as more objects are added
self.assert_container_listing(obj_names)
# 'swift-manage-shard-ranges repair' will choose the second set of 3
# shard ranges (1.*) over the first set of 4 (0.*) because that's the
# path with most cleaving progress, and so shrink shard ranges 0.*.
db_file = self.get_db_file(self.brain.part, self.brain.nodes[0])
self.assert_subprocess_success(
['swift-manage-shard-ranges', db_file, 'repair', '--yes'])
# make sure all root replicas now sync their shard ranges
self.replicators.once()
# Run sharder on the shrinking shards. This should not change the state
# of any of the acceptors, particularly the ones that have yet to have
# object cleaved from the roots, because we don't want the as yet
# uncleaved acceptors becoming prematurely active and creating 'holes'
# in listings. The shrinking shard ranges should however get deleted in
# root container table.
self.run_sharders(shard_ranges_0)
shard_ranges = self.assert_container_state(self.brain.nodes[1],
'sharding', 3)
self.assertEqual([ShardRange.CLEAVED] * 1 + [ShardRange.CREATED] * 2,
[sr.state for sr in shard_ranges])
self.assert_container_listing(obj_names)
# check the unwanted shards did shrink away...
for shard_range in shard_ranges_0:
with annotate_failure(shard_range):
found_for_shard = self.categorize_container_dir_content(
shard_range.account, shard_range.container)
self.assertLengthEqual(found_for_shard['shard_dbs'], 3)
actual = []
for shard_db in found_for_shard['shard_dbs']:
broker = ContainerBroker(shard_db)
own_sr = broker.get_own_shard_range()
actual.append(
(broker.get_db_state(), own_sr.state, own_sr.deleted))
self.assertEqual([(SHARDED, ShardRange.SHRUNK, True)] * 3,
actual)
# At this point one of the first two replicas may have done some useful
# cleaving of 1.* shards, the other may have only cleaved 0.* shards,
# and the third replica may have cleaved no shards. We therefore need
# two more passes of the sharder to get to a predictable state where
# all replicas have cleaved all three 0.* shards.
self.sharders_once()
self.sharders_once()
# now we expect all replicas to have just the three 1.* shards, with
# the 0.* shards all deleted
brokers = {}
exp_shard_ranges = sorted(
[sr.copy(state=ShardRange.SHRUNK, deleted=True)
for sr in shard_ranges_0] +
[sr.copy(state=ShardRange.ACTIVE)
for sr in shard_ranges_1],
key=ShardRange.sort_key)
for node in (0, 1, 2):
with annotate_failure('node %s' % node):
broker = self.get_broker(self.brain.part,
self.brain.nodes[node])
brokers[node] = broker
shard_ranges = broker.get_shard_ranges()
self.assertEqual(shard_ranges_1, shard_ranges)
shard_ranges = broker.get_shard_ranges(include_deleted=True)
self.assertLengthEqual(shard_ranges, len(exp_shard_ranges))
self.maxDiff = None
self.assertEqual(exp_shard_ranges, shard_ranges)
self.assertEqual(ShardRange.SHARDED,
broker._own_shard_range().state)
# Sadly, the first replica to start sharding is still reporting its db
# state to be 'unsharded' because, although it has sharded, its shard
# db epoch (epoch_0) does not match its own shard range epoch
# (epoch_1), and that is because the second replica (with epoch_1)
# updated the own shard range and replicated it to all other replicas.
# If we had run the sharder on the second replica before the first
# replica, then by the time the first replica started sharding it would
# have learnt the newer epoch_1 and we wouldn't see this inconsistency.
self.assertEqual(UNSHARDED, brokers[0].get_db_state())
self.assertEqual(SHARDED, brokers[1].get_db_state())
self.assertEqual(SHARDED, brokers[2].get_db_state())
epoch_1 = brokers[1].db_epoch
self.assertEqual(epoch_1, brokers[2].db_epoch)
self.assertLess(brokers[0].db_epoch, epoch_1)
# the root replica that thinks it is unsharded is problematic - it will
# not return shard ranges for listings, but has no objects, so it's
# luck of the draw whether we get a listing or not at this point :(
# Run the sharders again: the first replica that is still 'unsharded'
# because of the older epoch_0 in its db filename will now start to
# shard again with a newer epoch_1 db, and will start to re-cleave the
# 3 active shards, albeit with zero objects to cleave.
self.sharders_once()
for node in (0, 1, 2):
with annotate_failure('node %s' % node):
broker = self.get_broker(self.brain.part,
self.brain.nodes[node])
brokers[node] = broker
shard_ranges = broker.get_shard_ranges()
self.assertEqual(shard_ranges_1, shard_ranges)
shard_ranges = broker.get_shard_ranges(include_deleted=True)
self.assertLengthEqual(shard_ranges, len(exp_shard_ranges))
self.assertEqual(exp_shard_ranges, shard_ranges)
self.assertEqual(ShardRange.SHARDED,
broker._own_shard_range().state)
self.assertEqual(epoch_1, broker.db_epoch)
self.assertIn(brokers[0].get_db_state(), (SHARDING, SHARDED))
self.assertEqual(SHARDED, brokers[1].get_db_state())
self.assertEqual(SHARDED, brokers[2].get_db_state())
# This cycle of the sharders also guarantees that all shards have had
# their state updated to ACTIVE from the root; this was not necessarily
# true at end of the previous sharder pass because a shard audit (when
# the shard is updated from a root) may have happened before all roots
# have had their shard ranges transitioned to ACTIVE.
for shard_range in shard_ranges_1:
with annotate_failure(shard_range):
found_for_shard = self.categorize_container_dir_content(
shard_range.account, shard_range.container)
self.assertLengthEqual(found_for_shard['normal_dbs'], 3)
actual = []
for shard_db in found_for_shard['normal_dbs']:
broker = ContainerBroker(shard_db)
own_sr = broker.get_own_shard_range()
actual.append(
(broker.get_db_state(), own_sr.state, own_sr.deleted))
self.assertEqual([(UNSHARDED, ShardRange.ACTIVE, False)] * 3,
actual)
# We may need one more pass of the sharder before all three shard
# ranges are cleaved (2 per pass) and all the root replicas are
# predictably in sharded state. Note: the accelerated cleaving of >2
# zero-object shard ranges per cycle is defeated if a shard happens
# to exist on the same node as the root because the roots cleaving
# process doesn't think that it created the shard db and will therefore
# replicate it as per a normal cleave.
self.sharders_once()
for node in (0, 1, 2):
with annotate_failure('node %s' % node):
broker = self.get_broker(self.brain.part,
self.brain.nodes[node])
brokers[node] = broker
shard_ranges = broker.get_shard_ranges()
self.assertEqual(shard_ranges_1, shard_ranges)
shard_ranges = broker.get_shard_ranges(include_deleted=True)
self.assertLengthEqual(shard_ranges, len(exp_shard_ranges))
self.assertEqual(exp_shard_ranges, shard_ranges)
self.assertEqual(ShardRange.SHARDED,
broker._own_shard_range().state)
self.assertEqual(epoch_1, broker.db_epoch)
self.assertEqual(SHARDED, broker.get_db_state())
# Finally, with all root replicas in a consistent state, the listing
# will be be predictably correct
self.assert_container_listing(obj_names)
def test_manage_shard_ranges_repair_shard(self):
# provoke overlaps in a shard container and repair them
obj_names = self._make_object_names(24)
initial_obj_names = obj_names[::2]
# put 12 objects in container
self.put_objects(initial_obj_names)
client.post_container(self.url, self.admin_token, self.container_name,
headers={'X-Container-Sharding': 'on'})
# run replicators first time to get sync points set
self.replicators.once()
# find 3 shard ranges on root nodes[0] and get the root sharded
self.assert_subprocess_success([
'swift-manage-shard-ranges',
self.get_db_file(self.brain.part, self.brain.nodes[0]),
'find_and_replace', '4', '--enable'])
self.replicators.once()
# cleave first two shards
self.sharders_once(additional_args='--partitions=%s' % self.brain.part)
# cleave third shard
self.sharders_once(additional_args='--partitions=%s' % self.brain.part)
# ensure all shards learn their ACTIVE state from root
self.sharders_once()
for node in (0, 1, 2):
with annotate_failure('node %d' % node):
shard_ranges = self.assert_container_state(
self.brain.nodes[node], 'sharded', 3)
for sr in shard_ranges:
self.assertEqual(ShardRange.ACTIVE, sr.state)
self.assert_container_listing(initial_obj_names)
# add objects to second shard range so it has 8 objects ; this range
# has bounds (obj-0006,obj-0014]
root_shard_ranges = self.get_container_shard_ranges()
self.assertEqual(3, len(root_shard_ranges))
shard_1 = root_shard_ranges[1]
self.assertEqual(obj_names[6], shard_1.lower)
self.assertEqual(obj_names[14], shard_1.upper)
more_obj_names = obj_names[7:15:2]
self.put_objects(more_obj_names)
expected_obj_names = sorted(initial_obj_names + more_obj_names)
self.assert_container_listing(expected_obj_names)
shard_1_part, shard_1_nodes = self.brain.ring.get_nodes(
shard_1.account, shard_1.container)
# find 3 sub-shards on one shard node; use --force-commits to ensure
# the recently PUT objects are included when finding the shard range
# pivot points
self.assert_subprocess_success([
'swift-manage-shard-ranges', '--force-commits',
self.get_db_file(shard_1_part, shard_1_nodes[1], shard_1.account,
shard_1.container),
'find_and_replace', '3', '--enable'])
# ... and mistakenly find 4 shard ranges on a different shard node :(
self.assert_subprocess_success([
'swift-manage-shard-ranges', '--force-commits',
self.get_db_file(shard_1_part, shard_1_nodes[2], shard_1.account,
shard_1.container),
'find_and_replace', '2', '--enable'])
# replicate the muddle of shard ranges between shard replicas, merged
# result is:
# '' - 6 shard ACTIVE
# 6 - 8 sub-shard FOUND
# 6 - 9 sub-shard FOUND
# 8 - 10 sub-shard FOUND
# 9 - 12 sub-shard FOUND
# 10 - 12 sub-shard FOUND
# 12 - 14 sub-shard FOUND
# 12 - 14 sub-shard FOUND
# 6 - 14 shard SHARDING
# 14 - '' shard ACTIVE
self.replicators.once()
# try hard to shard the shard...
self.sharders_once(additional_args='--partitions=%s' % shard_1_part)
self.sharders_once(additional_args='--partitions=%s' % shard_1_part)
self.sharders_once(additional_args='--partitions=%s' % shard_1_part)
# sharding hasn't completed and there's overlaps in the shard and root:
# the sub-shards will have been cleaved in the order listed above, but
# sub-shards (10 -12) and one of (12 - 14) will be overlooked because
# the cleave cursor will have moved past their namespace before they
# were yielded by the shard range iterator, so we now have:
# '' - 6 shard ACTIVE
# 6 - 8 sub-shard ACTIVE
# 6 - 9 sub-shard ACTIVE
# 8 - 10 sub-shard ACTIVE
# 10 - 12 sub-shard CREATED
# 9 - 12 sub-shard ACTIVE
# 12 - 14 sub-shard CREATED
# 12 - 14 sub-shard ACTIVE
# 14 - '' shard ACTIVE
sub_shard_ranges = self.get_container_shard_ranges(
shard_1.account, shard_1.container)
self.assertEqual(7, len(sub_shard_ranges), sub_shard_ranges)
root_shard_ranges = self.get_container_shard_ranges()
self.assertEqual(9, len(root_shard_ranges), root_shard_ranges)
self.assertEqual([ShardRange.ACTIVE] * 4 +
[ShardRange.CREATED, ShardRange.ACTIVE] * 2 +
[ShardRange.ACTIVE],
[sr.state for sr in root_shard_ranges])
# fix the overlaps - a set of 3 ACTIVE sub-shards will be chosen and 4
# other sub-shards will be shrunk away; apply the fix at the root
# container
db_file = self.get_db_file(self.brain.part, self.brain.nodes[0])
self.assert_subprocess_success(
['swift-manage-shard-ranges', db_file, 'repair', '--yes'])
self.replicators.once()
self.sharders_once()
self.sharders_once()
# check root now has just 5 shard ranges
root_shard_ranges = self.get_container_shard_ranges()
self.assertEqual(5, len(root_shard_ranges), root_shard_ranges)
self.assertEqual([ShardRange.ACTIVE] * 5,
[sr.state for sr in root_shard_ranges])
# check there are 1 sharded shard and 4 shrunk sub-shard ranges in the
# root (note, shard_1's shard ranges aren't updated once it has sharded
# because the sub-shards report their state to the root; we cannot make
# assertions about shrunk states in shard_1's shard range table)
root_shard_ranges = self.get_container_shard_ranges(
include_deleted=True)
self.assertEqual(10, len(root_shard_ranges), root_shard_ranges)
shrunk_shard_ranges = [sr for sr in root_shard_ranges
if sr.state == ShardRange.SHRUNK]
self.assertEqual(4, len(shrunk_shard_ranges), root_shard_ranges)
self.assertEqual([True] * 4,
[sr.deleted for sr in shrunk_shard_ranges])
sharded_shard_ranges = [sr for sr in root_shard_ranges
if sr.state == ShardRange.SHARDED]
self.assertEqual(1, len(sharded_shard_ranges), root_shard_ranges)
self.assert_container_listing(expected_obj_names)
| [
[
[
596,
600
],
[
7286,
7290
]
],
[
[
608,
610
],
[
8146,
8148
],
[
8583,
8585
],
[
9849,
9851
],
[
9897,
9899
],
[
126061,
126063
],
[
126404,
126406
],
[
126419,
126421
],
[
127460,
127462
],
[
127475,
127477
],
[
127570,
127572
],
[
128104,
128106
],
[
128119,
128121
],
[
129262,
129264
],
[
129277,
129279
],
[
129618,
129620
],
[
129923,
129925
],
[
131553,
131555
],
[
131809,
131811
],
[
131927,
131929
],
[
132392,
132394
],
[
132800,
132802
]
],
[
[
618,
624
],
[
126103,
126109
],
[
126925,
126931
],
[
131595,
131601
]
],
[
[
632,
642
],
[
16704,
16714
],
[
16745,
16755
]
],
[
[
650,
654
],
[
4229,
4233
],
[
29476,
29480
]
],
[
[
673,
681
],
[
2406,
2414
],
[
3682,
3690
],
[
30998,
31006
],
[
31279,
31287
]
],
[
[
689,
692
],
[
13449,
13452
],
[
29974,
29977
],
[
30543,
30546
],
[
41483,
41486
],
[
69091,
69094
],
[
69911,
69914
],
[
70193,
70196
],
[
70703,
70706
],
[
71081,
71084
],
[
71686,
71689
],
[
136468,
136471
],
[
137041,
137044
],
[
19882,
19885
],
[
33016,
33019
]
],
[
[
728,
733
],
[
19598,
19603
],
[
23106,
23111
],
[
23368,
23373
],
[
73532,
73537
]
],
[
[
760,
773
],
[
1958,
1971
],
[
16184,
16197
],
[
100912,
100925
],
[
104651,
104664
]
],
[
[
775,
780
],
[
2253,
2258
],
[
8015,
8020
],
[
8231,
8236
],
[
8283,
8288
],
[
30842,
30847
],
[
91417,
91422
],
[
126048,
126053
],
[
131540,
131545
]
],
[
[
814,
821
],
[
4687,
4694
],
[
44387,
44394
],
[
68393,
68400
],
[
91288,
91295
]
],
[
[
857,
869
],
[
4801,
4813
]
],
[
[
901,
911
],
[
7251,
7261
],
[
11149,
11159
],
[
11272,
11282
],
[
12558,
12568
],
[
12599,
12609
],
[
16578,
16588
],
[
24853,
24863
],
[
24929,
24939
],
[
26399,
26409
],
[
38344,
38354
],
[
38420,
38430
],
[
39831,
39841
],
[
43002,
43012
],
[
43021,
43031
],
[
43763,
43773
],
[
43783,
43793
],
[
48093,
48103
],
[
48753,
48763
],
[
49654,
49664
],
[
49675,
49685
],
[
49696,
49706
],
[
50829,
50839
],
[
50998,
51008
],
[
51018,
51028
],
[
51059,
51069
],
[
52040,
52050
],
[
53110,
53120
],
[
55371,
55381
],
[
55540,
55550
],
[
55560,
55570
],
[
55601,
55611
],
[
56603,
56613
],
[
57186,
57196
],
[
57206,
57216
],
[
57247,
57257
],
[
57267,
57277
],
[
57309,
57319
],
[
59144,
59154
],
[
59911,
59921
],
[
66577,
66587
],
[
66597,
66607
],
[
66617,
66627
],
[
100719,
100729
],
[
100746,
100756
],
[
102669,
102679
],
[
102696,
102706
],
[
103646,
103656
],
[
103673,
103683
],
[
104473,
104483
],
[
132222,
132232
],
[
132242,
132252
],
[
132262,
132272
],
[
133374,
133384
],
[
140921,
140931
],
[
140950,
140960
],
[
141278,
141288
],
[
141723,
141733
],
[
142265,
142275
],
[
142844,
142854
],
[
146620,
146630
],
[
146647,
146657
],
[
146896,
146906
],
[
146923,
146933
],
[
148335,
148345
],
[
148362,
148372
],
[
149212,
149222
],
[
149912,
149922
],
[
150013,
150023
],
[
150088,
150098
],
[
150762,
150772
],
[
152972,
152982
],
[
154353,
154363
],
[
155574,
155584
],
[
157382,
157392
],
[
160773,
160783
],
[
160824,
160834
],
[
160844,
160854
],
[
160895,
160905
],
[
161637,
161647
],
[
162313,
162323
],
[
162621,
162631
],
[
20030,
20040
],
[
20100,
20110
],
[
20290,
20300
],
[
20360,
20370
],
[
33228,
33238
],
[
33468,
33478
],
[
33773,
33783
],
[
34012,
34022
],
[
72231,
72241
],
[
72409,
72419
],
[
72631,
72641
],
[
72698,
72708
],
[
76456,
76466
],
[
85453,
85463
]
],
[
[
913,
930
],
[
10007,
10024
]
],
[
[
932,
944
],
[
8659,
8671
]
],
[
[
952,
963
],
[
80884,
80895
]
],
[
[
965,
982
],
[
2565,
2582
],
[
31141,
31158
]
],
[
[
984,
993
],
[
32323,
32332
],
[
33266,
33275
],
[
33358,
33367
],
[
33505,
33514
],
[
33811,
33820
],
[
33902,
33911
],
[
34050,
34059
],
[
114587,
114596
],
[
120810,
120819
],
[
120920,
120929
]
],
[
[
995,
998
],
[
114698,
114701
],
[
121031,
121034
]
],
[
[
1035,
1050
],
[
8805,
8820
],
[
41071,
41086
],
[
43509,
43524
],
[
45118,
45133
],
[
49729,
49744
],
[
50550,
50565
],
[
52071,
52086
],
[
52961,
52976
],
[
54045,
54060
],
[
55092,
55107
],
[
56319,
56334
],
[
58865,
58880
],
[
60460,
60475
],
[
65076,
65091
],
[
66273,
66288
],
[
66950,
66965
],
[
68102,
68117
],
[
68538,
68553
],
[
148970,
148985
],
[
154109,
154124
]
],
[
[
1052,
1061
],
[
67582,
67591
],
[
67593,
67602
],
[
151490,
151499
],
[
154342,
154351
]
],
[
[
1063,
1071
],
[
66390,
66398
],
[
67572,
67580
],
[
68156,
68164
],
[
153167,
153175
]
],
[
[
1079,
1086
],
[
149203,
149210
],
[
151553,
151560
],
[
151614,
151621
],
[
153177,
153184
],
[
153212,
153219
],
[
153273,
153280
],
[
155752,
155759
]
],
[
[
1123,
1138
],
[
43252,
43267
],
[
44253,
44268
],
[
46059,
46074
],
[
51393,
51408
],
[
53244,
53259
],
[
53765,
53780
],
[
54289,
54304
],
[
54499,
54514
],
[
59431,
59446
]
],
[
[
1140,
1156
],
[
18666,
18682
]
],
[
[
1196,
1215
],
[
91515,
91534
]
],
[
[
1240,
1246
],
[
4273,
4279
],
[
4296,
4302
],
[
4328,
4334
],
[
4386,
4392
],
[
5836,
5842
],
[
6268,
6274
],
[
13158,
13164
],
[
13745,
13751
],
[
14142,
14148
],
[
14260,
14266
],
[
14616,
14622
],
[
14884,
14890
],
[
15117,
15123
],
[
23913,
23919
],
[
27578,
27584
],
[
27728,
27734
],
[
28258,
28264
],
[
31441,
31447
],
[
41327,
41333
],
[
41588,
41594
],
[
41760,
41766
],
[
46500,
46506
],
[
47035,
47041
],
[
62378,
62384
],
[
63448,
63454
],
[
64482,
64488
],
[
64641,
64647
],
[
69632,
69638
],
[
70461,
70467
],
[
71519,
71525
],
[
89678,
89684
],
[
91085,
91091
],
[
92917,
92923
],
[
100083,
100089
],
[
105933,
105939
],
[
107569,
107575
],
[
108214,
108220
],
[
115037,
115043
],
[
121370,
121376
],
[
126316,
126322
],
[
130581,
130587
],
[
134519,
134525
],
[
137775,
137781
],
[
139390,
139396
],
[
143134,
143140
],
[
156251,
156257
],
[
19698,
19704
],
[
21132,
21138
],
[
22981,
22987
],
[
23242,
23248
],
[
32865,
32871
],
[
34571,
34577
],
[
36427,
36433
],
[
36643,
36649
],
[
74876,
74882
],
[
79032,
79038
],
[
79199,
79205
],
[
85670,
85676
],
[
88365,
88371
],
[
88541,
88547
],
[
89245,
89251
]
],
[
[
1248,
1256
],
[
4504,
4512
]
],
[
[
1258,
1273
],
[
14580,
14595
],
[
14848,
14863
],
[
15081,
15096
],
[
21092,
21107
],
[
34531,
34546
],
[
79161,
79176
]
],
[
[
1316,
1329
],
[
57911,
57924
],
[
112340,
112353
],
[
78686,
78699
],
[
80394,
80407
]
],
[
[
1370,
1391
],
[
80821,
80842
]
],
[
[
1409,
1425
],
[
11474,
11490
],
[
15767,
15783
],
[
48660,
48676
],
[
50592,
50608
],
[
55134,
55150
],
[
56361,
56377
],
[
56959,
56975
],
[
58396,
58412
],
[
58907,
58923
],
[
59818,
59834
],
[
95082,
95098
],
[
95642,
95658
],
[
97724,
97740
],
[
98649,
98665
],
[
99208,
99224
],
[
133239,
133255
],
[
141577,
141593
],
[
142698,
142714
],
[
148611,
148627
],
[
150157,
150173
],
[
152403,
152419
],
[
153748,
153764
],
[
155005,
155021
],
[
157151,
157167
],
[
73747,
73763
],
[
77674,
77690
],
[
81099,
81115
],
[
81621,
81637
],
[
83617,
83633
],
[
85220,
85236
],
[
88155,
88171
]
],
[
[
1449,
1463
],
[
4526,
4540
]
],
[
[
1493,
1506
],
[
4948,
4961
],
[
31704,
31717
]
],
[
[
1537,
1550
],
[
2130,
2143
]
],
[
[
1552,
1569
],
[
7827,
7844
]
],
[
[
1577,
1602
],
[
5733,
5758
]
],
[
[
1610,
1614
],
[
101487,
101491
],
[
101556,
101560
],
[
101751,
101755
],
[
101810,
101814
],
[
101869,
101873
],
[
105224,
105228
],
[
105293,
105297
],
[
105488,
105492
],
[
105547,
105551
],
[
105606,
105610
]
],
[
[
1617,
1646
],
[
2946,
2975
],
[
3196,
3225
]
],
[
[
1651,
1680
],
[
3018,
3047
],
[
3244,
3273
]
],
[
[
1695,
1709
],
[
7473,
7487
]
],
[
[
2104,
2129
],
[
18815,
18840
],
[
30667,
30692
],
[
40589,
40614
],
[
137156,
137181
],
[
3770,
3795
],
[
4433,
4458
]
],
[
[
18786,
18814
],
[
29233,
29261
],
[
29523,
29551
]
],
[
[
29201,
29232
]
],
[
[
29497,
29522
],
[
30216,
30241
]
],
[
[
30629,
30666
],
[
30741,
30778
]
],
[
[
40567,
40588
],
[
136054,
136075
]
],
[
[
136024,
136053
],
[
136710,
136739
]
],
[
[
137127,
137155
]
]
] |
import unittest
from code.google_search import get_people_also_ask_links
class TestGoogleSearch(unittest.TestCase):
def setUp(self) -> None:
pass
def test_get_people_also_ask_links(self):
"""Test the get_people_also_ask_links method"""
test = "principal components"
result = get_people_also_ask_links(test)
self.assertEqual(list, type(result))
| [
[
[
7,
15
],
[
97,
105
]
],
[
[
47,
72
],
[
317,
342
]
],
[
[
80,
96
]
]
] |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import json
import uuid
from xml.dom import minidom
import webob
from cinder.api import common
from cinder.api.openstack.wsgi import MetadataXMLDeserializer
from cinder.api.openstack.wsgi import XMLDeserializer
from cinder import db
from cinder import test
from cinder.tests.api import fakes
from cinder import volume
def fake_volume_get(*args, **kwargs):
return {
'id': 'fake',
'host': 'host001',
'status': 'available',
'size': 5,
'availability_zone': 'somewhere',
'created_at': datetime.datetime.now(),
'attach_status': None,
'display_name': 'anothervolume',
'display_description': 'Just another volume!',
'volume_type_id': None,
'snapshot_id': None,
'project_id': 'fake',
}
def fake_volume_get_all(*args, **kwargs):
return [fake_volume_get()]
fake_image_metadata = {
'image_id': 'someid',
'image_name': 'fake',
'kernel_id': 'somekernel',
'ramdisk_id': 'someramdisk',
}
def fake_get_volume_image_metadata(*args, **kwargs):
return fake_image_metadata
def fake_get_volumes_image_metadata(*args, **kwargs):
return {'fake': fake_image_metadata}
class VolumeImageMetadataTest(test.TestCase):
content_type = 'application/json'
def setUp(self):
super(VolumeImageMetadataTest, self).setUp()
self.stubs.Set(volume.API, 'get', fake_volume_get)
self.stubs.Set(volume.API, 'get_all', fake_volume_get_all)
self.stubs.Set(volume.API, 'get_volume_image_metadata',
fake_get_volume_image_metadata)
self.stubs.Set(volume.API, 'get_volumes_image_metadata',
fake_get_volumes_image_metadata)
self.stubs.Set(db, 'volume_get', fake_volume_get)
self.UUID = uuid.uuid4()
def _make_request(self, url):
req = webob.Request.blank(url)
req.accept = self.content_type
res = req.get_response(fakes.wsgi_app())
return res
def _get_image_metadata(self, body):
return json.loads(body)['volume']['volume_image_metadata']
def _get_image_metadata_list(self, body):
return [
volume['volume_image_metadata']
for volume in json.loads(body)['volumes']
]
def test_get_volume(self):
res = self._make_request('/v2/fake/volumes/%s' % self.UUID)
self.assertEqual(res.status_int, 200)
self.assertEqual(self._get_image_metadata(res.body),
fake_image_metadata)
def test_list_detail_volumes(self):
res = self._make_request('/v2/fake/volumes/detail')
self.assertEqual(res.status_int, 200)
self.assertEqual(self._get_image_metadata_list(res.body)[0],
fake_image_metadata)
class ImageMetadataXMLDeserializer(common.MetadataXMLDeserializer):
metadata_node_name = "volume_image_metadata"
class VolumeImageMetadataXMLTest(VolumeImageMetadataTest):
content_type = 'application/xml'
def _get_image_metadata(self, body):
deserializer = XMLDeserializer()
volume = deserializer.find_first_child_named(
minidom.parseString(body), 'volume')
image_metadata = deserializer.find_first_child_named(
volume, 'volume_image_metadata')
return MetadataXMLDeserializer().extract_metadata(image_metadata)
def _get_image_metadata_list(self, body):
deserializer = XMLDeserializer()
volumes = deserializer.find_first_child_named(
minidom.parseString(body), 'volumes')
volume_list = deserializer.find_children_named(volumes, 'volume')
image_metadata_list = [
deserializer.find_first_child_named(
volume, 'volume_image_metadata'
)
for volume in volume_list]
return map(MetadataXMLDeserializer().extract_metadata,
image_metadata_list)
| [
[
[
657,
665
],
[
1202,
1210
]
],
[
[
673,
677
],
[
2709,
2713
],
[
2895,
2899
]
],
[
[
685,
689
],
[
2458,
2462
]
],
[
[
710,
717
],
[
3814,
3821
],
[
4187,
4194
]
],
[
[
726,
731
],
[
2520,
2525
]
],
[
[
756,
762
],
[
3485,
3491
]
],
[
[
801,
824
],
[
3973,
3996
],
[
4500,
4523
]
],
[
[
863,
878
],
[
3730,
3745
],
[
4102,
4117
]
],
[
[
898,
900
],
[
2403,
2405
]
],
[
[
920,
924
],
[
1885,
1889
]
],
[
[
954,
959
],
[
2615,
2620
]
],
[
[
979,
985
],
[
2037,
2043
],
[
2096,
2102
],
[
2163,
2169
],
[
2282,
2288
]
],
[
[
992,
1007
],
[
1507,
1522
],
[
2056,
2071
],
[
2421,
2436
]
],
[
[
1457,
1476
],
[
2119,
2138
]
],
[
[
1528,
1547
],
[
1736,
1755
],
[
1832,
1851
],
[
3165,
3184
],
[
3427,
3446
]
],
[
[
1676,
1706
],
[
2227,
2257
]
],
[
[
1762,
1793
],
[
2347,
2378
]
],
[
[
1861,
1884
],
[
3602,
3625
],
[
1975,
1998
]
],
[
[
3456,
3484
]
],
[
[
3575,
3601
]
]
] |
from datetime import datetime
from typing import List, Dict, Optional
from pydantic import BaseModel, validator, root_validator
class ItemModel(BaseModel):
cve: Dict
configurations: Optional[Dict]
impact: Optional[Dict]
publishedDate: datetime
lastModifiedDate: datetime
class ResultModel(BaseModel):
CVE_data_timestamp: datetime
CVE_data_type: str
CVE_Items: List[ItemModel]
@validator('CVE_data_type')
def fixed_type(cls, v):
assert v == 'CVE', 'Must be of type CVE'
return v
class ResponseModel(BaseModel):
resultsPerPage: int
startIndex: int
totalResults: int
result: ResultModel
| [
[
[
21,
29
],
[
253,
261
],
[
284,
292
],
[
349,
357
]
],
[
[
49,
53
],
[
396,
400
]
],
[
[
55,
59
],
[
167,
171
],
[
201,
205
],
[
228,
232
]
],
[
[
61,
69
],
[
192,
200
],
[
219,
227
]
],
[
[
91,
100
],
[
146,
155
],
[
313,
322
],
[
561,
570
]
],
[
[
102,
111
],
[
418,
427
]
],
[
[
113,
127
]
],
[
[
136,
145
],
[
401,
410
]
],
[
[
301,
312
],
[
651,
662
]
],
[
[
547,
560
]
]
] |
import easygui as g
user_info=g.multenterbox(title='账号中心',msg='【*用户名】为必填项\t【*真实姓名】为必填项\t【*手机号码】为必填项\t【*E-mail】为必填项',
fields=['*用户名','*真实姓名','固定电话','*手机号码','QQ','*E-mail']
) | [
[
[
7,
19
],
[
31,
32
]
],
[
[
21,
30
]
]
] |
import re
import os
__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
""" Holds all the custom exceptions raised by the api """
class OrderNotFound(StandardError):
"""Error raised when an order is not found"""
def __init__(self, orderid):
"""Create new OrderNotFound
Args:
orderid (str): The orderid that was not found
"""
super(OrderNotFound, self).__init__(orderid)
class ItemNotFound(StandardError):
"""Error raised when an item is not found"""
def __init__(self, orderid, itemid):
"""Create new ItemNotFound
Args:
orderid (str): The orderid of the item
itemid (str): The id of the item that was not found
"""
super(ItemNotFound, self).__init__(orderid, itemid)
class ProductNotImplemented(NotImplementedError):
"""Exception to be thrown when trying to instantiate an unsupported
product"""
def __init__(self, product_id):
"""Constructor for the product not implemented
Keyword args:
product_id -- The product id of that is not implemented
Return:
None
"""
self.product_id = product_id
super(ProductNotImplemented, self).__init__(product_id)
class ValidationException(Exception):
"""Exceptions when there is an error with validating an order
example:
"3 validation errors": [
"Value u'' for field '<obj>.tm5.products[0]' cannot be blank'",
"Value u'' for field '<obj>.tm5.products[0]' is not in the enumeration: ['source_metadata', 'l1', 'toa', 'bt', 'cloud', 'sr', 'lst', 'swe', 'sr_ndvi', 'sr_evi', 'sr_savi', 'sr_msavi', 'sr_ndmi', 'sr_nbr', 'sr_nbr2', 'stats']",
"Value [u''] for field '<obj>.tm5.products' Requested products are not available"
]
"""
def __init__(self, msg):
err_ls = msg.split('\n')
err_key = err_ls[0].replace(':', '')
self.response = {err_key: []}
for err in err_ls[1:]:
if err:
err = re.sub(r'<obj>.', '', err)
self.response[err_key].append(err)
super(ValidationException, self).__init__(str(self.response))
class InventoryException(Exception):
"""Exception for handling problems with inventory handling"""
def __init__(self, msg):
super(InventoryException, self).__init__(msg)
self.response = {'Inputs Not Available': msg}
class InventoryConnectionException(Exception):
"""Exception handling if input data pool is down"""
def __init__(self, msg):
super(InventoryConnectionException, self).__init__(msg) | [
[
[
7,
9
],
[
2063,
2065
]
],
[
[
17,
19
],
[
36,
38
],
[
53,
55
],
[
66,
68
],
[
79,
81
]
],
[
[
21,
33
]
],
[
[
173,
186
],
[
424,
437
]
],
[
[
471,
483
],
[
782,
794
]
],
[
[
836,
857
],
[
1239,
1260
]
],
[
[
1296,
1315
],
[
2156,
2175
]
],
[
[
2220,
2238
],
[
2360,
2378
]
],
[
[
2463,
2491
],
[
2603,
2631
]
]
] |
#Import modules and libraries
from random import randint
from string import ascii_uppercase, ascii_lowercase
from itertools import permutations
from copy import deepcopy
from tail_recursion import tail_recursive, recurse
#Define board mapping function
def mapBoard(col, row, value):
board = [[value for x in range(col)] for y in range(row)]
return board
#Define metaboard mapping function
def mapMetaBoard(col, row):
metaboard = [[[[0, 0, 0, 0], [0, 0, 0, 0]] for x in range(col)] for y in range(row)]
return metaboard
#Define view board function
def viewBoard(board):
alphabet = ascii_uppercase
col = len(board[0])
row = len(board)
border = ""
topBorder = "#||"
for i in range(col):
border += "_" * 2
topBorder += alphabet[i]
topBorder += " "
border += "___"
print(topBorder)
print(border)
for i in range(row):
print(alphabet[i] + "||" + " ".join(board[i]) + "|")
#Define mark function
def mark(board, signature):
alphabet = ascii_uppercase
alphabet1 = ascii_lowercase
dimensionY = len(board)
dimensionX = len(board[0])
valid = False
while (not valid):
print("\n\nWhere do you want to mark?\n\n")
x = input(f"Column (A - {alphabet[dimensionX - 1]})? ")
y = input(f"Row (A - {alphabet[dimensionY - 1]})? ")
try:
x = alphabet.index(x)
except ValueError:
x = alphabet1.index(x)
try:
y = alphabet.index(y)
except:
y = alphabet1.index(y)
if (board[y][x] == ' '):
valid = True
else:
print('That position has already been marked. Please try again.\n')
board[y][x] = signature
print('\n')
viewBoard(board)
#Define function to find all occurences of 'X'
#Value is [opponentSignature]
#Return [[col1, row1], [col2, row2], ...]
def locate(value, board):
dimensionY = len(board)
dimensionX = len(board[0])
returnList = []
for row in range(dimensionY):
for col in range(dimensionX):
if (board[row][col] in value): returnList.append([col, row])
return returnList
#Define computer's turn -- recursive
@tail_recursive
def play(boardHistory, depleted, checked, iteration, winCond, forecasted, possibilities, board, selfSignature, opponentSignature, difficulty, first = True):
#AI
#Each of metaboard's position is a list [danger, opportunity]
#Define function to update metaboard
#TODO: refine to improve efficiency at detecting risks and opportunities of non-continuous streak & multi-directional streaks
#REQUIREMENTS 1: resonant effect on a tile immediately next to a continuous winCond - 1 streak == risk/opportunity factor of interrupted resonance on a tile conjoining 2 aligning sub-streaks whose sum >= winCond - 1
#REQUIREMENTS 2: implement weighted resonance system on a tile conjoining multiple directional streaks > resonance system for linear streaks
def meta(board, opponentSignature, selfSignature, winCond, difficulty):
#Define function to sweep perimeter of a position's coordinates and add attributes to them
#coord = [col, row]
def sweep(metaboard, coord, keyword, opponentSignature, selfSignature, winCond):
if (keyword == 'danger'):
type = 0
otherType = 1
signature = opponentSignature
else:
type = 1
otherType = 0
signature = selfSignature
coordVars = list(permutations([-1, 0, 1], 2))
coordVars.extend(((-1, -1), (1, 1)))
for coordVar in coordVars:
try:
if (coordVar in [(-1, -1), (1, 1)]):
pos = 2
elif (coordVar in [(0, -1), (0, 1)]):
pos = 0
elif (coordVar in [(-1, 0), (1, 0)]):
pos = 1
else:
pos = 3
row = coord[1] + coordVar[0]
if (row < 0 or row > len(metaboard)): raise IndexError
col = coord[0] + coordVar[1]
if (col < 0 or col > len(metaboard[0])): raise IndexError
#Ripple effect
if (not isinstance(metaboard[row][col], str)):
for i in range(winCond - 1):
if (not isinstance(metaboard[row][col], str)):
metaboard[row][col][type][pos] += (1 - i/(winCond - 1))
metaboard[row][col][otherType][pos] -= (1 - i/(winCond - 1))
row += coordVar[0]
if (row < 0 or row > len(metaboard)): raise IndexError
col += coordVar[1]
if (col < 0 or col > len(metaboard[0])): raise IndexError
elif (metaboard[row][col] == signature):
row += coordVar[0]
if (row < 0 or row > len(metaboard)): raise IndexError
col += coordVar[1]
if (col < 0 or col > len(metaboard[0])): raise IndexError
else:
raise IndexError
#alphabet = ascii_uppercase
#print(f'Metaboard at column {alphabet[col]} and row {alphabet[row]} has a {keyword} level of {metaboard[row][col][type]}.')
#Resonance effect
if (metaboard[row][col] == signature):
alignment = 0
while (metaboard[row][col] == signature):
row += coordVar[0]
if (row < 0 or row > len(metaboard)): raise IndexError
col += coordVar[1]
if (col < 0 or col > len(metaboard[0])): raise IndexError
alignment += 1
if (isinstance(metaboard[row][col], list)):
metaboard[row][col][type][pos] += alignment
except IndexError: pass
#Define function to screen entire metaboard for invalidation
def screen(metaboard, selfSignature, opponentSignature, winCond):
#Define function to rotate board 90 degree counter-clockwise with perspective to keeping OG board intact
def rotate(board):
#Define function to inverse board vertically
def invertY(board):
invertYBoard = []
dimensionY = len(board)
for row in range(dimensionY):
invertYBoard.append(board[dimensionY - row - 1])
return invertYBoard
rotateBoard = []
dimensionY = len(board)
dimensionX = len(board[0])
for col in range(dimensionX):
column = [board[row][col] for row in range(dimensionY)]
rotateBoard.append(column)
return invertY(rotateBoard)
#Define function to screen the top left corner of the board
def screenTopLeftCorner(metaboard, winCond, pos, name):
for row in range(winCond - 1):
for col in range(winCond - 1 - row):
if (isinstance(metaboard[row][col], list)):
#print(f'nullify {row}:{col}\'s danger and potential in the {name} diagonal')
metaboard[row][col][0][pos] = 0
metaboard[row][col][1][pos] = 0
#Define function to screen metaboard to invalidate 'type' from signature (e.g, invalidate dangers between two blocked self) horizontally
def screenHorizontal(metaboard, signature, type, winCond, pos):
dimensionX = len(metaboard[0])
if type == 'danger': type = 0
else: type = 1
#Format all selfSignature's coords found in each row
#sus = [susRow1, susRow3, ...]
#susRow1 = [[col1, row], [col3, row], ...]
sus = []
for row in metaboard:
susEachRow = []
for col in row:
if (col == signature): susEachRow.append([row.index(col), metaboard.index(row)])
sus.append(susEachRow)
sus = [susEachRow for susEachRow in sus if len(susEachRow) != 0]
#Filter out all invalid segments between two blocked self horizontally
for susEachRow in sus:
for i in range(len(susEachRow) - 1):
if (2 <= susEachRow[i + 1][0] - susEachRow[i][0] <= winCond):
for k in range(0, susEachRow[i + 1][0] - susEachRow[i][0]):
if (isinstance(metaboard[susEachRow[i][1]][susEachRow[i][0] + k], list)):
#print(f'Due to being blocked on both ends by {signature} at coordinates {susEachRow[i][0]}:{susEachRow[i][1]} and {susEachRow[i + 1][0]}:{susEachRow[i + 1][1]}, the position with the coordinates {susEachRow[i][1]}:{susEachRow[i][0] + k} has been nullified of its {type}\'s {pos}.')
metaboard[susEachRow[i][1]][susEachRow[i][0] + k][type][pos] = 0
#Filter out all invalid segments between self and border
for susEachRow in sus:
start = susEachRow[0]
end = susEachRow[-1]
if (1 <= start[0] < winCond):
for k in range(0, start[0]):
if (isinstance(metaboard[start[1]][k], list)):
#print(f'Due to being blocked on both ends by {signature} at coordinates {start[0]}:{start[1]} and the border, the position with the coordinates {start[1]}:{k} has been nullified of its {type}\'s {pos}.')
metaboard[start[1]][k][type][pos] = 0
if (1 <= dimensionX - end[0] - 1 < winCond):
for k in range(0, dimensionX - end[0] - 1):
if (isinstance(metaboard[end[1]][end[0] + k], list)):
#print(f'Due to being blocked on both ends by {signature} at coordinates {end[0]}:{end[1]} and the border, the position with the coordinates {end[1]}:{end[0] + k} has been nullified of its {type}\'s {pos}.')
metaboard[end[1]][end[0] + k][type][pos] = 0
return metaboard
#Define function to screen metaboard to invalidate 'type' from signature (e.g, invalidate dangers between two blocked self) diagonally
def screenDiagonal(metaboard, signature, type, winCond, pos):
dimensionY = len(metaboard)
dimensionX = len(metaboard[0])
if type == 'danger': type = 0
else: type = 1
#Format all selfSignature's coords found in each diagonal
#susDiagDown, Up, sus = [susDiag1, susDiag3, ...]
#susDiag1 = [[col1, row1], [col3, row3], ...]
sus = []
susDiagDown = []
lenSusDiagDown = []
susDiagUp = []
lenSusDiagUp = []
susDuplicate = []
for i in range(dimensionY):
susEachDiagDown = []
originalDiagLen = 0
for j in range(dimensionY):
try:
if (metaboard[i + j][j] == signature): susEachDiagDown.append([i + j, j])
originalDiagLen += 1
except IndexError:
pass
susDiagDown.append(susEachDiagDown)
if (len(susEachDiagDown) != 0):
lenSusDiagDown.append(originalDiagLen)
else: lenSusDiagDown.append(0)
for i in range(dimensionX):
susEachDiagUp = []
originalDiagLen = 0
for j in range(dimensionX):
try:
if (metaboard[j][i + j] == signature): susEachDiagUp.append([j, i + j])
originalDiagLen += 1
except IndexError: pass
susDiagUp.append(susEachDiagUp)
if (len(susEachDiagUp) != 0):
lenSusDiagUp.append(originalDiagLen)
else: lenSusDiagUp.append(0)
sus.extend(susDiagDown)
sus.extend(susDiagUp)
for i in range(min(dimensionX, dimensionY)):
if (metaboard[i][i] == signature): susDuplicate.append([i, i])
sus.remove(susDuplicate)
susDiagUp = [susEachDiag for susEachDiag in susDiagUp if len(susEachDiag) != 0]
lenSusDiagUp = [eachLen for eachLen in lenSusDiagUp if eachLen != 0]
susDiagDown = [susEachDiag for susEachDiag in susDiagDown if len(susEachDiag) != 0]
lenSusDiagDown = [eachLen for eachLen in lenSusDiagDown if eachLen != 0]
#Filter out all invalid segments between two blocked self diagontally
for susEachDiag in sus:
for i in range(len(susEachDiag) - 1):
if (2 <= susEachDiag[i + 1][0] - susEachDiag[i][0] <= winCond):
for k in range(0, susEachDiag[i + 1][0] - susEachDiag[i][0]):
if (isinstance(metaboard[susEachDiag[i][0] + k][susEachDiag[i][1] + k], list)):
#print(f'Due to being blocked on both ends by {signature} at coordinates {susEachDiag[i][0]}:{susEachDiag[i][1]} and {susEachDiag[i + 1][0]}:{susEachDiag[i + 1][1]}, the position with the coordinates {susEachDiag[i][0] + k}:{susEachDiag[i][1] + k} has been nullified of its {type}\'s {pos}.')
metaboard[susEachDiag[i][0] + k][susEachDiag[i][1] + k][type][pos] = 0
#Filter out all invalid segments between self and border for susDiagUp
for susEachDiag in susDiagUp:
start = susEachDiag[0]
end = susEachDiag[-1]
if (1 <= min(start[0], start[1]) < winCond):
for k in range(0, min(start[0], start[1]) + 1):
if (isinstance(metaboard[start[0] - k][start[1] - k], list)):
#print(f'Due to being blocked on both ends by {signature} at coordinates {start[0]}:{start[1]} and the corner, the position with the coordinates {start[0] + k}:{start[1] + k} has been nullified of its {type}\'s {pos}.')
metaboard[start[0] - k][start[1] - k][type][pos] = 0
if (1 <= lenSusDiagUp[susDiagUp.index(susEachDiag)] - min(end[0], end[1]) <= winCond):
for k in range(0, lenSusDiagUp[susDiagUp.index(susEachDiag)] - min(end[0], end[1])):
if (isinstance(metaboard[end[0] + k][end[1] + k], list)):
#print(f'Due to being blocked on both ends by {signature} at coordinates {end[0]}:{end[1]} and the corner, the position with the coordinates {end[0] + k}:{end[1] + k} has been nullified of its {type}\'s {pos}.')
metaboard[end[0] + k][end[1] + k][type][pos] = 0
#Filter out all invalid segments between self and border for susDiagDown
for susEachDiag in susDiagDown:
start = susEachDiag[0]
end = susEachDiag[-1]
if (1 <= min(start[0], start[1]) < winCond):
for k in range(0, min(start[0], start[1]) + 1):
if (isinstance(metaboard[start[0] - k][start[1] - k], list)):
#print(f'Due to being blocked on both ends by {signature} at coordinates {start[0]}:{start[1]} and the corner, the position with the coordinates {start[0] + k}:{start[1] + k} has been nullified of its {type}\'s {pos}.')
metaboard[start[0] - k][start[1] - k][type][pos] = 0
if (1 <= lenSusDiagDown[susDiagDown.index(susEachDiag)] - min(end[0], end[1]) <= winCond):
for k in range(0, lenSusDiagDown[susDiagDown.index(susEachDiag)] - min(end[0], end[1])):
if (isinstance(metaboard[end[0] + k][end[1] + k], list)):
#print(f'Due to being blocked on both ends by {signature} at coordinates {end[0]}:{end[1]} and the corner, the position with the coordinates {end[0] + k}:{end[1] + k} has been nullified of its {type}\'s {pos}.')
metaboard[end[0] + k][end[1] + k][type][pos] = 0
return metaboard
#pos: index of relevant value (0: horizontal, 1: vertical, 2: NW - SE, 3: NE - SW)
#Screen top left corner
screenTopLeftCorner(metaboard, winCond, 3, 'top left')
metaboard = rotate(metaboard)
#Screen top right corner
screenTopLeftCorner(metaboard, winCond, 2, 'top right')
metaboard = rotate(metaboard)
#Screen bottom right corner
screenTopLeftCorner(metaboard, winCond, 3, 'bottom right')
metaboard = rotate(metaboard)
#Screen bottom left corner
screenTopLeftCorner(metaboard, winCond, 2, 'bottom left')
metaboard = rotate(metaboard)
#Screen horizontally
screenHorizontal(metaboard, selfSignature, 'danger' , winCond, 0)
screenHorizontal(metaboard, opponentSignature, 'opportunity' , winCond, 0)
metaboard = rotate(metaboard)
#Screen vertically
screenHorizontal(metaboard, selfSignature, 'danger' , winCond, 1)
screenHorizontal(metaboard, opponentSignature, 'opportunity' , winCond, 1)
for i in range(3): metaboard = rotate(metaboard)
#Screen NW-SE diagonally
screenDiagonal(metaboard, selfSignature, 'danger' , winCond, 2)
screenDiagonal(metaboard, opponentSignature, 'opportunity' , winCond, 2)
metaboard = rotate(metaboard)
#Screen NE-SW diagonally
screenDiagonal(metaboard, selfSignature, 'danger' , winCond, 3)
screenDiagonal(metaboard, opponentSignature, 'opportunity' , winCond, 3)
for i in range(3): metaboard = rotate(metaboard)
metaboard = mapMetaBoard(len(board[0]), len(board))
dangerCoords = locate([opponentSignature], board)
opportunityCoords = locate([selfSignature], board)
for coord in dangerCoords:
metaboard[coord[1]][coord[0]] = opponentSignature
for coord in opportunityCoords:
metaboard[coord[1]][coord[0]] = selfSignature
for coord in dangerCoords:
sweep(metaboard, coord, 'danger', opponentSignature, selfSignature, winCond)
for coord in opportunityCoords:
sweep(metaboard, coord, 'opportunity', opponentSignature, selfSignature, winCond)
#Screening applies for difficulty 2 and up
if (difficulty >= 2):
screen(metaboard, selfSignature, opponentSignature, winCond)
return metaboard
#Define function to choose between aggresive or defensive
def stance(metaboard, difficulty):
dangerList = []
opportunityList = []
for row in metaboard:
for col in row:
if (isinstance(col, list)):
dangerList.append(max(col[0]))
opportunityList.append(max(col[1]))
pressingDanger = max(dangerList)
pressingOpportunity = max(opportunityList)
#print(f'Highest danger is {pressingDanger}, whilst highest opportunity is {pressingOpportunity}.')
#'Tactical' playstyle applies only for difficulty 3
if (difficulty >= 3):
if (pressingOpportunity > pressingDanger):
return 'aggressive', pressingOpportunity
elif (pressingOpportunity == pressingDanger):
return 'tactical', pressingOpportunity
else:
return 'defensive', pressingDanger
else:
if (pressingOpportunity >= pressingDanger):
return 'aggressive', pressingOpportunity
else:
return 'defensive', pressingDanger
#Define function to make a play
@tail_recursive
def decide(forecasted, checked, style, value, metaboard, difficulty):
if style == 'aggressive': type = 1
elif style == 'defensive': type = 0
else: type = 2
if (style in ['aggressive', 'defensive']):
for row in metaboard:
for col in row:
if (isinstance(col, list)):
if max(col[type]) == value:
#print(col[type].index(value))
x, y = row.index(col), metaboard.index(row)
else:
returnList = []
maxTracker = []
for row in range(len(metaboard)):
for col in range(len(metaboard[0])):
if (isinstance(metaboard[row][col], list)):
if (max(metaboard[row][col][0]) == value) or (max(metaboard[row][col][1]) == value):
#print(col[type].index(value))
returnList.append([col, row])
maxTracker.append(sum(metaboard[row][col][0]) + sum(metaboard[row][col][1]))
x, y = returnList[maxTracker.index(max(maxTracker))][0], returnList[maxTracker.index(max(maxTracker))][1]
if [*forecasted, [x, y]] not in checked:
return x, y
else:
#For a checked position, set metaboard value to negative
metaboardTemp = deepcopy(metaboard)
metaboardTemp[y][x] = [[-1, -1, -1, -1], [-1, -1, -1, -1]]
style, newValue = stance(metaboardTemp, difficulty)
#When all potential positions have been checked, all potential metaboard values will have been set to negative => depleted
if newValue != value: raise ValueError
return recurse(forecasted, checked, style, newValue, metaboardTemp, difficulty)
#Define function to swap self signature and opponent signature
def swap(selfSignature, opponentSignature):
temp = selfSignature
selfSignature = opponentSignature
opponentSignature = temp
return selfSignature, opponentSignature
#Define function to determine if terminal node has been reached
def reachedTerminal(forecasted):
if len(forecasted) >= 1:
last = forecasted[-1][0]
return isinstance(last, bool) or isinstance(last, float)
return False
#Define function to evaluate value of self node
def evalSelf(selfPlaying: bool, possibilities, iteration):
def countExact(values, countItem):
counted = 0
for value in values:
if value is countItem: counted += 1
return counted
#Define function to collapse all forecasted paths with same iteration count
def collapse(selfPlaying: bool, possibilities, iteration):
def contains(values, comparisonItem):
for value in values:
if value is comparisonItem: return True
return False
#Extract all forecasted paths with same iteration count
#print("All possibilities at this stage are: ", possibilities)
extracted = deepcopy([possibility for possibility in possibilities if possibility[-1][1] == iteration])
#if selfPlaying: print("Node layer ", iteration, " and maximizer is playing.")
#else: print("Node layer ", iteration, " and minimizer is playing.")
#print("Before collapse, all values at node layer ", iteration, " is ", extracted)
tempPossibilities = deepcopy([possibility for possibility in possibilities if possibility not in extracted])
#Heuristics: if only 1 or less forecasted at current node, skip collapse
if len(extracted) == 1:
#print("Taking shortcut to skip collapse because only 1 forecasted detected at layer ", iteration, ": ", extracted[0])
tempPossibilities.append(extracted[0])
return tempPossibilities
elif len(extracted) == 0:
#print("Taking shortcut to skip collapse because no forecasted detected at layer ", iteration)
return tempPossibilities
values = [extraction[-1][0] for extraction in extracted]
#print("Performing collapse on ", values)
tieLimiter = False
for value in values:
if isinstance(value, float): tieLimiter = True
#Prioritize boolean: if True exists, all positive possibilities can be pruned
if contains(values, True) and selfPlaying:
values = [value for value in values if not (isinstance(value, float) and value > 0)]
if contains(values, False) and not selfPlaying:
values = [value for value in values if not (isinstance(value, float) and value < 0)]
#When both True and False exists, eliminate any in-between
if contains(values, True) and contains(values, False):
values = [value for value in values if not isinstance(value, float)]
#print("Preliminary sifting is done. Now performing collapse on ", values)
if selfPlaying:
#Due to Python's max([False, 0.0]) -> False, must remove all False if 0.0 exists in maximizer's turn
if tieLimiter and contains(values, False):
values = [value for value in values if value is not False]
returnValue = max(values)
else:
#Due to Python's min([0.0, False]) -> 0.0, must remove all float if False exists in minimizer's turn
if contains(values, False):
returnValue = False
else:
returnValue = min(values)
#print("Collapse done, ", returnValue)
#Deeper eval performed when multiple returnValue in values; choose longest steps for min; shortest steps for max
#Heuristics: when multiple combinations of moves result in same state, keep only 1
if countExact(values, returnValue) > 1:
#print("Multiple forecasted evaluating to the same value detected. Comparing steps for each.")
extractedShortlisted = [forecasted for forecasted in extracted if forecasted[-1][0] is returnValue]
lenList = [len(forecasted) for forecasted in extractedShortlisted]
if selfPlaying:
fullReturnValue = extractedShortlisted[lenList.index(min(lenList))]
else:
fullReturnValue = extractedShortlisted[lenList.index(max(lenList))]
#print("From ", extractedShortlisted, " choose ", fullReturnValue)
else:
#Reconstruct full format of possibility holding returnValue and add back to possibilities
fullReturnValue = [possibility for possibility in extracted if possibility[-1][0] is returnValue][0]
#print("After collapse, all values at node layer ", iteration, " is ", fullReturnValue)
tempPossibilities.append(fullReturnValue)
return tempPossibilities
#Define function to decrement all forecasted paths (should be 1) with iteration count matching current (bubble-up)
def passUp(possibilities, iteration):
for possibility in possibilities:
if possibility[-1][1] == iteration: possibility[-1][1] -= 1
#Identify if a duplicated iteration count exists in possibilities, then collapse all those forecasted depending on self nature
iterationList = [possibility[-1][1] for possibility in possibilities]
#print(iterationList)
for iterationItem in iterationList:
if countExact(iterationList, iterationItem) > 1:
possibilities = collapse(selfPlaying, possibilities, iteration)
#print(iteration)
if (iteration > 0):
passUp(possibilities, iteration)
return possibilities
#Even iteration = machine plays; odd = human
#maxDepthSearch = layer of nodes forecasted ahead by AI -- CAREFUL! O(n) time complexity = b ** m, with m being maxDepthSearch and b being branching factor = (boardDimensionX * boardDimensionY - claimed tiles)
#For 3x3 board, set to 10 for full coverage
if len(board) == len(board[0]) and len(board) == 3:
maxDepthSearch = 10
#If game is in developing phase (i.e, number of placed marks <= 1/2 win condition)
elif max(len(locate(selfSignature, board)), len(locate(opponentSignature, board))) <= winCond/2:
maxDepthSearch = 2
else:
maxDepthSearch = 3
#possibilities = [forecasted1, forecasted2, ...]
#forecasted = [[x1, y1], [x2, y2], [x3, y3]..., [True, iteration]] containing moves of both players until end & boolean of win state(True when self is winner, False otherwise)
#forecasted = [[x1, y1], [x2, y2], [x3, y3]..., [score: float, iteration]] containing moves of both players until maxDepthSearch reached, score is evaluated to assign to board state (0 when tie, +highestTacticalValue when it's self's turn, - otherwise)
#Evaluate value of self node depending on min/max nature, run when all child nodes to maxDepthSearch are explored/ when terminal node is detected
#evalSelf only sifts through forecasteds and collapses those having the same iteration value (vying to value same node)
#When bubble up 1 node, take all forecasteds in possibilities with matching current iteration (if everything is right this should already be collapsed to only 1) and decrement that (to imply this value is passed upwards to parent node and is now parent node's originating value)
if reachedTerminal(forecasted):
selfPlaying = (iteration % 2 == 0)
forecastedCopy = deepcopy(forecasted)
possibilities.append(forecastedCopy)
possibilities = evalSelf(selfPlaying, possibilities, iteration)
iteration -= 1
#Reset back 1 node higher
forecasted.pop(-1)
forecasted.pop(-1)
return recurse(boardHistory, depleted, checked, iteration, winCond, forecasted, possibilities, board, selfSignature, opponentSignature, difficulty, False)
#Terminal node: winCond is met/maxDepthSearch reached/no possible moves left
if win(board, winCond, selfSignature, opponentSignature) or win(board, winCond, opponentSignature, selfSignature) or len(locate(' ', board)) == 0 or iteration == maxDepthSearch:
if forecasted not in checked:
checked.append(deepcopy(forecasted))
#If self/other is winner, document move
if win(board, winCond, selfSignature, opponentSignature):
#If it's computer's turn, and computer wins
if (iteration % 2 == 0):
forecasted.append([True, iteration])
#print("Forecasted a possible win if moves are as followed: ", forecasted)
#viewBoard(board)
else:
forecasted.append([False, iteration])
#print("Forecasted a possible loss if moves are as followed: ", forecasted)
#viewBoard(board)
elif win(board, winCond, opponentSignature, selfSignature):
#If it's computer's turn, and computer's opponent wins
if (iteration % 2 == 0):
forecasted.append([False, iteration])
#print("Forecasted a possible loss if moves are as followed: ", forecasted)
#viewBoard(board)
else:
forecasted.append([True, iteration])
#print("Forecasted a possible win if moves are as followed: ", forecasted)
#viewBoard(board)
elif iteration == maxDepthSearch:
metaboard = meta(board, opponentSignature, selfSignature, winCond, difficulty)
try:
style, value = stance(metaboard, difficulty)
#If self's turn
if (iteration % 2 == 0):
forecasted.append([float(value), iteration])
#print("Max search depth reached: ", forecasted)
#viewBoard(board)
else:
forecasted.append([float(-value), iteration])
#print("Max search depth reached: ", forecasted)
#viewBoard(board)
#When maxDepthSearch is reached, but game is also tied
except ValueError:
forecasted.append([0.0, iteration])
#print("Forecasted a possible tie at max depth search if moves are as followed: ", forecasted)
#viewBoard(board)
#When tie is reached through tiles depletion, score is set to 0.0
else:
forecasted.append([0.0, iteration])
#print("Forecasted a possible tie if moves are as followed: ", forecasted)
#viewBoard(board)
#Reset back 1 node higher
boardHistory.pop(-1)
board = deepcopy(boardHistory[-1])
#print("Breakpoint 2: Reset board back to ")
#viewBoard(board)
selfSignature, opponentSignature = swap(selfSignature, opponentSignature)
return recurse(boardHistory, depleted, checked, iteration, winCond, forecasted, possibilities, board, selfSignature, opponentSignature, difficulty, False)
#At each node layer, make a decision and "forecast" board and metaboard, then switch position with opponent and do the same
#Normal case: when self node is not terminal, and all children are not depleted yet/maxDepthSearch is not reached yet
#dimension = len(board)
metaboard = meta(board, opponentSignature, selfSignature, winCond, difficulty)
#Heuristics: if there is only one available move left, take that move
if (len(locate(' ', board)) == 1):
x = locate(' ', board)[0][0]
y = locate(' ', board)[0][1]
#For actual move; only apply when not projecting self as opponent
if (len(checked) == 0 and iteration == 0):
alphabet = ascii_uppercase
print(f'Computer has decided to play at column {alphabet[x]} and row {alphabet[y]}.\n\n')
board = boardHistory[0]
board[y][x] = selfSignature
viewBoard(board)
return board
#For a forecasted move
elif [*forecasted, [x, y]] not in checked:
forecasted.append([x, y])
checked.append(deepcopy(forecasted))
board[y][x] = selfSignature
boardHistory.append(deepcopy(board))
iteration += 1
selfSignature, opponentSignature = swap(selfSignature, opponentSignature)
return recurse(boardHistory, depleted, checked, iteration, winCond, forecasted, possibilities, board, selfSignature, opponentSignature, difficulty, False)
style, value = stance(metaboard, difficulty)
try:
#For first move only
if len(locate(selfSignature, board)) == 0 and len(locate(opponentSignature, board)) == 0:
#For symmetrical board or customized board dimension smaller than twice win condition
if len(board) == len(board[0]) or (len(board) < winCond * 2) or (len(board[0]) < winCond * 2):
move = [int(len(board[0])/2), int(len(board)/2)]
#For customized board dimension larger than twice win condition
else:
move = [randint(winCond, len(board[0]) - 1 - winCond), randint(winCond, len(board) - 1 - winCond)]
x = move[0]
y = move[1]
alphabet = ascii_uppercase
print(f'Computer has decided to play at column {alphabet[x]} and row {alphabet[y]}.\n\n')
board = boardHistory[0]
board[y][x] = selfSignature
viewBoard(board)
return board
else:
x, y = decide(forecasted, checked, style, value, metaboard, difficulty)
except ValueError:
depleted = True
#All child nodes had been depleted (i.e, checked has been populated with all possible forecasted combinations)
if depleted:
depleted = False
selfPlaying = (iteration % 2 == 0)
possibilities = evalSelf(selfPlaying, possibilities, iteration)
iteration -= 1
#If base case had been evaluated; root has been given value; iteration is negative => make a move
#All child branches had been depleted
if iteration < 0:
#print(possibilities)
move = possibilities[0][0]
x = move[0]
y = move[1]
alphabet = ascii_uppercase
print(f'Computer has decided to play at column {alphabet[x]} and row {alphabet[y]}.\n\n')
board = boardHistory[0]
board[y][x] = selfSignature
viewBoard(board)
return board
forecasted.pop(-1)
boardHistory.pop(-1)
board = deepcopy(boardHistory[-1])
#print("Breakpoint 1: Reset board back to ")
#viewBoard(board)
selfSignature, opponentSignature = swap(selfSignature, opponentSignature)
return recurse(boardHistory, depleted, checked, iteration, winCond, forecasted, possibilities, board, selfSignature, opponentSignature, difficulty, False)
forecasted.append([x, y])
checked.append(deepcopy(forecasted))
board[y][x] = selfSignature
#print(selfSignature, " took the move ", [x, y])
#viewBoard(board)
boardHistory.append(deepcopy(board))
#print(f'Assessing risk and opportunity, taking {style} move this turn at col {x}, row {y}.')
# valid = False
# while (not valid):
# x = randint(0, dimension - 1)
# y = randint(0, dimension - 1)
# if board[y][x] == ' ': valid = True
iteration += 1
#Swap player each turn
selfSignature, opponentSignature = swap(selfSignature, opponentSignature)
return recurse(boardHistory, depleted, checked, iteration, winCond, forecasted, possibilities, board, selfSignature, opponentSignature, difficulty, False)
#Define winning
def win(board, winCond, signature, opponentSignature):
#Define function to determine box containing played area
def box(board):
#Define function to find first occurence of 'X' or 'O', row-wise; if none is found, return 0
#Value is [signature, opponentSignature]
def locate(value, board):
dimensionY = len(board)
dimensionX = len(board[0])
for row in range(dimensionY):
for col in range(dimensionX):
if (board[row][col] in value):
return row
return 0
#Define function to inverse board vertically
def invertY(board):
invertYBoard = []
dimensionY = len(board)
for row in range(dimensionY):
invertYBoard.append(board[dimensionY - row - 1])
return invertYBoard
#Define function to rotate board 90 degree
def rotate(board):
rotateBoard = []
dimensionY = len(board)
dimensionX = len(board[0])
for col in range(dimensionX):
column = [board[row][col] for row in range(dimensionY)]
rotateBoard.append(column)
return rotateBoard
dimensionY = len(board)
dimensionX = len(board[0])
boundaryN = locate([signature, opponentSignature], board)
boundaryS = dimensionY - locate([signature, opponentSignature], invertY(board)) - 1
boundaryW = locate([signature, opponentSignature], rotate(board))
boundaryE = dimensionX - locate([signature, opponentSignature], invertY(rotate(board))) - 1
box = []
for row in range(boundaryN, boundaryS + 1):
boxRow = [board[row][col] for col in range(boundaryW, boundaryE + 1)]
box.append(boxRow)
return box
#Create as many winCond x winCond grids as needed to cover the entire played area
def grid(box, winCond):
dimensionY = len(box)
dimensionX = len(box[0])
gridY = dimensionY - winCond + 1
if (gridY < 1): gridY = 1
gridX = dimensionX - winCond + 1
if (gridX < 1): gridX = 1
#List of grids
grids = []
for offsetX in range(gridX):
for offsetY in range(gridY):
grid = []
for row in range(offsetY, offsetY + winCond):
rowY = []
for col in range(offsetX, offsetX + winCond):
try:
rowY.append(box[row][col])
except IndexError: pass
grid.append(rowY)
grids.append(grid)
return grids
for board in grid(box(board), winCond):
#Within each grid:
dimensionY = len(board)
dimensionX = len(board[0])
#Count 'O's in a row
for row in range(dimensionY):
if (board[row].count(signature) >= winCond):
return True
#Count 'O's in a column
columns = []
for col in range(dimensionX):
try:
columns.append([row[col] for row in board])
except IndexError: pass
for col in columns:
if (col.count(signature) >= winCond):
return True
#Count 'O's in a diagonal line
dimension = min(dimensionX, dimensionY)
diagonalsNW = []
diagonalsNE = []
for i in range(dimension):
diagonalNW = []
diagonalNE = []
for j in range(dimension):
try:
diagonalNW.append(board[j][j])
except IndexError: pass
try:
diagonalNE.append(board[j][dimension - j - 1])
except IndexError: pass
diagonalsNW.append(diagonalNW)
diagonalsNE.append(diagonalNE)
for diagonalNW in diagonalsNW:
if (diagonalNW.count(signature) >= winCond):
return True
for diagonalNE in diagonalsNE:
if (diagonalNE.count(signature) >= winCond):
return True
#Game loop
print('Welcome to a game of Tic-tac-toe!\nThe rule is simple: block your opponent before they can get a long enough streak in a continuous row, column or diagonal to win.\n')
mode = True
while (mode):
gamemode = input('Before we start, there are two gamemodes: custom and preset. Which one would you prefer?\n(c) for custom, (p) for preset. ')
if (gamemode not in ['c', 'p']):
print('Unrecognized input command. Please read the instructions carefully and try again.\n')
else:
mode = False
print('\n\n')
#Configuration settings for custom gamemode
configure = True
while (configure):
#Set custom dimension
invalid = True
while (invalid and gamemode == 'c'):
try:
dimensionX, dimensionY = input('Input dimension for game initialization:\n(width x length): ').split('x')
dimensionX = int(dimensionX)
dimensionY = int(dimensionY)
invalid = False
except:
print('Invalid input detected. Please try again.\n')
#Preset dimension
if (gamemode == 'p'):
print('Default grid set to 26x26.')
dimensionX = 26
dimensionY = 26
#Set win condition
valid = False
while (not valid and gamemode == 'c'):
try:
winCond = input('Input streak size to count as win: ')
winCond = int(winCond)
if (not isinstance(winCond, int) or 3 > winCond > min(dimensionX, dimensionY)): raise TypeError
valid = True
except:
print('Invalid input detected. Please try again.\n')
#Preset win condition
if (gamemode == 'p'):
print('Default win streak set to 5.')
winCond = 5
#Set difficulty
chose = False
while (not chose and gamemode == 'c'):
try:
difficulty = int(input('Choose difficulty (easiest: 1 - hardest: 3): '))
if (3 < difficulty or difficulty < 1): raise ValueError
chose = True
except:
print('Invalid input detected. Please try again.\n')
#Preset difficulty
if (gamemode == 'p'):
print('Default difficulty set to 3.')
difficulty = 3
#Set player's marker
proper = False
while (not proper and gamemode == 'c'):
marker = input('Choose your prefered marker:\n(o) for \'O\', (x) for \'X\': ')
if (marker not in ['x', 'o']):
print('Invalid input detected. Please try again.\n')
else:
proper = True
if (marker == 'o'):
opponentSignature = 'O'
selfSignature = 'X'
else:
opponentSignature = 'X'
selfSignature = 'O'
#Preset marker
if (gamemode == 'p'):
print('Default player marker set to \'X\'.')
opponentSignature = 'X'
selfSignature = 'O'
#Choose who goes first
ok = False
while (not ok and gamemode == 'c'):
playerGoesFirst = input('Do you want to go first?\n(y) for yes, (n) for no: ')
if (playerGoesFirst not in ['y', 'n']):
print('Invalid input detected. Please try again.\n')
else:
ok = True
playerGoesFirst = (playerGoesFirst == 'y')
#Preset first play
if (gamemode == 'p'):
print('Default: computer goes first.')
playerGoesFirst = False
#Replay loop
replay = True
while (replay):
print('\n\n')
board = mapBoard(int(dimensionX), int(dimensionY), ' ')
viewBoard(board)
while (True):
try:
locate([' '], board)[0]
except IndexError:
print('\nIt\'s a tie!')
break
#Player plays
if (playerGoesFirst):
mark(board, opponentSignature)
if (win(board, winCond, opponentSignature, selfSignature)):
print('Congratulations, you won!')
break
playerGoesFirst = True
try:
locate([' '], board)[0]
except IndexError:
print('\nIt\'s a tie!')
break
print('\n\nComputer is calculating...')
#Computer plays
board = play([deepcopy(board)], False, [], 0, winCond, [], [], board, selfSignature, opponentSignature, difficulty)
if (win(board, winCond, selfSignature, opponentSignature)):
print('Sorry, you lost!')
break
#Replay choice
makingChoice = True
while makingChoice:
choice = input('\n\nDo you want to replay?\n(y) to replay with current configurations, (n) to quit, (p) to play with recommended configurations, or (c) to replay with different configurations.\n')
if (choice == 'y'):
replay = True
configure = False
print('\n\n')
makingChoice = False
elif (choice == 'n'):
replay = False
configure = False
makingChoice = False
elif (choice == 'p'):
replay = False
configure = True
gamemode = 'p'
print('\n\n')
makingChoice = False
elif (choice == 'c'):
replay = False
configure = True
gamemode = 'c'
print('\n\n')
makingChoice = False
else:
print('Invalid input detected. Please try again.\n')
input('\nPress ENTER to quit.') | [
[
[
49,
56
],
[
36390,
36397
],
[
36437,
36444
]
],
[
[
76,
91
],
[
603,
618
],
[
1022,
1037
],
[
35029,
35044
],
[
36552,
36567
],
[
37563,
37578
]
],
[
[
93,
108
],
[
1054,
1069
]
],
[
[
131,
143
],
[
3567,
3579
]
],
[
[
161,
169
],
[
47457,
47465
],
[
30807,
30815
],
[
31548,
31556
],
[
33979,
33987
],
[
35424,
35432
],
[
35518,
35526
],
[
37884,
37892
],
[
38284,
38292
],
[
38437,
38445
],
[
22421,
22429
],
[
24177,
24185
],
[
24568,
24576
]
],
[
[
197,
211
],
[
2208,
2222
],
[
21008,
21022
]
],
[
[
213,
220
],
[
31071,
31078
],
[
34182,
34189
],
[
35667,
35674
],
[
38087,
38094
],
[
38858,
38865
],
[
22781,
22788
]
],
[
[
257,
265
],
[
46655,
46663
]
],
[
[
403,
415
],
[
19027,
19039
]
],
[
[
570,
579
],
[
46711,
46720
],
[
1754,
1763
],
[
35235,
35244
],
[
36758,
36767
],
[
37769,
37778
]
],
[
[
983,
987
],
[
46977,
46981
]
],
[
[
1895,
1901
],
[
46783,
46789
],
[
47233,
47239
],
[
29506,
29512
],
[
29541,
29547
],
[
31426,
31432
],
[
34780,
34786
],
[
34819,
34825
],
[
34856,
34862
],
[
35918,
35924
],
[
35962,
35968
],
[
19090,
19096
],
[
19153,
19159
]
],
[
[
2227,
2231
],
[
47451,
47455
]
],
[
[
39027,
39030
],
[
47028,
47031
],
[
47575,
47578
],
[
31308,
31311
],
[
31365,
31368
],
[
31629,
31632
],
[
32166,
32169
]
],
[
[
43377,
43381
],
[
43396,
43400
]
],
[
[
43407,
43415
],
[
43558,
43566
],
[
43883,
43891
],
[
44261,
44269
],
[
44438,
44446
],
[
44819,
44827
],
[
44967,
44975
],
[
45288,
45296
],
[
45450,
45458
],
[
45928,
45936
],
[
46124,
46132
],
[
46464,
46472
]
],
[
[
43706,
43710
],
[
43396,
43400
]
],
[
[
43778,
43787
],
[
43802,
43811
]
],
[
[
43845,
43852
],
[
43871,
43878
]
],
[
[
43926,
43936
],
[
44069,
44079
]
],
[
[
43938,
43948
],
[
44110,
44120
]
],
[
[
44052,
44062
],
[
44637,
44647
],
[
46668,
46678
]
],
[
[
44093,
44103
],
[
44649,
44659
],
[
46685,
46695
]
],
[
[
44134,
44141
],
[
43871,
43878
]
],
[
[
44331,
44341
],
[
44637,
44647
],
[
46668,
46678
]
],
[
[
44355,
44365
],
[
44649,
44659
],
[
46685,
46695
]
],
[
[
44399,
44404
],
[
44428,
44433
]
],
[
[
44481,
44488
],
[
44562,
44569
]
],
[
[
44548,
44555
],
[
44602,
44609
],
[
44623,
44630
],
[
47039,
47046
],
[
47489,
47496
],
[
47586,
47593
]
],
[
[
44691,
44696
],
[
44428,
44433
]
],
[
[
44891,
44898
],
[
47039,
47046
],
[
47489,
47496
],
[
47586,
47593
]
],
[
[
44928,
44933
],
[
44957,
44962
]
],
[
[
45010,
45020
],
[
45103,
45113
],
[
45117,
45127
],
[
47547,
47557
]
],
[
[
45163,
45168
],
[
44957,
44962
]
],
[
[
45360,
45370
],
[
47547,
47557
]
],
[
[
45409,
45415
],
[
45439,
45445
]
],
[
[
45476,
45482
],
[
45567,
45573
],
[
45715,
45721
]
],
[
[
45685,
45691
],
[
45439,
45445
]
],
[
[
45747,
45764
],
[
46989,
47006
],
[
47048,
47065
],
[
47528,
47545
],
[
47610,
47627
]
],
[
[
45787,
45800
],
[
47067,
47080
],
[
47513,
47526
],
[
47595,
47608
]
],
[
[
45841,
45858
],
[
46989,
47006
],
[
47048,
47065
],
[
47528,
47545
],
[
47610,
47627
]
],
[
[
45881,
45894
],
[
47067,
47080
],
[
47513,
47526
],
[
47595,
47608
]
],
[
[
46007,
46024
],
[
46989,
47006
],
[
47048,
47065
],
[
47528,
47545
],
[
47610,
47627
]
],
[
[
46039,
46052
],
[
47067,
47080
],
[
47513,
47526
],
[
47595,
47608
]
],
[
[
46091,
46093
],
[
46117,
46119
]
],
[
[
46150,
46165
],
[
46241,
46256
],
[
46409,
46424
],
[
46943,
46958
]
],
[
[
46368,
46370
],
[
46117,
46119
]
],
[
[
46390,
46405
],
[
46943,
46958
]
],
[
[
46537,
46552
],
[
46943,
46958
]
],
[
[
46583,
46589
],
[
46608,
46614
]
],
[
[
46647,
46652
],
[
46721,
46726
],
[
46797,
46802
],
[
46982,
46987
],
[
47032,
47037
],
[
47247,
47252
],
[
47466,
47471
],
[
47506,
47511
]
],
[
[
47177,
47192
],
[
46943,
46958
]
],
[
[
47443,
47448
],
[
47579,
47584
],
[
46797,
46802
],
[
46982,
46987
],
[
47032,
47037
],
[
47247,
47252
],
[
47466,
47471
],
[
47506,
47511
]
],
[
[
47735,
47747
],
[
47769,
47781
]
],
[
[
47795,
47801
],
[
48008,
48014
],
[
48173,
48179
],
[
48309,
48315
],
[
48505,
48511
]
],
[
[
48040,
48046
],
[
46608,
46614
]
],
[
[
48070,
48079
],
[
43802,
43811
]
],
[
[
48134,
48146
],
[
47769,
47781
]
],
[
[
48205,
48211
],
[
46608,
46614
]
],
[
[
48236,
48245
],
[
43802,
43811
]
],
[
[
48270,
48282
],
[
47769,
47781
]
],
[
[
48341,
48347
],
[
46608,
46614
]
],
[
[
48372,
48381
],
[
43802,
43811
]
],
[
[
48405,
48413
],
[
43883,
43891
],
[
44261,
44269
],
[
44438,
44446
],
[
44819,
44827
],
[
44967,
44975
],
[
45288,
45296
],
[
45450,
45458
],
[
45928,
45936
],
[
46124,
46132
],
[
46464,
46472
]
],
[
[
48466,
48478
],
[
47769,
47781
]
],
[
[
48537,
48543
],
[
46608,
46614
]
],
[
[
48568,
48577
],
[
43802,
43811
]
],
[
[
48601,
48609
],
[
43883,
43891
],
[
44261,
44269
],
[
44438,
44446
],
[
44819,
44827
],
[
44967,
44975
],
[
45288,
45296
],
[
45450,
45458
],
[
45928,
45936
],
[
46124,
46132
],
[
46464,
46472
]
],
[
[
48662,
48674
],
[
47769,
47781
]
]
] |
import os
import argparse
import json
import pandas as pd
import bilby
from bilby_pipe.create_injections import InjectionCreator
def main():
parser = argparse.ArgumentParser(description="Slurm files from nmma injection file")
parser.add_argument(
"--prior-file",
type=str,
required=True,
help="The prior file from which to generate injections",
)
parser.add_argument(
"--injection-file",
type=str,
required=True,
help="The bilby injection json file to be used",
)
parser.add_argument(
"--analysis-file",
type=str,
required=True,
help="The analysis bash script to be replicated",
)
parser.add_argument("-o", "--outdir", type=str, default="outdir")
args = parser.parse_args()
# load the injection json file
if args.injection_file:
if args.injection_file.endswith(".json"):
with open(args.injection_file, "rb") as f:
injection_data = json.load(f)
datadict = injection_data["injections"]["content"]
dataframe_from_inj = pd.DataFrame.from_dict(datadict)
else:
print("Only json supported.")
exit(1)
if len(dataframe_from_inj) > 0:
args.n_injection = len(dataframe_from_inj)
# create the injection dataframe from the prior_file
injection_creator = InjectionCreator(
prior_file=args.prior_file,
prior_dict=None,
n_injection=args.n_injection,
default_prior="PriorDict",
gps_file=None,
trigger_time=0,
generation_seed=0,
)
dataframe_from_prior = injection_creator.get_injection_dataframe()
# combine the dataframes
dataframe = pd.DataFrame.merge(
dataframe_from_inj,
dataframe_from_prior,
how="outer",
left_index=True,
right_index=True,
)
for index, row in dataframe.iterrows():
with open(args.analysis_file, "r") as file:
analysis = file.read()
outdir = os.path.join(args.outdir, str(index))
if not os.path.isdir(outdir):
os.makedirs(outdir)
priors = bilby.gw.prior.PriorDict(args.prior_file)
priors.to_file(outdir, label="injection")
priorfile = os.path.join(outdir, "injection.prior")
injfile = os.path.join(outdir, "lc.csv")
analysis = analysis.replace("PRIOR", priorfile)
analysis = analysis.replace("OUTDIR", outdir)
analysis = analysis.replace("INJOUT", injfile)
analysis = analysis.replace("INJNUM", str(index))
analysis_file = os.path.join(outdir, "inference.sh")
fid = open(analysis_file, "w")
fid.write(analysis)
fid.close()
if __name__ == "__main__":
main()
| [
[
[
7,
9
],
[
2063,
2065
],
[
2116,
2118
],
[
2151,
2153
],
[
2301,
2303
],
[
2359,
2361
],
[
2638,
2640
]
],
[
[
17,
25
],
[
158,
166
]
],
[
[
33,
37
],
[
1012,
1016
]
],
[
[
45,
57
],
[
1129,
1131
],
[
1757,
1759
]
],
[
[
66,
71
],
[
2189,
2194
]
],
[
[
113,
129
],
[
1408,
1424
]
],
[
[
136,
140
],
[
2796,
2800
]
]
] |
from functools import partial
from typing import NamedTuple, Union
from flake8_annotations import Argument, Function
from flake8_annotations.enums import AnnotationType
class FormatTestCase(NamedTuple):
"""Named tuple for representing our test cases."""
test_object: Union[Argument, Function]
str_output: str
repr_output: str
# Define partial functions to simplify object creation
arg = partial(Argument, lineno=0, col_offset=0, annotation_type=AnnotationType.ARGS)
func = partial(Function, name="test_func", lineno=0, col_offset=0, decorator_list=[])
formatting_test_cases = {
"arg": FormatTestCase(
test_object=arg(argname="test_arg"),
str_output="<Argument: test_arg, Annotated: False>",
repr_output=(
"Argument("
"argname='test_arg', "
"lineno=0, "
"col_offset=0, "
"annotation_type=AnnotationType.ARGS, "
"has_type_annotation=False, "
"has_3107_annotation=False, "
"has_type_comment=False"
")"
),
),
"func_no_args": FormatTestCase(
test_object=func(args=[arg(argname="return")]),
str_output="<Function: test_func, Args: [<Argument: return, Annotated: False>]>",
repr_output=(
"Function("
"name='test_func', "
"lineno=0, "
"col_offset=0, "
"function_type=FunctionType.PUBLIC, "
"is_class_method=False, "
"class_decorator_type=None, "
"is_return_annotated=False, "
"has_type_comment=False, "
"has_only_none_returns=True, "
"is_nested=False, "
"decorator_list=[], "
"args=[Argument(argname='return', lineno=0, col_offset=0, annotation_type=AnnotationType.ARGS, " # noqa: E501
"has_type_annotation=False, has_3107_annotation=False, has_type_comment=False)]"
")"
),
),
"func_has_arg": FormatTestCase(
test_object=func(args=[arg(argname="foo"), arg(argname="return")]),
str_output="<Function: test_func, Args: [<Argument: foo, Annotated: False>, <Argument: return, Annotated: False>]>", # noqa: E501
repr_output=(
"Function("
"name='test_func', "
"lineno=0, "
"col_offset=0, "
"function_type=FunctionType.PUBLIC, "
"is_class_method=False, "
"class_decorator_type=None, "
"is_return_annotated=False, "
"has_type_comment=False, "
"has_only_none_returns=True, "
"is_nested=False, "
"decorator_list=[], "
"args=[Argument(argname='foo', lineno=0, col_offset=0, annotation_type=AnnotationType.ARGS, " # noqa: E501
"has_type_annotation=False, has_3107_annotation=False, has_type_comment=False), "
"Argument(argname='return', lineno=0, col_offset=0, annotation_type=AnnotationType.ARGS, " # noqa: E501
"has_type_annotation=False, has_3107_annotation=False, has_type_comment=False)]"
")"
),
),
}
| [
[
[
22,
29
],
[
409,
416
],
[
495,
502
]
],
[
[
49,
59
],
[
193,
203
]
],
[
[
61,
66
],
[
279,
284
]
],
[
[
99,
107
],
[
285,
293
],
[
417,
425
]
],
[
[
109,
117
],
[
295,
303
],
[
503,
511
]
],
[
[
155,
169
],
[
467,
481
]
],
[
[
178,
192
],
[
612,
626
],
[
1096,
1110
],
[
1981,
1995
]
],
[
[
403,
406
],
[
648,
651
],
[
1143,
1146
],
[
2028,
2031
],
[
2048,
2051
]
],
[
[
488,
492
],
[
1132,
1136
],
[
2017,
2021
]
],
[
[
575,
596
]
]
] |
# coding: utf-8
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__),'../..'))
import suzu.matdb.srim_compounddb as compounddb
air = compounddb.Compound()
air.desc = 'Air, Dry near sea level (ICRU-104) 0.00120484 O-23.2, N-75.5, Ar-1.3'
air.name = '%Air, Dry (ICRU-104)'
air.density = 0.00120484
air.mass_percentage = True
air.elems = [(6, 0.000124), (8, 0.231781), (7, 0.755267), (18, 0.012827)]
air.bonding = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
air.comment = """corrected by H. Paul, Sept. 2004
"""
air.fulltext = """*Air, Dry near sea level (ICRU-104) 0.00120484 O-23.2, N-75.5, Ar-1.3
"%Air, Dry (ICRU-104)", .00120484, 4, 6, .000124, 8, .231781, 7, .755267, 18, .012827
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
$ corrected by H. Paul, Sept. 2004
$"""
water = compounddb.Compound()
water.desc = 'Water (liquid) 1.00 H-2, O-1'
water.name = 'Water_Liquid (ICRU-276)'
water.density = 1.0
water.mass_percentage = False
water.elems = [(1, 2.0), (8, 1.0)]
water.bonding = [0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
water.comment = b"""Chemical Formula: H \u00c4\u00c4 O \u00c4\u00c4 H
There is about an 8% increase in the peak of the stopping power
for ions in water vapour relative to the liquid. (The peak of the
stopping occurs at an energy of about 100 keV/amu times the 2/3
power of the ion's atomic number.) Above the peak the phase
difference begins to disappear. This calculation is for the
LIQUID phase. """.decode('cp437')
print(water.to_suzu())
print(air.to_suzu())
| [
[
[
23,
26
],
[
38,
41
]
],
[
[
34,
36
],
[
57,
59
],
[
70,
72
]
],
[
[
114,
154
],
[
162,
172
],
[
855,
865
]
],
[
[
156,
159
],
[
185,
188
],
[
269,
272
],
[
303,
306
],
[
328,
331
],
[
355,
358
],
[
429,
432
],
[
534,
537
],
[
588,
591
],
[
1635,
1638
]
],
[
[
847,
852
],
[
878,
883
],
[
950,
955
],
[
989,
994
],
[
1009,
1014
],
[
1039,
1044
],
[
1074,
1079
],
[
1181,
1186
],
[
1612,
1617
]
]
] |
import numpy as np
from sklearn import metrics
from PIL import Image
def get_metrics(pred, logits, gt):
if isinstance(logits, list):
logits = logits[-1]
result = {'confusion_matrix': metrics.confusion_matrix(gt.flatten(), pred.flatten(), labels=[1, 0]),
'auc': roc(gt, logits)}
return result
def get_metrics_without_roc(pred, gt):
result = {'confusion_matrix': metrics.confusion_matrix(gt.flatten(), pred.flatten(), labels=[1, 0])}
return result
def show_metrics(metrics):
con_mat = np.zeros((2,2))
auc = 0.0
for m in metrics:
con_mat += m['confusion_matrix']
auc += m['auc']
auc /= len(metrics)
result = {'confusion_matrix': con_mat.tolist(),
'accuracy': accuracy(con_mat),
'kappa': kappa(con_mat),
'precision': precision(con_mat),
'sensitivity': sensitivity(con_mat),
'specificity': specificity(con_mat),
'auc': auc,
}
return result
def show_metrics_without_roc(metrics):
con_mat = np.zeros((2,2))
for m in metrics:
con_mat += m['confusion_matrix']
result = {'confusion_matrix': con_mat,
'accuracy': accuracy(con_mat),
'kappa': kappa(con_mat),
'precision': precision(con_mat),
'sensitivity': sensitivity(con_mat),
'specificity': specificity(con_mat),
}
return result
def show_metrics_from_save_image(data):
pred = data[:,:,0] // 255
gt = data[:,:,1] // 255
metrics = [get_metrics_without_roc(pred, gt)]
return show_metrics_without_roc(metrics)
def kappa(matrix):
matrix = np.array(matrix)
n = np.sum(matrix)
sum_po = 0
sum_pe = 0
for i in range(len(matrix[0])):
sum_po += matrix[i][i]
row = np.sum(matrix[i, :])
col = np.sum(matrix[:, i])
sum_pe += row * col
po = sum_po / n
pe = sum_pe / (n * n)
# print(po, pe)
return (po - pe) / (1 - pe)
def sensitivity(matrix):
return matrix[0][0]/(matrix[0][0]+matrix[1][0])
def specificity(matrix):
return matrix[1][1]/(matrix[1][1]+matrix[0][1])
def precision(matrix):
return matrix[0][0]/(matrix[0][0]+matrix[0][1])
def roc(gt, logits):
gtlist = gt.flatten()
predlist = logits.detach().cpu().numpy()[0, 1, ...].flatten()
fpr, tpr, thresholds = metrics.roc_curve(gtlist, predlist, pos_label=1)
roc_auc = metrics.auc(fpr, tpr) # auc为Roc曲线下的面积
return roc_auc
def accuracy(matrix):
return (matrix[0][0]+matrix[1][1])/(matrix[0][0]+matrix[0][1]+matrix[1][0]+matrix[1][1])
def error_rate(predictions, labels):
"""
Return the error rate based on dense predictions and 1-hot labels.
"""
return 100.0 - (
100.0 *
np.sum(np.argmin(predictions, 3) == np.argmin(labels, 3)) /
(predictions.shape[0] * predictions.shape[1] * predictions.shape[2]))
def save_predict(filename, data, gt, pred):
pred = pred * 255
gt = gt[0, 1, :, :]
gt = np.where(gt > 0.5, 255, 0)
differ = np.stack([np.zeros_like(pred), gt, pred], -1)
pred = np.stack([pred, pred, pred], -1)
gt = np.stack([gt, gt, gt], -1)
data = np.transpose(data, (0, 2, 3, 1))[0,...]
if data.shape[2] == 60:
data = data[:, :, 10:40:10]
elif data.shape[2] == 1:
data = np.concatenate([data, data, data], -1)
elif data.shape[2] == 15:
data = data[:, :, 0:15:5]
data -= np.min(data, axis=(0,1))
data /= (np.max(data, axis=(0,1))/255)
data = data.astype(np.uint8)
img = Image.fromarray(np.concatenate([data, pred, gt, differ], axis=1).astype(np.uint8))
img.save(filename)
def save_logits(filename, pred):
pred = pred * 255
pred = np.stack([pred, pred, pred], -1)
img = Image.fromarray(pred.astype(np.uint8))
img.save(filename)
| [
[
[
7,
18
],
[
532,
534
],
[
1072,
1074
],
[
1688,
1690
],
[
1713,
1715
],
[
1839,
1841
],
[
1874,
1876
],
[
2814,
2816
],
[
2821,
2823
],
[
2850,
2852
],
[
3056,
3058
],
[
3096,
3098
],
[
3106,
3108
],
[
3153,
3155
],
[
3195,
3197
],
[
3233,
3235
],
[
3381,
3383
],
[
3496,
3498
],
[
3534,
3536
],
[
3587,
3589
],
[
3623,
3625
],
[
3679,
3681
],
[
3780,
3782
],
[
3851,
3853
]
],
[
[
39,
46
],
[
200,
207
],
[
401,
408
],
[
2398,
2405
],
[
2461,
2468
]
],
[
[
63,
68
],
[
3607,
3612
],
[
3823,
3828
]
],
[
[
74,
85
]
],
[
[
332,
355
],
[
1575,
1598
]
],
[
[
495,
507
]
],
[
[
1023,
1047
],
[
1621,
1645
]
],
[
[
1466,
1494
]
],
[
[
1660,
1665
],
[
793,
798
],
[
1262,
1267
]
],
[
[
2027,
2038
],
[
885,
896
],
[
1354,
1365
]
],
[
[
2106,
2117
],
[
936,
947
],
[
1405,
1416
]
],
[
[
2185,
2194
],
[
836,
845
],
[
1305,
1314
]
],
[
[
2261,
2264
],
[
292,
295
]
],
[
[
2525,
2533
],
[
751,
759
],
[
1220,
1228
]
],
[
[
2641,
2651
]
],
[
[
2961,
2973
]
],
[
[
3718,
3729
]
]
] |
from pysys.constants import *
from apama.basetest import ApamaBaseTest
from apama.correlator import CorrelatorHelper
from GAPDemoConnected import GAPDemoConnectedHelper
class PySysTest(ApamaBaseTest):
def __init__(self, descriptor, outsubdir, runner):
super(PySysTest, self).__init__(descriptor, outsubdir, runner)
self.helper = GAPDemoConnectedHelper(self, PROJECT)
def execute(self):
# Start application
correlator = self.helper.startApplication()
# Find a phone device
(phoneId, phoneName) = self.helper.getDeviceDetails()
self.log.info(f'Found c8y_SensorPhone device with name "{phoneName}" and id "{phoneId}"')
# Wait for application to subscribe to measurements from the phone
self.helper.waitForSubscription()
# Set baseline acceleration
self.helper.sendAcceleration(phoneId, 0.0, 0.0, 1.23)
# Wait for all events to be processed
self.helper.waitForBaseline()
# Get current active alarm counts
flipUpBefore = self.helper.countActiveAlarms("FlipUp")
self.log.info(f'Found {flipUpBefore} active "FlipUp" alarms before sending measurements')
flipDownBefore = self.helper.countActiveAlarms("FlipDown")
self.log.info(f'Found {flipDownBefore} active "FlipDown" alarms before sending measurements')
# Send acceleration measurements
self.log.info('Sending measurements...')
self.helper.sendAcceleration(phoneId, 0.0, 0.0, -0.9) # Up
self.helper.sendAcceleration(phoneId, 0.0, 0.0, 0.9) # Down
self.helper.sendAcceleration(phoneId, 0.0, 0.0, 0.4)
self.helper.sendAcceleration(phoneId, 0.0, 0.0, 0.0)
self.helper.sendAcceleration(phoneId, 0.0, 0.0, -0.4)
self.helper.sendAcceleration(phoneId, 0.0, 0.0, -0.9) # Up
self.helper.sendAcceleration(phoneId, 0.0, 0.0, 0.8)
self.helper.sendAcceleration(phoneId, 0.0, 0.0, 0.9)
self.helper.sendAcceleration(phoneId, 0.0, 0.0, 0.85) # Down
# wait for all events to be processed
self.helper.waitForMeasurements()
# Get latest active alarm counts and calculate delta
flipUpAfter = self.helper.countActiveAlarms("FlipUp")
self.log.info(f'Found {flipUpAfter} active "FlipUp" alarms after sending measurements')
flipDownAfter = self.helper.countActiveAlarms("FlipDown")
self.log.info(f'Found {flipDownAfter} active "FlipDown" alarms after sending measurements')
self.flipUpDelta = flipUpAfter - flipUpBefore
self.flipDownDelta = flipDownAfter - flipDownBefore
def validate(self):
self.assertEval("self.flipUpDelta=={expected}", expected=2)
self.assertEval("self.flipDownDelta=={expected}", expected=2)
| [
[
[
28,
29
],
[
372,
379
]
],
[
[
58,
71
],
[
191,
204
]
],
[
[
102,
118
]
],
[
[
149,
171
],
[
343,
365
]
],
[
[
181,
190
],
[
269,
278
]
]
] |
"""youtubesearch URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('search.urls')),
]
| [
[
[
664,
669
],
[
744,
749
]
],
[
[
694,
698
],
[
729,
733
],
[
766,
770
]
],
[
[
700,
707
],
[
775,
782
]
],
[
[
709,
720
]
]
] |
from cereal import car
from selfdrive.car import dbc_dict
Ecu = car.CarParams.Ecu
class CarControllerParams:
ACCEL_MAX = 2.0
ACCEL_MIN = -3.7
STEER_MAX = 384 # 409 is the max, 255 is stock
STEER_DELTA_UP = 3
STEER_DELTA_DOWN = 7
STEER_DRIVER_ALLOWANCE = 50
STEER_DRIVER_MULTIPLIER = 2
STEER_DRIVER_FACTOR = 1
class CAR:
# Hyundai
ELANTRA_I30 = "HYUNDAI AVANTE,I30 2017~2020 (AD,PD)"
ELANTRA21 = "HYUNDAI AVANTE 2021 (CN7)"
ELANTRA21_HEV = "HYUNDAI AVANTE HEV 2021 (CN7)"
SONATA = "HYUNDAI SONATA 2020 (DN8)"
SONATA_HEV = "HYUNDAI SONATA HEV 2020 (DN8)"
SONATA_LF = "HYUNDAI SONATA 2016~2019 (LF)"
SONATA_LF_HEV = "HYUNDAI SONATA 2018 HEV (LF)"
KONA = "HYUNDAI KONA 2019 (OS)"
KONA_EV = "HYUNDAI KONA EV 2019 (OS)"
KONA_HEV = "HYUNDAI KONA HEV 2019 (OS)"
IONIQ_EV = "HYUNDAI IONIQ EV 2019~2020 (AE)"
IONIQ_HEV = "HYUNDAI IONIQ HEV 2017 (AE)"
SANTA_FE = "HYUNDAI SANTA FE 2019~2021 (TM)"
SANTA_FE_HEV = "HYUNDAI SANTA FE 2021~2022 (TM)"
PALISADE = "HYUNDAI PALISADE 2020 (LX2)"
VELOSTER = "HYUNDAI VELOSTER 2019 (JS)"
GRANDEUR = "GRANDEUR 2017~2019 (IG)"
GRANDEUR_HEV = "GRANDEUR HEV 2018~2019 (IG)"
GRANDEUR20 = "GRANDEUR 2020 (IG)"
GRANDEUR20_HEV = "GRANDEUR HEV 2020 (IG)"
NEXO = "HYUNDAI NEXO (FE)"
# Kia
FORTE = "KIA K3 2018 (BD)"
K5 = "KIA K5 2016~2020 (JF)"
K5_HEV = "KIA K5 HEV 2016~2020 (JF)"
K5_DL3 = "KIA K5 2021 (DL3)"
K5_DL3_HEV = "KIA K5 HEV 2021 (DL3)"
K7 = "KIA K7 2016-2019 (YG)"
K7_HEV = "KIA K7 HEV 2017-2019 (YG)"
K9 = "KIA K9 2019-2021 (RJ)"
SPORTAGE = "KIA SPORTAGE 2016~2020 (QL)"
SORENTO = "KIA SORENTO 2017~2020 (UM)"
MOHAVE = "KIA MOHAVE 2020 (HM)"
STINGER = "KIA STINGER 2018~2021 (CK)"
NIRO_EV = "KIA NIRO EV 2020 (DE)"
NIRO_HEV = "KIA NIRO HEV 2018 (DE)"
SOUL_EV = "KIA SOUL EV 2019 (SK3)"
SELTOS = "KIA SELTOS 2019 (SP2)"
# Genesis
GENESIS = "GENESIS 2014-2016 (DH)"
GENESIS_G70 = "GENESIS G70 2018~ (IK)"
GENESIS_G80 = "GENESIS G80 2018~ (DH)"
GENESIS_G90 = "GENESIS G90,EQ900 2016~2019 (HI)"
# ---------------------------------------------------------------------------------------
# E-CAN Signal CAR
# hyundai - G80 2020(RG3), GV70 2021(JK1), GV80 2020(JX1), TUSON 2021(NX4), STARIA 2021(UX4), IONIQ5 2021(NE)
# kia - CARNIVAL 2021(KA4), SORENTO 2020(MQ4), K8 2021(GL3)
# ---------------------------------------------------------------------------------------
class Buttons:
NONE = 0
RES_ACCEL = 1
SET_DECEL = 2
GAP_DIST = 3
CANCEL = 4
FINGERPRINTS = {
# Hyundai
CAR.ELANTRA_I30: [{
66: 8, 67: 8, 68: 8, 127: 8, 128: 8, 129: 8, 273: 8, 274: 8, 275: 8, 339: 8, 354: 3, 356: 4, 399: 8, 512: 6, 544: 8, 546: 8, 547: 8, 593: 8, 608: 8, 688: 5, 790: 8, 809: 8, 832: 8, 838: 8, 844: 8, 884: 8, 897: 8, 899: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1087: 8, 1151: 6, 1155: 8, 1164: 8, 1168: 7, 1170: 8, 1191: 2, 1193: 8, 1253: 8, 1254: 8, 1255: 8, 1265: 4, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1314: 8, 1322: 8, 1331: 8, 1332: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1356: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1407: 8, 1414: 3, 1415: 8, 1419: 8, 1425: 2, 1427: 6, 1440: 8, 1456: 4, 1470: 8, 1472: 8, 1485: 8, 1486: 8, 1487: 8, 1491: 8, 1530: 8, 1532: 5, 1792: 8, 1872: 8, 1937: 8, 1952: 8, 1953: 8, 1960: 8, 1968: 8, 1988: 8, 1990: 8, 1998: 8, 2000: 8, 2001: 8, 2003: 8, 2004: 8, 2005: 8, 2008: 8, 2009: 8, 2012: 8, 2013: 8, 2015: 8, 2016: 8, 2017: 8, 2024: 8, 2025: 8
}],
CAR.ELANTRA21: [{
67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 524: 8, 544: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 8, 865: 8, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1069: 8, 1078: 4, 1102: 8, 1107: 5, 1108: 8, 1136: 8, 1145: 8, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 8, 1170: 8, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1193: 8, 1225: 8, 1227: 8, 1265: 4, 1268: 8, 1280: 8, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1339: 8, 1342: 8, 1343: 8, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1379: 8, 1384: 8, 1394: 8, 1407: 8, 1419: 8, 1427: 6, 1446: 8, 1456: 4, 1470: 8, 1485: 8, 1988: 8, 1996: 8, 2000: 8, 2004: 8, 2008: 8, 2012: 8
}],
CAR.ELANTRA21_HEV: [{
}],
CAR.SONATA: [{
67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 545: 8, 546: 8, 547: 8, 548: 8, 549: 8, 550: 8, 576: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 8, 865: 8, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 908: 8, 909: 8, 912: 7, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1089: 5, 1096: 8, 1107: 5, 1108: 8, 1114: 8, 1136: 8, 1145: 8, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 8, 1170: 8, 1173: 8, 1180: 8, 1183: 8, 1184: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1268: 8, 1280: 8, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1330: 8, 1339: 8, 1342: 6, 1343: 8, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1379: 8, 1384: 8, 1394: 8, 1407: 8, 1419: 8, 1427: 6, 1446: 8, 1456: 4, 1460: 8, 1470: 8, 1485: 8, 1504: 3, 1988: 8, 1996: 8, 2000: 8, 2004: 8, 2008: 8, 2012: 8, 2015: 8
}],
CAR.SONATA_HEV: [{
127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 548: 8, 576: 8, 593: 8, 688: 6, 757: 2, 832: 8, 865: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1102: 8, 1108: 8, 1114: 8, 1136: 6, 1138: 5, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 8, 1173: 8, 1180: 8, 1184: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1268: 8, 1280: 8, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1330: 8, 1339: 8, 1342: 6, 1343: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1446: 8, 1448: 8, 1456: 4, 1460: 8, 1470: 8, 1476: 8, 1535: 8
}],
CAR.SONATA_LF: [{
66: 8, 67: 8, 68: 8, 127: 8, 273: 8, 274: 8, 275: 8, 339: 8, 356: 4, 399: 8, 447: 8, 512: 6, 544: 8, 593: 8, 608: 8, 688: 5, 790: 8, 809: 8, 832: 8, 884: 8, 897: 8, 899: 8, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1253: 8, 1254: 8, 1255: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1314: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1397: 8, 1407: 8, 1415: 8, 1419: 8, 1425: 2, 1427: 6, 1440: 8, 1456: 4, 1470: 8, 1472: 8, 1486: 8, 1487: 8, 1491: 8, 1530: 8, 1532: 5, 2000: 8, 2001: 8, 2004: 8, 2005: 8, 2008: 8, 2009: 8, 2012: 8, 2013: 8, 2014: 8, 2016: 8, 2017: 8, 2024: 8, 2025: 8
}],
CAR.SONATA_LF_HEV: [{
68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 7, 593: 8, 688: 5, 881: 8, 882: 8, 897: 8, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1151: 6, 1168: 7, 1173: 8, 1186: 2, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1425: 2, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8, 2000: 8, 2004: 8, 2005: 8, 2008: 8, 2012: 8, 2013: 8
}],
CAR.KONA: [{
67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 354: 3, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1170: 8, 1173: 8, 1186: 2, 1191: 2, 1193: 8, 1265: 4,1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1378: 8, 1384: 8, 1394: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1988: 8, 1990: 8, 1996: 8, 1998: 8, 2000: 8, 2001: 8, 2004: 8, 2008: 8, 2009: 8, 2012: 8, 2015: 8
}],
CAR.KONA_EV: [{
127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 549: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1193: 8, 1225: 8, 1260: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1307: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1378: 4, 1379: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8, 1988: 8, 1996: 8, 2000: 8, 2004: 8, 2008: 8, 2012: 8, 2015: 8
}],
CAR.KONA_HEV: [{
68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 547: 8, 548: 8, 549: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1138: 4, 1151: 6, 1155: 8, 1157: 4, 1164: 8, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1193: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1378: 8, 1379: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8
}],
CAR.IONIQ_EV: [{
127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 524: 8, 544: 7, 546: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1164: 8, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1379: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8, 1988: 8, 1996: 8, 2000: 8, 2004: 8, 2005: 8, 2008: 8, 2012: 8, 2013: 8, 2015: 8
}],
CAR.IONIQ_HEV: [{
68:8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 524: 8, 544: 8, 576:8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1164: 8, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1379: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1473: 8, 1476: 8, 1507: 8, 1535: 8, 1988: 8, 1996: 8, 2000: 8, 2004: 8, 2005: 8, 2008: 8, 2012: 8, 2013: 8
}],
CAR.SANTA_FE: [{
67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 6, 764: 8, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1180: 8, 1183: 8, 1186: 2, 1191: 2, 1227: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1479: 8, 1990: 8, 1998: 8, 2000: 8, 2004: 8, 2008: 8, 2012: 8, 2015: 8
}],
CAR.SANTA_FE_HEV: [{
}],
CAR.PALISADE: [{
67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 549: 8, 576: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1123: 8, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1280: 8, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 2000: 8, 2005: 8, 2008: 8
}],
CAR.VELOSTER: [{
64: 8, 66: 8, 67: 8, 68: 8, 127: 8, 128: 8, 129: 8, 273: 8, 274: 8, 275: 8, 339: 8, 354: 3, 356: 4, 399: 8, 512: 6, 544: 8, 558: 8, 593: 8, 608: 8, 688: 5, 790: 8, 809: 8, 832: 8, 884: 8, 897: 8, 899: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1170: 8, 1181: 5, 1186: 2, 1191: 2, 1265: 4, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1356: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1378: 4, 1407: 8, 1414: 3, 1415: 8, 1419: 8, 1427: 6, 1440: 8, 1456: 4, 1470: 8, 1486: 8, 1487: 8, 1491: 8, 1530: 8, 1532: 5, 1872: 8, 1988: 8, 1996: 8, 2000: 8, 2001: 8, 2004: 8, 2008: 8, 2009: 8, 2012: 8, 2015: 8, 2016: 8, 2017: 8, 2024: 8, 2025: 8
}],
CAR.GRANDEUR: [{
67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 547: 8, 549: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1156: 8, 1157: 4, 1162: 4, 1168: 7, 1170: 8, 1173: 8, 1185: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 4, 1384: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1456: 4, 1470: 8
}],
CAR.GRANDEUR_HEV: [{
68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 516: 8, 544: 8, 546: 8, 576: 8, 593: 8, 688: 5, 832: 8, 865: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1108: 8, 1136: 6, 1138: 5, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1173: 8, 1180: 8, 1185: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 4, 1379: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8
}],
CAR.GRANDEUR20: [{
67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 516: 8, 524: 8, 528: 8, 532: 8, 544: 8, 576: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 8, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 8, 1170: 8, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 8, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1379: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8
}],
CAR.GRANDEUR20_HEV: [{
127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 516: 8, 544: 8, 576: 8, 593: 8, 688: 5, 764: 8, 832: 8, 865: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1108: 8, 1136: 6, 1138: 5, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 8, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8
}],
CAR.NEXO: [{
127: 8, 145: 8, 146: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 512: 6, 544: 8, 546: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 908: 8, 909: 8, 912: 7, 916: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1173: 8, 1174: 8, 1180: 8, 1183: 8, 1186: 2, 1191: 2, 1192: 8, 1193: 8, 1210: 8, 1219: 8, 1220: 8, 1222: 6, 1223: 8, 1224: 8, 1227: 8, 1230: 6, 1231: 6, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1297: 8, 1298: 8, 1305: 8, 1312: 8, 1315: 8, 1316: 8, 1322: 8, 1324: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1437: 8, 1456: 4, 1460: 8, 1470: 8, 1484: 8, 1507: 8, 1520: 8, 1535: 8
}],
# Kia
CAR.FORTE: [{
67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1078: 4, 1107: 5, 1136: 8, 1156: 8, 1170: 8, 1173: 8, 1191: 2, 1225: 8, 1265: 4, 1280: 4, 1287: 4, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1384: 8, 1394: 8, 1407: 8, 1427: 6, 1456: 4, 1470: 8
}],
CAR.K5: [{
64: 8, 66: 8, 67: 8, 68: 8, 127: 8, 128: 8, 129: 8, 273: 8, 274: 8, 275: 8, 339: 8, 354: 3, 356: 4, 399: 8, 447: 8, 512: 6, 544: 8, 593: 8, 608: 8, 625: 8, 688: 5, 790: 8, 809: 8, 832: 8, 884: 8, 897: 8, 899: 8, 902: 8, 903: 6, 909: 8, 912: 7, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1186: 2, 1191: 2, 1236: 2, 1253: 8, 1254: 8, 1255: 8, 1265: 4, 1268: 8, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1356: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1371: 8, 1407: 8, 1414: 3, 1415: 8, 1419: 8, 1425: 2, 1427: 6, 1440: 8, 1456: 4, 1470: 8, 1472: 8, 1486: 8, 1487: 8, 1491: 8, 1492: 8, 1530: 8, 1532: 5, 1905: 8, 1913: 8, 1952: 8, 1960: 8, 1988: 8, 1996: 8, 2001: 8, 2004: 8, 2008: 8, 2009: 8, 2012: 8, 2015: 8, 2016: 8, 2017: 8, 2024: 8, 2025: 8
}],
CAR.K5_HEV: [{
68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 6, 909: 8, 912: 7, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1151: 6, 1168: 7, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1236: 2, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1407: 8, 1419: 8, 1420: 8, 1425: 2, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8
}],
CAR.K5_DL3: [{
}],
CAR.K5_DL3_HEV: [{
}],
CAR.SPORTAGE: [{
67: 8, 68: 8, 127: 8, 273: 8, 274: 8, 275: 8, 339: 8, 356: 4, 399: 8, 447: 8, 512: 6, 544: 8, 593: 8, 608: 8, 688: 5, 790: 8, 809: 8, 832: 8, 884: 8, 897: 8, 899: 8, 902: 8, 903: 6, 909: 8, 916: 8, 1040: 8, 1078: 4, 1170: 8, 1191: 2, 1253: 8, 1254: 8, 1255: 8, 1265: 4, 1280: 1, 1282: 4, 1287: 4, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1440: 8, 1456: 4, 1470: 8, 1472: 8, 1486: 8, 1487: 8, 1491: 8, 1492: 8, 1530: 8
}],
CAR.SORENTO: [{
67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1168: 7, 1170: 8, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1370: 8, 1371: 8, 1384: 8, 1407: 8, 1411: 8, 1419: 8, 1425: 2, 1427: 6, 1444: 8, 1456: 4, 1470: 8, 1489: 1
}],
CAR.MOHAVE: [{
67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 8, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1123: 8, 1136: 8, 1145: 8, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 8, 1170: 8, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1280: 8, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1479: 8
}],
CAR.STINGER: [{
67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 358: 6, 359: 8, 544: 8, 576: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1168: 7, 1170: 8, 1173: 8, 1184: 8, 1265: 4, 1280: 1, 1281: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 4, 1379: 8, 1384: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1456: 4, 1470: 8, 2015: 8
}],
CAR.NIRO_EV: [{
127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 516: 8, 544: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1156: 8, 1157: 4, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1193: 8, 1225: 8, 1260: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8, 1988: 8, 1990: 8, 1998: 8, 1996: 8, 2000: 8, 2004: 8, 2008: 8, 2012: 8, 2015: 8
}],
CAR.NIRO_HEV: [{
127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1535: 8
}],
CAR.SOUL_EV: [{
127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 548: 8, 549: 8, 593: 8, 688: 6, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1173: 8, 1186: 2, 1191: 2, 1193: 8, 1225: 8, 1227: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1378: 8, 1379: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8
}],
CAR.SELTOS: [{
67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 354: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 8, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 905: 8, 909: 8, 910: 5, 911: 5, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1114: 8, 1136: 8, 1145: 8, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 8, 1170: 8, 1173: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 8, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1379: 8, 1384: 8, 1394: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1446: 8, 1456: 4, 1470: 8, 1485: 8, 1911: 8
}],
CAR.K7: [{
67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1157: 4, 1162: 4, 1168: 7, 1170: 8, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 4, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1444: 8, 1456: 4, 1470: 8
}],
CAR.K7_HEV: [{
68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 549: 8, 576: 8, 593: 8, 688: 5, 832: 8, 865: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1096: 8, 1102: 8, 1108: 8, 1136: 6, 1138: 5, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1210: 8, 1227: 8, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1343: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1379: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8
}],
CAR.K9: [{
67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1184: 8, 1186: 2, 1191: 2, 1227: 8, 1265: 4, 1280: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1379: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8
}],
# Genesis
CAR.GENESIS: [{
67: 8, 68: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 7, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 5, 897: 8, 902: 8, 903: 6, 912: 7, 916: 8, 1024: 2, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1168: 7, 1170: 8, 1173: 8, 1184: 8, 1265: 4, 1268: 8, 1280: 1, 1281: 3, 1287: 4, 1292: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1334: 8, 1335: 8, 1342: 6, 1345: 8, 1363: 8, 1369: 8, 1370: 8, 1371: 8, 1378: 4, 1379: 8, 1384: 5, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1434: 2, 1437: 8, 1456: 4
}],
CAR.GENESIS_G70: [{
67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 358: 6, 544: 8, 576: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1168: 7, 1170: 8, 1173: 8, 1184: 8, 1186: 2, 1191: 2, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1379: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1988: 8, 1996: 8, 2000: 8, 2004: 8, 2008: 8, 2012: 8, 2015: 8
}],
CAR.GENESIS_G80: [{
67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 358: 6, 359: 8, 544: 8, 546: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1024: 2, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1157: 4, 1162: 8, 1168: 7, 1170: 8, 1173: 8, 1184: 8, 1265: 4, 1280: 1, 1281: 3, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1370: 8, 1371: 8, 1378: 4, 1384: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1434: 2, 1437: 8, 1456: 4, 1470: 8
}],
CAR.GENESIS_G90: [{
67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 358: 6, 359: 8, 544: 8, 546: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1162: 4, 1168: 7, 1170: 8, 1173: 8, 1184: 8, 1265: 4, 1280: 1, 1281: 3, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1370: 8, 1371: 8, 1378: 4, 1384: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1434: 2, 1456: 4, 1470: 8, 1988: 8, 2000: 8, 2003: 8, 2004: 8, 2005: 8, 2008: 8, 2011: 8, 2012: 8, 2013: 8, 2015: 8
}],
}
ECU_FINGERPRINT = {
Ecu.fwdCamera: [832, 1156, 1191, 1342] #832:lkas11, 1156:hda11_mfc, 1191:mfc_4a7, 1342:lkas12
}
FW_VERSIONS = {
# fwdRadar, fwdCamera, eps, esp, engine, transmission
# hyundai
CAR.ELANTRA_I30: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00PD__ SCC F-CUP 1.00 1.01 99110-G3100 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00PDP LKAS AT AUS RHD 1.00 1.01 99211-G4000 v60',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x00PDu MDPS C 1.00 1.01 56310/G3690 4PDUC101',
],
(Ecu.esp, 0x7d1, None): [
b'\xf1\x00PD ESC \x11 100 \a\x03 58910-G3AC0',
],
(Ecu.engine, 0x7e0, None): [
b'\x01TPD-1A506F000H00',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x816U2VA051\x00\x00\xf1\x006U2V0_C2\x00\x006U2VA051\x00\x00DPD0H16US0\x00\x00\x00\x00',
],
},
CAR.ELANTRA21: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00CN7_ SCC F-CUP 1.00 1.01 99110-AA000 ',
b'\xf1\x00CN7_ SCC FHCUP 1.00 1.01 99110-AA000 ',
b'\xf1\x8799110AA000\xf1\x00CN7_ SCC FHCUP 1.00 1.01 99110-AA000 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00CN7 MFC AT USA LHD 1.00 1.00 99210-AB000 200819'
b'\xf1\x00CN7 MFC AT USA LHD 1.00 1.03 99210-AA000 200819',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x87\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf1\x00CN7 MDPS C 1.00 1.06 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 4CNDC106',
b'\xf1\x8756310/AA070\xf1\x00CN7 MDPS C 1.00 1.06 56310/AA070 4CNDC106',
b'\xf1\x8756310AA050\x00\xf1\x00CN7 MDPS C 1.00 1.06 56310AA050\x00 4CNDC106',
],
(Ecu.esp, 0x7d1, None): [
b'\xf1\x00CN ESC \t 101 \x10\x03 58910-AB800',
b'\xf1\x8758910-AA800\xf1\x00CN ESC \t 104 \x08\x03 58910-AA800',
b'\xf1\x8758910-AB800\xf1\x00CN ESC \t 101 \x10\x03 58910-AB800',
],
(Ecu.engine, 0x7e0, None): [
b'\xf1\x82CNCWD0AMFCXCSFFA',
b'\xf1\x82CNCWD0AMFCXCSFFB',
b'\xf1\x82CNCVD0AMFCXCSFFB',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x00HT6WA280BLHT6VA640A1CCN0N20NS5\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x00HT6WA280BLHT6VA640A1CCN0N20NS5\x00\x00\x00\x00\x00\x00\xe8\xba\xce\xfa',
b'\xf1\x87CXMQFM2135005JB2E\xb9\x89\x98W\xa9y\x97h\xa9\x98\x99wxvwh\x87\177\xffx\xff\xff\xff,,\xf1\x89HT6VA640A1\xf1\x82CCN0N20NS5\x00\x00\x00\x00\x00\x00',
b'\xf1\x87CXMQFM1916035JB2\x88vvgg\x87Wuwgev\xa9\x98\x88\x98h\x99\x9f\xffh\xff\xff\xff\xa5\xee\xf1\x89HT6VA640A1\xf1\x82CCN0N20NS5\x00\x00\x00\x00\x00\x00',
b'\xf1\x87CXLQF40189012JL2f\x88\x86\x88\x88vUex\xb8\x88\x88\x88\x87\x88\x89fh?\xffz\xff\xff\xff\x08z\xf1\x89HT6VA640A1\xf1\x82CCN0N20NS5\x00\x00\x00\x00\x00\x00',
],
},
CAR.ELANTRA21_HEV: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\000CNhe SCC FHCUP 1.00 1.01 99110-BY000 ',
b'\xf1\x8799110BY000\xf1\x00CNhe SCC FHCUP 1.00 1.01 99110-BY000 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\000CN7HMFC AT USA LHD 1.00 1.03 99210-AA000 200819'
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x8756310/BY050\xf1\000CN7 MDPS C 1.00 1.02 56310/BY050 4CNHC102'
],
(Ecu.engine, 0x7e0, None): [
b'\xf1\x816H6G5051\000\000\000\000\000\000\000\000'
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\0006U3L0_C2\000\0006U3K3051\000\000HCN0G16NS0\xb9?A\xaa',
b'\xf1\0006U3L0_C2\000\0006U3K3051\000\000HCN0G16NS0\000\000\000\000',
b'\xf1\x816U3K3051\000\000\xf1\0006U3L0_C2\000\0006U3K3051\000\000HCN0G16NS0\xb9?A\xaa',
b'\xf1\x816U3K3051\000\000\xf1\0006U3L0_C2\000\0006U3K3051\000\000HCN0G16NS0\000\000\000\000'
],
},
CAR.SONATA: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00DN8 1.00 99110-L0000 \xaa\xaa\xaa\xaa\xaa\xaa\xaa ',
b'\xf1\x00DN8 1.00 99110-L0000 \xaa\xaa\xaa\xaa\xaa\xaa\xaa\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x00DN8_ SCC F-CU- 1.00 1.00 99110-L0000 ',
b'\xf1\x00DN8_ SCC F-CUP 1.00 1.00 99110-L0000 ',
b'\xf1\x00DN8_ SCC F-CUP 1.00 1.02 99110-L1000 ',
b'\xf1\x00DN8_ SCC FHCUP 1.00 1.00 99110-L0000 ',
b'\xf1\x00DN8_ SCC FHCUP 1.00 1.01 99110-L1000 ',
b'\xf1\x00DN89110-L0000 \xaa\xaa\xaa\xaa\xaa\xaa\xaa ',
b'\xf1\x8799110L0000\xf1\x00DN8_ SCC F-CUP 1.00 1.00 99110-L0000 ',
b'\xf1\x8799110L0000\xf1\x00DN8_ SCC FHCUP 1.00 1.00 99110-L0000 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00DN8 MFC AT KOR LHD 1.00 1.02 99211-L1000 190422',
b'\xf1\x00DN8 MFC AT RUS LHD 1.00 1.03 99211-L1000 190705',
b'\xf1\x00DN8 MFC AT USA LHD 1.00 1.00 99211-L0000 190716',
b'\xf1\x00DN8 MFC AT USA LHD 1.00 1.01 99211-L0000 191016',
b'\xf1\x00DN8 MFC AT USA LHD 1.00 1.03 99211-L0000 210603',
b'\xf1\x00DN8 MFC AT USA LHD 1.00 1.05 99211-L1000 201109',
b'\xf1\x00DN8 MFC AT USA LHD 1.00 1.06 99211-L1000 210325',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x00DN8 MDPS C 1.00 1.01 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 4DNAC101',
b'\xf1\x00DN8 MDPS C 1.00 1.01 56310-L0010 4DNAC101',
b'\xf1\x00DN8 MDPS C 1.00 1.01 56310L0010\x00 4DNAC101',
b'\xf1\x00DN8 MDPS R 1.00 1.00 57700-L0000 4DNAP100',
b'\xf1\x87\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf1\x00DN8 MDPS C 1.00 1.01 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 4DNAC101',
b'\xf1\x8756310-L0010\xf1\x00DN8 MDPS C 1.00 1.01 56310-L0010 4DNAC101',
b'\xf1\x8756310-L0210\xf1\x00DN8 MDPS C 1.00 1.01 56310-L0210 4DNAC101',
b'\xf1\x8756310-L1010\xf1\x00DN8 MDPS C 1.00 1.03 56310-L1010 4DNDC103',
b'\xf1\x8756310-L1030\xf1\x00DN8 MDPS C 1.00 1.03 56310-L1030 4DNDC103',
b'\xf1\x8756310L0010\x00\xf1\x00DN8 MDPS C 1.00 1.01 56310L0010\x00 4DNAC101',
b'\xf1\x8756310L0210\x00\xf1\x00DN8 MDPS C 1.00 1.01 56310L0210\x00 4DNAC101',
b'\xf1\x8757700-L0000\xf1\x00DN8 MDPS R 1.00 1.00 57700-L0000 4DNAP100',
],
(Ecu.esp, 0x7d1, None): [
b'\xf1\x00DN ESC \a 106 \a\x01 58910-L0100',
b'\xf1\x00DN ESC \x01 102\x19\x04\x13 58910-L1300',
b'\xf1\x00DN ESC \x03 100 \x08\x01 58910-L0300',
b'\xf1\x00DN ESC \x06 104\x19\x08\x01 58910-L0100',
b'\xf1\x00DN ESC \x07 104\x19\x08\x01 58910-L0100',
b'\xf1\x00DN ESC \x08 103\x19\x06\x01 58910-L1300',
b'\xf1\x8758910-L0100\xf1\x00DN ESC \a 106 \a\x01 58910-L0100',
b'\xf1\x8758910-L0100\xf1\x00DN ESC \x06 104\x19\x08\x01 58910-L0100',
b'\xf1\x8758910-L0100\xf1\x00DN ESC \x06 106 \x07\x01 58910-L0100',
b'\xf1\x8758910-L0100\xf1\x00DN ESC \x07 104\x19\x08\x01 58910-L0100',
b'\xf1\x8758910-L0300\xf1\x00DN ESC \x03 100 \x08\x01 58910-L0300',
b'\xf1\x00DN ESC \x06 106 \x07\x01 58910-L0100',
],
(Ecu.engine, 0x7e0, None): [
b'\xf1\x81HM6M1_0a0_F00',
b'\xf1\x82DNBVN5GMCCXXXDCA',
b'\xf1\x82DNBVN5GMCCXXXG2F',
b'\xf1\x82DNBWN5TMDCXXXG2E',
b'\xf1\x82DNCVN5GMCCXXXF0A',
b'\xf1\x82DNCVN5GMCCXXXG2B',
b'\xf1\x870\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf1\x82DNDWN5TMDCXXXJ1A',
b'\xf1\x87391162M003',
b'\xf1\x87391162M013',
b'\xf1\x87391162M023',
b'HM6M1_0a0_F00',
b'HM6M1_0a0_G20',
b'HM6M2_0a0_BD0',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x00bcsh8p54 U913\x00\x00\x00\x00\x00\x00SDN8T16NB1\xe3\xc10\xa1',
b'\xf1\x00bcsh8p54 U913\x00\x00\x00\x00\x00\x00SDN8T16NB2\n\xdd^\xbc',
b'\xf1\x00HT6TA260BLHT6TA800A1TDN8C20KS4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x00HT6TA260BLHT6TA810A1TDN8M25GS0\x00\x00\x00\x00\x00\x00\xaa\x8c\xd9p',
b'\xf1\x00HT6WA250BLHT6WA910A1SDN8G25NB1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x00HT6WA250BLHT6WA910A1SDN8G25NB1\x00\x00\x00\x00\x00\x00\x96\xa1\xf1\x92',
b'\xf1\x00HT6WA280BLHT6WAD10A1SDN8G25NB2\x00\x00\x00\x00\x00\x00\x08\xc9O:',
b'\xf1\x00T02601BL T02730A1 VDN8T25XXX730NS5\xf7_\x92\xf5',
b'\xf1\x87954A02N060\x00\x00\x00\x00\x00\xf1\x81T02730A1 \xf1\x00T02601BL T02730A1 VDN8T25XXX730NS5\xf7_\x92\xf5',
b'\xf1\x87SAKFBA2926554GJ2VefVww\x87xwwwww\x88\x87xww\x87wTo\xfb\xffvUo\xff\x8d\x16\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SAKFBA3030524GJ2UVugww\x97yx\x88\x87\x88vw\x87gww\x87wto\xf9\xfffUo\xff\xa2\x0c\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SAKFBA3356084GJ2\x86fvgUUuWgw\x86www\x87wffvf\xb6\xcf\xfc\xffeUO\xff\x12\x19\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SAKFBA3474944GJ2ffvgwwwwg\x88\x86x\x88\x88\x98\x88ffvfeo\xfa\xff\x86fo\xff\t\xae\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SAKFBA3475714GJ2Vfvgvg\x96yx\x88\x97\x88ww\x87ww\x88\x87xs_\xfb\xffvUO\xff\x0f\xff\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SALDBA3510954GJ3ww\x87xUUuWx\x88\x87\x88\x87w\x88wvfwfc_\xf9\xff\x98wO\xffl\xe0\xf1\x89HT6WA910A1\xf1\x82SDN8G25NB1\x00\x00\x00\x00\x00\x00',
b'\xf1\x87SALDBA3573534GJ3\x89\x98\x89\x88EUuWgwvwwwwww\x88\x87xTo\xfa\xff\x86f\x7f\xffo\x0e\xf1\x89HT6WA910A1\xf1\x82SDN8G25NB1\x00\x00\x00\x00\x00\x00',
b'\xf1\x87SALDBA3601464GJ3\x88\x88\x88\x88ffvggwvwvw\x87gww\x87wvo\xfb\xff\x98\x88\x7f\xffjJ\xf1\x89HT6WA910A1\xf1\x82SDN8G25NB1\x00\x00\x00\x00\x00\x00',
b'\xf1\x87SALDBA3753044GJ3UUeVff\x86hwwwwvwwgvfgfvo\xf9\xfffU_\xffC\xae\xf1\x89HT6WA910A1\xf1\x82SDN8G25NB1\x00\x00\x00\x00\x00\x00',
b'\xf1\x87SALDBA3862294GJ3vfvgvefVxw\x87\x87w\x88\x87xwwwwc_\xf9\xff\x87w\x9f\xff\xd5\xdc\xf1\x89HT6WA910A1\xf1\x82SDN8G25NB1\x00\x00\x00\x00\x00\x00',
b'\xf1\x87SALDBA3873834GJ3fefVwuwWx\x88\x97\x88w\x88\x97xww\x87wU_\xfb\xff\x86f\x8f\xffN\x04\xf1\x89HT6WA910A1\xf1\x82SDN8G25NB1\x00\x00\x00\x00\x00\x00',
b'\xf1\x87SALDBA4525334GJ3\x89\x99\x99\x99fevWh\x88\x86\x88fwvgw\x88\x87xfo\xfa\xffuDo\xff\xd1>\xf1\x89HT6WA910A1\xf1\x82SDN8G25NB1\x00\x00\x00\x00\x00\x00',
b'\xf1\x87SALDBA4626804GJ3wwww\x88\x87\x88xx\x88\x87\x88wwgw\x88\x88\x98\x88\x95_\xf9\xffuDo\xff|\xe7\xf1\x89HT6WA910A1\xf1\x82SDN8G25NB1\x00\x00\x00\x00\x00\x00',
b'\xf1\x87SALDBA4803224GJ3wwwwwvwg\x88\x88\x98\x88wwww\x87\x88\x88xu\x9f\xfc\xff\x87f\x8f\xff\xea\xea\xf1\x89HT6WA910A1\xf1\x82SDN8G25NB1\x00\x00\x00\x00\x00\x00',
b'\xf1\x87SALDBA6212564GJ3\x87wwwUTuGg\x88\x86xx\x88\x87\x88\x87\x88\x98xu?\xf9\xff\x97f\x7f\xff\xb8\n\xf1\x89HT6WA910A1\xf1\x82SDN8G25NB1\x00\x00\x00\x00\x00\x00',
b'\xf1\x87SALDBA6347404GJ3wwwwff\x86hx\x88\x97\x88\x88\x88\x88\x88vfgf\x88?\xfc\xff\x86Uo\xff\xec/\xf1\x89HT6WA910A1\xf1\x82SDN8G25NB1\x00\x00\x00\x00\x00\x00',
b'\xf1\x87SALDBA6901634GJ3UUuWVeVUww\x87wwwwwvUge\x86/\xfb\xff\xbb\x99\x7f\xff]2\xf1\x89HT6WA910A1\xf1\x82SDN8G25NB1\x00\x00\x00\x00\x00\x00',
b'\xf1\x87SALDBA7077724GJ3\x98\x88\x88\x88ww\x97ygwvwww\x87ww\x88\x87x\x87_\xfd\xff\xba\x99o\xff\x99\x01\xf1\x89HT6WA910A1\xf1\x82SDN8G25NB1\x00\x00\x00\x00\x00\x00',
b'\xf1\x87SALFBA3525114GJ2wvwgvfvggw\x86wffvffw\x86g\x85_\xf9\xff\xa8wo\xffv\xcd\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SALFBA3624024GJ2\x88\x88\x88\x88wv\x87hx\x88\x97\x88x\x88\x97\x88ww\x87w\x86o\xfa\xffvU\x7f\xff\xd1\xec\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SALFBA3960824GJ2wwwwff\x86hffvfffffvfwfg_\xf9\xff\xa9\x88\x8f\xffb\x99\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SALFBA4011074GJ2fgvwwv\x87hw\x88\x87xww\x87wwfgvu_\xfa\xffefo\xff\x87\xc0\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SALFBA4121304GJ2x\x87xwff\x86hwwwwww\x87wwwww\x84_\xfc\xff\x98\x88\x9f\xffi\xa6\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SALFBA4195874GJ2EVugvf\x86hgwvwww\x87wgw\x86wc_\xfb\xff\x98\x88\x8f\xff\xe23\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SALFBA4625294GJ2eVefeUeVx\x88\x97\x88wwwwwwww\xa7o\xfb\xffvw\x9f\xff\xee.\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SALFBA4728774GJ2vfvg\x87vwgww\x87ww\x88\x97xww\x87w\x86_\xfb\xffeD?\xffk0\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SALFBA5129064GJ2vfvgwv\x87hx\x88\x87\x88ww\x87www\x87wd_\xfa\xffvfo\xff\x1d\x00\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SALFBA5454914GJ2\x98\x88\x88\x88\x87vwgx\x88\x87\x88xww\x87ffvf\xa7\x7f\xf9\xff\xa8w\x7f\xff\x1b\x90\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SALFBA5987784GJ2UVugDDtGx\x88\x87\x88w\x88\x87xwwwwd/\xfb\xff\x97fO\xff\xb0h\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SALFBA5987864GJ2fgvwUUuWgwvw\x87wxwwwww\x84/\xfc\xff\x97w\x7f\xff\xdf\x1d\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SALFBA6337644GJ2vgvwwv\x87hgffvwwwwwwww\x85O\xfa\xff\xa7w\x7f\xff\xc5\xfc\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SALFBA6802004GJ2UUuWUUuWgw\x86www\x87www\x87w\x96?\xf9\xff\xa9\x88\x7f\xff\x9fK\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SALFBA6892284GJ233S5\x87w\x87xx\x88\x87\x88vwwgww\x87w\x84?\xfb\xff\x98\x88\x8f\xff*\x9e\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00SDN8T16NB0z{\xd4v',
b'\xf1\x87SALFBA7005534GJ2eUuWfg\x86xxww\x87x\x88\x87\x88\x88w\x88\x87\x87O\xfc\xffuUO\xff\xa3k\xf1\x81U913\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U913\x00\x00\x00\x00\x00\x00SDN8T16NB1\xe3\xc10\xa1',
b'\xf1\x87SALFBA7152454GJ2gvwgFf\x86hx\x88\x87\x88vfWfffffd?\xfa\xff\xba\x88o\xff,\xcf\xf1\x81U913\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U913\x00\x00\x00\x00\x00\x00SDN8T16NB1\xe3\xc10\xa1',
b'\xf1\x87SALFBA7485034GJ2ww\x87xww\x87xfwvgwwwwvfgf\xa5/\xfc\xff\xa9w_\xff40\xf1\x81U913\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U913\x00\x00\x00\x00\x00\x00SDN8T16NB2\n\xdd^\xbc',
b'\xf1\x87SAMDBA7743924GJ3wwwwww\x87xgwvw\x88\x88\x88\x88wwww\x85_\xfa\xff\x86f\x7f\xff0\x9d\xf1\x89HT6WAD10A1\xf1\x82SDN8G25NB2\x00\x00\x00\x00\x00\x00',
b'\xf1\x87SAMDBA7817334GJ3Vgvwvfvgww\x87wwwwwwfgv\x97O\xfd\xff\x88\x88o\xff\x8e\xeb\xf1\x89HT6WAD10A1\xf1\x82SDN8G25NB2\x00\x00\x00\x00\x00\x00',
b'\xf1\x87SAMDBA8054504GJ3gw\x87xffvgffffwwwweUVUf?\xfc\xffvU_\xff\xddl\xf1\x89HT6WAD10A1\xf1\x82SDN8G25NB2\x00\x00\x00\x00\x00\x00',
b'\xf1\x87SAMFB41553621GC7ww\x87xUU\x85Xvwwg\x88\x88\x88\x88wwgw\x86\xaf\xfb\xffuDo\xff\xaa\x8f\xf1\x81U913\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U913\x00\x00\x00\x00\x00\x00SDN8T16NB2\n\xdd^\xbc',
b'\xf1\x87SAMFB42555421GC7\x88\x88\x88\x88wvwgx\x88\x87\x88wwgw\x87wxw3\x8f\xfc\xff\x98f\x8f\xffga\xf1\x81U913\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U913\x00\x00\x00\x00\x00\x00SDN8T16NB2\n\xdd^\xbc',
b'\xf1\x87SAMFBA7978674GJ2gw\x87xgw\x97ywwwwvUGeUUeU\x87O\xfb\xff\x98w\x8f\xfffF\xf1\x81U913\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U913\x00\x00\x00\x00\x00\x00SDN8T16NB2\n\xdd^\xbc',
b'\xf1\x87SAMFBA9283024GJ2wwwwEUuWwwgwwwwwwwww\x87/\xfb\xff\x98w\x8f\xff<\xd3\xf1\x81U913\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U913\x00\x00\x00\x00\x00\x00SDN8T16NB2\n\xdd^\xbc',
b'\xf1\x87SAMFBA9708354GJ2wwwwVf\x86h\x88wx\x87xww\x87\x88\x88\x88\x88w/\xfa\xff\x97w\x8f\xff\x86\xa0\xf1\x81U913\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U913\x00\x00\x00\x00\x00\x00SDN8T16NB2\n\xdd^\xbc',
],
},
CAR.SONATA_HEV: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\000DNhe SCC FHCUP 1.00 1.02 99110-L5000 ',
b'\xf1\x8799110L5000\xf1\000DNhe SCC FHCUP 1.00 1.02 99110-L5000 ',
b'\xf1\000DNhe SCC F-CUP 1.00 1.02 99110-L5000 ',
b'\xf1\x8799110L5000\xf1\000DNhe SCC F-CUP 1.00 1.02 99110-L5000 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\000DN8HMFC AT USA LHD 1.00 1.04 99211-L1000 191016',
b'\xf1\x00DN8HMFC AT USA LHD 1.00 1.05 99211-L1000 201109',
b'\xf1\000DN8HMFC AT USA LHD 1.00 1.06 99211-L1000 210325',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x8756310-L5500\xf1\000DN8 MDPS C 1.00 1.02 56310-L5500 4DNHC102',
b'\xf1\x8756310-L5450\xf1\x00DN8 MDPS C 1.00 1.02 56310-L5450 4DNHC102',
b'\xf1\x8756310-L5450\xf1\000DN8 MDPS C 1.00 1.03 56310-L5450 4DNHC103',
],
(Ecu.esp, 0x7d1, None): [
b'\xf1\x8758910-L0100\xf1\x00DN ESC \x06 104\x19\x08\x01 58910-L0100\xf1\xa01.04',
],
(Ecu.engine, 0x7e0, None): [
b'\xf1\x87391062J002\xf1\xa0000P',
b'\xf1\x87391162J012',
b'\xf1\x87391162J013',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\000PSBG2333 E14\x00\x00\x00\x00\x00\x00\x00TDN2H20SA6N\xc2\xeeW',
b'\xf1\x87959102T250\000\000\000\000\000\xf1\x81E09\000\000\000\000\000\000\000\xf1\000PSBG2323 E09\000\000\000\000\000\000\000TDN2H20SA5\x97R\x88\x9e',
b'\xf1\000PSBG2323 E09\000\000\000\000\000\000\000TDN2H20SA5\x97R\x88\x9e',
b'\xf1\000PSBG2333 E16\000\000\000\000\000\000\000TDN2H20SA7\0323\xf9\xab',
b'\xf1\x87PCU\000\000\000\000\000\000\000\000\000\xf1\x81E16\000\000\000\000\000\000\000\xf1\000PSBG2333 E16\000\000\000\000\000\000\000TDN2H20SA7\0323\xf9\xab',
b'\xf1\x87959102T250\x00\x00\x00\x00\x00\xf1\x81E14\x00\x00\x00\x00\x00\x00\x00\xf1\x00PSBG2333 E14\x00\x00\x00\x00\x00\x00\x00TDN2H20SA6N\xc2\xeeW',
],
},
CAR.SONATA_LF: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00LF__ SCC F-CUP 1.00 1.00 96401-C2200 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00LFF LKAS AT USA LHD 1.00 1.01 95740-C1000 E51',
b'\xf1\x00LFF LKAS AT USA LHD 1.01 1.02 95740-C1000 E52',
],
(Ecu.esp, 0x7d1, None): [
b'\xf1\x00LF ESC \f 11 \x17\x01\x13 58920-C2610',
b'\xf1\x00LF ESC \t 11 \x17\x01\x13 58920-C2610',
],
(Ecu.engine, 0x7e0, None): [
b'\xf1\x81606D5051\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x81606D5K51\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x81606G1051\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x006T6H0_C2\x00\x006T6B4051\x00\x00TLF0G24NL1\xb0\x9f\xee\xf5',
b'\xf1\x87\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf1\x816T6B4051\x00\x00\xf1\x006T6H0_C2\x00\x006T6B4051\x00\x00TLF0G24NL1\x00\x00\x00\x00',
b'\xf1\x87\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf1\x816T6B4051\x00\x00\xf1\x006T6H0_C2\x00\x006T6B4051\x00\x00TLF0G24NL1\xb0\x9f\xee\xf5',
b'\xf1\x87\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf1\x816T6B4051\x00\x00\xf1\x006T6H0_C2\x00\x006T6B4051\x00\x00TLF0G24SL2n\x8d\xbe\xd8',
b'\xf1\x87LAHSGN012918KF10\x98\x88x\x87\x88\x88x\x87\x88\x88\x98\x88\x87w\x88w\x88\x88\x98\x886o\xf6\xff\x98w\x7f\xff3\x00\xf1\x816W3B1051\x00\x00\xf1\x006W351_C2\x00\x006W3B1051\x00\x00TLF0T20NL2\x00\x00\x00\x00',
b'\xf1\x87LAHSGN012918KF10\x98\x88x\x87\x88\x88x\x87\x88\x88\x98\x88\x87w\x88w\x88\x88\x98\x886o\xf6\xff\x98w\x7f\xff3\x00\xf1\x816W3B1051\x00\x00\xf1\x006W351_C2\x00\x006W3B1051\x00\x00TLF0T20NL2H\r\xbdm',
],
},
CAR.KONA: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00OS__ SCC F-CUP 1.00 1.00 95655-J9200 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00OS9 LKAS AT USA LHD 1.00 1.00 95740-J9300 g21',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x00OS MDPS C 1.00 1.05 56310J9030\x00 4OSDC105',
],
(Ecu.esp, 0x7d1, None): [
b'\xf1\x816V5RAK00018.ELF\xf1\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [
b'"\x01TOS-0NU06F301J02',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x816U2VE051\x00\x00\xf1\x006U2V0_C2\x00\x006U2VE051\x00\x00DOS4T16NS3\x00\x00\x00\x00',
],
},
CAR.KONA_EV: {
(Ecu.fwdRadar, 0x7D0, None): [
b'\xf1\x00DEev SCC F-CUP 1.00 1.00 99110-Q4000 ',
b'\xf1\x00OSev SCC F-CUP 1.00 1.00 99110-K4000 ',
b'\xf1\x00OSev SCC F-CUP 1.00 1.00 99110-K4100 ',
b'\xf1\x00OSev SCC F-CUP 1.00 1.01 99110-K4000 ',
b'\xf1\x00OSev SCC FNCUP 1.00 1.01 99110-K4000 ',
b'\xf1\x00DEev SCC F-CUP 1.00 1.03 96400-Q4100 ',
b'\xf1\x8799110Q4000\xf1\x00DEev SCC F-CUP 1.00 1.00 99110-Q4000 ',
b'\xf1\x8799110Q4100\xf1\x00DEev SCC F-CUP 1.00 1.00 99110-Q4100 ',
b'\xf1\x8799110Q4500\xf1\000DEev SCC F-CUP 1.00 1.00 99110-Q4500 ',
],
(Ecu.fwdCamera, 0x7C4, None): [
b'\xf1\x00DEE MFC AT USA LHD 1.00 1.03 95740-Q4000 180821',
b'\xf1\x00DEE MFC AT EUR LHD 1.00 1.00 99211-Q4000 191211',
b'\xf1\x00DEE MFC AT USA LHD 1.00 1.00 99211-Q4000 191211',
b'\xf1\000DEE MFC AT EUR LHD 1.00 1.00 99211-Q4100 200706',
b'\xf1\x00OSE LKAS AT EUR LHD 1.00 1.00 95740-K4100 W40',
b'\xf1\x00OSE LKAS AT EUR RHD 1.00 1.00 95740-K4100 W40',
b'\xf1\x00OSE LKAS AT KOR LHD 1.00 1.00 95740-K4100 W40',
b'\xf1\x00OE2 LKAS AT EUR LHD 1.00 1.00 95740-K4200 200',
b'\xf1\x00OSE LKAS AT USA LHD 1.00 1.00 95740-K4300 W50',
],
(Ecu.eps, 0x7D4, None): [
b'\xf1\x00OS MDPS C 1.00 1.03 56310/K4550 4OEDC103',
b'\xf1\x00OS MDPS C 1.00 1.04 56310K4000\x00 4OEDC104',
b'\xf1\x00OS MDPS C 1.00 1.04 56310K4050\x00 4OEDC104',
b'\xf1\x00DE MDPS C 1.00 1.05 56310Q4000\x00 4DEEC105',
b'\xf1\x00DE MDPS C 1.00 1.05 56310Q4100\x00 4DEEC105',
],
(Ecu.esp, 0x7D1, None): [
b'\xf1\x00OS IEB \r 105\x18\t\x18 58520-K4000',
b'\xf1\x00OS IEB \x01 212 \x11\x13 58520-K4000',
b'\xf1\x00OS IEB \x02 212 \x11\x13 58520-K4000',
b'\xf1\x00OS IEB \x03 210 \x02\x14 58520-K4000',
b'\xf1\x00OS IEB \x03 212 \x11\x13 58520-K4000',
],
},
CAR.KONA_HEV: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00OShe SCC FNCUP 1.00 1.01 99110-CM000 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00OSH LKAS AT KOR LHD 1.00 1.01 95740-CM000 l31',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x00OS MDPS C 1.00 1.00 56310CM030\x00 4OHDC100',
],
(Ecu.esp, 0x7d1, None): [
b'\xf1\x00OS IEB \x01 104 \x11 58520-CM000',
],
(Ecu.engine, 0x7e0, None): [
b'\xf1\x816H6F6051\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x816U3J9051\x00\x00\xf1\x006U3H1_C2\x00\x006U3J9051\x00\x00HOS0G16DS1\x16\xc7\xb0\xd9',
],
},
CAR.IONIQ_EV: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00AEev SCC F-CUP 1.00 1.00 96400-G7000 ',
b'\xf1\x00AEev SCC F-CUP 1.00 1.00 96400-G7100 ',
b'\xf1\x00AEev SCC F-CUP 1.00 1.01 99110-G7000 ',
b'\xf1\x00AEev SCC F-CUP 1.00 1.00 99110-G7200 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00AEE MFC AT EUR LHD 1.00 1.00 95740-G7200 160418',
b'\xf1\x00AEE MFC AT USA LHD 1.00 1.00 95740-G2400 180222',
b'\xf1\x00AEE MFC AT EUR LHD 1.00 1.03 95740-G2500 190516',
b'\xf1\x00AEE MFC AT EUR RHD 1.00 1.01 95740-G2600 190819',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x00AE MDPS C 1.00 1.02 56310G7300\x00 4AEEC102',
b'\xf1\x00AE MDPS C 1.00 1.04 56310/G7501 4AEEC104',
b'\xf1\x00AE MDPS C 1.00 1.01 56310/G7310 4APEC101',
b'\xf1\x00AE MDPS C 1.00 1.01 56310/G7560 4APEC101',
],
},
CAR.IONIQ_HEV: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\000AEhe SCC F-CUP 1.00 1.02 99110-G2100 ',
b'\xf1\x00AEhe SCC F-CUP 1.00 1.00 99110-G2200 ',
b'\xf1\x00AEhe SCC H-CUP 1.01 1.01 96400-G2000 ',
b'\xf1\x00AEhe SCC F-CUP 1.00 1.00 99110-G2600 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00AEH MFC AT EUR LHD 1.00 1.01 95740-G2600 190819',
b'\xf1\x00AEH MFC AT EUR LHD 1.00 1.00 95740-G2400 180222',
b'\xf1\000AEP MFC AT USA LHD 1.00 1.01 95740-G2600 190819',
b'\xf1\x00AEH MFC AT USA LHD 1.00 1.00 95740-G2700 201027',
],
(Ecu.eps, 0x7D4, None): [
b'\xf1\x00AE MDPS C 1.00 1.07 56310/G2301 4AEHC107',
b'\xf1\x00AE MDPS C 1.00 1.01 56310/G2310 4APHC101',
b'\xf1\000AE MDPS C 1.00 1.01 56310/G2510 4APHC101',
],
(Ecu.engine, 0x7e0, None): [
b'\xf1\x816H6F6051\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x816H6F2051\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x816H6F6051\000\000\000\000\000\000\000\000',
b'\xf1\x816H6G5051\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x816U3J8051\x00\x00\xf1\x006U3H1_C2\x00\x006U3J8051\x00\x00HAE0G16UL0Nd\xed:',
b'\xf1\x816U3H1051\x00\x00\xf1\x006U3H0_C2\x00\x006U3H1051\x00\x00HAE0G16US2\x95\xa2^$',
b'\xf1\x816U3J9051\000\000\xf1\0006U3H1_C2\000\0006U3J9051\000\000PAE0G16NL0\x82zT\xd2',
b'\xf1\x816U3J9051\x00\x00\xf1\x006U3H1_C2\x00\x006U3J9051\x00\x00HAE0G16NL2\x00\x00\x00\x00',
],
},
CAR.SANTA_FE: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00TM__ SCC F-CUP 1.00 1.01 99110-S2000 ',
b'\xf1\x00TM__ SCC F-CUP 1.00 1.02 99110-S2000 ',
b'\xf1\x00TM__ SCC F-CUP 1.00 1.03 99110-S2000 ',
b'\xf1\x00TM__ SCC F-CUP 1.00 1.00 99110-S1500 ',
b'\xf1\x8799110S1500\xf1\x00TM__ SCC F-CUP 1.00 1.00 99110-S1500 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00TM MFC AT USA LHD 1.00 1.00 99211-S2000 180409',
b'\xf1\x00TMA MFC AT MEX LHD 1.00 1.01 99211-S2500 210205',
b'\xf1\x00TMA MFC AT USA LHD 1.00 1.00 99211-S2500 200720',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x00TM MDPS C 1.00 1.00 56340-S2000 8409',
b'\xf1\x00TM MDPS C 1.00 1.00 56340-S2000 8A12',
b'\xf1\x00TM MDPS C 1.00 1.01 56340-S2000 9129',
b'\xf1\x00TM MDPS C 1.00 1.02 56370-S2AA0 0B19',
],
(Ecu.esp, 0x7d1, None): [
b'\xf1\x00TM ESC \r 100\x18\x031 58910-S2650',
b'\xf1\x00TM ESC \r 103\x18\x11\x08 58910-S2650',
b'\xf1\x00TM ESC \r 104\x19\a\b 58910-S2650',
b'\xf1\x00TM ESC \x02 100\x18\x030 58910-S2600',
b'\xf1\x00TM ESC \x02 102\x18\x07\x01 58910-S2600',
b'\xf1\x00TM ESC \x02 103\x18\x11\x07 58910-S2600',
b'\xf1\x00TM ESC \x02 104\x19\x07\x07 58910-S2600',
b'\xf1\x00TM ESC \x03 103\x18\x11\x07 58910-S2600',
b'\xf1\x00TM ESC \x0c 103\x18\x11\x08 58910-S2650',
b'\xf1\x00TM ESC \x02 101 \x08\x04 58910-S2GA0',
b'\xf1\x00TM ESC \x03 101 \x08\x02 58910-S2DA0',
b'\xf1\x8758910-S2DA0\xf1\x00TM ESC \x03 101 \x08\x02 58910-S2DA0',
b'\xf1\x8758910-S2GA0\xf1\x00TM ESC \x02 101 \x08\x04 58910-S2GA0',
],
(Ecu.engine, 0x7e0, None): [
b'\xf1\x81606EA051\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x81606G1051\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x81606G3051\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x82TMBZN5TMD3XXXG2E',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x87LBJSGA7082574HG0\x87www\x98\x88\x88\x88\x99\xaa\xb9\x9afw\x86gx\x99\xa7\x89co\xf8\xffvU_\xffR\xaf\xf1\x816W3C2051\x00\x00\xf1\x006W351_C2\x00\x006W3C2051\x00\x00TTM2T20NS1\x00\xa6\xe0\x91',
b'\xf1\x87LBKSGA0458404HG0vfvg\x87www\x89\x99\xa8\x99y\xaa\xa7\x9ax\x88\xa7\x88t_\xf9\xff\x86w\x8f\xff\x15x\xf1\x816W3C2051\x00\x00\xf1\x006W351_C2\x00\x006W3C2051\x00\x00TTM2T20NS1\x00\x00\x00\x00',
b'\xf1\x87LDJUEA6010814HG1\x87w\x87x\x86gvw\x88\x88\x98\x88gw\x86wx\x88\x97\x88\x85o\xf8\xff\x86f_\xff\xd37\xf1\x816W3C2051\x00\x00\xf1\x006W351_C2\x00\x006W3C2051\x00\x00TTM4T20NS0\xf8\x19\x92g',
b'\xf1\x87LDJUEA6458264HG1ww\x87x\x97x\x87\x88\x88\x99\x98\x89g\x88\x86xw\x88\x97x\x86o\xf7\xffvw\x8f\xff3\x9a\xf1\x816W3C2051\x00\x00\xf1\x006W351_C2\x00\x006W3C2051\x00\x00TTM4T20NS0\xf8\x19\x92g',
b'\xf1\x87LDKUEA2045844HG1wwww\x98\x88x\x87\x88\x88\xa8\x88x\x99\x97\x89x\x88\xa7\x88U\x7f\xf8\xffvfO\xffC\x1e\xf1\x816W3E0051\x00\x00\xf1\x006W351_C2\x00\x006W3E0051\x00\x00TTM4T20NS3\x00\x00\x00\x00',
b'\xf1\x87LDKUEA9993304HG1\x87www\x97x\x87\x88\x99\x99\xa9\x99x\x99\xa7\x89w\x88\x97x\x86_\xf7\xffwwO\xffl#\xf1\x816W3C2051\x00\x00\xf1\x006W351_C2\x00\x006W3C2051\x00\x00TTM4T20NS1R\x7f\x90\n',
b'\xf1\x87LDLUEA6061564HG1\xa9\x99\x89\x98\x87wwwx\x88\x97\x88x\x99\xa7\x89x\x99\xa7\x89sO\xf9\xffvU_\xff<\xde\xf1\x816W3E1051\x00\x00\xf1\x006W351_C2\x00\x006W3E1051\x00\x00TTM4T20NS50\xcb\xc3\xed',
b'\xf1\x87LDLUEA6159884HG1\x88\x87hv\x99\x99y\x97\x89\xaa\xb8\x9ax\x99\x87\x89y\x99\xb7\x99\xa7?\xf7\xff\x97wo\xff\xf3\x05\xf1\x816W3E1051\x00\x00\xf1\x006W351_C2\x00\x006W3E1051\x00\x00TTM4T20NS5\x00\x00\x00\x00',
b'\xf1\x87LDLUEA6852664HG1\x97wWu\x97www\x89\xaa\xc8\x9ax\x99\x97\x89x\x99\xa7\x89SO\xf7\xff\xa8\x88\x7f\xff\x03z\xf1\x816W3E1051\x00\x00\xf1\x006W351_C2\x00\x006W3E1051\x00\x00TTM4T20NS50\xcb\xc3\xed',
b'\xf1\x87LDLUEA6898374HG1fevW\x87wwwx\x88\x97\x88h\x88\x96\x88x\x88\xa7\x88ao\xf9\xff\x98\x99\x7f\xffD\xe2\xf1\x816W3E1051\x00\x00\xf1\x006W351_C2\x00\x006W3E1051\x00\x00TTM4T20NS5\x00\x00\x00\x00',
b'\xf1\x87LDLUEA6898374HG1fevW\x87wwwx\x88\x97\x88h\x88\x96\x88x\x88\xa7\x88ao\xf9\xff\x98\x99\x7f\xffD\xe2\xf1\x816W3E1051\x00\x00\xf1\x006W351_C2\x00\x006W3E1051\x00\x00TTM4T20NS50\xcb\xc3\xed',
b'\xf1\x87SBJWAA5842214GG0\x88\x87\x88xww\x87x\x89\x99\xa8\x99\x88\x99\x98\x89w\x88\x87xw_\xfa\xfffU_\xff\xd1\x8d\xf1\x816W3C2051\x00\x00\xf1\x006W351_C2\x00\x006W3C2051\x00\x00TTM2G24NS1\x98{|\xe3',
b'\xf1\x87SBJWAA5890864GG0\xa9\x99\x89\x98\x98\x87\x98y\x89\x99\xa8\x99w\x88\x87xww\x87wvo\xfb\xffuD_\xff\x9f\xb5\xf1\x816W3C2051\x00\x00\xf1\x006W351_C2\x00\x006W3C2051\x00\x00TTM2G24NS1\x98{|\xe3',
b'\xf1\x87SBJWAA6562474GG0ffvgeTeFx\x88\x97\x88ww\x87www\x87w\x84o\xfa\xff\x87fO\xff\xc2 \xf1\x816W3C2051\x00\x00\xf1\x006W351_C2\x00\x006W3C2051\x00\x00TTM2G24NS1\x00\x00\x00\x00',
b'\xf1\x87SBJWAA6562474GG0ffvgeTeFx\x88\x97\x88ww\x87www\x87w\x84o\xfa\xff\x87fO\xff\xc2 \xf1\x816W3C2051\x00\x00\xf1\x006W351_C2\x00\x006W3C2051\x00\x00TTM2G24NS1\x98{|\xe3',
b'\xf1\x87SBJWAA7780564GG0wvwgUUeVwwwwx\x88\x87\x88wwwwd_\xfc\xff\x86f\x7f\xff\xd7*\xf1\x816W3C2051\x00\x00\xf1\x006W351_C2\x00\x006W3C2051\x00\x00TTM2G24NS2F\x84<\xc0',
b'\xf1\x87SBJWAA8278284GG0ffvgUU\x85Xx\x88\x87\x88x\x88w\x88ww\x87w\x96o\xfd\xff\xa7U_\xff\xf2\xa0\xf1\x816W3C2051\x00\x00\xf1\x006W351_C2\x00\x006W3C2051\x00\x00TTM2G24NS2F\x84<\xc0',
b'\xf1\x87SBLWAA4363244GG0wvwgwv\x87hgw\x86ww\x88\x87xww\x87wdo\xfb\xff\x86f\x7f\xff3$\xf1\x816W3E1051\x00\x00\xf1\x006W351_C2\x00\x006W3E1051\x00\x00TTM2G24NS6\x00\x00\x00\x00',
b'\xf1\x87SBLWAA4363244GG0wvwgwv\x87hgw\x86ww\x88\x87xww\x87wdo\xfb\xff\x86f\x7f\xff3$\xf1\x816W3E1051\x00\x00\xf1\x006W351_C2\x00\x006W3E1051\x00\x00TTM2G24NS6x0\x17\xfe',
b'\xf1\x87SBLWAA4899564GG0VfvgUU\x85Xx\x88\x87\x88vfgf\x87wxwvO\xfb\xff\x97f\xb1\xffSB\xf1\x816W3E1051\x00\x00\xf1\x006W351_C2\x00\x006W3E1051\x00\x00TTM2G24NS7\x00\x00\x00\x00',
b'\xf1\x87SBLWAA6622844GG0wwwwff\x86hwwwwx\x88\x87\x88\x88\x88\x88\x88\x98?\xfd\xff\xa9\x88\x7f\xffn\xe5\xf1\x816W3E1051\x00\x00\xf1\x006W351_C2\x00\x006W3E1051\x00\x00TTM2G24NS7u\x1e{\x1c',
b'\xf1\x87SDJXAA7656854GG1DEtWUU\x85X\x88\x88\x98\x88w\x88\x87xx\x88\x87\x88\x96o\xfb\xff\x86f\x7f\xff.\xca\xf1\x816W3C2051\x00\x00\xf1\x006W351_C2\x00\x006W3C2051\x00\x00TTM4G24NS2\x00\x00\x00\x00',
b'\xf1\x87SDJXAA7656854GG1DEtWUU\x85X\x88\x88\x98\x88w\x88\x87xx\x88\x87\x88\x96o\xfb\xff\x86f\x7f\xff.\xca\xf1\x816W3C2051\x00\x00\xf1\x006W351_C2\x00\x006W3C2051\x00\x00TTM4G24NS2K\xdaV0',
b'\xf1\x87SDKXAA2443414GG1vfvgwv\x87h\x88\x88\x88\x88ww\x87wwwww\x99_\xfc\xffvD?\xffl\xd2\xf1\x816W3E1051\x00\x00\xf1\x006W351_C2\x00\x006W3E1051\x00\x00TTM4G24NS6\x00\x00\x00\x00',
b'\xf1\x00T02601BL T02730A1 VTMPT25XXX730NS2\xa6\x06\x88\xf7',
b'\xf1\x87SDMXCA8653204GN1EVugEUuWwwwwww\x87wwwwwv/\xfb\xff\xa8\x88\x9f\xff\xa5\x9c\xf1\x89HT6WAD00A1\xf1\x82STM4G25NH1\x00\x00\x00\x00\x00\x00',
b'\xf1\x87954A02N250\x00\x00\x00\x00\x00\xf1\x81T02730A1 \xf1\x00T02601BL T02730A1 VTMPT25XXX730NS2\xa6\x06\x88\xf7',
],
},
CAR.SANTA_FE_HEV: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x8799110CL500\xf1\x00TMhe SCC FHCUP 1.00 1.00 99110-CL500 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00TMH MFC AT USA LHD 1.00 1.03 99211-S1500 210224',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x00TM MDPS C 1.00 1.02 56310-CLAC0 4TSHC102',
],
(Ecu.engine, 0x7e0, None): [
b'\xf1\x87391312MTC1',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x87959102T250\x00\x00\x00\x00\x00\xf1\x81E14\x00\x00\x00\x00\x00\x00\x00\xf1\x00PSBG2333 E14\x00\x00\x00\x00\x00\x00\x00TTM2H16SA2\x80\xd7l\xb2',
],
},
CAR.PALISADE: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\000LX2_ SCC F-CUP 1.00 1.05 99110-S8100 ',
b'\xf1\x00LX2 SCC FHCUP 1.00 1.04 99110-S8100 ',
b'\xf1\x00LX2_ SCC FHCU- 1.00 1.05 99110-S8100 ',
b'\xf1\x00LX2_ SCC FHCUP 1.00 1.00 99110-S8110 ',
b'\xf1\x00LX2_ SCC FHCUP 1.00 1.04 99110-S8100 ',
b'\xf1\x00LX2_ SCC FHCUP 1.00 1.05 99110-S8100 ',
b'\xf1\x00ON__ FCA FHCUP 1.00 1.02 99110-S9100 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00LX2 MFC AT USA LHD 1.00 1.03 99211-S8100 190125',
b'\xf1\x00LX2 MFC AT USA LHD 1.00 1.05 99211-S8100 190909',
b'\xf1\x00LX2 MFC AT USA LHD 1.00 1.07 99211-S8100 200422',
b'\xf1\x00LX2 MFC AT USA LHD 1.00 1.08 99211-S8100 200903',
b'\xf1\x00ON MFC AT USA LHD 1.00 1.01 99211-S9100 181105',
b'\xf1\x00ON MFC AT USA LHD 1.00 1.03 99211-S9100 200720',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x00LX2 MDPS C 1,00 1,03 56310-S8020 4LXDC103', # modified firmware
b'\xf1\x00LX2 MDPS C 1.00 1.03 56310-S8020 4LXDC103',
b'\xf1\x00LX2 MDPS C 1.00 1.04 56310-S8020 4LXDC104',
b'\xf1\x00ON MDPS C 1.00 1.00 56340-S9000 8B13',
b'\xf1\x00ON MDPS C 1.00 1.01 56340-S9000 9201',
],
(Ecu.esp, 0x7d1, None): [
b'\xf1\x00LX ESC \x01 103\x19\t\x10 58910-S8360',
b'\xf1\x00LX ESC \x01 103\x31\t\020 58910-S8360',
b'\xf1\x00LX ESC \x0b 101\x19\x03\x17 58910-S8330',
b'\xf1\x00LX ESC \x0b 102\x19\x05\x07 58910-S8330',
b'\xf1\x00LX ESC \x0b 103\x19\t\x07 58910-S8330',
b'\xf1\x00LX ESC \x0b 103\x19\t\x10 58910-S8360',
b'\xf1\x00LX ESC \x0b 104 \x10\x16 58910-S8360',
b'\xf1\x00ON ESC \x0b 100\x18\x12\x18 58910-S9360',
b'\xf1\x00ON ESC \x0b 101\x19\t\x08 58910-S9360',
],
(Ecu.engine, 0x7e0, None): [
b'\xf1\x81640J0051\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x81640K0051\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x81640S1051\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x00bcsh8p54 U872\x00\x00\x00\x00\x00\x00TON4G38NB1\x96z28',
b'\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00TON4G38NB2[v\\\xb6',
b'\xf1\x87LBLUFN591307KF25vgvw\x97wwwy\x99\xa7\x99\x99\xaa\xa9\x9af\x88\x96h\x95o\xf7\xff\x99f/\xff\xe4c\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX2G38NB2\xd7\xc1/\xd1',
b'\xf1\x87LBLUFN650868KF36\xa9\x98\x89\x88\xa8\x88\x88\x88h\x99\xa6\x89fw\x86gw\x88\x97x\xaa\x7f\xf6\xff\xbb\xbb\x8f\xff+\x82\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX2G38NB3\xd1\xc3\xf8\xa8',
b'\xf1\x87LBLUFN655162KF36\x98\x88\x88\x88\x98\x88\x88\x88x\x99\xa7\x89x\x99\xa7\x89x\x99\x97\x89g\x7f\xf7\xffwU_\xff\xe9!\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX2G38NB3\xd1\xc3\xf8\xa8',
b'\xf1\x87LBLUFN731381KF36\xb9\x99\x89\x98\x98\x88\x88\x88\x89\x99\xa8\x99\x88\x99\xa8\x89\x88\x88\x98\x88V\177\xf6\xff\x99w\x8f\xff\xad\xd8\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\000bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX2G38NB3\xd1\xc3\xf8\xa8',
b'\xf1\x87LDKVAA0028604HH1\xa8\x88x\x87vgvw\x88\x99\xa8\x89gw\x86ww\x88\x97x\x97o\xf9\xff\x97w\x7f\xffo\x02\xf1\x81U872\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U872\x00\x00\x00\x00\x00\x00TON4G38NB1\x96z28',
b'\xf1\x87LDKVAA3068374HH1wwww\x87xw\x87y\x99\xa7\x99w\x88\x87xw\x88\x97x\x85\xaf\xfa\xffvU/\xffU\xdc\xf1\x81U872\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U872\x00\x00\x00\x00\x00\x00TON4G38NB1\x96z28',
b'\xf1\x87LDKVBN382172KF26\x98\x88\x88\x88\xa8\x88\x88\x88x\x99\xa7\x89\x87\x88\x98x\x98\x99\xa9\x89\xa5_\xf6\xffDDO\xff\xcd\x16\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB2\xafL]\xe7',
b'\xf1\x87LDKVBN424201KF26\xba\xaa\x9a\xa9\x99\x99\x89\x98\x89\x99\xa8\x99\x88\x99\x98\x89\x88\x99\xa8\x89v\x7f\xf7\xffwf_\xffq\xa6\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB2\xafL]\xe7',
b'\xf1\x87LDKVBN540766KF37\x87wgv\x87w\x87xx\x99\x97\x89v\x88\x97h\x88\x88\x88\x88x\x7f\xf6\xffvUo\xff\xd3\x01\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB2\xafL]\xe7',
b'\xf1\x87LDLVAA4225634HH1\x98\x88\x88\x88eUeVx\x88\x87\x88g\x88\x86xx\x88\x87\x88\x86o\xf9\xff\x87w\x7f\xff\xf2\xf7\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00TON4G38NB2[v\\\xb6',
b'\xf1\x87LDLVAA4777834HH1\x98\x88x\x87\x87wwwx\x88\x87\x88x\x99\x97\x89x\x88\x97\x88\x86o\xfa\xff\x86fO\xff\x1d9\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00TON4G38NB2[v\\\xb6',
b'\xf1\x87LDLVAA5194534HH1ffvguUUUx\x88\xa7\x88h\x99\x96\x89x\x88\x97\x88ro\xf9\xff\x98wo\xff\xaaM\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00TON4G38NB2[v\\\xb6',
b'\xf1\x87LDLVAA5949924HH1\xa9\x99y\x97\x87wwwx\x99\x97\x89x\x99\xa7\x89x\x99\xa7\x89\x87_\xfa\xffeD?\xff\xf1\xfd\xf1\x81U903\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U903\x00\x00\x00\x00\x00\x00TON4G38NB2[v\\\xb6',
b'\xf1\x87LDLVBN560098KF26\x86fff\x87vgfg\x88\x96xfw\x86gfw\x86g\x95\xf6\xffeU_\xff\x92c\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB2\xafL]\xe7',
b'\xf1\x87LDLVBN602045KF26\xb9\x99\x89\x98\x97vwgy\xaa\xb7\x9af\x88\x96hw\x99\xa7y\xa9\x7f\xf5\xff\x99w\x7f\xff,\xd3\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB3X\xa8\xc08',
b'\xf1\x87LDLVBN628911KF26\xa9\x99\x89\x98\x98\x88\x88\x88y\x99\xa7\x99fw\x86gw\x88\x87x\x83\x7f\xf6\xff\x98wo\xff2\xda\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB3X\xa8\xc08',
b'\xf1\x87LDLVBN645817KF37\x87www\x98\x87xwx\x99\x97\x89\x99\x99\x99\x99g\x88\x96x\xb6_\xf7\xff\x98fo\xff\xe2\x86\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB3X\xa8\xc08',
b'\xf1\x87LDLVBN662115KF37\x98\x88\x88\x88\xa8\x88\x88\x88x\x99\x97\x89x\x99\xa7\x89\x88\x99\xa8\x89\x88\x7f\xf7\xfffD_\xff\xdc\x84\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB3X\xa8\xc08',
b'\xf1\x87LDLVBN667933KF37\xb9\x99\x89\x98\xb9\x99\x99\x99x\x88\x87\x88w\x88\x87x\x88\x88\x98\x88\xcbo\xf7\xffe3/\xffQ!\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB3X\xa8\xc08',
b'\xf1\x87LDLVBN673087KF37\x97www\x86fvgx\x99\x97\x89\x99\xaa\xa9\x9ag\x88\x86x\xe9_\xf8\xff\x98w\x7f\xff"\xad\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB3X\xa8\xc08',
b'\xf1\x87LDLVBN673841KF37\x98\x88x\x87\x86g\x86xy\x99\xa7\x99\x88\x99\xa8\x89w\x88\x97xdo\xf5\xff\x98\x88\x8f\xffT\xec\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB3X\xa8\xc08',
b'\xf1\x87LDLVBN681363KF37\x98\x88\x88\x88\x97x\x87\x88y\xaa\xa7\x9a\x88\x88\x98\x88\x88\x88\x88\x88vo\xf6\xffvD\x7f\xff%v\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB3X\xa8\xc08',
b'\xf1\x87LDLVBN713782KF37\x99\x99y\x97\x98\x88\x88\x88x\x88\x97\x88\x88\x99\x98\x89\x88\x99\xa8\x89\x87o\xf7\xffeU?\xff7,\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB3X\xa8\xc08',
b'\xf1\x87LDLVBN713890KF26\xb9\x99\x89\x98\xa9\x99\x99\x99x\x99\x97\x89\x88\x99\xa8\x89\x88\x99\xb8\x89Do\xf7\xff\xa9\x88o\xffs\r\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB3X\xa8\xc08',
b'\xf1\x87LDLVBN733215KF37\x99\x98y\x87\x97wwwi\x99\xa6\x99x\x99\xa7\x89V\x88\x95h\x86o\xf7\xffeDO\xff\x12\xe7\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB3X\xa8\xc08',
b'\xf1\x87LDLVBN750044KF37\xca\xa9\x8a\x98\xa7wwwy\xaa\xb7\x9ag\x88\x96x\x88\x99\xa8\x89\xb9\x7f\xf6\xff\xa8w\x7f\xff\xbe\xde\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB3X\xa8\xc08',
b'\xf1\x87LDLVBN752612KF37\xba\xaa\x8a\xa8\x87w\x87xy\xaa\xa7\x9a\x88\x99\x98\x89x\x88\x97\x88\x96o\xf6\xffvU_\xffh\x1b\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB3X\xa8\xc08',
b'\xf1\x87LDLVBN755553KF37\x87xw\x87\x97w\x87xy\x99\xa7\x99\x99\x99\xa9\x99Vw\x95gwo\xf6\xffwUO\xff\xb5T\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX4G38NB3X\xa8\xc08',
b'\xf1\x87LDLVBN757883KF37\x98\x87xw\x98\x87\x88xy\xaa\xb7\x9ag\x88\x96x\x89\x99\xa8\x99e\x7f\xf6\xff\xa9\x88o\xff5\x15\xf1\x81U922\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U922\x00\x00\x00\x00\x00\x00SLX4G38NB4\xd6\xe8\xd7\xa6',
b'\xf1\x87LDMVBN778156KF37\x87vWe\xa9\x99\x99\x99y\x99\xb7\x99\x99\x99\x99\x99x\x99\x97\x89\xa8\x7f\xf8\xffwf\x7f\xff\x82_\xf1\x81U922\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U922\x00\x00\x00\x00\x00\x00SLX4G38NB4\xd6\xe8\xd7\xa6',
b'\xf1\x87LDMVBN780576KF37\x98\x87hv\x97x\x97\x89x\x99\xa7\x89\x88\x99\x98\x89w\x88\x97x\x98\x7f\xf7\xff\xba\x88\x8f\xff\x1e0\xf1\x81U922\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U922\x00\x00\x00\x00\x00\x00SLX4G38NB4\xd6\xe8\xd7\xa6',
b'\xf1\x87LDMVBN783485KF37\x87www\x87vwgy\x99\xa7\x99\x99\x99\xa9\x99Vw\x95g\x89_\xf6\xff\xa9w_\xff\xc5\xd6\xf1\x81U922\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U922\x00\x00\x00\x00\x00\x00SLX4G38NB4\xd6\xe8\xd7\xa6',
b'\xf1\x87LDMVBN811844KF37\x87vwgvfffx\x99\xa7\x89Vw\x95gg\x88\xa6xe\x8f\xf6\xff\x97wO\xff\t\x80\xf1\x81U922\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U922\x00\x00\x00\x00\x00\x00SLX4G38NB4\xd6\xe8\xd7\xa6',
b'\xf1\x87LDMVBN830601KF37\xa7www\xa8\x87xwx\x99\xa7\x89Uw\x85Ww\x88\x97x\x88o\xf6\xff\x8a\xaa\x7f\xff\xe2:\xf1\x81U922\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U922\x00\x00\x00\x00\x00\x00SLX4G38NB4\xd6\xe8\xd7\xa6',
b'\xf1\x87LDMVBN848789KF37\x87w\x87x\x87w\x87xy\x99\xb7\x99\x87\x88\x98x\x88\x99\xa8\x89\x87\x7f\xf6\xfffUo\xff\xe3!\xf1\x81U922\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U922\x00\x00\x00\x00\x00\x00SLX4G38NB5\xb9\x94\xe8\x89',
b'\xf1\x87LDMVBN851595KF37\x97wgvvfffx\x99\xb7\x89\x88\x99\x98\x89\x87\x88\x98x\x99\x7f\xf7\xff\x97w\x7f\xff@\xf3\xf1\x81U922\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U922\x00\x00\x00\x00\x00\x00SLX4G38NB5\xb9\x94\xe8\x89',
b'\xf1\x87LDMVBN873175KF26\xa8\x88\x88\x88vfVex\x99\xb7\x89\x88\x99\x98\x89x\x88\x97\x88f\x7f\xf7\xff\xbb\xaa\x8f\xff,\x04\xf1\x81U922\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U922\x00\x00\x00\x00\x00\x00SLX4G38NB5\xb9\x94\xe8\x89',
b'\xf1\x87LDMVBN879401KF26veVU\xa8\x88\x88\x88g\x88\xa6xVw\x95gx\x88\xa7\x88v\x8f\xf9\xff\xdd\xbb\xbf\xff\xb3\x99\xf1\x81U922\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U922\x00\x00\x00\x00\x00\x00SLX4G38NB5\xb9\x94\xe8\x89',
b'\xf1\x87LDMVBN881314KF37\xa8\x88h\x86\x97www\x89\x99\xa8\x99w\x88\x97xx\x99\xa7\x89\xca\x7f\xf8\xff\xba\x99\x8f\xff\xd8v\xf1\x81U922\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U922\x00\x00\x00\x00\x00\x00SLX4G38NB5\xb9\x94\xe8\x89',
b'\xf1\x87LDMVBN888651KF37\xa9\x99\x89\x98vfff\x88\x99\x98\x89w\x99\xa7y\x88\x88\x98\x88D\x8f\xf9\xff\xcb\x99\x8f\xff\xa5\x1e\xf1\x81U922\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U922\x00\x00\x00\x00\x00\x00SLX4G38NB5\xb9\x94\xe8\x89',
b'\xf1\x87LDMVBN889419KF37\xa9\x99y\x97\x87w\x87xx\x88\x97\x88w\x88\x97x\x88\x99\x98\x89e\x9f\xf9\xffeUo\xff\x901\xf1\x81U922\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U922\x00\x00\x00\x00\x00\x00SLX4G38NB5\xb9\x94\xe8\x89',
b'\xf1\x87LDMVBN895969KF37vefV\x87vgfx\x99\xa7\x89\x99\x99\xb9\x99f\x88\x96he_\xf7\xffxwo\xff\x14\xf9\xf1\x81U922\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U922\x00\x00\x00\x00\x00\x00SLX4G38NB5\xb9\x94\xe8\x89',
b'\xf1\x87LDMVBN899222KF37\xa8\x88x\x87\x97www\x98\x99\x99\x89\x88\x99\x98\x89f\x88\x96hdo\xf7\xff\xbb\xaa\x9f\xff\xe2U\xf1\x81U922\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U922\x00\x00\x00\x00\x00\x00SLX4G38NB5\xb9\x94\xe8\x89',
b"\xf1\x87LBLUFN622950KF36\xa8\x88\x88\x88\x87w\x87xh\x99\x96\x89\x88\x99\x98\x89\x88\x99\x98\x89\x87o\xf6\xff\x98\x88o\xffx'\xf1\x81U891\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U891\x00\x00\x00\x00\x00\x00SLX2G38NB3\xd1\xc3\xf8\xa8",
],
},
CAR.VELOSTER: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00JS__ SCC H-CUP 1.00 1.02 95650-J3200 ',
b'\xf1\x00JS__ SCC HNCUP 1.00 1.02 95650-J3100 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00JS LKAS AT USA LHD 1.00 1.02 95740-J3000 K32',
b'\xf1\x00JS LKAS AT KOR LHD 1.00 1.03 95740-J3000 K33',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x00JSL MDPS C 1.00 1.03 56340-J3000 8308',
],
(Ecu.esp, 0x7d1, None): [
b'\xf1\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [
b'\x01TJS-JNU06F200H0A',
b'\x01TJS-JDK06F200H0A',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x816U2V8051\x00\x00\xf1\x006U2V0_C2\x00\x006U2V8051\x00\x00DJS0T16NS1\xba\x02\xb8\x80',
b'\xf1\x816U2V8051\x00\x00\xf1\x006U2V0_C2\x00\x006U2V8051\x00\x00DJS0T16NS1\x00\x00\x00\x00',
b'\xf1\x816U2V8051\x00\x00\xf1\x006U2V0_C2\x00\x006U2V8051\x00\x00DJS0T16KS2\016\xba\036\xa2',
],
},
# kia
CAR.FORTE: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00BD__ SCC H-CUP 1.00 1.02 99110-M6000 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00BD LKAS AT USA LHD 1.00 1.04 95740-M6000 J33',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x00BD MDPS C 1.00 1.02 56310-XX000 4BD2C102',
b'\xf1\x00BD MDPS C 1.00 1.08 56310/M6300 4BDDC108',
b'\xf1\x00BD MDPS C 1.00 1.08 56310M6300\x00 4BDDC108',
],
(Ecu.esp, 0x7d1, None): [
b'\xf1\x816VGRAH00018.ELF\xf1\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [
b'\x01TBDM1NU06F200H01',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x816U2VC051\x00\x00\xf1\x006U2V0_C2\x00\x006U2VC051\x00\x00DBD0T16SS0\x00\x00\x00\x00',
b"\xf1\x816U2VC051\x00\x00\xf1\x006U2V0_C2\x00\x006U2VC051\x00\x00DBD0T16SS0\xcf\x1e'\xc3", ],
},
CAR.K5: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00JF__ SCC F-CUP 1.00 1.00 96400-D4110 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00JFA LKAS AT USA LHD 1.00 1.02 95895-D5000 h31',
b'\xf1\x00JFA LKAS AT USA LHD 1.00 1.00 95895-D5001 h32',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x00TM MDPS C 1.00 1.00 56340-S2000 8409',
],
(Ecu.esp, 0x7d1, None): [
b'\xf1\x00JF ESC \v 11 \x18\x030 58920-D5180',
],
(Ecu.engine, 0x7e0, None): [
b'\x01TJFAJNU06F201H03',
b'\xf1\x89F1JF600AISEIU702\xf1\x82F1JF600AISEIU702',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x816U2V8051\x00\x00\xf1\x006U2V0_C2\x00\x006U2V8051\x00\x00DJF0T16NL0\t\xd2GW', ],
},
CAR.K5_HEV: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00DEhe SCC H-CUP 1.01 1.02 96400-G5100 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00DEP MFC AT USA LHD 1.00 1.01 95740-G5010 170424',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x00DE MDPS C 1.00 1.09 56310G5301\x00 4DEHC109',
],
(Ecu.engine, 0x7e0, None): [
b'\xf1\x816H6F4051\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.transmission, 0x7e1, None): [
b"\xf1\x816U3J2051\x00\x00\xf1\x006U3H0_C2\x00\x006U3J2051\x00\x00PDE0G16NS2\xf4'\\\x91", ],
},
CAR.K5_DL3: {
(Ecu.fwdRadar, 0x7D0, None): [
b'\xf1\000DL3_ SCC FHCUP 1.00 1.03 99110-L2000 ',
b'\xf1\x8799110L2000\xf1\000DL3_ SCC FHCUP 1.00 1.03 99110-L2000 ',
b'\xf1\x8799110L2100\xf1\x00DL3_ SCC F-CUP 1.00 1.03 99110-L2100 ',
b'\xf1\x8799110L2100\xf1\x00DL3_ SCC FHCUP 1.00 1.03 99110-L2100 ',
],
(Ecu.fwdCamera, 0x7C4, None): [
b'\xf1\000DL3 MFC AT USA LHD 1.00 1.03 99210-L3000 200915',
b'\xf1\x00DL3 MFC AT USA LHD 1.00 1.04 99210-L3000 210208',
],
(Ecu.eps, 0x7D4, None): [
b'\xf1\x8756310-L3110\xf1\000DL3 MDPS C 1.00 1.01 56310-L3110 4DLAC101',
b'\xf1\x8756310-L3220\xf1\x00DL3 MDPS C 1.00 1.01 56310-L3220 4DLAC101',
b'\xf1\x8757700-L3000\xf1\x00DL3 MDPS R 1.00 1.02 57700-L3000 4DLAP102',
],
(Ecu.esp, 0x7D1, None): [
b'\xf1\000DL ESC \006 101 \004\002 58910-L3200',
b'\xf1\x8758910-L3200\xf1\000DL ESC \006 101 \004\002 58910-L3200',
b'\xf1\x8758910-L3800\xf1\x00DL ESC \t 101 \x07\x02 58910-L3800',
b'\xf1\x8758910-L3600\xf1\x00DL ESC \x03 100 \x08\x02 58910-L3600',
],
(Ecu.engine, 0x7E0, None): [
b'\xf1\x87391212MKT0',
b'\xf1\x87391212MKV0',
b'\xf1\x870\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf1\x82DLDWN5TMDCXXXJ1B',
],
(Ecu.transmission, 0x7E1, None): [
b'\xf1\000bcsh8p54 U913\000\000\000\000\000\000TDL2T16NB1ia\v\xb8',
b'\xf1\x87SALFEA5652514GK2UUeV\x88\x87\x88xxwg\x87ww\x87wwfwvd/\xfb\xffvU_\xff\x93\xd3\xf1\x81U913\000\000\000\000\000\000\xf1\000bcsh8p54 U913\000\000\000\000\000\000TDL2T16NB1ia\v\xb8',
b'\xf1\x87SALFEA6046104GK2wvwgeTeFg\x88\x96xwwwwffvfe?\xfd\xff\x86fo\xff\x97A\xf1\x81U913\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U913\x00\x00\x00\x00\x00\x00TDL2T16NB1ia\x0b\xb8',
b'\xf1\x87SCMSAA8572454GK1\x87x\x87\x88Vf\x86hgwvwvwwgvwwgT?\xfb\xff\x97fo\xffH\xb8\xf1\x81U913\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 U913\x00\x00\x00\x00\x00\x00TDL4T16NB05\x94t\x18',
b'\xf1\x87954A02N300\x00\x00\x00\x00\x00\xf1\x81T02730A1 \xf1\x00T02601BL T02730A1 WDL3T25XXX730NS2b\x1f\xb8%',
],
},
CAR.STINGER: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00CK__ SCC F_CUP 1.00 1.01 96400-J5100 ',
b'\xf1\x00CK__ SCC F_CUP 1.00 1.03 96400-J5100 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00CK MFC AT USA LHD 1.00 1.03 95740-J5000 170822',
b'\xf1\x00CK MFC AT USA LHD 1.00 1.04 95740-J5000 180504',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x00CK MDPS R 1.00 1.04 57700-J5200 4C2CL104',
b'\xf1\x00CK MDPS R 1.00 1.04 57700-J5220 4C2VL104',
b'\xf1\x00CK MDPS R 1.00 1.04 57700-J5420 4C4VL104',
b'\xf1\x00CK MDPS R 1.00 1.06 57700-J5420 4C4VL106',
b'\xf1\x00CK MDPS R 1.00 1.07 57700-J5420 4C4VL107',
],
(Ecu.engine, 0x7e0, None): [
b'\xf1\x81606DE051\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x81640E0051\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x81640L0051\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x82CKJN3TMSDE0B\x00\x00\x00\x00',
b'\xf1\x82CKKN3TMD_H0A\x00\x00\x00\x00',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x87VCJLE17622572DK0vd6D\x99\x98y\x97vwVffUfvfC%CuT&Dx\x87o\xff{\x1c\xf1\x81E21\x00\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 E21\x00\x00\x00\x00\x00\x00\x00SCK0T33NB0\x88\xa2\xe6\xf0',
b'\xf1\x87VDHLG17000192DK2xdFffT\xa5VUD$DwT\x86wveVeeD&T\x99\xba\x8f\xff\xcc\x99\xf1\x81E21\x00\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 E21\x00\x00\x00\x00\x00\x00\x00SCK0T33NB0\x88\xa2\xe6\xf0',
b'\xf1\x87VDHLG17000192DK2xdFffT\xa5VUD$DwT\x86wveVeeD&T\x99\xba\x8f\xff\xcc\x99\xf1\x89E21\x00\x00\x00\x00\x00\x00\x00\xf1\x82SCK0T33NB0',
b'\xf1\x87VDHLG17034412DK2vD6DfVvVTD$D\x99w\x88\x98EDEDeT6DgfO\xff\xc3=\xf1\x81E21\x00\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 E21\x00\x00\x00\x00\x00\x00\x00SCK0T33NB0\x88\xa2\xe6\xf0',
b'\xf1\x87VDHLG17118862DK2\x8awWwgu\x96wVfUVwv\x97xWvfvUTGTx\x87o\xff\xc9\xed\xf1\x81E21\x00\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 E21\x00\x00\x00\x00\x00\x00\x00SCK0T33NB0\x88\xa2\xe6\xf0',
b'\xf1\x87VDJLG18425192DK2xeGewfgf\x86eFeweWv\x88eVeuTGT\x89vo\xff\tJ\xf1\x81E24\x00\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 E24\x00\x00\x00\x00\x00\x00\x00SCK0T33NB1\x8a\xdcM\x90',
b'\xf1\x87VDKLJ18675252DK6\x89vhgwwwwveVU\x88w\x87w\x99vgf\x97vXfgw_\xff\xc2\xfb\xf1\x89E25\x00\x00\x00\x00\x00\x00\x00\xf1\x82TCK0T33NB2',
b'\xf1\x87WAJTE17552812CH4vfFffvfVeT5DwvvVVdFeegeg\x88\x88o\xff\x1a]\xf1\x81E21\x00\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 E21\x00\x00\x00\x00\x00\x00\x00TCK2T20NB1\x19\xd2\x00\x94',
],
},
CAR.NIRO_EV: {
(Ecu.fwdRadar, 0x7D0, None): [
b'\xf1\x00DEev SCC F-CUP 1.00 1.00 99110-Q4000 ',
b'\xf1\x00DEev SCC F-CUP 1.00 1.02 96400-Q4100 ',
b'\xf1\x00DEev SCC F-CUP 1.00 1.03 96400-Q4100 ',
b'\xf1\x00OSev SCC F-CUP 1.00 1.01 99110-K4000 ',
b'\xf1\x8799110Q4000\xf1\x00DEev SCC F-CUP 1.00 1.00 99110-Q4000 ',
b'\xf1\x8799110Q4100\xf1\x00DEev SCC F-CUP 1.00 1.00 99110-Q4100 ',
b'\xf1\x8799110Q4500\xf1\x00DEev SCC F-CUP 1.00 1.00 99110-Q4500 ',
b'\xf1\x8799110Q4600\xf1\x00DEev SCC FNCUP 1.00 1.00 99110-Q4600 ',
b'\xf1\x8799110Q4600\xf1\x00DEev SCC FHCUP 1.00 1.00 99110-Q4600 ',
],
(Ecu.fwdCamera, 0x7C4, None): [
b'\xf1\x00DEE MFC AT USA LHD 1.00 1.03 95740-Q4000 180821',
b'\xf1\x00DEE MFC AT EUR LHD 1.00 1.00 99211-Q4000 191211',
b'\xf1\x00DEE MFC AT USA LHD 1.00 1.00 99211-Q4000 191211',
b'\xf1\000DEE MFC AT EUR LHD 1.00 1.00 99211-Q4100 200706',
b'\xf1\x00OSE LKAS AT EUR LHD 1.00 1.00 95740-K4100 W40',
],
(Ecu.eps, 0x7D4, None): [
b'\xf1\x00OS MDPS C 1.00 1.04 56310K4050\x00 4OEDC104',
b'\xf1\x00DE MDPS C 1.00 1.05 56310Q4000\x00 4DEEC105',
b'\xf1\x00DE MDPS C 1.00 1.05 56310Q4100\x00 4DEEC105',
],
(Ecu.esp, 0x7D1, None): [
b'\xf1\x00OS IEB \r 212 \x11\x13 58520-K4000',
],
},
CAR.NIRO_HEV: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00DEhe SCC H-CUP 1.01 1.02 96400-G5100 ',
b'\xf1\x00DEhe SCC FHCUP 1.00 1.00 99110-G5600 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00DEP MFC AT USA LHD 1.00 1.01 95740-G5010 170424',
b'\xf1\x00DEH MFC AT USA LHD 1.00 1.07 99211-G5000 201221',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\000DE MDPS C 1.00 1.09 56310G5301\000 4DEHC109',
b'\xf1\x00DE MDPS C 1.00 1.01 56310G5520\x00 4DEPC101',
],
(Ecu.engine, 0x7e0, None): [
b'\xf1\x816H6F4051\000\000\000\000\000\000\000\000',
b'\xf1\x816H6G5051\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.transmission, 0x7e1, None): [
b"\xf1\x816U3J2051\000\000\xf1\0006U3H0_C2\000\0006U3J2051\000\000PDE0G16NS2\xf4\'\\\x91",
b'\xf1\x816U3J2051\000\000\xf1\0006U3H0_C2\000\0006U3J2051\000\000PDE0G16NS2\000\000\000\000',
b'\xf1\x816U3J9051\x00\x00\xf1\x006U3H1_C2\x00\x006U3J9051\x00\x00HDE0G16NL3\x00\x00\x00\x00',
b'\xf1\x816U3J9051\x00\x00\xf1\x006U3H1_C2\x00\x006U3J9051\x00\x00HDE0G16NL3\xb9\xd3\xfaW',
],
},
CAR.SELTOS: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x8799110Q5100\xf1\000SP2_ SCC FHCUP 1.01 1.05 99110-Q5100 ',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\000SP2 MFC AT USA LHD 1.00 1.04 99210-Q5000 191114',
b'\xf1\000SP2 MFC AT USA LHD 1.00 1.05 99210-Q5000 201012',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\000SP2 MDPS C 1.00 1.04 56300Q5200 ',
b'\xf1\000SP2 MDPS C 1.01 1.05 56300Q5200 ',
],
(Ecu.esp, 0x7d1, None): [
b'\xf1\x8758910-Q5450\xf1\000SP ESC \a 101\031\t\005 58910-Q5450',
b'\xf1\x8758910-Q5450\xf1\000SP ESC \t 101\031\t\005 58910-Q5450',
],
(Ecu.engine, 0x7e0, None): [
b'\xf1\x81616D2051\000\000\000\000\000\000\000\000',
b'\xf1\x81616D5051\000\000\000\000\000\000\000\000',
b'\001TSP2KNL06F100J0K',
b'\001TSP2KNL06F200J0K',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x87CZLUB49370612JF7h\xa8y\x87\x99\xa7hv\x99\x97fv\x88\x87x\x89x\x96O\xff\x88\xff\xff\xff.@\xf1\x816V2C2051\000\000\xf1\0006V2B0_C2\000\0006V2C2051\000\000CSP4N20NS3\000\000\000\000',
b'\xf1\x87954A22D200\xf1\x81T01950A1 \xf1\000T0190XBL T01950A1 DSP2T16X4X950NS6\xd30\xa5\xb9',
b'\xf1\x87954A22D200\xf1\x81T01950A1 \xf1\000T0190XBL T01950A1 DSP2T16X4X950NS8\r\xfe\x9c\x8b',
],
},
CAR.K7: {
(Ecu.eps, 0x7d4, None): [b'\xf1\000YG MDPS C 1.00 1.01 56310F6350\000 4YG7C101',],
},
# Genesis
CAR.GENESIS_G70: {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00IK__ SCC F-CUP 1.00 1.02 96400-G9100 ',
b'\xf1\x00IK__ SCC F-CUP 1.00 1.02 96400-G9100 \xf1\xa01.02',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00IK MFC AT USA LHD 1.00 1.01 95740-G9000 170920',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x00IK MDPS R 1.00 1.06 57700-G9420 4I4VL106',
b'\xf1\x00IK MDPS R 1.00 1.07 57700-G9220 4I2VL107',
],
(Ecu.esp, 0x7d1, None): [
b'\xf1\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [
b'\xf1\x81640F0051\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x81640J0051\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x87VDJLT17895112DN4\x88fVf\x99\x88\x88\x88\x87fVe\x88vhwwUFU\x97eFex\x99\xff\xb7\x82\xf1\x81E25\x00\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 E25\x00\x00\x00\x00\x00\x00\x00SIK0T33NB2\x11\x1am\xda',
b'\xf1\x87VCJLP18407832DN3\x88vXfvUVT\x97eFU\x87d7v\x88eVeveFU\x89\x98\x7f\xff\xb2\xb0\xf1\x81E25\x00\x00\x00'
b'\x00\x00\x00\x00\xf1\x00bcsh8p54 E25\x00\x00\x00\x00\x00\x00\x00SIK0T33NB4\xecE\xefL',
],
},
}
CHECKSUM = {
"crc8": [CAR.SONATA, CAR.SANTA_FE, CAR.PALISADE, CAR.SELTOS, CAR.ELANTRA21, CAR.K5_DL3,
CAR.SONATA_HEV, CAR.SANTA_FE_HEV, CAR.SOUL_EV, CAR.ELANTRA21_HEV, CAR.K5_DL3_HEV],
"6B": [CAR.SORENTO, CAR.GENESIS],
}
FEATURES = {
"use_cluster_gears": # Use Cluster for Gear Selection, rather than Transmission [ CLU15 ]
{CAR.ELANTRA_I30, CAR.KONA, CAR.GRANDEUR, CAR.MOHAVE, CAR.NIRO_HEV, CAR.K7},
"use_tcu_gears": # Use TCU Message for Gear Selection [ TCU12 ]
{CAR.SONATA_LF, CAR.VELOSTER, CAR.K5},
"use_elect_gears": # Use Elect GEAR Message for Gear Selection [ ELECT_GEAR ]
{CAR.KONA_EV, CAR.IONIQ_EV, CAR.NEXO, CAR.NIRO_EV, CAR.SOUL_EV, CAR.KONA_HEV, CAR.IONIQ_HEV, CAR.NIRO_HEV,
CAR.SONATA_HEV, CAR.SONATA_LF_HEV, CAR.GRANDEUR_HEV, CAR.GRANDEUR20_HEV,
CAR.K5_HEV, CAR.K5_DL3_HEV, CAR.K7_HEV},
# Gear not set is [ LVR12 ]
# these cars use the [ FCA11 ] message for the AEB and FCW signals, all others use [ SCC12 ]
# "use_fca": {}, carstate aeb_fcw / qt ui aebselect toggle set
# "has_scc13": {},
# "has_scc14": {},
# new lfa car - carcontroller lfamfc / hyundaican lfamfc using qt ui mfcselect toggle set
}
EV_CAR = {CAR.KONA_EV, CAR.IONIQ_EV, CAR.NIRO_EV, CAR.SOUL_EV, CAR.NEXO}
HYBRID_CAR = {CAR.KONA_HEV, CAR.IONIQ_HEV, CAR.NIRO_HEV, CAR.SANTA_FE_HEV,
CAR.ELANTRA21_HEV, CAR.SONATA_HEV, CAR.SONATA_LF_HEV, CAR.GRANDEUR_HEV, CAR.GRANDEUR20_HEV,
CAR.K5_HEV, CAR.K5_DL3_HEV, CAR.K7_HEV}
EV_HYBRID_CAR = EV_CAR | HYBRID_CAR
DBC = {
# Hyundai
CAR.ELANTRA_I30: dbc_dict('hyundai_kia_generic', None),
CAR.ELANTRA21: dbc_dict('hyundai_kia_generic', None),
CAR.ELANTRA21_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.SONATA: dbc_dict('hyundai_kia_generic', 'hyundai_kia_mando_front_radar'),
CAR.SONATA_HEV: dbc_dict('hyundai_kia_generic', 'hyundai_kia_mando_front_radar'),
CAR.SONATA_LF: dbc_dict('hyundai_kia_generic', None),
CAR.SONATA_LF_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.KONA: dbc_dict('hyundai_kia_generic', None),
CAR.KONA_EV: dbc_dict('hyundai_kia_generic', None),
CAR.KONA_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.IONIQ_EV: dbc_dict('hyundai_kia_generic', 'hyundai_kia_mando_front_radar'),
CAR.IONIQ_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.SANTA_FE: dbc_dict('hyundai_kia_generic', 'hyundai_kia_mando_front_radar'),
CAR.SANTA_FE_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.PALISADE: dbc_dict('hyundai_kia_generic', 'hyundai_kia_mando_front_radar'),
CAR.VELOSTER: dbc_dict('hyundai_kia_generic', None),
CAR.GRANDEUR: dbc_dict('hyundai_kia_generic', None),
CAR.GRANDEUR_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.GRANDEUR20: dbc_dict('hyundai_kia_generic', None),
CAR.GRANDEUR20_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.NEXO: dbc_dict('hyundai_kia_generic_nexo', None),
# Kia
CAR.FORTE: dbc_dict('hyundai_kia_generic', None),
CAR.K5: dbc_dict('hyundai_kia_generic', None),
CAR.K5_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.K5_DL3: dbc_dict('hyundai_kia_generic', None),
CAR.K5_DL3_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.SPORTAGE: dbc_dict('hyundai_kia_generic', None),
CAR.SORENTO: dbc_dict('hyundai_kia_generic', None),
CAR.MOHAVE: dbc_dict('hyundai_kia_generic', None),
CAR.STINGER: dbc_dict('hyundai_kia_generic', None),
CAR.NIRO_EV: dbc_dict('hyundai_kia_generic', 'hyundai_kia_mando_front_radar'),
CAR.NIRO_HEV: dbc_dict('hyundai_kia_generic', 'hyundai_kia_mando_front_radar'),
CAR.SOUL_EV: dbc_dict('hyundai_kia_generic', None),
CAR.SELTOS: dbc_dict('hyundai_kia_generic', None),
CAR.K7: dbc_dict('hyundai_kia_generic', None),
CAR.K7_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.K9: dbc_dict('hyundai_kia_generic', None),
# Genesis
CAR.GENESIS: dbc_dict('hyundai_kia_generic', None),
CAR.GENESIS_G70: dbc_dict('hyundai_kia_generic', 'hyundai_kia_mando_front_radar'),
CAR.GENESIS_G80: dbc_dict('hyundai_kia_generic', None),
CAR.GENESIS_G90: dbc_dict('hyundai_kia_generic', None),
}
STEER_THRESHOLD = 150
def main():
for member, value in vars(CAR).items():
if not member.startswith("_"):
print(value)
if __name__ == "__main__":
main()
| [
[
[
19,
22
],
[
64,
67
]
],
[
[
49,
57
],
[
88050,
88058
],
[
88106,
88114
],
[
88166,
88174
],
[
88219,
88227
],
[
88303,
88311
],
[
88386,
88394
],
[
88446,
88454
],
[
88497,
88505
],
[
88551,
88559
],
[
88606,
88614
],
[
88661,
88669
],
[
88744,
88752
],
[
88799,
88807
],
[
88885,
88893
],
[
88940,
88948
],
[
89022,
89030
],
[
89077,
89085
],
[
89136,
89144
],
[
89193,
89201
],
[
89254,
89262
],
[
89305,
89313
],
[
89371,
89379
],
[
89420,
89428
],
[
89473,
89481
],
[
89526,
89534
],
[
89583,
89591
],
[
89638,
89646
],
[
89692,
89700
],
[
89745,
89753
],
[
89799,
89807
],
[
89853,
89861
],
[
89935,
89943
],
[
90016,
90024
],
[
90069,
90077
],
[
90118,
90126
],
[
90171,
90179
],
[
90220,
90228
],
[
90287,
90295
],
[
90345,
90353
],
[
90430,
90438
],
[
90488,
90496
]
],
[
[
58,
61
],
[
26900,
26903
],
[
27107,
27110
],
[
27218,
27221
],
[
27325,
27328
],
[
27422,
27425
],
[
27512,
27515
],
[
27583,
27586
],
[
27754,
27757
],
[
28021,
28024
],
[
28197,
28200
],
[
28543,
28546
],
[
28777,
28780
],
[
28922,
28925
],
[
29669,
29672
],
[
29867,
29870
],
[
29976,
29979
],
[
30091,
30094
],
[
30189,
30192
],
[
30599,
30602
],
[
31447,
31450
],
[
31959,
31962
],
[
32982,
32985
],
[
33784,
33787
],
[
34274,
34277
],
[
43305,
43308
],
[
43659,
43662
],
[
43903,
43906
],
[
44177,
44180
],
[
44303,
44306
],
[
44442,
44445
],
[
45244,
45247
],
[
45355,
45358
],
[
45526,
45529
],
[
45675,
45678
],
[
45892,
45895
],
[
47296,
47299
],
[
47407,
47410
],
[
47514,
47517
],
[
47614,
47617
],
[
47723,
47726
],
[
47795,
47798
],
[
47964,
47967
],
[
48681,
48684
],
[
49312,
49315
],
[
49661,
49664
],
[
49995,
49998
],
[
50106,
50109
],
[
50213,
50216
],
[
50313,
50316
],
[
50402,
50405
],
[
50501,
50504
],
[
50671,
50674
],
[
50989,
50992
],
[
51300,
51303
],
[
51604,
51607
],
[
51922,
51925
],
[
52233,
52236
],
[
52450,
52453
],
[
52726,
52729
],
[
53178,
53181
],
[
53583,
53586
],
[
53827,
53830
],
[
54088,
54091
],
[
54889,
54892
],
[
55141,
55144
],
[
60326,
60329
],
[
60455,
60458
],
[
60565,
60568
],
[
60662,
60665
],
[
60731,
60734
],
[
60960,
60963
],
[
61484,
61487
],
[
61929,
61932
],
[
62278,
62281
],
[
62824,
62827
],
[
63041,
63044
],
[
73345,
73348
],
[
73525,
73528
],
[
73696,
73699
],
[
73789,
73792
],
[
73869,
73872
],
[
73971,
73974
],
[
74349,
74352
],
[
74460,
74463
],
[
74567,
74570
],
[
74787,
74790
],
[
74890,
74893
],
[
74961,
74964
],
[
75222,
75225
],
[
75333,
75336
],
[
75504,
75507
],
[
75597,
75600
],
[
75687,
75690
],
[
75817,
75820
],
[
75973,
75976
],
[
76084,
76087
],
[
76194,
76197
],
[
76294,
76297
],
[
76393,
76396
],
[
76552,
76555
],
[
76924,
76927
],
[
77101,
77104
],
[
77375,
77378
],
[
77687,
77690
],
[
77869,
77872
],
[
78710,
78713
],
[
78890,
78893
],
[
79067,
79070
],
[
79404,
79407
],
[
79715,
79718
],
[
81249,
81252
],
[
82002,
82005
],
[
82377,
82380
],
[
82603,
82606
],
[
82716,
82719
],
[
82896,
82899
],
[
83073,
83076
],
[
83236,
83239
],
[
83394,
83397
],
[
83858,
83861
],
[
83987,
83990
],
[
84164,
84167
],
[
84321,
84324
],
[
84505,
84508
],
[
84726,
84729
],
[
85195,
85198
],
[
85322,
85325
],
[
85514,
85517
],
[
85624,
85627
],
[
85781,
85784
],
[
85861,
85864
],
[
86019,
86022
]
],
[
[
89,
108
]
],
[
[
337,
340
],
[
2523,
2526
],
[
3526,
3529
],
[
4276,
4279
],
[
4306,
4309
],
[
5214,
5217
],
[
5967,
5970
],
[
6742,
6745
],
[
7289,
7292
],
[
7918,
7921
],
[
8565,
8568
],
[
9189,
9192
],
[
9854,
9857
],
[
10534,
10537
],
[
11213,
11216
],
[
11242,
11245
],
[
11910,
11913
],
[
12695,
12698
],
[
13229,
13232
],
[
13938,
13941
],
[
14603,
14606
],
[
15280,
15283
],
[
16081,
16084
],
[
16525,
16528
],
[
17438,
17441
],
[
18019,
18022
],
[
18042,
18045
],
[
18069,
18072
],
[
18637,
18640
],
[
19190,
19193
],
[
19814,
19817
],
[
20366,
20369
],
[
21013,
21016
],
[
21438,
21441
],
[
22064,
22067
],
[
22703,
22706
],
[
23214,
23217
],
[
23900,
23903
],
[
24452,
24455
],
[
25014,
25017
],
[
25607,
25610
],
[
26215,
26218
],
[
27083,
27086
],
[
27732,
27735
],
[
29643,
29646
],
[
30580,
30583
],
[
43282,
43285
],
[
45222,
45225
],
[
47279,
47282
],
[
47944,
47947
],
[
49974,
49977
],
[
50650,
50653
],
[
51582,
51585
],
[
53157,
53160
],
[
60301,
60304
],
[
60939,
60942
],
[
73324,
73327
],
[
74331,
74334
],
[
75207,
75210
],
[
75954,
75957
],
[
76533,
76536
],
[
78690,
78693
],
[
81229,
81232
],
[
82695,
82698
],
[
83839,
83842
],
[
85180,
85183
],
[
85298,
85301
],
[
86517,
86520
],
[
86529,
86532
],
[
86543,
86546
],
[
86557,
86560
],
[
86569,
86572
],
[
86584,
86587
],
[
86607,
86610
],
[
86623,
86626
],
[
86641,
86644
],
[
86654,
86657
],
[
86673,
86676
],
[
86699,
86702
],
[
86712,
86715
],
[
86839,
86842
],
[
86856,
86859
],
[
86866,
86869
],
[
86880,
86883
],
[
86892,
86895
],
[
86906,
86909
],
[
86986,
86989
],
[
87001,
87004
],
[
87015,
87018
],
[
87109,
87112
],
[
87122,
87125
],
[
87136,
87139
],
[
87146,
87149
],
[
87159,
87162
],
[
87172,
87175
],
[
87186,
87189
],
[
87201,
87204
],
[
87220,
87223
],
[
87236,
87239
],
[
87255,
87258
],
[
87273,
87276
],
[
87298,
87301
],
[
87310,
87313
],
[
87326,
87329
],
[
87676,
87679
],
[
87689,
87692
],
[
87703,
87706
],
[
87716,
87719
],
[
87729,
87732
],
[
87753,
87756
],
[
87767,
87770
],
[
87782,
87785
],
[
87796,
87799
],
[
87828,
87831
],
[
87847,
87850
],
[
87863,
87866
],
[
87882,
87885
],
[
87900,
87903
],
[
87934,
87937
],
[
87946,
87949
],
[
87962,
87965
],
[
88033,
88036
],
[
88091,
88094
],
[
88147,
88150
],
[
88207,
88210
],
[
88287,
88290
],
[
88371,
88374
],
[
88427,
88430
],
[
88487,
88490
],
[
88538,
88541
],
[
88592,
88595
],
[
88647,
88650
],
[
88729,
88732
],
[
88785,
88788
],
[
88867,
88870
],
[
88926,
88929
],
[
89008,
89011
],
[
89063,
89066
],
[
89118,
89121
],
[
89177,
89180
],
[
89234,
89237
],
[
89295,
89298
],
[
89360,
89363
],
[
89412,
89415
],
[
89461,
89464
],
[
89514,
89517
],
[
89567,
89570
],
[
89624,
89627
],
[
89679,
89682
],
[
89733,
89736
],
[
89786,
89789
],
[
89840,
89843
],
[
89921,
89924
],
[
90003,
90006
],
[
90057,
90060
],
[
90110,
90113
],
[
90159,
90162
],
[
90212,
90215
],
[
90274,
90277
],
[
90328,
90331
],
[
90413,
90416
],
[
90471,
90474
],
[
90594,
90597
]
],
[
[
2411,
2418
]
],
[
[
2492,
2504
]
],
[
[
26878,
26893
]
],
[
[
26997,
27008
]
],
[
[
86493,
86501
]
],
[
[
86729,
86737
]
],
[
[
87666,
87672
],
[
87990,
87996
]
],
[
[
87739,
87749
],
[
87999,
88009
]
],
[
[
87974,
87987
]
],
[
[
88011,
88014
]
],
[
[
90530,
90545
]
],
[
[
90558,
90562
],
[
90693,
90697
]
]
] |
# -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le contexte éditeur EdtBoiteEnvoi"""
from primaires.interpreteur.editeur import Editeur
from primaires.interpreteur.editeur.env_objet import EnveloppeObjet
from primaires.communication.editeurs.medit import EdtMedit
from primaires.communication.mudmail import ENVOYE
from primaires.format.fonctions import couper_phrase
class EdtBoiteEnvoi(Editeur):
"""Classe définissant le contexte-éditeur 'boîte d'envoi'.
Ce contexte liste les messages envoyés et propose des options d'édition.
"""
def __init__(self, pere, objet=None, attribut=None):
"""Constructeur de l'éditeur"""
Editeur.__init__(self, pere, objet, attribut)
self.ajouter_option("l", self.opt_lire)
self.ajouter_option("c", self.opt_copier)
self.ajouter_option("s", self.opt_supprimer)
def accueil(self):
"""Méthode d'accueil"""
joueur = self.pere.joueur
mails = type(self).importeur.communication.mails.get_mails_pour(
joueur, ENVOYE)
msg = "||tit| " + "Messages envoyés".ljust(76) + "|ff||\n"
msg += self.opts.separateur + "\n"
msg += self.aide_courte + "\n\n"
if not mails:
msg += "|att|Vous n'avez envoyé aucun message.|ff|"
else:
taille = 0
for mail in mails:
t_sujet = len(couper_phrase(mail.sujet, 33))
if t_sujet > taille:
taille = t_sujet
taille = (taille < 5 and 5) or taille
msg += "+" + "-".ljust(taille + 41, "-") + "+\n"
msg += "| |tit|N°|ff| | |tit|" + "Sujet".ljust(taille)
msg += "|ff| | |tit|Destinataire|ff| | |tit|" + "Date".ljust(16)
msg += "|ff| |\n"
i = 1
for mail in mails:
msg += "| |rg|" + str(i).rjust(2) + "|ff| | "
msg += "|vr|" + couper_phrase(mail.sujet, 33).ljust( \
taille) + "|ff| | |blc|"
msg += couper_phrase(mail.aff_dest,12).ljust(12) + "|ff| | "
msg += "|jn|" + mail.date.isoformat(" ")[:16] + "|ff| |\n"
i += 1
msg += "+" + "-".ljust(taille + 41, "-") + "+"
return msg
def opt_lire(self, arguments):
"""Option lire"""
if not arguments or arguments.isspace():
self.pere.joueur << "|err|Vous devez préciser le numéro d'un " \
"message.|ff|"
return
mails = type(self).importeur.communication.mails.get_mails_pour(
self.pere.joueur, ENVOYE)
try:
num = int(arguments.split(" ")[0])
except ValueError:
self.pere.joueur << "|err|Vous devez spécifier un nombre entier " \
"valide.|ff|"
else:
i = 1
l_mail = None
for mail in mails:
if num == i:
l_mail = mail
break
i += 1
if l_mail is None:
self.pere.joueur << "|err|Le numéro spécifié ne correspond à " \
"aucun message.|ff|"
return
self.pere.joueur << l_mail.afficher()
def opt_copier(self, arguments):
"""Option copier"""
if not arguments or arguments.isspace():
self.pere.joueur << "|err|Vous devez préciser le numéro d'un " \
"message.|ff|"
return
mails = type(self).importeur.communication.mails.get_mails_pour(
self.pere.joueur, ENVOYE)
try:
num = int(arguments.split(" ")[0])
except ValueError:
self.pere.joueur << "|err|Vous devez spécifier un nombre entier " \
"valide.|ff|"
else:
i = 1
c_mail = None
for mail in mails:
if num == i:
c_mail = mail
break
i += 1
if c_mail is None:
self.pere.joueur << "|err|Le numéro spécifié ne correspond à " \
"aucun message.|ff|"
return
mail = type(self).importeur.communication.mails.creer_mail(
self.pere.joueur)
mail.sujet = "CC:" + c_mail.sujet
mail.liste_dest = c_mail.liste_dest
mail.contenu.ajouter_paragraphe(str(c_mail.contenu))
enveloppe = EnveloppeObjet(EdtMedit, mail, None)
enveloppe.parent = self
contexte = enveloppe.construire(self.pere.joueur)
self.pere.joueur.contextes.ajouter(contexte)
contexte.actualiser()
def opt_supprimer(self, arguments):
"""Option supprimer"""
if not arguments or arguments.isspace():
self.pere.joueur << "|err|Vous devez préciser le numéro d'un " \
"message.|ff|"
return
mails = type(self).importeur.communication.mails.get_mails_pour(
self.pere.joueur, ENVOYE)
try:
num = int(arguments.split(" ")[0])
except ValueError:
self.pere.joueur << "|err|Vous devez spécifier un nombre entier " \
"valide.|ff|"
else:
i = 1
s_mail = None
for mail in mails:
if num == i:
s_mail = mail
break
i += 1
if s_mail is None:
self.pere.joueur << "|err|Le numéro spécifié ne correspond à " \
"aucun message.|ff|"
return
del type(self).importeur.communication.mails[s_mail.id]
self.pere.joueur << "|att|Ce message a bien été supprimé.|ff|"
| [
[
[
1667,
1674
],
[
1928,
1935
],
[
2206,
2213
]
],
[
[
1728,
1742
],
[
6062,
6076
]
],
[
[
1794,
1802
],
[
6077,
6085
]
],
[
[
1847,
1853
],
[
2594,
2600
],
[
4182,
4188
],
[
5179,
5185
],
[
6651,
6657
]
],
[
[
1893,
1906
],
[
2946,
2959
],
[
3479,
3492
],
[
3590,
3603
]
],
[
[
1914,
1927
]
]
] |
from __future__ import absolute_import, division, print_function
import numbers
import warnings
import torch
from torch.autograd import Variable
import pyro
import pyro.poutine as poutine
from pyro.distributions.util import is_identically_zero
from pyro.infer.elbo import ELBO
from pyro.infer.enum import iter_discrete_traces
from pyro.infer.util import torch_backward, torch_data_sum, torch_sum
from pyro.poutine.util import prune_subsample_sites
from pyro.util import check_model_guide_match, is_nan
def check_enum_discrete_can_run(model_trace, guide_trace):
"""
Checks whether `enum_discrete` is supported for the given (model, guide) pair.
:param Trace model: A model trace.
:param Trace guide: A guide trace.
:raises: NotImplementedError
"""
# Check that all batch_log_pdf shapes are the same,
# since we currently do not correctly handle broadcasting.
model_trace.compute_batch_log_pdf()
guide_trace.compute_batch_log_pdf()
shapes = {}
for source, trace in [("model", model_trace), ("guide", guide_trace)]:
for name, site in trace.nodes.items():
if site["type"] == "sample":
shapes[site["batch_log_pdf"].size()] = (source, name)
if len(shapes) > 1:
raise NotImplementedError(
"enum_discrete does not support mixture of batched and un-batched variables. "
"Try rewriting your model to avoid batching or running with enum_discrete=False. "
"Found the following variables of different batch shapes:\n{}".format(
"\n".join(["{} {}: shape = {}".format(source, name, tuple(shape))
for shape, (source, name) in sorted(shapes.items())])))
class Trace_ELBO(ELBO):
"""
A trace implementation of ELBO-based SVI
"""
def _get_traces(self, model, guide, *args, **kwargs):
"""
runs the guide and runs the model against the guide with
the result packaged as a trace generator
"""
for i in range(self.num_particles):
if self.enum_discrete:
# This iterates over a bag of traces, for each particle.
for scale, guide_trace in iter_discrete_traces("flat", guide, *args, **kwargs):
model_trace = poutine.trace(poutine.replay(model, guide_trace),
graph_type="flat").get_trace(*args, **kwargs)
check_model_guide_match(model_trace, guide_trace)
guide_trace = prune_subsample_sites(guide_trace)
model_trace = prune_subsample_sites(model_trace)
check_enum_discrete_can_run(model_trace, guide_trace)
guide_trace.compute_score_parts()
log_r = model_trace.batch_log_pdf() - guide_trace.batch_log_pdf()
weight = scale / self.num_particles
yield weight, model_trace, guide_trace, log_r
continue
guide_trace = poutine.trace(guide).get_trace(*args, **kwargs)
model_trace = poutine.trace(poutine.replay(model, guide_trace)).get_trace(*args, **kwargs)
check_model_guide_match(model_trace, guide_trace)
guide_trace = prune_subsample_sites(guide_trace)
model_trace = prune_subsample_sites(model_trace)
guide_trace.compute_score_parts()
log_r = model_trace.log_pdf() - guide_trace.log_pdf()
weight = 1.0 / self.num_particles
yield weight, model_trace, guide_trace, log_r
def _is_batched(self, weight):
return self.enum_discrete and \
isinstance(weight, Variable) and \
weight.dim() > 0 and \
weight.size(0) > 1
def loss(self, model, guide, *args, **kwargs):
"""
:returns: returns an estimate of the ELBO
:rtype: float
Evaluates the ELBO with an estimator that uses num_particles many samples/particles.
"""
elbo = 0.0
for weight, model_trace, guide_trace, log_r in self._get_traces(model, guide, *args, **kwargs):
elbo_particle = weight * 0
if self._is_batched(weight):
log_pdf = "batch_log_pdf"
else:
log_pdf = "log_pdf"
for name in model_trace.nodes.keys():
if model_trace.nodes[name]["type"] == "sample":
if model_trace.nodes[name]["is_observed"]:
elbo_particle += model_trace.nodes[name][log_pdf]
else:
elbo_particle += model_trace.nodes[name][log_pdf]
elbo_particle -= guide_trace.nodes[name][log_pdf]
# drop terms of weight zero to avoid nans
if isinstance(weight, numbers.Number):
if weight == 0.0:
elbo_particle = torch.zeros_like(elbo_particle)
else:
elbo_particle[weight == 0] = 0.0
elbo += torch_data_sum(weight * elbo_particle)
loss = -elbo
if is_nan(loss):
warnings.warn('Encountered NAN loss')
return loss
def loss_and_grads(self, model, guide, *args, **kwargs):
"""
:returns: returns an estimate of the ELBO
:rtype: float
Computes the ELBO as well as the surrogate ELBO that is used to form the gradient estimator.
Performs backward on the latter. Num_particle many samples are used to form the estimators.
"""
elbo = 0.0
# grab a trace from the generator
for weight, model_trace, guide_trace, log_r in self._get_traces(model, guide, *args, **kwargs):
elbo_particle = weight * 0
surrogate_elbo_particle = weight * 0
batched = self._is_batched(weight)
# compute elbo and surrogate elbo
if batched:
log_pdf = "batch_log_pdf"
else:
log_pdf = "log_pdf"
for name, model_site in model_trace.nodes.items():
if model_site["type"] == "sample":
model_log_pdf = model_site[log_pdf]
if model_site["is_observed"]:
elbo_particle += model_log_pdf
surrogate_elbo_particle += model_log_pdf
else:
guide_site = guide_trace.nodes[name]
guide_log_pdf, score_function_term, entropy_term = guide_site["score_parts"]
if not batched:
guide_log_pdf = guide_log_pdf.sum()
elbo_particle += model_log_pdf - guide_log_pdf
surrogate_elbo_particle += model_log_pdf
if not is_identically_zero(entropy_term):
if not batched:
entropy_term = entropy_term.sum()
surrogate_elbo_particle -= entropy_term
if not is_identically_zero(score_function_term):
if not batched:
score_function_term = score_function_term.sum()
surrogate_elbo_particle += log_r.detach() * score_function_term
# drop terms of weight zero to avoid nans
if isinstance(weight, numbers.Number):
if weight == 0.0:
elbo_particle = torch.zeros_like(elbo_particle)
surrogate_elbo_particle = torch.zeros_like(surrogate_elbo_particle)
else:
weight_eq_zero = (weight == 0)
elbo_particle[weight_eq_zero] = 0.0
surrogate_elbo_particle[weight_eq_zero] = 0.0
elbo += torch_data_sum(weight * elbo_particle)
surrogate_elbo_particle = torch_sum(weight * surrogate_elbo_particle)
# collect parameters to train from model and guide
trainable_params = set(site["value"]
for trace in (model_trace, guide_trace)
for site in trace.nodes.values()
if site["type"] == "param")
if trainable_params:
surrogate_loss_particle = -surrogate_elbo_particle
torch_backward(surrogate_loss_particle)
pyro.get_param_store().mark_params_active(trainable_params)
loss = -elbo
if is_nan(loss):
warnings.warn('Encountered NAN loss')
return loss
| [
[
[
23,
38
]
],
[
[
40,
48
]
],
[
[
50,
64
]
],
[
[
73,
80
],
[
4854,
4861
],
[
7436,
7443
]
],
[
[
88,
96
],
[
5159,
5167
],
[
8575,
8583
]
],
[
[
105,
110
],
[
4941,
4946
],
[
7523,
7528
],
[
7601,
7606
]
],
[
[
138,
146
],
[
3710,
3718
]
],
[
[
155,
159
],
[
8456,
8460
]
],
[
[
167,
190
],
[
2305,
2312
],
[
2319,
2326
],
[
3047,
3054
],
[
3121,
3128
],
[
3135,
3142
]
],
[
[
227,
246
],
[
6844,
6863
],
[
7089,
7108
]
],
[
[
275,
279
],
[
1757,
1761
]
],
[
[
308,
328
],
[
2217,
2237
]
],
[
[
357,
371
],
[
8400,
8414
]
],
[
[
373,
387
],
[
5061,
5075
],
[
7843,
7857
]
],
[
[
389,
398
],
[
7920,
7929
]
],
[
[
429,
450
],
[
2554,
2575
],
[
2623,
2644
],
[
3287,
3308
],
[
3348,
3369
]
],
[
[
473,
496
],
[
2470,
2493
],
[
3211,
3234
]
],
[
[
498,
504
],
[
5133,
5139
],
[
8549,
8555
]
],
[
[
511,
538
],
[
2678,
2705
]
],
[
[
1746,
1756
]
]
] |
# -*- coding: utf-8 -*-
"""HydroMT workflows"""
from .basin_mask import *
from .forcing import *
from .rivers import *
| [
[
[
73,
74
]
],
[
[
96,
97
]
],
[
[
118,
119
]
]
] |
"""
To trace the falcon web framework, install the trace middleware::
import falcon
from ddtrace import tracer
from ddtrace.contrib.falcon import TraceMiddleware
mw = TraceMiddleware(tracer, 'my-falcon-app')
falcon.API(middleware=[mw])
You can also use the autopatching functionality::
import falcon
from ddtrace import tracer, patch
patch(falcon=True)
app = falcon.API()
To disable distributed tracing when using autopatching, set the
``DATADOG_FALCON_DISTRIBUTED_TRACING`` environment variable to ``False``.
**Supported span hooks**
The following is a list of available tracer hooks that can be used to intercept
and modify spans created by this integration.
- ``request``
- Called before the response has been finished
- ``def on_falcon_request(span, request, response)``
Example::
import falcon
from ddtrace import config, patch_all
patch_all()
app = falcon.API()
@config.falcon.hooks.on('request')
def on_falcon_request(span, request, response):
span.set_tag('my.custom', 'tag')
:ref:`Headers tracing <http-headers-tracing>` is supported for this integration.
"""
from ...utils.importlib import require_modules
required_modules = ["falcon"]
with require_modules(required_modules) as missing_modules:
if not missing_modules:
from .middleware import TraceMiddleware
from .patch import patch
__all__ = ["TraceMiddleware", "patch"]
| [
[
[
1192,
1207
],
[
1245,
1260
]
],
[
[
1209,
1225
],
[
1261,
1277
]
],
[
[
1282,
1297
],
[
1310,
1325
]
],
[
[
1359,
1374
]
],
[
[
1402,
1407
]
],
[
[
1417,
1424
]
]
] |
# coding: utf-8
# @author octopoulo <[email protected]>
# @version 2020-05-01
"""
Sync
"""
import gzip
from logging import getLogger
import os
import re
import shutil
from subprocess import run
from time import time
from typing import Any
from PIL import Image, ImageFile
from common import makedirs_safe, read_text_safe, write_text_safe
from css_minify import css_minify
# folders, might want to edit these
BASE = os.path.dirname(os.path.dirname(__file__))
COMPILER = os.path.join(BASE, 'script/closure-compiler-v20200406.jar')
CSS_FOLDER = os.path.join(BASE, 'css')
JAVA = 'java'
JS_FOLDER = os.path.join(BASE, 'js')
LOCAL = BASE
# edit these files
CSS_FILES = [
'light',
]
JS_FILES = {
'4d': [
'libs/three',
'libs/stats',
'libs/GLTFLoader',
'libs/DRACOLoader',
'libs/camera-controls',
],
'all': [
'libs/socket.io',
':common',
'libs/chess-quick',
':engine',
':global',
':3d',
':xboard',
':graph',
':game',
':temp',
':network',
':startup',
':config',
'script',
],
'chart': [
'libs/chart-quick',
],
}
NEED_GZIPS = {
'4d_.js',
'ammo.wasm.js',
'ammo.wasm.wasm',
'chart_.js',
'chart.min.js',
'dark.css',
'dark-archive.css',
'draco_decoder.js',
'draco_decoder.wasm',
'draco_wasm_wrapper.js',
'fra.json',
'index.html',
'jpn.json',
'light-archive.css',
'manifest.json',
'pieces-draco.glb',
'rus.json',
'sea.css',
'sea-archive.css',
'ukr.json',
}
# don't gzip inside those folders
SKIP_GZIPS = {
'archive',
'doc',
'image',
'model',
'node_modules',
'script',
'sound',
'test',
'theme',
}
class Sync:
"""Sync
"""
#
def __init__(self, **kwargs):
self.kwargs = kwargs
self.clean = kwargs.get('clean') # type: bool
self.host = kwargs.get('host') # type: str
self.no_compress = kwargs.get('no_compress') # type: bool
self.no_debug = kwargs.get('no_debug') # type: bool
self.no_process = kwargs.get('no_process') # type: bool
self.zip = kwargs.get('zip') # type: bool
self.logger = getLogger(self.__class__.__name__)
def combine_pieces(self, folder: str):
"""Combine chess pieces png files into 1 file
"""
if 'metro' in folder:
height = 160
width = 160
else:
height = 80
width = 80
combined = Image.new('RGBA', (width * 12, height), (0, 255, 0, 0))
output = f'{folder}.png'
i = 0
pieces = 'bknpqr'
for color in 'bw':
for piece in pieces:
name = f'{color}{piece}'
image = Image.open(os.path.join(folder, f'{name}.png'))
offset = (i * width, 0)
combined.paste(image, offset)
i += 1
combined.save(output, format='png')
print('a', end='')
def combine_themes(self, folder: str):
"""Combine all pieces of each theme
"""
sources = os.listdir(folder)
for source in sources:
filename = os.path.join(folder, source)
if os.path.isdir(filename):
self.combine_pieces(filename)
def compress_3d(self, data: str) -> str:
"""Compress THREE javascript
"""
data = re.sub(r'\bTHREE\b', 'T', data)
data = re.sub(r'console\.(error|warn)\(.+?\);', '', data, flags=re.S)
return data
def compress_gzip(self, filename: str):
"""Gzip compress a file
"""
output = f'{filename}.gz'
with open(filename, 'rb') as f_in:
with gzip.open(output, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
# synchronise the date/time
if os.path.isfile(output):
info = os.stat(output)
os.utime(filename, (info.st_atime, info.st_mtime))
print('g', end='')
def compress_js(self, filename: str) -> str:
"""Compress javascript
"""
base, ext = os.path.splitext(filename)
output = f'{base}_{ext}'
if self.no_compress:
shutil.copy(filename, output)
return output
args = [
JAVA,
'-jar', COMPILER,
'--js', filename,
'--js_output_file', output,
'--language_in', 'ECMASCRIPT_2018',
'--language_out', 'ECMASCRIPT_2018',
]
if self.kwargs.get('advanced'):
args.extend(['--compilation_level', 'ADVANCED'])
run(args)
return output
def gzip_files(self, folder: str, depth: int, delete: bool):
"""Gzip all wanted files, recursively
"""
queues = []
sources = os.listdir(folder)
for source in sources:
if source.startswith(('.', '_')):
continue
filename = os.path.join(folder, source)
if os.path.isdir(filename):
if source not in SKIP_GZIPS:
queues.append(filename)
continue
# file
if not os.path.isfile(filename):
continue
if source not in NEED_GZIPS:
continue
output = f'{filename}.gz'
source_time = os.path.getmtime(filename)
if os.path.isfile(output):
destin_time = os.path.getmtime(output)
if delete:
os.unlink(output)
print('d', end='')
else:
destin_time = 0
if not delete and source_time != destin_time:
self.compress_gzip(filename)
print(f"{' ' * depth}{filename}")
for queue in queues:
self.gzip_files(queue, depth + 1, delete)
@staticmethod
def import_file(match: Any) -> str:
"""@import {common.js}
"""
source = match.group(1)
filename = os.path.join(JS_FOLDER, source)
data = read_text_safe(filename) or ''
if source.endswith('.js'):
data = re.sub(r'["\']use strict["\'];?', '', data)
return data
def normalise_folders(self):
"""Add the missing / (slash) at the end of the folder
"""
global CSS_FOLDER, JS_FOLDER, LOCAL
if CSS_FOLDER[-1] != '/':
CSS_FOLDER += '/'
if JS_FOLDER[-1] != '/':
JS_FOLDER += '/'
if LOCAL[-1] != '/':
LOCAL += '/'
def create_index(self):
"""Create the new index.html
"""
base = os.path.join(LOCAL, 'index_base.html')
base_time = os.path.getmtime(base)
index = os.path.join(LOCAL, 'index.html')
index_time = os.path.getmtime(index) if os.path.isfile(index) else 0
change = 0
if base_time >= index_time:
change += 1
# 1) minimise JS
for js_output, js_files in JS_FILES.items():
all_js = os.path.join(JS_FOLDER, f'{js_output}.js')
all_min_js = os.path.join(JS_FOLDER, f'{js_output}_.js')
# common/engine changed => need to update, even though we're not using those files
js_dates = [os.path.abspath(f"{JS_FOLDER}{js_file.strip(':')}.js") for js_file in js_files]
js_names = [os.path.abspath(f'{JS_FOLDER}{js_file}.js') for js_file in js_files if js_file[0] != ':']
if js_output == 'all':
# script_js = os.path.join(JS_FOLDER, 'script.js')
extras = []
else:
extras = []
# skip?
update = True
if os.path.isfile(all_min_js) and os.path.isfile(all_js):
all_time = os.path.getmtime(all_min_js)
update = False
for js_date in js_dates + extras:
update |= os.path.isfile(js_date) and os.path.getmtime(js_date) >= all_time
if not update:
print('J', end='')
continue
datas = []
for js_name in js_names:
print(js_name)
script_data = read_text_safe(js_name)
if not script_data:
continue
# process the script.js
if js_name.endswith('script.js'):
script_data = re.sub('@import {(.*?)}', self.import_file, script_data);
script_data = re.sub('// BEGIN.*?// END', '', script_data, flags=re.S)
if self.no_debug:
script_data = re.sub('// <<.*?// >>', '', script_data, flags=re.S)
# use HOST
print(f'host={self.host}')
if self.host != '/':
script_data = script_data.replace("HOST = '/',", f"HOST = '{self.host}',")
datas.append(script_data)
data = '\n'.join(datas)
if '4d' in js_output:
data = self.compress_3d(data)
write_text_safe(all_js, data)
self.compress_js(all_js)
print('j', end='')
change += 1
# 2) minimise CSS
all_css = os.path.join(CSS_FOLDER, 'all.css')
all_min_css = os.path.join(CSS_FOLDER, 'all_.css')
css_names = [os.path.abspath(f'{CSS_FOLDER}{css_file}.css') for css_file in CSS_FILES]
update = True
if os.path.isfile(all_min_css) and os.path.isfile(all_css):
all_time = os.path.getmtime(all_min_css)
update = False
for css_name in css_names:
update |= os.path.isfile(css_name) and os.path.getmtime(css_name) >= all_time
if update:
datas = []
for css_name in css_names:
datas.append(read_text_safe(css_name) or '')
data = '\n'.join(datas)
write_text_safe(all_css, data)
css_data = css_minify(data)
write_text_safe(all_min_css, css_data)
print('c', end='')
change += 1
else:
css_data = read_text_safe(all_min_css) or ''
print('C', end='')
if not change:
print('X', end='')
return
# 3) remove BEGIN ... END
html = read_text_safe(base)
html = re.sub('<!-- BEGIN -->.*?<!-- END -->', '', html, flags=re.S)
html = re.sub('// BEGIN.*?// END', '', html, flags=re.S)
# use the HOST
if self.host != '/':
replaces = {
'href="/': f'href="{self.host}',
'src="/': f'src="{self.host}',
}
for key, value in replaces.items():
html = html.replace(key, value)
# 4) create the new index.html
if not self.no_process:
all_min_js = os.path.join(JS_FOLDER, 'all_.js')
js_data = read_text_safe(all_min_js) or ''
replaces = {
'<!-- {SCRIPT} -->': f'<script>{js_data}</script>',
'<!-- {STYLE} -->': f'<style>{css_data}</style>',
}
for key, value in replaces.items():
html = html.replace(key, value)
html = re.sub('<!-- .*? -->', '', html, flags=re.S)
html = re.sub(r'\n\s+', '\n', html)
filename = os.path.join(LOCAL, 'index.html')
write_text_safe(filename, html)
def synchronise(self) -> bool:
"""Synchronise the files
"""
self.normalise_folders()
self.create_index()
if self.clean:
self.gzip_files(LOCAL, 0, True)
elif self.zip:
self.gzip_files(LOCAL, 0, False)
return True
if __name__ == '__main__':
start = time()
sync = Sync()
if 0:
sync.combine_themes(os.path.join(BASE, 'theme'))
else:
sync.synchronise()
end = time()
print(f'\nELAPSED: {end-start:.3f} seconds')
| [
[
[
101,
105
],
[
3949,
3953
]
],
[
[
126,
135
],
[
2432,
2441
]
],
[
[
143,
145
],
[
422,
424
],
[
438,
440
],
[
476,
478
],
[
549,
551
],
[
601,
603
],
[
12084,
12086
],
[
3002,
3004
],
[
3338,
3340
],
[
3411,
3413
],
[
3455,
3457
],
[
4079,
4081
],
[
4122,
4124
],
[
4150,
4152
],
[
4345,
4347
],
[
5049,
5051
],
[
5195,
5197
],
[
5239,
5241
],
[
5417,
5419
],
[
5599,
5601
],
[
5641,
5643
],
[
5695,
5697
],
[
5767,
5769
],
[
6262,
6264
],
[
6883,
6885
],
[
6942,
6944
],
[
6981,
6983
],
[
7063,
7065
],
[
7036,
7038
],
[
7271,
7273
],
[
7339,
7341
],
[
7502,
7504
],
[
7606,
7608
],
[
7935,
7937
],
[
7966,
7968
],
[
8017,
8019
],
[
8157,
8159
],
[
8185,
8187
],
[
9486,
9488
],
[
9544,
9546
],
[
9602,
9604
],
[
9710,
9712
],
[
9742,
9744
],
[
9790,
9792
],
[
9912,
9914
],
[
9941,
9943
],
[
11120,
11122
],
[
11608,
11610
]
],
[
[
153,
155
],
[
3636,
3638
],
[
3683,
3685
],
[
3740,
3742
],
[
6394,
6396
],
[
8647,
8649
],
[
8739,
8741
],
[
8790,
8792
],
[
8873,
8875
],
[
8920,
8922
],
[
10612,
10614
],
[
10668,
10670
],
[
10689,
10691
],
[
10733,
10735
],
[
11499,
11501
],
[
11538,
11540
],
[
11560,
11562
]
],
[
[
163,
169
],
[
3999,
4005
],
[
4447,
4453
]
],
[
[
193,
196
],
[
4855,
4858
]
],
[
[
214,
218
],
[
12020,
12024
],
[
12161,
12165
]
],
[
[
238,
241
],
[
6155,
6158
]
],
[
[
259,
264
],
[
2736,
2741
],
[
2991,
2996
]
],
[
[
266,
275
]
],
[
[
296,
309
]
],
[
[
311,
325
],
[
6309,
6323
],
[
8433,
8447
],
[
10091,
10105
],
[
10387,
10401
],
[
10576,
10590
],
[
11177,
11191
]
],
[
[
327,
342
],
[
9319,
9334
],
[
10172,
10187
],
[
10255,
10270
],
[
11650,
11665
]
],
[
[
366,
376
],
[
10226,
10236
]
],
[
[
415,
419
],
[
489,
493
],
[
562,
566
],
[
614,
618
],
[
634,
638
],
[
12097,
12101
]
],
[
[
465,
473
],
[
4559,
4567
]
],
[
[
536,
546
],
[
6621,
6631
],
[
6656,
6666
],
[
9499,
9509
],
[
9557,
9567
],
[
9621,
9631
]
],
[
[
575,
579
],
[
4533,
4537
]
],
[
[
589,
598
],
[
6275,
6284
],
[
6685,
6694
],
[
6719,
6728
],
[
7284,
7293
],
[
7352,
7361
],
[
7521,
7530
],
[
7625,
7634
],
[
11133,
11142
]
],
[
[
626,
631
],
[
6747,
6752
],
[
6777,
6782
],
[
6896,
6901
],
[
6994,
6999
],
[
11621,
11626
],
[
11875,
11880
],
[
11942,
11947
]
],
[
[
660,
669
],
[
9665,
9674
]
],
[
[
690,
698
],
[
7232,
7240
]
],
[
[
1200,
1210
],
[
5497,
5507
]
],
[
[
1654,
1664
],
[
5297,
5307
]
],
[
[
1803,
1807
],
[
12038,
12042
]
],
[
[
12012,
12017
],
[
12196,
12201
]
],
[
[
12031,
12035
],
[
12064,
12068
],
[
12131,
12135
]
],
[
[
12155,
12158
],
[
12192,
12195
]
]
] |
"""Tests for the main module."""
import unittest
from unittest.mock import Mock, patch
from yala.main import LinterRunner
class TestLinterRunner(unittest.TestCase):
"""Test the LinterRunner class."""
@patch('yala.main.Config')
def test_chosen_not_found(self, mock_config):
"""Should print an error when chosen linter is not found."""
# Linter chosen by the user
name = 'my linter'
mock_config.user_linters = [name]
_, stderr = self._path_and_run(mock_config, name)
self.assertIn('Did you install', stderr[0])
@patch('yala.main.Config')
def test_not_chosen_not_found(self, mock_config):
"""Should not print an error when chosen linter is not found."""
# No linters chosen by the user
mock_config.user_linters = []
stdout, stderr = self._path_and_run(mock_config)
self.assertEqual(0, len(stdout))
self.assertEqual(0, len(stderr))
def _path_and_run(self, mock_config, name='my linter'):
cls = self._mock_linter_class(name)
mock_config.get_linter_classes.return_value = [cls]
with patch('yala.main.subprocess.run', side_effect=FileNotFoundError):
linter_cfg_tgts = cls, mock_config, []
return LinterRunner.run(linter_cfg_tgts)
@staticmethod
def _mock_linter_class(name):
linter_class = Mock()
linter = linter_class.return_value
linter.command_with_options = linter.name = name
return linter_class
| [
[
[
40,
48
],
[
148,
156
]
],
[
[
75,
79
],
[
1373,
1377
]
],
[
[
81,
86
],
[
213,
218
],
[
579,
584
],
[
1127,
1132
]
],
[
[
110,
122
],
[
1263,
1275
]
],
[
[
131,
147
]
]
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.