repo
stringlengths 7
59
| instance_id
stringlengths 11
63
| base_commit
stringlengths 40
40
| patch
stringlengths 167
798k
| test_patch
stringclasses 1
value | problem_statement
stringlengths 20
65.2k
| hints_text
stringlengths 0
142k
| created_at
timestamp[ns]date 2015-08-30 10:31:05
2024-12-13 16:08:19
| environment_setup_commit
stringclasses 1
value | version
stringclasses 1
value | FAIL_TO_PASS
sequencelengths 0
0
| PASS_TO_PASS
sequencelengths 0
0
|
---|---|---|---|---|---|---|---|---|---|---|---|
spacetelescope/jwst_coronagraph_visibility | spacetelescope__jwst_coronagraph_visibility-48 | 01e48a1ec25585591a34415938a31bcc1a412e3b | diff --git a/jwst_coronagraph_visibility/gui.py b/jwst_coronagraph_visibility/gui.py
index 9881e10..4906732 100755
--- a/jwst_coronagraph_visibility/gui.py
+++ b/jwst_coronagraph_visibility/gui.py
@@ -56,19 +56,25 @@
DEFAULT_NPOINTS = 360
DEFAULT_NROLLS = 20
-# Outlining the 'bad' areas of the NIRCam Module A coronagraphs requires some
-# coordinate conversion gymnastics as there is an optical wedge in the pupil
-# wheel that changes the angular to pixel transformation
-
def compute_v2v3_offset(aperture_a, aperture_b):
- '''
- For the same pixel coordinates, different V2, V3 coordinates are used
- depending on whether the coronagraph pupil wheel wedge is in the beam.
- The offset is computed by transforming the same pixel (Det) coordinates
- to V2, V3 in two different apertures and computing the difference in
- the resulting Tel frame coordinates
- '''
+ """Compute the V2, V3 offset between two apertures by transforming the same pixel (Det) coordinates in each aperture
+ to V2, V3 and computing the difference in the resulting telescope (Tel) frame coordinates.
+
+ This function is designed for the NIRCAM module A Coronagraphs, where different V2, V3 coordinates are used
+ for the same pixel coordinates depending on whether the coronagraph pupil wheel wedge is in the beam.
+
+ Parameters
+ ----------
+ aperture_a : pysiaf.Aperture
+ SIAF aperture object.
+ aperture_b : pysiaf.Aperture
+ SIAF aperture object.
+
+ Returns
+ -------
+ V2, V3 offset between apertures.
+ """
x_a, y_a = aperture_a.det_to_tel(aperture_b.XDetRef, aperture_b.YDetRef)
x_b, y_b = aperture_b.det_to_tel(aperture_b.XDetRef, aperture_b.YDetRef)
return x_a - x_b, y_a - y_b
@@ -77,6 +83,9 @@ def compute_v2v3_offset(aperture_a, aperture_b):
_NIRCAM_SIAF = Siaf('NIRCam')
_MIRI_SIAF = Siaf('MIRI')
+# Outlining the 'bad' areas of the NIRCam Module A coronagraphs requires some
+# coordinate conversion gymnastics as there is an optical wedge in the pupil
+# wheel that changes the angular to pixel transformation
_NIRCAM_CORON_OFFSET_TEL = compute_v2v3_offset(
_NIRCAM_SIAF['NRCA5_MASKLWB'],
_NIRCAM_SIAF['NRCA5_FULL']
@@ -173,6 +182,17 @@ def compute_v2v3_offset(aperture_a, aperture_b):
def query_simbad(query_string):
+ """Resolve target's celestial coordinates using SIMBAD.
+
+ Parameters
+ ----------
+ query_string: string
+ Object identifier (astronomical designation).
+
+ Returns
+ -------
+ Tuple containing target's celestial coordinates and ID.
+ """
# response = requests.get('http://cdsweb.u-strasbg.fr/cgi-bin/nph-sesame/-oI?' + quote(query_string), timeout=QUERY_TIMEOUT_SEC)
try:
response = requests.get('http://cdsweb.u-strasbg.fr/cgi-bin/nph-sesame/-oI?' + quote(query_string), timeout=QUERY_TIMEOUT_SEC)
@@ -198,6 +218,19 @@ def query_simbad(query_string):
def get_aperture(instrname, apername):
+ """Get pysiaf Aperture object from instrument SIAF.
+
+ Parameters
+ ----------
+ instrname : str
+ Name of instrument; one of 'NIRCam', 'NIRSpec', 'NIRISS', 'MIRI', 'FGS' (case-insensitive).
+ apername : str
+ Name of instrument aperture.
+
+ Returns
+ -------
+ pysiaf.Aperture object.
+ """
# siaf_path = os.path.join(bundle_dir, 'data', '{}_SIAF.xml'.format(instrname))
# assert os.path.exists(siaf_path), 'no SIAF for {} at {}'.format(instrname, siaf_path)
siaf = Siaf(instrument=instrname)
@@ -206,6 +239,13 @@ def get_aperture(instrname, apername):
@contextmanager
def _busy_cursor(root):
+ """Display a busy mouse cursor during long operations.
+
+ Parameters
+ ----------
+ root: tk.Tk object
+ Root window of application.
+ """
try:
root.config(cursor='wait')
except TclError:
@@ -218,6 +258,25 @@ def _busy_cursor(root):
class VisibilityCalculation(object):
def __init__(self, ra, dec, companions, aperture, start_date, npoints, nrolls):
+ """Class for the target visibility calculation.
+
+ Parameters
+ ----------
+ ra : float
+ Right ascension of target in decimal degrees (0-360).
+ dec : float
+ Declination of target in decimal degrees (-90, 90).
+ companions : list
+ list of companions' position angle (degrees east of north) and separation (arcseconds).
+ aperture : pysiaf.Aperture object
+ Aperture as loaded from the instrument SIAF.
+ start_date : datetime
+ Start date of visibility calculqtion.
+ npoints : int
+ Number of points to sample in the year-long interval to find observable dates (default: 360).
+ nrolls : int
+ Number of roll angles in the allowed roll angle range to sample at each date (default: 15).
+ """
self.ra = ra
self.dec = dec
self.companions = companions
@@ -243,6 +302,7 @@ def __init__(self, ra, dec, companions, aperture, start_date, npoints, nrolls):
self.e_y = None
def calculate(self):
+ """Calculate target visibility windows."""
(
self.days,
self.observable,
@@ -285,6 +345,7 @@ def calculate(self):
class VisibilityCalculator(object):
+ """Class for the Target Visibility Calculator."""
NIRCAM_A = 'NIRCam Channel A'
NIRCAM_B = 'NIRCam Channel B'
MIRI = 'MIRI'
@@ -332,12 +393,22 @@ def close_app():
self._build()
def start(self):
+ """Start the application."""
self.root.lift()
self.root.call('wm', 'attributes', '.', '-topmost', True)
self.root.after_idle(self.root.call, 'wm', 'attributes', '.', '-topmost', False)
self.root.mainloop()
def error_modal(self, message, title="Error"):
+ """Generate a modal dialog box.
+
+ Parameters
+ ----------
+ message: str
+ Label for the dialog box.
+ title: str
+ Title for the dialog box; default is "Error".
+ """
modal = Toplevel()
modal.geometry('+400+400')
modal.title(title)
@@ -353,6 +424,7 @@ def error_modal(self, message, title="Error"):
self.root.wait_window(modal)
def show_about(self):
+ """Generate an 'About' modal box."""
self.error_modal(
"The JWST Coronagraph Visibility tool provides approximate\n"
"pointing restriction information for planning coronagraphic observations.\n\n"
@@ -361,6 +433,7 @@ def show_about(self):
)
def _build(self):
+ """Build the GUI."""
# improve visual feedback for entries in 'disabled' state
self.style = ttk.Style()
self.style.map(
@@ -407,6 +480,14 @@ def _build(self):
self.main.rowconfigure(0, weight=1)
def _build_controls(self, frame):
+ """Build the controls panel, containing a SIMBAD Target Resolver frame; input boxes for RA and
+ declination coordinates; a Companions frame; an Instrument/Mask Selector frame; and an Update Plot button.
+
+ Parameters
+ ----------
+ frame : ttk.Frame
+ Frame to contain the controls panel.
+ """
# SIMBAD + RA/Dec
simbad_frame = ttk.LabelFrame(frame, text="Target Location")
self._build_simbad_lookup(simbad_frame)
@@ -462,12 +543,22 @@ def _build_controls(self, frame):
frame.columnconfigure(0, weight=1)
def _build_examples_menu(self, menu):
+ """Build an examples menu containing three example calculation options. The examples menu is included to
+ provide testing and demonstration capabilities.
+
+ Parameters
+ ----------
+ menu : tkinter.Menu
+ Menu widget in which the examples menu is to be built.
+ """
menu.add_command(label="Single companion, NIRCam 210R spot",
command=self._ex_single_companion)
menu.add_command(label="Three companions, MIRI 4QPM", command=self._ex_three_companions)
menu.add_command(label="North Ecliptic Pole, NIRCam long wavelength bar", command=self._ex_north_ecliptic)
def _ex_single_companion(self):
+ """Calculate the target visibility for an example use-case involving NIRCam coronagraphic observations of a target
+ with a single companion."""
ra = 344.41269
dec = -29.62224
pa1 = 325
@@ -499,6 +590,8 @@ def _ex_single_companion(self):
self.update_plot()
def _ex_three_companions(self):
+ """Calculate the target visibility for an example use-case involving MIRI coronagraphic observations of a
+ science target with three companions."""
ra = 346.86965
dec = 21.13425
pa1 = 45
@@ -531,6 +624,8 @@ def _ex_three_companions(self):
self.update_plot()
def _ex_north_ecliptic(self):
+ """Calculate target visibility for an example use-case involving NIRCam coronagraphic observations of a target
+ at the North Ecliptic pole."""
ra = 270.0
dec = 66.5
pa1 = 0
@@ -563,6 +658,13 @@ def _ex_north_ecliptic(self):
self.update_plot()
def _build_date_controls(self, frame):
+ """Build the date controls.
+
+ Parameters
+ ----------
+ frame : ttk.Frame
+ Frame to contain the date controls.
+ """
ttk.Label(frame, text="Timesteps per year:").grid(column=0, row=0, sticky=(N, W))
self.npoints_value = StringVar()
self.npoints_value.set(DEFAULT_NPOINTS)
@@ -575,13 +677,22 @@ def _build_date_controls(self, frame):
# Clear the SIMBAD ID when user edits RA or Dec
def _clear_simbad_id(self, *_):
+ """Clear SIMBAD ID if user supplies RA or Dec coordinates."""
self.simbad_id.set(self.USER_SUPPLIED_COORDS_MSG)
def _clear_simbad_entry(self, *_):
+ """Clear SIMBAD entry"""
self.simbad_query.set('')
self._clear_simbad_id()
def _build_simbad_lookup(self, frame):
+ """Build SIMBAD Target Resolver tool.
+
+ Parameters
+ ----------
+ frame: ttk.Frame
+ Frame to contain the SIMBAD Target Resolver fields.
+ """
# SIMBAD lookup
simbad_label = ttk.Label(frame, text="SIMBAD Target Resolver")
simbad_label.grid(column=0, row=0, sticky=(N, W), columnspan=4)
@@ -622,6 +733,7 @@ def _build_simbad_lookup(self, frame):
ecliptic_display.grid(column=0, row=6, sticky=(N, W, E), columnspan=4)
def _update_ecliptic(*_):
+ """Update the target's ecliptic coordinates."""
try:
ra, dec = float(self.ra_value.get()), float(self.dec_value.get())
except ValueError:
@@ -645,6 +757,13 @@ def _update_ecliptic(*_):
frame.columnconfigure(1, weight=1)
def _build_companion_controls(self, frame):
+ """Build the controls for the Companions frame.
+
+ Parameters
+ ----------
+ frame : ttk.Frame
+ Frame to contain the Companion controls.
+ """
# (show?) PA deg Sep arcsec
ttk.Label(frame, text="PA (º)").grid(column=1, row=0)
ttk.Label(frame, text="Sep (\")").grid(column=2, row=0)
@@ -652,10 +771,12 @@ def _build_companion_controls(self, frame):
for i in range(1, 4):
# variables
visible = BooleanVar(value=False)
- # ensure widgets are updated when `visible` changes:
+ # ensure widgets are updated when `visible` changes:
def _update_companions(*args):
+ """Update widget when companion set as 'visible'."""
self.update_companions()
+
visible.trace('w', _update_companions)
pa = StringVar(value="0.00")
sep = StringVar(value="0.00")
@@ -686,6 +807,13 @@ def _update_companions(*args):
frame.columnconfigure(2, weight=1)
def _build_instrument_mask_controls(self, frame):
+ """Build the controls for the Instrument and Mask frame.
+
+ Parameters
+ ----------
+ frame : ttk.Frame
+ Frame to contain the Instrument and Mask controls.
+ """
ttk.Label(frame, text="Instrument", anchor=E).grid(column=0, row=0)
self.instrument_value = StringVar(value=self.NIRCAM_A)
instrument_combo = ttk.Combobox(
@@ -713,18 +841,19 @@ def _build_instrument_mask_controls(self, frame):
row=3
)
- # Hacks to prevent wonky looking text selection within readonly
- # combo boxes
+ # Hacks to prevent wonky looking text selection within read-only combo boxes
def _clear_selection_instr(evt):
+ """Clear instrument selection."""
instrument_combo.selection_clear()
instrument_combo.bind('<<ComboboxSelected>>', _clear_selection_instr)
def _clear_selection_aper(evt):
+ """Clear aperture selection."""
apername_combo.selection_clear()
apername_combo.bind('<<ComboboxSelected>>', _clear_selection_aper)
- # Update apernames based on instrument
def _update_apernames(*args):
+ """Update aperture names based on instrument choice."""
# throw away args, no useful info there
values = self.INSTRUMENT_TO_APERNAMES[self.instrument_value.get()]
apername_combo['values'] = values
@@ -732,6 +861,13 @@ def _update_apernames(*args):
self.instrument_value.trace('w', _update_apernames)
def _build_plots(self, frame):
+ """Build plots.
+
+ Parameters
+ ----------
+ frame : ttk.Frame
+ Frame to contain the plots.
+ """
self.figure = Figure(figsize=(8, 8), dpi=72)
# initialized when the plot is updated:
@@ -781,11 +917,19 @@ def _build_plots(self, frame):
self._canvas._tkcanvas.pack(side=TOP, fill=BOTH, expand=1)
def on_key_event(event):
+ """Handle a key press event.
+
+ Parameters
+ ----------
+ event : `KeyEvent`
+ A key press/release event.
+ """
key_press_handler(event, self._canvas, self._toolbar)
self._canvas.mpl_connect('key_press_event', on_key_event)
def do_simbad_lookup(self):
+ """Query SIMBAD using user entered target ID field."""
search_string = self.simbad_query.get()
if not len(search_string.strip()) > 0:
self.error_modal("Search query for SIMBAD must not be empty")
@@ -807,7 +951,7 @@ def do_simbad_lookup(self):
self.simbad_id.set(result.id)
def update_companions(self):
- # handle disabling / enabling entries
+ """Handle companion disabling/ enabling entries."""
for comp, widg in zip(self.companions, self.companion_widgets):
visible, pa, sep = comp
check, pa_entry, sep_entry = widg
@@ -819,6 +963,7 @@ def update_companions(self):
sep_entry.config(state="disabled")
def update_plot(self):
+ """Update plots. Raises an exception if unsupported instrument is selected in the Instrument frame."""
try:
ra = float(self.ra_value.get())
dec = float(self.dec_value.get())
@@ -901,6 +1046,7 @@ def update_plot(self):
self.update_button.config(state='normal')
def zoom_to_fit(self):
+ """Zoom to fit science target and companions on the science detector plot."""
if self.result is None:
return
max_separation = max([c['separation'] for c in self.result.companions])
@@ -910,6 +1056,7 @@ def zoom_to_fit(self):
self._canvas.draw()
def _update_observability(self):
+ """Update the observability plot."""
days = self.result.days
elongation_rad = self.result.elongation_rad
roll_rad = self.result.roll_rad
@@ -976,12 +1123,39 @@ def _update_observability(self):
ax.set_ylabel('Degrees')
def work_backwards(self, x_array, y_array, xdata, ydata):
+ """Get the coordinates of the picked data point.
+
+ Parameters
+ ----------
+ x_array : array
+ Array of X data values.
+ y_array : array
+ Array of Y data values.
+ xdata : array
+ X coord of mouse event in data coords.
+ ydata : array
+ Y coord of mouse event in data coords.
+
+ Returns
+ -------
+ x : tuple of ndarray
+ x-coordinate of picked data point
+ y: tuple of ndarray
+ y-coordinate of picked data point
+ """
dist = (x_array - xdata)**2 + (y_array - ydata)**2
dist[self.result.observable == 0] = np.nan
y, x = np.unravel_index(np.nanargmin(dist), dist.shape)
return y, x
def _on_pick(self, event):
+ """Handle a pick event.
+
+ Parameters
+ ----------
+ event : `KeyEvent`
+ A key press/release event.
+ """
self._clear_plot_overlay()
if event.artist.axes == self.detector_ax:
self._on_detector_pick(event)
@@ -989,10 +1163,24 @@ def _on_pick(self, event):
self._on_observability_pick(event)
def _on_observability_pick(self, event):
+ """Handle pick event on the observability plot.
+
+ Parameters
+ ----------
+ event : `KeyEvent`
+ The key press/release event.
+ """
yidx, xidx = self.work_backwards(self._days_for_all_rolls, self._theta, event.mouseevent.xdata, event.mouseevent.ydata)
self._add_plot_overlay(yidx, xidx)
def _on_detector_pick(self, event):
+ """Handle pick event on the detector plot.
+
+ Parameters
+ ----------
+ event : MouseEvent
+ The mouse event that generated the pick.
+ """
companions = (
(self.c1_plot_group, (self.result.c1_x, self.result.c1_y)),
(self.c2_plot_group, (self.result.c2_x, self.result.c2_y)),
@@ -1007,6 +1195,7 @@ def _on_detector_pick(self, event):
return
def _clear_plot_overlay(self):
+ """Clear plot overlay."""
while len(self._plot_overlay_elements):
elem = self._plot_overlay_elements.pop()
elem.remove()
@@ -1014,6 +1203,15 @@ def _clear_plot_overlay(self):
text.set_text('')
def _add_plot_overlay(self, yidx, xidx):
+ """Add an overlay to plot.
+
+ Parameters
+ ----------
+ yidx :
+ Y coordinate of the overlay element.
+ xidx :
+ X coordinate of the overlay element.
+ """
obs_highlight = self.observability_ax.scatter(self._days_for_all_rolls[yidx, xidx], self._theta[yidx, xidx], color='white', edgecolor='black', s=100)
self._plot_overlay_elements.append(obs_highlight)
obs_vline = self.observability_ax.axvline(self._days_for_all_rolls[yidx, xidx], color=BLUE_GGPLOT)
@@ -1071,6 +1269,7 @@ def _add_plot_overlay(self, yidx, xidx):
self._canvas.draw()
def _update_detector(self):
+ """Update detector plot."""
ax = self.detector_ax
ax.clear()
aperture = self.result.aperture
@@ -1101,6 +1300,7 @@ def _update_detector(self):
ax.set_ylabel('y (arcsec, ideal frame)')
def _overlay_mask(self):
+ """Plot an overlay of the coronagraphic mask on the detector plot."""
while self._mask_artists:
artist = self._mask_artists.pop()
artist.remove()
@@ -1112,7 +1312,10 @@ def _overlay_mask(self):
mask_artists = []
def _overlay_miri_ta_positions():
- '''MIRI has eight target acq locations for each coronagraph which could result in persistence'''
+ """Plot the positions of the MIRI target acquisition locations.
+
+ MIRI has eight target acq locations for each coronagraph which could result in persistence.
+ """
ta_loc_spot_radius = 0.2 # arcsec
mask_name = re.match(r'MIRIM_CORON(\d+|LYOT)', aperture.AperName).groups()[0]
ta_apers = [
@@ -1252,5 +1455,6 @@ def _overlay_miri_ta_positions():
def run():
+ """Run the Target Visibility Calculator."""
app = VisibilityCalculator()
app.start()
diff --git a/jwst_coronagraph_visibility/skyvec2ins.py b/jwst_coronagraph_visibility/skyvec2ins.py
index d272550..60a16d7 100755
--- a/jwst_coronagraph_visibility/skyvec2ins.py
+++ b/jwst_coronagraph_visibility/skyvec2ins.py
@@ -4,7 +4,8 @@
skyvec2ins JWST Coronagraph Visibility Calculator
Developed by Chris Stark ([email protected]), translated to Python
-from IDL by Joseph Long ([email protected])
+from IDL by Joseph Long ([email protected]), Brendan Hagan,
+Bryony Nickson ([email protected]) and Mees Fix ([email protected])
The allowed pointing of JWST leads to target visibility that depends on
ecliptic latitude, and the range of roll angles allowed depends on
@@ -34,13 +35,33 @@
def _wrap_to_2pi(scalar_or_arr):
- """Offsets angles outside 0 <= x <= 2 * pi to lie within the interval"""
+ """Offsets angles outside 0 <= x <= 2 * pi to lie within the interval
+
+ Parameters
+ ----------
+ scalar_or_arr : int or array
+ Angle or array of angles, in radians.
+
+ Returns
+ -------
+ Array of wrapped angles with values in the range [0, 2*pi].
+ """
return np.asarray(scalar_or_arr) % (2 * np.pi)
def sun_ecliptic_longitude(start_date):
- """Compute ecliptic longitude of sun on start_date
- using equations from http://aa.usno.navy.mil/faq/docs/SunApprox.php
+ """Compute ecliptic longitude of sun on a given start date using equations from
+ http://aa.usno.navy.mil/faq/docs/SunApprox.php [broken].
+
+ Parameters
+ ----------
+ start_date : datetime
+ Start date of the year-long interval evaluated by skyvec2ins.
+
+ Returns
+ -------
+ lambda_sun : float
+ The longitude of quadrature at day 0.
"""
n_days = (start_date - datetime.datetime(2000, 1, 1, 12, 00)).days
mean_longitude = 280.459 + 0.98564736 * n_days
@@ -54,11 +75,22 @@ def sun_ecliptic_longitude(start_date):
def ad2lb(alpha_rad, delta_rad):
- """
- Converts celestial coordinates (ra, dec), i.e. (alpha, delta)
- to ecliptic coordinates (lambda, beta). All angles in radians.
+ """Convert equatorial coordinates (RA, Dec, i.e. alpha, delta) to ecliptic coordinates (lambda, beta)
+ according to Eq 3 in Leinert et al. 1998.
+
+ Parameters
+ ----------
+ alpha_rad : numpy.ndarray
+ Right ascension in radians.
+ delta_rad : numpy.ndarray
+ Declination in radians.
- See Eq 3 in Leinert et al. 1998
+ Returns
+ -------
+ lambda_rad : numpy.ndarray
+ Ecliptic longitude in radians.
+ beta_rad : numpy.ndarray
+ Ecliptic latitude in radians.
"""
obliq = _tenv(23, 26, 21.45) # J2000 obliquity of Earth in degrees
obliq *= np.pi / 180.0
@@ -73,12 +105,24 @@ def ad2lb(alpha_rad, delta_rad):
def lb2ei(lmlsun, beta):
- """Convert ecliptic coordinates (lambda-lambda_sun, beta) to
- alternative ecliptic coordinates (epsilon, i). All angles in radians.
+ """Convert ecliptic coordinates (lambda-lambda_sun, beta) to alternative ecliptic coordinates (epsilon, i)
+ according to Eq 11 in Leinert et al. 1998.
+
+ Parameters
+ ----------
+ beta : numpy.ndarray
+ Ecplitic longitude in radians.
+ lmlsun : numpy.ndarray
+ Ecliptic lattitude in radians.
- See Eq 11 in Leinert et al. 1998
+ Returns
+ -------
+ elong : numpy.ndarray
+ Elongation (angular distance from the sun to the field-of-view) in radians.
+ inc : numpy.ndarray
+ Inclination (position angle counted from the ecliptic counterclockwise) in radians.
"""
- # convert to an elongation in radians (see Eq 11 in Leinert et al. 1998)
+ # convert to an elongation in radians
elong = np.arccos(np.cos(lmlsun) * np.cos(beta))
cosinc = np.cos(beta) * np.sin(lmlsun) / np.sin(elong)
sininc = np.sin(beta) / np.sin(elong)
@@ -95,10 +139,22 @@ def lb2ei(lmlsun, beta):
def ei2lb(elong, inc):
- """Convert alternative ecliptic coordinates (epsilon, i) to
- ecliptic coordinates (lambda-lambda_sun, beta). All angles in radians.
+ """Convert alternative ecliptic coordinates (epsilon, i) to ecliptic coordinates (lambda-lambda_sun, beta)
+ according to Eq 12 in Leinert et al. 1998.
+
+ Parameters
+ ----------
+ elong : numpy.ndarray
+ Elongation (angular distance from the sun) in radians.
+ inc : numpy.ndarray
+ Inclination (position angle counted from the ecliptic counterclockwise) in radians.
- See Eq 12 in Leinert et al. 1998
+ Returns
+ -------
+ lmlsun : numpy.ndarray
+ Differential helioecliptic longitude in radians.
+ beta : numpy.ndarray
+ Ecliptic latitude in radians.
"""
beta = np.arcsin(np.sin(inc) * np.sin(elong))
coslmlsun = np.cos(elong) / np.cos(beta)
@@ -108,13 +164,23 @@ def ei2lb(elong, inc):
def lb2ad(lambda_rad, beta_rad):
- """Converts ecliptic coordinates (lambda, beta) to
- celestial coordinates (ra, dec), i.e. (alpha, delta).
- All angles in radians.
+ """Convert ecliptic coordinates (lambda, beta) to equatorial coordinates (RA, Dec, i.e. alpha, delta)
+ according to Eq 4 in Leinert et al. 1998.
- See Eq 4 in Leinert et al. 1998
- """
+ Parameters
+ ----------
+ lambda_rad : ndarray
+ Ecliptic longitude in radians.
+ beta_rad : numpy.ndarray
+ Ecliptic latitude in radians.
+ Returns
+ -------
+ alpha : ndarray
+ Equatorial Right Ascension in radians.
+ delta : ndarray
+ Equatorial Declination in radians.
+ """
obliq = _tenv(23, 26, 21.45) # J2000 obliquity of Earth in degrees
obliq *= np.pi / 180.
@@ -129,6 +195,21 @@ def lb2ad(lambda_rad, beta_rad):
def _tenv(dd, mm, ss):
+ """Convert angle from sexagesimal (DMS) to decimal degree notation.
+
+ Parameters
+ ----------
+ ss : int
+ Seconds of sexagesimal measure
+ mm : int
+ Minutes of sexagesimal measure
+ dd : int
+ Degrees of sexagesimal measure
+
+ Returns
+ -------
+ Angle in decimal degrees.
+ """
sgn, dd_mag = dd / dd, np.abs(dd)
return sgn * (dd_mag + np.abs(mm) / 60.0 + np.abs(ss) / 3600.0)
@@ -138,29 +219,30 @@ def skyvec2ins(ra, dec,
separation_as1, separation_as2, separation_as3,
aper, start_date,
npoints=360, nrolls=15, maxvroll=7.0):
- """
+ """JWST coronagraphic target visibility calculator.
+
Parameters
----------
ra : float
- right ascension of target in decimal degrees (0-360)
+ Right ascension of science target in decimal degrees (0-360)
dec : float
- declination of target in decimal degrees (-90, 90)
+ Declination of science target in decimal degrees (-90, 90)
pa1, pa2, pa3 : float
- position angles of companions in degrees east of north
+ Position angles of target companions in degrees east of north
separation_as1, separation_as2, separation_as3 : float
- separations of companions in arcseconds
+ Separations of target companions in arcseconds.
aper : jwxml.Aperture object
Aperture as loaded from the instrument SIAF
start_date : datetime.datetime
Start date of the year-long interval evaluated by skyvec2ins
npoints : int
- number of points to sample in the year-long interval
- to find observable dates (default: 360)
+ Number of points to sample in the year-long interval to find
+ observable dates (default: 360)
nrolls : int
- number of roll angles in the allowed roll angle range to
+ Number of roll angles in the allowed roll angle range to
sample at each date (default: 15)
maxvroll : float
- maximum number of degrees positive or negative roll around
+ Maximum number of degrees positive or negative roll around
the boresight to allow (as designed: 7.0)
.. note::
@@ -172,24 +254,24 @@ def skyvec2ins(ra, dec,
Returns
-------
x : numpy.ndarray
- float array of length `npoints` containing days from starting
+ Float array of length `npoints` containing days from starting
date
observable : numpy.ndarray
uint8 array of shape (`nrolls`, `npoints`) that is 1 where
the target is observable and 0 otherwise
elongation_rad : numpy.ndarray
- float array of length `npoints` containing elongation of the
+ Float array of length `npoints` containing elongation of the
observatory in radians
roll_rad : numpy.ndarray
- float array of shape (`nrolls`, `npoints`) containing V3 PA
+ Float array of shape (`nrolls`, `npoints`) containing V3 PA
in radians
c1_x, c1_y, c2_x, c2_y, c3_x, c3_y : numpy.ndarray
- float array of shape (`nrolls`, `npoints`) containing the
+ Float array of shape (`nrolls`, `npoints`) containing the
location of the companions in "Idl" (ideal) frame coordinates
n_x, n_y, e_x, e_y : numpy.ndarray
- float array of shape (`nrolls`, `npoints`) containing the location
- of a reference "north" vector and "east" vector from the
- center in "Idl" (ideal) frame coordinates
+ Float array of shape (`nrolls`, `npoints`) containing the
+ location of a reference "north" vector and "east" vector from
+ the center in "Idl" (ideal) frame coordinates
"""
# Per Chris Stark:
# > lambda_rad0 is commented as the longitude of quadrature at day 0 of the code.
@@ -215,7 +297,7 @@ def skyvec2ins(ra, dec,
# on the pointing vector, etc...to solve the problem correctly we'd need
# a root finder. To speed things up, we simply assume the PA of the
# telescope when pointing at the star is the same as when the
- # coronagraphmask is on the star--this is an approximation! The
+ # coronagraph mask is on the star--this is an approximation! The
# approximation is valid if the coronagraphs are close to
# the optical axis, which isn't too bad of an assumption.
pointing_rad = [ra_rad, dec_rad] # this is our approximation
@@ -241,7 +323,6 @@ def skyvec2ins(ra, dec,
# Calculate celestial coordinates of V2 & V3 axis
# First, calculate solar elongation & inclination
-
v3_elongation_rad = elongation_rad + (np.pi / 2)
v3_inc_rad = inc_rad.copy() # explicit copy -- does mutating this affect things later??
j = np.where(v3_elongation_rad > np.pi) # Make sure the solar elongation is between 0 - 180 degrees
@@ -455,6 +536,30 @@ def skyvec2ins(ra, dec,
def detector_transform(nrolls, npoints, roll_rad, pa, separation_as, aper):
+ """Transform to detector coordinates.
+
+ Parameters
+ ----------
+ separation_as : float
+ Separation of companion in arcseconds.
+ pa : float
+ Position Angle of companion in degrees.
+ roll_rad : numpy.ndarray
+ Float array of shape (`nrolls`, `npoints`) containing the V3 PA in radians.
+ npoints : int
+ Number of points to sample in the year-long interval to find observable dates (default: 360).
+ nrolls : int
+ Number of roll angles in the allowed roll angle range to sample at each date (default: 15).
+ aper : jwxml.Aperture object
+ Aperture as loaded from the instrument SIAF.
+
+ Returns
+ -------
+ c1_x : numpy.ndarray
+ Detector X coordinate of star and companion.
+ c1_y : numpy.ndarray
+ Detector Y coordinate of star and companion.
+ """
pa_rad = np.deg2rad(pa) # companion 1
# Calculate the (V2,V3) coordinates of the coronagraph center
# That's where we want to stick the target
@@ -504,17 +609,31 @@ def detector_transform(nrolls, npoints, roll_rad, pa, separation_as, aper):
def _Tel2Idl(aper, V2, V3):
- """ Convert Tel to Idl
- input in arcsec, output in arcsec
- This transformation involves going from global V2,V3 to local angles with respect to some
- reference point, and possibly rotating the axes and/or flipping the parity of the X axis.
+ """Convert from JWST telescope coordinate system to to ideal frame coordinates; input in arcsec, output in arcsec.
+ This transformation involves going from global V2,V3 to local angles with respect to some reference point, and
+ possibly rotating the axes and/or flipping the parity of the X axis.
+
WARNING
--------
- This is an implementation of the planar approximation, which is adequate for most
- purposes but may not be for all. Error is about 1.7 mas at 10 arcminutes from the tangent
- point. See JWST-STScI-1550 for more details.
- """
+ This is an implementation of the planar approximation, which is adequate for most purposes but may not be for all.
+ Error is about 1.7 mas at 10 arcminutes from the tangent point. See JWST-STScI-1550 for more details.
+
+ Parameters
+ ----------
+ V3 : float
+ V3 coordinate, in arcsec.
+ V2 : float
+ V2 coordinate, in arcsec.
+ aper : jwxml.Aperture object
+ Aperture as loaded from the instrument SIAF.
+ Returns
+ -------
+ XIdl : numpy.ndarray
+ Y coordinate in ideal frame.
+ YIdl : numpy.ndarray
+ X coordinate in ideal frame.
+ """
dV2 = np.asarray(V2, dtype=float) - aper.V2Ref
dV3 = np.asarray(V3, dtype=float) - aper.V3Ref
ang = np.deg2rad(aper.V3IdlYAngle)
diff --git a/setup.py b/setup.py
index 97b8bb8..316fcb3 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
- version='0.4.4',
+ version='0.4.5',
description='JWST coronagraphic target observability calculator',
long_description=long_description,
| Replace vendored jwxml with a dependency
The `jwxml` package has been factored out into a standalone package on PyPI (and eventually Astroconda).
| 2021-10-17T05:33:51 | 0.0 | [] | [] |
|||
arangoml/dgl-adapter | arangoml__dgl-adapter-24 | fc758debbbc43399f044573047766ab4b995660b | diff --git a/.github/workflows/analyze.yml b/.github/workflows/analyze.yml
index 905d1d8..25ddf32 100644
--- a/.github/workflows/analyze.yml
+++ b/.github/workflows/analyze.yml
@@ -41,7 +41,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
- uses: github/codeql-action/init@v1
+ uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -52,7 +52,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
- uses: github/codeql-action/autobuild@v1
+ uses: github/codeql-action/autobuild@v2
# ℹ️ Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
@@ -66,4 +66,4 @@ jobs:
# make release
- name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@v1
+ uses: github/codeql-action/analyze@v2
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 744b9d0..ea64af4 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -13,7 +13,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- python: ["3.6", "3.7", "3.8", "3.9"]
+ python: ["3.7", "3.8", "3.9"]
name: Python ${{ matrix.python }}
steps:
- uses: actions/checkout@v2
@@ -38,7 +38,7 @@ jobs:
- name: Run mypy
run: mypy ${{env.PACKAGE_DIR}} ${{env.TESTS_DIR}}
- name: Run pytest
- run: py.test --cov=${{env.PACKAGE_DIR}} --cov-report xml --cov-report term-missing -v --color=yes --no-cov-on-fail --code-highlight=yes
+ run: py.test -s --cov=${{env.PACKAGE_DIR}} --cov-report xml --cov-report term-missing -v --color=yes --no-cov-on-fail --code-highlight=yes
- name: Publish to coveralls.io
if: matrix.python == '3.8'
env:
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 103e497..38fe029 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- python: ["3.6", "3.7", "3.8", "3.9"]
+ python: ["3.7", "3.8", "3.9"]
name: Python ${{ matrix.python }}
steps:
- uses: actions/checkout@v2
diff --git a/README.md b/README.md
index b3ac161..582e1cb 100644
--- a/README.md
+++ b/README.md
@@ -16,7 +16,7 @@
<a href="https://www.arangodb.com/" rel="arangodb.com"></a>
<a href="https://www.dgl.ai/" rel="dgl.ai"><img src="https://raw.githubusercontent.com/arangoml/dgl-adapter/master/examples/assets/dgl_logo.png" width=40% /></a>
-The ArangoDB-DGL Adapter exports Graphs from ArangoDB, a multi-model Graph Database, into Deep Graph Library (DGL), a python package for graph neural networks, and vice-versa.
+The ArangoDB-DGL Adapter exports Graphs from ArangoDB, the multi-model database for graph & beyond, into Deep Graph Library (DGL), a python package for graph neural networks, and vice-versa.
## About DGL
@@ -45,33 +45,25 @@ pip install git+https://github.com/arangoml/dgl-adapter.git
Also available as an ArangoDB Lunch & Learn session: [Graph & Beyond Course #2.8](https://www.arangodb.com/resources/lunch-sessions/graph-beyond-lunch-break-2-8-dgl-adapter/)
```py
-# Import the ArangoDB-DGL Adapter
-from adbdgl_adapter import ADBDGL_Adapter
+from arango import ArangoClient # Python-Arango driver
+from dgl.data import KarateClubDataset # Sample graph from DGL
-# Import the Python-Arango driver
-from arango import ArangoClient
+# Let's assume that the ArangoDB "fraud detection" dataset is imported to this endpoint
+db = ArangoClient(hosts="http://localhost:8529").db("_system", username="root", password="")
-# Import a sample graph from DGL
-from dgl.data import KarateClubDataset
-
-# Instantiate driver client based on user preference
-# Let's assume that the ArangoDB "fraud detection" dataset is imported to this endpoint for example purposes
-db = ArangoClient(hosts="http://localhost:8529").db("_system", username="root", password="openSesame")
-
-# Instantiate the ADBDGL Adapter with driver client
adbdgl_adapter = ADBDGL_Adapter(db)
-# Convert ArangoDB to DGL via Graph Name
+# Use Case 1.1: ArangoDB to DGL via Graph name
dgl_fraud_graph = adbdgl_adapter.arangodb_graph_to_dgl("fraud-detection")
-# Convert ArangoDB to DGL via Collection Names
+# Use Case 1.2: ArangoDB to DGL via Collection names
dgl_fraud_graph_2 = adbdgl_adapter.arangodb_collections_to_dgl(
"fraud-detection",
- {"account", "Class", "customer"}, # Specify vertex collections
- {"accountHolder", "Relationship", "transaction"}, # Specify edge collections
+ {"account", "Class", "customer"}, # Vertex collections
+ {"accountHolder", "Relationship", "transaction"}, # Edge collections
)
-# Convert ArangoDB to DGL via a Metagraph
+# Use Case 1.3: ArangoDB to DGL via Metagraph
metagraph = {
"vertexCollections": {
"account": {"Balance", "account_type", "customer_id", "rank"},
@@ -84,7 +76,7 @@ metagraph = {
}
dgl_fraud_graph_3 = adbdgl_adapter.arangodb_to_dgl("fraud-detection", metagraph)
-# Convert DGL to ArangoDB
+# Use Case 2: DGL to ArangoDB
dgl_karate_graph = KarateClubDataset()[0]
adb_karate_graph = adbdgl_adapter.dgl_to_arangodb("Karate", dgl_karate_graph)
```
@@ -107,4 +99,4 @@ def pytest_addoption(parser):
parser.addoption("--dbName", action="store", default="_system")
parser.addoption("--username", action="store", default="root")
parser.addoption("--password", action="store", default="")
-```
\ No newline at end of file
+```
diff --git a/adbdgl_adapter/abc.py b/adbdgl_adapter/abc.py
index 2842822..4635c55 100644
--- a/adbdgl_adapter/abc.py
+++ b/adbdgl_adapter/abc.py
@@ -30,7 +30,11 @@ def arangodb_graph_to_dgl(self, name: str, **query_options: Any) -> DGLHeteroGra
raise NotImplementedError # pragma: no cover
def dgl_to_arangodb(
- self, name: str, dgl_g: Union[DGLGraph, DGLHeteroGraph], batch_size: int
+ self,
+ name: str,
+ dgl_g: Union[DGLGraph, DGLHeteroGraph],
+ overwrite_graph: bool = False,
+ **import_options: Any,
) -> ArangoDBGraph:
raise NotImplementedError # pragma: no cover
@@ -48,23 +52,13 @@ def __insert_dgl_features(self) -> None:
def __prepare_adb_attributes(self) -> None:
raise NotImplementedError # pragma: no cover
- def __insert_adb_docs(self) -> None:
- raise NotImplementedError # pragma: no cover
-
def __fetch_adb_docs(self) -> None:
raise NotImplementedError # pragma: no cover
- def __validate_attributes(self) -> None:
- raise NotImplementedError # pragma: no cover
-
@property
def DEFAULT_CANONICAL_ETYPE(self) -> List[DGLCanonicalEType]:
return [("_N", "_E", "_N")]
- @property
- def METAGRAPH_ATRIBS(self) -> Set[str]:
- return {"vertexCollections", "edgeCollections"}
-
class Abstract_ADBDGL_Controller(ABC):
def _adb_attribute_to_dgl_feature(self, key: str, col: str, val: Any) -> Any:
diff --git a/adbdgl_adapter/adapter.py b/adbdgl_adapter/adapter.py
index 5ad7013..70f4b9f 100644
--- a/adbdgl_adapter/adapter.py
+++ b/adbdgl_adapter/adapter.py
@@ -78,7 +78,8 @@ def arangodb_to_dgl(
to DGL, along with their associated attributes to keep.
:type metagraph: adbdgl_adapter.typings.ArangoMetagraph
:param query_options: Keyword arguments to specify AQL query options when
- fetching documents from the ArangoDB instance.
+ fetching documents from the ArangoDB instance. Full parameter list:
+ https://docs.python-arango.com/en/main/specs.html#arango.aql.AQL.execute
:type query_options: Any
:return: A DGL Heterograph
:rtype: dgl.heterograph.DGLHeteroGraph
@@ -102,7 +103,6 @@ def arangodb_to_dgl(
}
"""
logger.debug(f"Starting arangodb_to_dgl({name}, ...):")
- self.__validate_attributes("graph", set(metagraph), self.METAGRAPH_ATRIBS)
# Maps ArangoDB vertex IDs to DGL node IDs
adb_map: Dict[str, Dict[str, Any]] = dict()
@@ -139,7 +139,7 @@ def arangodb_to_dgl(
from_col.add(from_node["col"])
to_col.add(to_node["col"])
if len(from_col | to_col) > 2:
- raise ValueError(
+ raise ValueError( # pragma: no cover
f"""Can't convert to DGL:
too many '_from' & '_to' collections in {e_col}
"""
@@ -182,8 +182,9 @@ def arangodb_collections_to_dgl(
:type v_cols: Set[str]
:param e_cols: A set of ArangoDB edge collections to import to DGL.
:type e_cols: Set[str]
- :param query_options: Keyword arguments to specify AQL query options
- when fetching documents from the ArangoDB instance.
+ :param query_options: Keyword arguments to specify AQL query options when
+ fetching documents from the ArangoDB instance. Full parameter list:
+ https://docs.python-arango.com/en/main/specs.html#arango.aql.AQL.execute
:type query_options: Any
:return: A DGL Heterograph
:rtype: dgl.heterograph.DGLHeteroGraph
@@ -200,8 +201,9 @@ def arangodb_graph_to_dgl(self, name: str, **query_options: Any) -> DGLHeteroGra
:param name: The ArangoDB graph name.
:type name: str
- :param query_options: Keyword arguments to specify AQL query options
- when fetching documents from the ArangoDB instance.
+ :param query_options: Keyword arguments to specify AQL query options when
+ fetching documents from the ArangoDB instance. Full parameter list:
+ https://docs.python-arango.com/en/main/specs.html#arango.aql.AQL.execute
:type query_options: Any
:return: A DGL Heterograph
:rtype: dgl.heterograph.DGLHeteroGraph
@@ -213,7 +215,11 @@ def arangodb_graph_to_dgl(self, name: str, **query_options: Any) -> DGLHeteroGra
return self.arangodb_collections_to_dgl(name, v_cols, e_cols, **query_options)
def dgl_to_arangodb(
- self, name: str, dgl_g: Union[DGLGraph, DGLHeteroGraph], batch_size: int = 1000
+ self,
+ name: str,
+ dgl_g: Union[DGLGraph, DGLHeteroGraph],
+ overwrite_graph: bool = False,
+ **import_options: Any,
) -> ADBGraph:
"""Create an ArangoDB graph from a DGL graph.
@@ -221,8 +227,13 @@ def dgl_to_arangodb(
:type name: str
:param dgl_g: The existing DGL graph.
:type dgl_g: Union[dgl.DGLGraph, dgl.heterograph.DGLHeteroGraph]
- :param batch_size: The maximum number of documents to insert at once
- :type batch_size: int
+ :param overwrite_graph: Overwrites the graph if it already exists.
+ Does not drop associated collections.
+ :type overwrite_graph: bool
+ :param import_options: Keyword arguments to specify additional
+ parameters for ArangoDB document insertion. Full parameter list:
+ https://docs.python-arango.com/en/main/specs.html#arango.collection.Collection.import_bulk
+ :type import_options: Any
:return: The ArangoDB Graph API wrapper.
:rtype: arango.graph.Graph
"""
@@ -237,24 +248,31 @@ def dgl_to_arangodb(
else dgl_g.canonical_etypes
)
- self.__db.delete_graph(name, ignore_missing=True)
- adb_graph: ADBGraph = self.__db.create_graph(name, edge_definitions)
+ if overwrite_graph:
+ logger.debug("Overwrite graph flag is True. Deleting old graph.")
+ self.__db.delete_graph(name, ignore_missing=True)
+
+ if self.__db.has_graph(name):
+ adb_graph = self.__db.graph(name)
+ else:
+ adb_graph = self.__db.create_graph(name, edge_definitions)
adb_v_cols = adb_graph.vertex_collections()
- adb_e_cols = [e_d["edge_collection"] for e_d in edge_definitions]
+ adb_e_cols = [e_d["edge_collection"] for e_d in adb_graph.edge_definitions()]
has_one_vcol = len(adb_v_cols) == 1
has_one_ecol = len(adb_e_cols) == 1
logger.debug(f"Is graph '{name}' homogenous? {has_one_vcol and has_one_ecol}")
adb_documents: DefaultDict[str, List[Json]] = defaultdict(list)
+
for v_col in adb_v_cols:
- ntype = None if is_default else v_col
v_col_docs = adb_documents[v_col]
+ ntype = None if is_default else v_col
features = dgl_g.node_attr_schemes(ntype).keys()
- logger.debug(f"Preparing {len(dgl_g.nodes(ntype))} '{v_col}' DGL nodes")
node: Tensor
+ logger.debug(f"Preparing {dgl_g.number_of_nodes(ntype)} '{v_col}' nodes")
for node in dgl_g.nodes(ntype):
dgl_node_id = node.item()
adb_vertex = {"_key": str(dgl_node_id)}
@@ -267,15 +285,15 @@ def dgl_to_arangodb(
has_one_vcol,
)
- self.__insert_adb_docs(v_col, v_col_docs, adb_vertex, batch_size)
+ v_col_docs.append(adb_vertex)
from_col: str
to_col: str
- from_nodes: Tensor
- to_nodes: Tensor
+ from_n: Tensor
+ to_n: Tensor
for e_col in adb_e_cols:
- etype = None if is_default else e_col
e_col_docs = adb_documents[e_col]
+ etype = None if is_default else e_col
features = dgl_g.edge_attr_schemes(etype).keys()
canonical_etype = None
@@ -285,32 +303,29 @@ def dgl_to_arangodb(
canonical_etype = dgl_g.to_canonical_etype(e_col)
from_col, _, to_col = canonical_etype
- from_nodes, to_nodes = dgl_g.edges(etype=etype)
- logger.debug(f"Preparing {len(from_nodes)} '{e_col}' DGL edges")
- for dgl_edge_id, (from_node, to_node) in enumerate(
- zip(from_nodes, to_nodes)
- ):
+ logger.debug(f"Preparing {dgl_g.number_of_edges(etype)} '{e_col}' edges")
+ for index, (from_n, to_n) in enumerate(zip(*dgl_g.edges(etype=etype))):
adb_edge = {
- "_key": str(dgl_edge_id),
- "_from": f"{from_col}/{str(from_node.item())}",
- "_to": f"{to_col}/{str(to_node.item())}",
+ "_key": str(index),
+ "_from": f"{from_col}/{str(from_n.item())}",
+ "_to": f"{to_col}/{str(to_n.item())}",
}
self.__prepare_adb_attributes(
dgl_g.edata,
features,
- dgl_edge_id,
+ index,
adb_edge,
e_col,
has_one_ecol,
canonical_etype,
)
- self.__insert_adb_docs(e_col, e_col_docs, adb_edge, batch_size)
+ e_col_docs.append(adb_edge)
- for col, doc_list in adb_documents.items(): # insert remaining documents
- if doc_list:
- logger.debug(f"Inserting last {len(doc_list)} documents into '{col}'")
- self.__db.collection(col).import_bulk(doc_list, on_duplicate="replace")
+ for col, doc_list in adb_documents.items(): # import documents into ArangoDB
+ logger.debug(f"Inserting {len(doc_list)} documents into '{col}'")
+ result = self.__db.collection(col).import_bulk(doc_list, **import_options)
+ logger.debug(result)
logger.info(f"Created ArangoDB '{name}' Graph")
return adb_graph
@@ -395,9 +410,7 @@ def __insert_dgl_features(
for key, col_dict in features_data.items():
for col, array in col_dict.items():
logger.debug(f"Inserting {len(array)} '{key}' features into '{col}'")
- data[key] = (
- tensor(array) if has_one_type else {**data[key], col: tensor(array)}
- )
+ data[key] = tensor(array) if has_one_type else {col: tensor(array)}
def __prepare_adb_attributes(
self,
@@ -433,32 +446,6 @@ def __prepare_adb_attributes(
tensor = data[key] if has_one_col else data[key][canonical_etype or col]
doc[key] = self.__cntrl._dgl_feature_to_adb_attribute(key, col, tensor[id])
- def __insert_adb_docs(
- self,
- col: str,
- col_docs: List[Json],
- doc: Json,
- batch_size: int,
- ) -> None:
- """Insert an ArangoDB document into a list. If the list exceeds
- batch_size documents, insert into the ArangoDB collection.
-
- :param col: The collection name
- :type col: str
- :param col_docs: The existing documents data belonging to the collection.
- :type col_docs: List[adbdgl_adapter.typings.Json]
- :param doc: The current document to insert.
- :type doc: adbdgl_adapter.typings.Json
- :param batch_size: The maximum number of documents to insert at once
- :type batch_size: int
- """
- col_docs.append(doc)
-
- if len(col_docs) >= batch_size:
- logger.debug(f"Inserting next {batch_size} batch documents into '{col}'")
- self.__db.collection(col).import_bulk(col_docs, on_duplicate="replace")
- col_docs.clear()
-
def __fetch_adb_docs(
self, col: str, attributes: Set[str], query_options: Any
) -> Result[Cursor]:
@@ -484,22 +471,3 @@ def __fetch_adb_docs(
"""
return self.__db.aql.execute(aql, **query_options)
-
- def __validate_attributes(
- self, type: str, attributes: Set[str], valid_attributes: Set[str]
- ) -> None:
- """Validates that a set of attributes includes the required valid
- attributes.
-
- :param type: The context of the attribute validation
- (e.g connection attributes, graph attributes, etc).
- :type type: str
- :param attributes: The provided attributes, possibly invalid.
- :type attributes: Set[str]
- :param valid_attributes: The valid attributes.
- :type valid_attributes: Set[str]
- :raise ValueError: If **valid_attributes** is not a subset of **attributes**
- """
- if valid_attributes.issubset(attributes) is False:
- missing_attributes = valid_attributes - attributes
- raise ValueError(f"Missing {type} attributes: {missing_attributes}")
diff --git a/examples/ArangoDB_DGL_Adapter.ipynb b/examples/ArangoDB_DGL_Adapter.ipynb
index bbd9e9b..16cb908 100644
--- a/examples/ArangoDB_DGL_Adapter.ipynb
+++ b/examples/ArangoDB_DGL_Adapter.ipynb
@@ -15,7 +15,7 @@
"id": "U1d45V4OeG89"
},
"source": [
- "<a href=\"https://colab.research.google.com/github/arangoml/dgl-adapter/blob/2.0.1/examples/ArangoDB_DGL_Adapter.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
+ "<a href=\"https://colab.research.google.com/github/arangoml/dgl-adapter/blob/2.1.0/examples/ArangoDB_DGL_Adapter.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
@@ -34,7 +34,7 @@
"id": "bpvZS-1aeG89"
},
"source": [
- "Version: 2.0.0\n",
+ "Version: 2.1.0\n",
"\n",
"Objective: Export Graphs from [ArangoDB](https://www.arangodb.com/), a multi-model Graph Database, to [Deep Graph Library](https://www.dgl.ai/) (DGL), a python package for graph neural networks, and vice-versa."
]
@@ -57,9 +57,9 @@
"outputs": [],
"source": [
"%%capture\n",
- "!pip install adbdgl-adapter==2.0.1\n",
+ "!pip install adbdgl-adapter==2.1.0\n",
"!pip install adb-cloud-connector\n",
- "!git clone -b 2.0.1 --single-branch https://github.com/arangoml/dgl-adapter.git\n",
+ "!git clone -b 2.1.0 --single-branch https://github.com/arangoml/dgl-adapter.git\n",
"\n",
"## For drawing purposes \n",
"!pip install matplotlib\n",
@@ -466,7 +466,7 @@
"\n",
"# You can also provide valid Python-Arango AQL query options to the command above, like such:\n",
"# dgl_g = aadbdgl_adapter.arangodb_graph_to_dgl(graph_name, ttl=1000, stream=True)\n",
- "# See more here: https://docs.python-arango.com/en/main/specs.html#arango.aql.AQL.execute\n",
+ "# See the full parameter list at https://docs.python-arango.com/en/main/specs.html#arango.aql.AQL.execute\n",
"\n",
"# Show graph data\n",
"print('\\n--------------------')\n",
@@ -522,7 +522,7 @@
"\n",
"# You can also provide valid Python-Arango AQL query options to the command above, like such:\n",
"# dgl_g = adbdgl_adapter.arangodb_collections_to_dgl(\"fraud-detection\", vertex_collections, edge_collections, ttl=1000, stream=True)\n",
- "# See more here: https://docs.python-arango.com/en/main/specs.html#arango.aql.AQL.execute\n",
+ "# See the full parameter list at https://docs.python-arango.com/en/main/specs.html#arango.aql.AQL.execute\n",
"\n",
"# Show graph data\n",
"print('\\n--------------------')\n",
@@ -588,7 +588,7 @@
"\n",
"# You can also provide valid Python-Arango AQL query options to the command above, like such:\n",
"# dgl_g = adbdgl_adapter.arangodb_to_dgl(graph_name = 'FraudDetection', fraud_detection_metagraph, ttl=1000, stream=True)\n",
- "# See more here: https://docs.python-arango.com/en/main/specs.html#arango.aql.AQL.execute\n",
+ "# See the full parameter list at https://docs.python-arango.com/en/main/specs.html#arango.aql.AQL.execute\n",
"\n",
"# Show graph data\n",
"print('\\n--------------')\n",
@@ -728,10 +728,6 @@
"# Create DGL Graph from attributes\n",
"dgl_g = fraud_adbdgl_adapter.arangodb_to_dgl('FraudDetection', fraud_detection_metagraph)\n",
"\n",
- "# You can also provide valid Python-Arango AQL query options to the command above, like such:\n",
- "# dgl_g = fraud_adbdgl_adapter.arangodb_to_dgl(graph_name = 'FraudDetection', fraud_detection_metagraph, ttl=1000, stream=True)\n",
- "# See more here: https://docs.python-arango.com/en/main/specs.html#arango.aql.AQL.execute\n",
- "\n",
"# Show graph data\n",
"print('\\n--------------')\n",
"print(dgl_g)\n",
@@ -800,6 +796,10 @@
"# Create the ArangoDB graph\n",
"adb_karate_graph = adbdgl_adapter.dgl_to_arangodb(name, dgl_karate_graph)\n",
"\n",
+ "# You can also provide valid Python-Arango Import Bulk options to the command above, like such:\n",
+ "# adb_karate_graph = adbdgl_adapter.dgl_to_arangodb(name, dgl_karate_graph, batch_size=5, on_duplicate=\"replace\")\n",
+ "# See the full parameter list at https://docs.python-arango.com/en/main/specs.html#arango.collection.Collection.import_bulk\n",
+ "\n",
"print('\\n--------------------')\n",
"print(\"URL: \" + con[\"url\"])\n",
"print(\"Username: \" + con[\"username\"])\n",
diff --git a/pyproject.toml b/pyproject.toml
index b9911d5..2068152 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,9 +1,5 @@
[build-system]
-requires = [
- "setuptools>=42",
- "setuptools_scm[toml]>=3.4",
- "wheel",
-]
+requires = ["setuptools>=45", "setuptools_scm[toml]>=6.2", "wheel"]
build-backend = "setuptools.build_meta"
[tool.coverage.run]
diff --git a/setup.py b/setup.py
index 688ec69..4c781da 100644
--- a/setup.py
+++ b/setup.py
@@ -14,17 +14,14 @@
keywords=["arangodb", "dgl", "adapter"],
packages=["adbdgl_adapter"],
include_package_data=True,
- use_scm_version=True,
- setup_requires=["setuptools_scm"],
python_requires=">=3.6",
license="Apache Software License",
install_requires=[
"requests>=2.27.1",
"dgl>=0.6.1",
"torch>=1.10.2",
- "python-arango>=7.3.1",
- "setuptools>=42",
- "setuptools_scm[toml]>=3.4",
+ "python-arango>=7.4.1",
+ "setuptools>=45",
],
extras_require={
"dev": [
@@ -44,7 +41,6 @@
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
| adjust DGL to ArangoDB interface for increased accessibility
Mirror of https://github.com/arangoml/networkx-adapter/issues/80
| 2022-06-12T01:10:01 | 0.0 | [] | [] |
|||
arangoml/dgl-adapter | arangoml__dgl-adapter-21 | 940503bf55bf745470ac43f51059772da6166d92 | diff --git a/adbdgl_adapter/adapter.py b/adbdgl_adapter/adapter.py
index 41c47e6..5ad7013 100644
--- a/adbdgl_adapter/adapter.py
+++ b/adbdgl_adapter/adapter.py
@@ -2,7 +2,7 @@
# -*- coding: utf-8 -*-
import logging
from collections import defaultdict
-from typing import Any, DefaultDict, Dict, List, Set, Union
+from typing import Any, DefaultDict, Dict, List, Optional, Set, Union
from arango.cursor import Cursor
from arango.database import Database
@@ -251,6 +251,7 @@ def dgl_to_arangodb(
for v_col in adb_v_cols:
ntype = None if is_default else v_col
v_col_docs = adb_documents[v_col]
+ features = dgl_g.node_attr_schemes(ntype).keys()
logger.debug(f"Preparing {len(dgl_g.nodes(ntype))} '{v_col}' DGL nodes")
node: Tensor
@@ -259,7 +260,7 @@ def dgl_to_arangodb(
adb_vertex = {"_key": str(dgl_node_id)}
self.__prepare_adb_attributes(
dgl_g.ndata,
- dgl_g.node_attr_schemes(ntype).keys(),
+ features,
dgl_node_id,
adb_vertex,
v_col,
@@ -275,11 +276,14 @@ def dgl_to_arangodb(
for e_col in adb_e_cols:
etype = None if is_default else e_col
e_col_docs = adb_documents[e_col]
+ features = dgl_g.edge_attr_schemes(etype).keys()
+ canonical_etype = None
if is_default:
from_col = to_col = adb_v_cols[0]
else:
- from_col, _, to_col = dgl_g.to_canonical_etype(e_col)
+ canonical_etype = dgl_g.to_canonical_etype(e_col)
+ from_col, _, to_col = canonical_etype
from_nodes, to_nodes = dgl_g.edges(etype=etype)
logger.debug(f"Preparing {len(from_nodes)} '{e_col}' DGL edges")
@@ -293,11 +297,12 @@ def dgl_to_arangodb(
}
self.__prepare_adb_attributes(
dgl_g.edata,
- dgl_g.edge_attr_schemes(etype).keys(),
+ features,
dgl_edge_id,
adb_edge,
e_col,
has_one_ecol,
+ canonical_etype,
)
self.__insert_adb_docs(e_col, e_col_docs, adb_edge, batch_size)
@@ -402,6 +407,7 @@ def __prepare_adb_attributes(
doc: Json,
col: str,
has_one_col: bool,
+ canonical_etype: Optional[DGLCanonicalEType] = None,
) -> None:
"""Convert DGL features into a set of ArangoDB attributes for a given document
@@ -419,9 +425,12 @@ def __prepare_adb_attributes(
:param has_one_col: Set to True if the ArangoDB graph has one
vertex collection or one edge collection only.
:type has_one_col: bool
+ :param canonical_etype: The DGL canonical edge type belonging to the current
+ **col**, provided that **col** is an edge collection (ignored otherwise).
+ :type canonical_etype: adbdgl_adapter.typings.DGLCanonicalEType
"""
for key in features:
- tensor = data[key] if has_one_col else data[key][col]
+ tensor = data[key] if has_one_col else data[key][canonical_etype or col]
doc[key] = self.__cntrl._dgl_feature_to_adb_attribute(key, col, tensor[id])
def __insert_adb_docs(
diff --git a/examples/ArangoDB_DGL_Adapter.ipynb b/examples/ArangoDB_DGL_Adapter.ipynb
index 1b3c7a3..bbd9e9b 100644
--- a/examples/ArangoDB_DGL_Adapter.ipynb
+++ b/examples/ArangoDB_DGL_Adapter.ipynb
@@ -15,7 +15,7 @@
"id": "U1d45V4OeG89"
},
"source": [
- "<a href=\"https://colab.research.google.com/github/arangoml/dgl-adapter/blob/2.0.0/examples/ArangoDB_DGL_Adapter.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
+ "<a href=\"https://colab.research.google.com/github/arangoml/dgl-adapter/blob/2.0.1/examples/ArangoDB_DGL_Adapter.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
@@ -57,9 +57,9 @@
"outputs": [],
"source": [
"%%capture\n",
- "!pip install adbdgl-adapter==2.0.0\n",
+ "!pip install adbdgl-adapter==2.0.1\n",
"!pip install adb-cloud-connector\n",
- "!git clone -b 2.0.0 --single-branch https://github.com/arangoml/dgl-adapter.git\n",
+ "!git clone -b 2.0.1 --single-branch https://github.com/arangoml/dgl-adapter.git\n",
"\n",
"## For drawing purposes \n",
"!pip install matplotlib\n",
@@ -987,7 +987,7 @@
"\n",
" if key == \"clique_ndata\":\n",
" try:\n",
- " return [\"Eins\", \"Zwei\", \"Drei\", \"Vier\", \"Fünf\", \"Sechs\"][key-1]\n",
+ " return [\"Eins\", \"Zwei\", \"Drei\", \"Vier\", \"Fünf\", \"Sechs\"][val-1]\n",
" except:\n",
" return -1\n",
"\n",
| can't convert DGL heterograph with edge attributes into ArangoDB
### How to reproduce
```py
import dgl
import torch
# Create Heterograph
dgl_g = dgl.heterograph({
('user', 'follows', 'user'): (torch.tensor([0, 1]), torch.tensor([1, 2])),
('user', 'plays', 'game'): (torch.tensor([1, 3]), torch.tensor([1, 2]))
})
dgl_g.edges['plays'].data['hours_played'] = torch.tensor([3, 5]) # Set edge attribute
prin
adb_g = adbdgl_adapter.dgl_to_arangodb('social-graph', dgl_g)
```
Observe the following error:
```
__prepare_adb_attributes(self, data, features, id, doc, col, has_one_col)
422 """
423 for key in features:
--> 424 tensor = data[key] if has_one_col else data[key][col]
425 doc[key] = self.__cntrl._dgl_feature_to_adb_attribute(key, col, tensor[id])
426
KeyError: 'plays'
```
### Reason
DGL Edge attributes must be referenced with the key, followed by the **canonical edge type** of the edge.
* Bad: `print(dgl_g.edata['hours_played']['plays'])`
* Good: `print(data['hours_played'][('user', 'plays', 'game')])`
This can be fixed using [`to_canonical_etype()`](https://docs.dgl.ai/en/0.6.x/generated/dgl.DGLGraph.to_canonical_etype.html)
can't convert DGL heterograph with edge attributes into ArangoDB
### How to reproduce
```py
import dgl
import torch
# Create Heterograph
dgl_g = dgl.heterograph({
('user', 'follows', 'user'): (torch.tensor([0, 1]), torch.tensor([1, 2])),
('user', 'plays', 'game'): (torch.tensor([1, 3]), torch.tensor([1, 2]))
})
dgl_g.edges['plays'].data['hours_played'] = torch.tensor([3, 5]) # Set edge attribute
prin
adb_g = adbdgl_adapter.dgl_to_arangodb('social-graph', dgl_g)
```
Observe the following error:
```
__prepare_adb_attributes(self, data, features, id, doc, col, has_one_col)
422 """
423 for key in features:
--> 424 tensor = data[key] if has_one_col else data[key][col]
425 doc[key] = self.__cntrl._dgl_feature_to_adb_attribute(key, col, tensor[id])
426
KeyError: 'plays'
```
### Reason
DGL Edge attributes must be referenced with the key, followed by the **canonical edge type** of the edge.
* Bad: `print(dgl_g.edata['hours_played']['plays'])`
* Good: `print(data['hours_played'][('user', 'plays', 'game')])`
This can be fixed using [`to_canonical_etype()`](https://docs.dgl.ai/en/0.6.x/generated/dgl.DGLGraph.to_canonical_etype.html)
| 2022-05-30T22:35:50 | 0.0 | [] | [] |
|||
arangoml/dgl-adapter | arangoml__dgl-adapter-17 | 8b034d155d1805eccd4664b57feb9b8c14a1d99f | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index da41db7..103e497 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -103,9 +103,6 @@ jobs:
- name: Install release packages
run: pip install wheel gitchangelog pystache
- - name: Install dependencies
- run: pip install .[dev]
-
- name: Set variables
run: echo "VERSION=$(curl ${GITHUB_API_URL}/repos/${GITHUB_REPOSITORY}/releases/latest | python -c "import sys; import json; print(json.load(sys.stdin)['tag_name'])")" >> $GITHUB_ENV
@@ -131,4 +128,4 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Alert developer of open PR
- run: echo "Hi dev! Please go see $PR_URL"
+ run: echo "Changelog $PR_URL is ready to be merged by developer."
\ No newline at end of file
diff --git a/README.md b/README.md
index 0d871b2..a4e2097 100644
--- a/README.md
+++ b/README.md
@@ -44,7 +44,7 @@ For a more detailed walk-through, access the official notebook on Colab: <a href
```py
# Import the ArangoDB-DGL Adapter
-from adbdgl_adapter.adapter import ADBDGL_Adapter
+from adbdgl_adapter import ADBDGL_Adapter
# Import the Python-Arango driver
from arango import ArangoClient
diff --git a/adbdgl_adapter/__init__.py b/adbdgl_adapter/__init__.py
index e69de29..3849ef7 100644
--- a/adbdgl_adapter/__init__.py
+++ b/adbdgl_adapter/__init__.py
@@ -0,0 +1,2 @@
+from adbdgl_adapter.adapter import ADBDGL_Adapter # noqa: F401
+from adbdgl_adapter.controller import ADBDGL_Controller # noqa: F401
diff --git a/adbdgl_adapter/adapter.py b/adbdgl_adapter/adapter.py
index f841e91..6e48913 100644
--- a/adbdgl_adapter/adapter.py
+++ b/adbdgl_adapter/adapter.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-
+import logging
from collections import defaultdict
from typing import Any, DefaultDict, Dict, List, Set, Union
@@ -17,6 +17,7 @@
from .abc import Abstract_ADBDGL_Adapter
from .controller import ADBDGL_Controller
from .typings import ArangoMetagraph, DGLCanonicalEType, DGLDataDict, Json
+from .utils import logger
class ADBDGL_Adapter(Abstract_ADBDGL_Adapter):
@@ -29,14 +30,20 @@ class ADBDGL_Adapter(Abstract_ADBDGL_Adapter):
Optionally re-defined by the user if needed (otherwise defaults to
ADBDGL_Controller).
:type controller: adbdgl_adapter.controller.ADBDGL_Controller
- :raise ValueError: If missing required keys in conn
+ :param logging_lvl: Defaults to logging.INFO. Other useful options are
+ logging.DEBUG (more verbose), and logging.WARNING (less verbose).
+ :type logging_lvl: str | int
+ :raise ValueError: If invalid parameters
"""
def __init__(
self,
db: Database,
controller: ADBDGL_Controller = ADBDGL_Controller(),
+ logging_lvl: Union[str, int] = logging.INFO,
):
+ self.set_logging(logging_lvl)
+
if issubclass(type(db), Database) is False:
msg = "**db** parameter must inherit from arango.database.Database"
raise TypeError(msg)
@@ -48,10 +55,15 @@ def __init__(
self.__db = db
self.__cntrl: ADBDGL_Controller = controller
+ logger.info(f"Instantiated ADBDGL_Adapter with database '{db.name}'")
+
@property
def db(self) -> Database:
return self.__db
+ def set_logging(self, level: Union[int, str]) -> None:
+ logger.setLevel(level)
+
def arangodb_to_dgl(
self, name: str, metagraph: ArangoMetagraph, **query_options: Any
) -> DGLHeteroGraph:
@@ -86,6 +98,7 @@ def arangodb_to_dgl(
},
}
"""
+ logger.debug(f"Starting arangodb_to_dgl({name}, ...):")
self.__validate_attributes("graph", set(metagraph), self.METAGRAPH_ATRIBS)
# Maps ArangoDB vertex IDs to DGL node IDs
@@ -98,6 +111,7 @@ def arangodb_to_dgl(
adb_v: Json
for v_col, atribs in metagraph["vertexCollections"].items():
+ logger.debug(f"Preparing '{v_col}' vertices")
for i, adb_v in enumerate(
self.__fetch_adb_docs(v_col, atribs, query_options)
):
@@ -112,6 +126,7 @@ def arangodb_to_dgl(
from_col: Set[str] = set()
to_col: Set[str] = set()
for e_col, atribs in metagraph["edgeCollections"].items():
+ logger.debug(f"Preparing '{e_col}' edges")
from_nodes: List[int] = []
to_nodes: List[int] = []
for adb_e in self.__fetch_adb_docs(e_col, atribs, query_options):
@@ -140,11 +155,12 @@ def arangodb_to_dgl(
dgl_g: DGLHeteroGraph = heterograph(data_dict)
has_one_ntype = len(dgl_g.ntypes) == 1
has_one_etype = len(dgl_g.etypes) == 1
+ logger.debug(f"Is graph '{name}' homogenous? {has_one_ntype and has_one_etype}")
self.__insert_dgl_features(ndata, dgl_g.ndata, has_one_ntype)
self.__insert_dgl_features(edata, dgl_g.edata, has_one_etype)
- print(f"DGL: {name} created")
+ logger.info(f"Created DGL '{name}' Graph")
return dgl_g
def arangodb_collections_to_dgl(
@@ -207,7 +223,9 @@ def dgl_to_arangodb(
:return: The ArangoDB Graph API wrapper.
:rtype: arango.graph.Graph
"""
+ logger.debug(f"Starting dgl_to_arangodb({name}, ...):")
is_default = dgl_g.canonical_etypes == self.DEFAULT_CANONICAL_ETYPE
+ logger.debug(f"Is graph '{name}' using default canonical_etypes? {is_default}")
adb_v_cols: List[str] = [name + dgl_g.ntypes[0]] if is_default else dgl_g.ntypes
adb_e_cols: List[str] = [name + dgl_g.etypes[0]] if is_default else dgl_g.etypes
e_definitions = self.etypes_to_edefinitions(
@@ -224,6 +242,7 @@ def dgl_to_arangodb(
has_one_ntype = len(dgl_g.ntypes) == 1
has_one_etype = len(dgl_g.etypes) == 1
+ logger.debug(f"Is graph '{name}' homogenous? {has_one_ntype and has_one_etype}")
adb_documents: DefaultDict[str, List[Json]] = defaultdict(list)
for v_col in adb_v_cols:
@@ -231,8 +250,10 @@ def dgl_to_arangodb(
v_col_docs = adb_documents[v_col]
if self.__db.has_collection(v_col) is False:
+ logger.debug(f"Creating {v_col} vertex collection")
self.__db.create_collection(v_col)
+ logger.debug(f"Preparing {len(dgl_g.nodes(ntype))} '{v_col}' DGL nodes")
node: Tensor
for node in dgl_g.nodes(ntype):
dgl_node_id = node.item()
@@ -257,6 +278,7 @@ def dgl_to_arangodb(
e_col_docs = adb_documents[e_col]
if self.__db.has_collection(e_col) is False:
+ logger.debug(f"Creating {e_col} edge collection")
self.__db.create_collection(e_col, edge=True)
if is_default:
@@ -265,6 +287,7 @@ def dgl_to_arangodb(
from_col, _, to_col = dgl_g.to_canonical_etype(e_col)
from_nodes, to_nodes = dgl_g.edges(etype=etype)
+ logger.debug(f"Preparing {len(from_nodes)} '{e_col}' DGL edges")
for dgl_edge_id, (from_node, to_node) in enumerate(
zip(from_nodes, to_nodes)
):
@@ -288,9 +311,10 @@ def dgl_to_arangodb(
adb_graph: ArangoDBGraph = self.__db.create_graph(name, e_definitions)
for col, doc_list in adb_documents.items(): # insert remaining documents
+ logger.debug(f"Inserting last {len(doc_list)} documents into '{col}'")
self.__db.collection(col).import_bulk(doc_list, on_duplicate="replace")
- print(f"ArangoDB: {name} created")
+ logger.info(f"Created ArangoDB '{name}' Graph")
return adb_graph
def etypes_to_edefinitions(
@@ -372,6 +396,7 @@ def __insert_dgl_features(
col_dict: Dict[str, List[Any]]
for key, col_dict in features_data.items():
for col, array in col_dict.items():
+ logger.debug(f"Inserting {len(array)} '{key}' features into '{col}'")
data[key] = (
tensor(array) if has_one_type else {**data[key], col: tensor(array)}
)
@@ -428,6 +453,7 @@ def __insert_adb_docs(
col_docs.append(doc)
if len(col_docs) >= batch_size:
+ logger.debug(f"Inserting next {batch_size} batch documents into '{col}'")
self.__db.collection(col).import_bulk(col_docs, on_duplicate="replace")
col_docs.clear()
diff --git a/adbdgl_adapter/utils.py b/adbdgl_adapter/utils.py
new file mode 100644
index 0000000..3f3f894
--- /dev/null
+++ b/adbdgl_adapter/utils.py
@@ -0,0 +1,11 @@
+import logging
+import os
+
+logger = logging.getLogger(__package__)
+handler = logging.StreamHandler()
+formatter = logging.Formatter(
+ f"[%(asctime)s] [{os.getpid()}] [%(levelname)s] - %(name)s: %(message)s",
+ "%Y/%m/%d %H:%M:%S %z",
+)
+handler.setFormatter(formatter)
+logger.addHandler(handler)
| Consider a 'Verbose' option for adapter functionality
Mirror of https://github.com/arangoml/networkx-adapter/issues/72
Consider a 'Verbose' option for adapter functionality
Mirror of https://github.com/arangoml/networkx-adapter/issues/72
| 2022-05-13T19:07:56 | 0.0 | [] | [] |
|||
arangoml/dgl-adapter | arangoml__dgl-adapter-15 | 0ac6af8f09da785a33b8a039701072295dc974c5 | diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index dd3b051..0d6cda5 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -36,7 +36,7 @@ jobs:
with:
python-version: ${{ matrix.python }}
- name: Set up ArangoDB Instance via Docker
- run: docker create --name adb -p 8529:8529 -e ARANGO_ROOT_PASSWORD=openSesame arangodb/arangodb:3.9.1
+ run: docker create --name adb -p 8529:8529 -e ARANGO_ROOT_PASSWORD= arangodb/arangodb:3.9.1
- name: Start ArangoDB Instance
run: docker start adb
- name: Setup pip
@@ -52,7 +52,7 @@ jobs:
- name: Run mypy
run: mypy ${{env.PACKAGE_DIR}} ${{env.TESTS_DIR}}
- name: Run pytest
- run: py.test --cov=${{env.PACKAGE_DIR}} --cov-report xml -v --color=yes --no-cov-on-fail --code-highlight=yes
+ run: py.test --cov=${{env.PACKAGE_DIR}} --cov-report xml --cov-report term-missing -v --color=yes --no-cov-on-fail --code-highlight=yes
- name: Publish to coveralls.io
if: matrix.python == '3.8'
env:
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 58239c2..da41db7 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -19,6 +19,10 @@ jobs:
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python }}
+ - name: Set up ArangoDB Instance via Docker
+ run: docker create --name adb -p 8529:8529 -e ARANGO_ROOT_PASSWORD= arangodb/arangodb:3.9.1
+ - name: Start ArangoDB Instance
+ run: docker start adb
- name: Setup pip
run: python -m pip install --upgrade pip setuptools wheel
- name: Install packages
@@ -32,7 +36,7 @@ jobs:
- name: Run mypy
run: mypy ${{env.PACKAGE_DIR}} ${{env.TESTS_DIR}}
- name: Run pytest
- run: py.test --cov=${{env.PACKAGE_DIR}} --cov-report xml -v --color=yes --no-cov-on-fail --code-highlight=yes
+ run: py.test --cov=${{env.PACKAGE_DIR}} --cov-report xml --cov-report term-missing -v --color=yes --no-cov-on-fail --code-highlight=yes
- name: Publish to coveralls.io
if: matrix.python == '3.8'
env:
diff --git a/README.md b/README.md
index cd5de5f..0d871b2 100644
--- a/README.md
+++ b/README.md
@@ -29,44 +29,44 @@ The Deep Graph Library (DGL) is an easy-to-use, high performance and scalable Py
## Installation
+#### Latest Release
```
pip install adbdgl-adapter
```
+#### Current State
+```
+pip install git+https://github.com/arangoml/dgl-adapter.git
+```
## Quickstart
For a more detailed walk-through, access the official notebook on Colab: <a href="https://colab.research.google.com/github/arangoml/dgl-adapter/blob/master/examples/ArangoDB_DGL_Adapter.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>
-
```py
# Import the ArangoDB-DGL Adapter
from adbdgl_adapter.adapter import ADBDGL_Adapter
+# Import the Python-Arango driver
+from arango import ArangoClient
+
# Import a sample graph from DGL
from dgl.data import KarateClubDataset
-# Store ArangoDB endpoint connection info
-# Assumption: the ArangoDB "fraud detection" dataset is imported to this endpoint for example purposes
-con = {
- "protocol": "http",
- "hostname": "localhost",
- "port": 8529,
- "username": "root",
- "password": "openSesame",
- "dbName": "_system",
-}
+# Instantiate driver client based on user preference
+# Let's assume that the ArangoDB "fraud detection" dataset is imported to this endpoint for example purposes
+db = ArangoClient(hosts="http://localhost:8529").db("_system", username="root", password="openSesame")
-# Instantiate the ADBDGL Adapter with connection credentials
-adbdgl_adapter = ADBDGL_Adapter(con)
+# Instantiate the ADBDGL Adapter with driver client
+adbdgl_adapter = ADBDGL_Adapter(db)
# Convert ArangoDB to DGL via Graph Name
dgl_fraud_graph = adbdgl_adapter.arangodb_graph_to_dgl("fraud-detection")
# Convert ArangoDB to DGL via Collection Names
dgl_fraud_graph_2 = adbdgl_adapter.arangodb_collections_to_dgl(
- "fraud-detection",
- {"account", "Class", "customer"}, # Specify vertex collections
- {"accountHolder", "Relationship", "transaction"}, # Specify edge collections
+ "fraud-detection",
+ {"account", "Class", "customer"}, # Specify vertex collections
+ {"accountHolder", "Relationship", "transaction"}, # Specify edge collections
)
# Convert ArangoDB to DGL via a Metagraph
@@ -94,17 +94,15 @@ Prerequisite: `arangorestore`
1. `git clone https://github.com/arangoml/dgl-adapter.git`
2. `cd dgl-adapter`
3. (create virtual environment of choice)
-4. `pip install -e . pytest`
+4. `pip install -e .[dev]`
5. (create an ArangoDB instance with method of choice)
-6. `pytest --protocol <> --host <> --port <> --dbName <> --username <> --password <>`
+6. `pytest --url <> --dbName <> --username <> --password <>`
**Note**: A `pytest` parameter can be omitted if the endpoint is using its default value:
```python
def pytest_addoption(parser):
- parser.addoption("--protocol", action="store", default="http")
- parser.addoption("--host", action="store", default="localhost")
- parser.addoption("--port", action="store", default="8529")
+ parser.addoption("--url", action="store", default="http://localhost:8529")
parser.addoption("--dbName", action="store", default="_system")
parser.addoption("--username", action="store", default="root")
- parser.addoption("--password", action="store", default="openSesame")
+ parser.addoption("--password", action="store", default="")
```
\ No newline at end of file
diff --git a/adbdgl_adapter/abc.py b/adbdgl_adapter/abc.py
index 3219d71..2842822 100644
--- a/adbdgl_adapter/abc.py
+++ b/adbdgl_adapter/abc.py
@@ -61,18 +61,10 @@ def __validate_attributes(self) -> None:
def DEFAULT_CANONICAL_ETYPE(self) -> List[DGLCanonicalEType]:
return [("_N", "_E", "_N")]
- @property
- def CONNECTION_ATRIBS(self) -> Set[str]:
- return {"hostname", "username", "password", "dbName"}
-
@property
def METAGRAPH_ATRIBS(self) -> Set[str]:
return {"vertexCollections", "edgeCollections"}
- @property
- def EDGE_DEFINITION_ATRIBS(self) -> Set[str]:
- return {"edge_collection", "from_vertex_collections", "to_vertex_collections"}
-
class Abstract_ADBDGL_Controller(ABC):
def _adb_attribute_to_dgl_feature(self, key: str, col: str, val: Any) -> Any:
diff --git a/adbdgl_adapter/adapter.py b/adbdgl_adapter/adapter.py
index 6d003d9..f841e91 100644
--- a/adbdgl_adapter/adapter.py
+++ b/adbdgl_adapter/adapter.py
@@ -4,9 +4,8 @@
from collections import defaultdict
from typing import Any, DefaultDict, Dict, List, Set, Union
-from arango import ArangoClient
from arango.cursor import Cursor
-from arango.database import StandardDatabase
+from arango.database import Database
from arango.graph import Graph as ArangoDBGraph
from arango.result import Result
from dgl import DGLGraph, heterograph
@@ -23,8 +22,8 @@
class ADBDGL_Adapter(Abstract_ADBDGL_Adapter):
"""ArangoDB-DGL adapter.
- :param conn: Connection details to an ArangoDB instance.
- :type conn: adbdgl_adapter.typings.Json
+ :param db: A python-arango database instance
+ :type db: arango.database.Database
:param controller: The ArangoDB-DGL controller, for controlling how
ArangoDB attributes are converted into DGL features, and vice-versa.
Optionally re-defined by the user if needed (otherwise defaults to
@@ -35,28 +34,22 @@ class ADBDGL_Adapter(Abstract_ADBDGL_Adapter):
def __init__(
self,
- conn: Json,
+ db: Database,
controller: ADBDGL_Controller = ADBDGL_Controller(),
):
- self.__validate_attributes("connection", set(conn), self.CONNECTION_ATRIBS)
- if issubclass(type(controller), ADBDGL_Controller) is False:
- msg = "controller must inherit from ADBDGL_Controller"
+ if issubclass(type(db), Database) is False:
+ msg = "**db** parameter must inherit from arango.database.Database"
raise TypeError(msg)
- username: str = conn["username"]
- password: str = conn["password"]
- db_name: str = conn["dbName"]
- host: str = conn["hostname"]
- protocol: str = conn.get("protocol", "https")
- port = str(conn.get("port", 8529))
-
- url = protocol + "://" + host + ":" + port
+ if issubclass(type(controller), ADBDGL_Controller) is False:
+ msg = "**controller** parameter must inherit from ADBDGL_Controller"
+ raise TypeError(msg)
- print(f"Connecting to {url}")
- self.__db = ArangoClient(hosts=url).db(db_name, username, password, verify=True)
+ self.__db = db
self.__cntrl: ADBDGL_Controller = controller
- def db(self) -> StandardDatabase:
+ @property
+ def db(self) -> Database:
return self.__db
def arangodb_to_dgl(
diff --git a/setup.py b/setup.py
index af18c1d..9dd74b4 100644
--- a/setup.py
+++ b/setup.py
@@ -19,6 +19,7 @@
python_requires=">=3.6",
license="Apache Software License",
install_requires=[
+ "requests>=2.27.1",
"dgl==0.6.1",
"torch>=1.10.2",
"python-arango>=7.3.1",
| Expose ArangoClient & StandardDatabase from adapter to provide driver client accessibility
Mirror of https://github.com/arangoml/networkx-adapter/issues/70
Expose ArangoClient & StandardDatabase from adapter to provide driver client accessibility
Mirror of https://github.com/arangoml/networkx-adapter/issues/70
| 2022-05-11T00:04:41 | 0.0 | [] | [] |
|||
arangoml/dgl-adapter | arangoml__dgl-adapter-3 | c172daf1a7be4dc738866e09012c2736f9eaa85c | diff --git a/.github/workflows/analyze.yml b/.github/workflows/analyze.yml
index e0001fb..dc84535 100644
--- a/.github/workflows/analyze.yml
+++ b/.github/workflows/analyze.yml
@@ -11,23 +11,30 @@
#
name: analyze
on:
- workflow_dispatch:
+ push:
+ branches: [ master ]
+ paths:
+ - 'adbdgl_adapter/**'
+ - 'tests/**'
+ - 'setup.py'
+ - 'setup.cfg'
+ - 'pyproject.toml'
+ - '.github/workflows/analyze.yml'
pull_request:
- # The branches below must be a subset of the branches above
- branches: [master]
+ branches: [ master ]
paths:
- - "adbdgl_adapter/**"
+ - 'adbdgl_adapter/**'
+ - 'tests/**'
+ - 'setup.py'
+ - 'setup.cfg'
+ - 'pyproject.toml'
+ - '.github/workflows/analyze.yml'
schedule:
- cron: "00 9 * * 1"
-env:
- SOURCE_DIR: adbdgl_adapter
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
- defaults:
- run:
- working-directory: ${{env.SOURCE_DIR}}
permissions:
actions: read
contents: read
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 6f6da1c..afbde43 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -1,40 +1,56 @@
name: build
on:
workflow_dispatch:
+ push:
+ branches: [ master ]
+ paths:
+ - 'adbdgl_adapter/**'
+ - 'tests/**'
+ - 'setup.py'
+ - 'setup.cfg'
+ - 'pyproject.toml'
+ - '.github/workflows/build.yml'
pull_request:
+ branches: [ master ]
paths:
- - "adbdgl_adapter/adbdgl_adapter/**"
- - "adbdgl_adapter/tests/**"
+ - 'adbdgl_adapter/**'
+ - 'tests/**'
+ - 'setup.py'
+ - 'setup.cfg'
+ - 'pyproject.toml'
+ - '.github/workflows/build.yml'
env:
- SOURCE_DIR: adbdgl_adapter
PACKAGE_DIR: adbdgl_adapter
+ TESTS_DIR: tests
jobs:
build:
runs-on: ubuntu-latest
- defaults:
- run:
- working-directory: ${{env.SOURCE_DIR}}
strategy:
matrix:
python: ["3.6", "3.7", "3.8", "3.9"]
name: Python ${{ matrix.python }}
- env:
- COVERALLS_REPO_TOKEN: ${{secrets.COVERALLS_REPO_TOKEN}}
- GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
steps:
- uses: actions/checkout@v2
- - name: Setup python
+ - name: Setup Python ${{ matrix.python }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python }}
- - name: Lint with Black
- uses: psf/black@stable
- with:
- options: "--check --verbose --diff --color"
- src: ${{env.PACKAGE_DIR}}
- - name: Install dependencies
- run: pip install -e . pytest pytest-cov coveralls
+ - name: Setup pip
+ run: python -m pip install --upgrade pip setuptools wheel
+ - name: Install packages
+ run: pip install .[dev]
+ - name: Run black
+ run: black --check --verbose --diff --color ${{env.PACKAGE_DIR}} ${{env.TESTS_DIR}}
+ - name: Run flake8
+ run: flake8 ${{env.PACKAGE_DIR}} ${{env.TESTS_DIR}}
+ - name: Run isort
+ run: isort --check --profile=black ${{env.PACKAGE_DIR}} ${{env.TESTS_DIR}}
+ - name: Run mypy
+ run: mypy ${{env.PACKAGE_DIR}} ${{env.TESTS_DIR}}
- name: Run pytest
- run: |
- pytest --cov=${{env.PACKAGE_DIR}} --cov-report term-missing -v --color=yes --no-cov-on-fail --code-highlight=yes
- coveralls
+ run: py.test --cov=${{env.PACKAGE_DIR}} --cov-report xml -v --color=yes --no-cov-on-fail --code-highlight=yes
+ - name: Publish to coveralls.io
+ if: matrix.python == '3.8'
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: coveralls --service=github
\ No newline at end of file
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 9b2a48e..50cea35 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -1,192 +1,136 @@
-# name: release
-# on:
-# workflow_dispatch:
-# push:
-# branches:
-# - master
-# paths:
-# - "adbdgl_adapter/adbdgl_adapter/**"
-# env:
-# SOURCE_DIR: adbdgl_adapter
-# PACKAGE_DIR: adbdgl_adapter
-# jobs:
-# version:
-# runs-on: ubuntu-latest
-# name: Verify version increase
-# steps:
-# - uses: actions/checkout@v2
-# - uses: actions/setup-python@v2
-# with:
-# python-version: "3.9"
-# - name: Install dependencies
-# run: pip install requests packaging
-# - name: Set variables
-# run: |
-# echo "OLD_VERSION=$(python scripts/extract_version.py)" >> $GITHUB_ENV
-# echo "NEW_VERSION=$(cat VERSION)" >> $GITHUB_ENV
-# - name: Assert version increase
-# id: verify
-# run: echo "::set-output name=has_increased::$(python scripts/assert_version.py ${{env.OLD_VERSION}} ${{env.NEW_VERSION}})"
-# - name: Fail on no version increase
-# if: ${{ steps.verify.outputs.has_increased != 'true' }}
-# uses: actions/github-script@v3
-# with:
-# script: core.setFailed("Cannot build & release - VERSION has not been manually incremented")
-# build:
-# needs: version
-# runs-on: ubuntu-latest
-# defaults:
-# run:
-# working-directory: ${{env.SOURCE_DIR}}
-# strategy:
-# matrix:
-# python: ["3.6", "3.7", "3.8", "3.9"]
-# name: Python ${{ matrix.python }}
-# env:
-# COVERALLS_REPO_TOKEN: ${{secrets.COVERALLS_REPO_TOKEN}}
-# GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
-# steps:
-# - uses: actions/checkout@v2
-# - uses: actions/setup-python@v2
-# with:
-# python-version: ${{ matrix.python }}
-# - name: Lint with Black
-# uses: psf/black@stable
-# with:
-# options: "--check --verbose --diff --color"
-# src: ${{env.PACKAGE_DIR}}
-# - name: Install dependencies
-# run: pip install -e . pytest pytest-cov coveralls
-# - name: Run pytest
-# run: |
-# pytest --cov=${{env.PACKAGE_DIR}} --cov-report term-missing -v --color=yes --no-cov-on-fail --code-highlight=yes
-# coveralls
-
-# release:
-# needs: build
-# runs-on: ubuntu-latest
-# name: Release package
-# env:
-# TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
-# steps:
-# - uses: actions/checkout@v2
-# with:
-# fetch-depth: 0
-
-# - name: Setup python
-# uses: actions/setup-python@v2
-# with:
-# python-version: "3.8"
-
-# - name: Copy static repo files
-# run: cp {CHANGELOG.md,LICENSE,README.md,VERSION} ${{env.SOURCE_DIR}}
-
-# - name: Install release packages
-# run: pip install wheel gitchangelog pystache twine
-
-# - name: Install dependencies
-# run: pip install -e .
-# working-directory: ${{env.SOURCE_DIR}}
-
-# - name: Set variables
-# run: |
-# echo "OLD_VERSION=$(python scripts/extract_version.py)" >> $GITHUB_ENV
-# echo "NEW_VERSION=$(cat VERSION)" >> $GITHUB_ENV
-
-# - name: Ensure clean dist/ and build/ folders
-# run: rm -rf dist build
-# working-directory: ${{env.SOURCE_DIR}}
-
-# - name: Build package
-# run: python setup.py sdist bdist_wheel
-# working-directory: ${{env.SOURCE_DIR}}
-
-# - name: Extract wheel artifact name
-# run: echo "wheel_name=$(echo ${{env.SOURCE_DIR}}/dist/*.whl)" >> $GITHUB_ENV
-
-# - name: Extract tar.gz artifact name
-# run: echo "tar_name=$(echo ${{env.SOURCE_DIR}}/dist/*.tar.gz)" >> $GITHUB_ENV
-
-# - name: Pull tags from the repo
-# run: git pull --tags
-
-# - name: Create version_changelog.md
-# run: gitchangelog ${{env.OLD_VERSION}}..HEAD | sed "s/## (unreleased)/${{env.NEW_VERSION}} ($(date +"%Y-%m-%d"))/" > version_changelog.md
-
-# - name: Read version_changelog.md
-# run: cat version_changelog.md
-
-# - name: TestPypi release
-# run: twine upload --repository testpypi dist/* -p ${{ secrets.TWINE_PASSWORD_TEST }} #--skip-existing
-# working-directory: ${{env.SOURCE_DIR}}
-
-# - name: Pypi release
-# run: twine upload dist/* -p ${{ secrets.TWINE_PASSWORD }} #--skip-existing
-# working-directory: ${{env.SOURCE_DIR}}
-
-# - name: Github release
-# env:
-# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-# run: hub release create -a $wheel_name -a $tar_name -F version_changelog.md ${{env.NEW_VERSION}}
-
-# changelog:
-# needs: release
-# runs-on: ubuntu-latest
-# name: Update Changelog
-# steps:
-# - uses: actions/checkout@v2
-# with:
-# fetch-depth: 0
-
-# - name: Create new branch
-# run: git checkout -b actions/changelog
-
-# - name: Set branch upstream
-# run: git push -u origin actions/changelog
-# env:
-# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
-# - name: Setup python
-# uses: actions/setup-python@v2
-# with:
-# python-version: "3.8"
-
-# - name: Install release packages
-# run: pip install wheel gitchangelog pystache
-
-# - name: Install dependencies
-# run: pip install -e .
-# working-directory: ${{env.SOURCE_DIR}}
-
-# - name: Set variables
-# run: echo "NEW_VERSION=$(cat VERSION)" >> $GITHUB_ENV
-
-# - name: Generate newest changelog
-# run: gitchangelog ${{env.NEW_VERSION}} > CHANGELOG.md
-
-# - name: Make commit for auto-generated changelog
-# uses: EndBug/add-and-commit@v7
-# env:
-# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-# with:
-# add: "CHANGELOG.md"
-# branch: actions/changelog
-# message: "!gitchangelog"
-
-# - name: Create pull request for the auto generated changelog
-# run: |
-# echo "PR_URL=$(gh pr create \
-# --title "changelog: release ${{env.NEW_VERSION}}" \
-# --body "beep boop, i am a robot" \
-# --label documentation)" >> $GITHUB_ENV
-# env:
-# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
-# - name: Set pull request to auto-merge as rebase
-# run: |
-# gh pr merge $PR_URL \
-# --auto \
-# --delete-branch \
-# --rebase
-# env:
-# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+name: release
+on:
+ workflow_dispatch:
+ release:
+ types: [published]
+env:
+ PACKAGE_DIR: adbdgl_adapter
+ TESTS_DIR: tests
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ python: ["3.6", "3.7", "3.8", "3.9"]
+ name: Python ${{ matrix.python }}
+ steps:
+ - uses: actions/checkout@v2
+ - name: Setup Python ${{ matrix.python }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python }}
+ - name: Setup pip
+ run: python -m pip install --upgrade pip setuptools wheel
+ - name: Install packages
+ run: pip install .[dev]
+ - name: Run black
+ run: black --check --verbose --diff --color ${{env.PACKAGE_DIR}} ${{env.TESTS_DIR}}
+ - name: Run flake8
+ run: flake8 ${{env.PACKAGE_DIR}} ${{env.TESTS_DIR}}
+ - name: Run isort
+ run: isort --check --profile=black ${{env.PACKAGE_DIR}} ${{env.TESTS_DIR}}
+ - name: Run mypy
+ run: mypy ${{env.PACKAGE_DIR}} ${{env.TESTS_DIR}}
+ - name: Run pytest
+ run: py.test --cov=${{env.PACKAGE_DIR}} --cov-report xml -v --color=yes --no-cov-on-fail --code-highlight=yes
+ - name: Publish to coveralls.io
+ if: matrix.python == '3.8'
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: coveralls --service=github
+
+ release:
+ needs: build
+ runs-on: ubuntu-latest
+ name: Release package
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Fetch complete history for all tags and branches
+ run: git fetch --prune --unshallow
+
+ - name: Setup python
+ uses: actions/setup-python@v2
+ with:
+ python-version: "3.8"
+
+ - name: Install release packages
+ run: pip install setuptools wheel twine setuptools-scm[toml]
+
+ - name: Install dependencies
+ run: pip install .[dev]
+
+ - name: Build distribution
+ run: python setup.py sdist bdist_wheel
+
+ - name: Publish to PyPI Test
+ env:
+ TWINE_USERNAME: __token__
+ TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD_TEST }}
+ run: twine upload --repository testpypi dist/* #--skip-existing
+ - name: Publish to PyPI
+ env:
+ TWINE_USERNAME: __token__
+ TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
+ run: twine upload --repository pypi dist/* #--skip-existing
+
+ changelog:
+ needs: release
+ runs-on: ubuntu-latest
+ name: Update Changelog
+ steps:
+ - uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+
+ - name: Create new branch
+ run: git checkout -b actions/changelog
+
+ - name: Set branch upstream
+ run: git push -u origin actions/changelog
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Setup python
+ uses: actions/setup-python@v2
+ with:
+ python-version: "3.8"
+
+ - name: Install release packages
+ run: pip install wheel gitchangelog pystache
+
+ - name: Install dependencies
+ run: pip install .[dev]
+
+ - name: Set variables
+ run: echo "VERSION=$(curl ${GITHUB_API_URL}/repos/${GITHUB_REPOSITORY}/releases/latest | python -c "import sys; import json; print(json.load(sys.stdin)['tag_name'])")" >> $GITHUB_ENV
+
+ - name: Generate newest changelog
+ run: gitchangelog ${{env.VERSION}} > CHANGELOG.md
+
+ - name: Make commit for auto-generated changelog
+ uses: EndBug/add-and-commit@v7
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ add: "CHANGELOG.md"
+ branch: actions/changelog
+ message: "!gitchangelog"
+
+ - name: Create pull request for the auto generated changelog
+ run: |
+ echo "PR_URL=$(gh pr create \
+ --title "changelog: release ${{env.VERSION}}" \
+ --body "beep boop, i am a robot" \
+ --label documentation)" >> $GITHUB_ENV
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Set pull request to auto-merge as rebase
+ run: |
+ gh pr merge $PR_URL \
+ --admin \
+ --delete-branch \
+ --rebase
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.gitignore b/.gitignore
index a2571cb..efb3b0c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,13 +1,118 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
.ipynb_checkpoints
-.tox
+
+# pyenv
+.python-version
+
+# celery beat schedule file
+celerybeat-schedule
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+
+# MacOS
.DS_Store
-**/*.pyc
-# log files
-**/*.log
-# Setuptools distribution folder.
-adbdgl_adapter/dist/
-# Remove the build directory from repo
-adbdgl_adapter/build/
-adbdgl_adapter/*.egg-info
-.vscode
-.venv
\ No newline at end of file
+
+# PyCharm
+.idea/
+
+# ArangoDB Starter
+localdata/
+
+# setuptools_scm
+adbdgl_adapter/version.py
+
+.vscode
\ No newline at end of file
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..3d73851
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,3 @@
+include README.md LICENSE
+prune tests
+prune examples
\ No newline at end of file
diff --git a/README.md b/README.md
index 255cada..2c260a7 100644
--- a/README.md
+++ b/README.md
@@ -29,12 +29,12 @@ The Deep Graph Library (DGL) is an easy-to-use, high performance and scalable Py
## Quickstart
-Get Started on Colab: <a href="https://colab.research.google.com/github/arangoml/dgl-adapter/blob/master/examples/ArangoDB_DGL_Adapter.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>
+Get Started on Colab: <a href="https://colab.research.google.com/github/arangoml/dgl-adapter/blob/master/examples/ADBDGL_Adapter.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>
```py
# Import the ArangoDB-DGL Adapter
-from adbdgl_adapter.adbdgl_adapter import ArangoDB_DGL_Adapter
+from adbdgl_adapter.adapter import ADBDGL_Adapter
# Import a sample graph from DGL
from dgl.data import KarateClubDataset
@@ -51,7 +51,7 @@ con = {
}
# This instantiates your ADBDGL Adapter with your connection credentials
-adbdgl_adapter = ArangoDB_DGL_Adapter(con)
+adbdgl_adapter = ADBDGL_Adapter(con)
# ArangoDB to DGL via Graph
dgl_fraud_graph = adbdgl_adapter.arangodb_graph_to_dgl("fraud-detection")
@@ -89,6 +89,5 @@ Prerequisite: `arangorestore` must be installed
2. `cd dgl-adapter`
3. `python -m venv .venv`
4. `source .venv/bin/activate` (MacOS) or `.venv/scripts/activate` (Windows)
-5. `cd adbdgl_adapter`
-6. `pip install -e . pytest`
-7. `pytest`
\ No newline at end of file
+5. `pip install -e . pytest`
+6. `pytest`
\ No newline at end of file
diff --git a/VERSION b/VERSION
deleted file mode 100644
index bd52db8..0000000
--- a/VERSION
+++ /dev/null
@@ -1,1 +0,0 @@
-0.0.0
\ No newline at end of file
diff --git a/adbdgl_adapter/adbdgl_adapter/__init__.py b/adbdgl_adapter/__init__.py
similarity index 100%
rename from adbdgl_adapter/adbdgl_adapter/__init__.py
rename to adbdgl_adapter/__init__.py
diff --git a/adbdgl_adapter/abc.py b/adbdgl_adapter/abc.py
new file mode 100644
index 0000000..3219d71
--- /dev/null
+++ b/adbdgl_adapter/abc.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+
+from abc import ABC
+from typing import Any, List, Set, Union
+
+from arango.graph import Graph as ArangoDBGraph
+from dgl import DGLGraph
+from dgl.heterograph import DGLHeteroGraph
+from torch.functional import Tensor
+
+from .typings import ArangoMetagraph, DGLCanonicalEType, Json
+
+
+class Abstract_ADBDGL_Adapter(ABC):
+ def __init__(self) -> None:
+ raise NotImplementedError # pragma: no cover
+
+ def arangodb_to_dgl(
+ self, name: str, metagraph: ArangoMetagraph, **query_options: Any
+ ) -> DGLHeteroGraph:
+ raise NotImplementedError # pragma: no cover
+
+ def arangodb_collections_to_dgl(
+ self, name: str, v_cols: Set[str], e_cols: Set[str], **query_options: Any
+ ) -> DGLHeteroGraph:
+ raise NotImplementedError # pragma: no cover
+
+ def arangodb_graph_to_dgl(self, name: str, **query_options: Any) -> DGLHeteroGraph:
+ raise NotImplementedError # pragma: no cover
+
+ def dgl_to_arangodb(
+ self, name: str, dgl_g: Union[DGLGraph, DGLHeteroGraph], batch_size: int
+ ) -> ArangoDBGraph:
+ raise NotImplementedError # pragma: no cover
+
+ def etypes_to_edefinitions(
+ self, canonical_etypes: List[DGLCanonicalEType]
+ ) -> List[Json]:
+ raise NotImplementedError # pragma: no cover
+
+ def __prepare_dgl_features(self) -> None:
+ raise NotImplementedError # pragma: no cover
+
+ def __insert_dgl_features(self) -> None:
+ raise NotImplementedError # pragma: no cover
+
+ def __prepare_adb_attributes(self) -> None:
+ raise NotImplementedError # pragma: no cover
+
+ def __insert_adb_docs(self) -> None:
+ raise NotImplementedError # pragma: no cover
+
+ def __fetch_adb_docs(self) -> None:
+ raise NotImplementedError # pragma: no cover
+
+ def __validate_attributes(self) -> None:
+ raise NotImplementedError # pragma: no cover
+
+ @property
+ def DEFAULT_CANONICAL_ETYPE(self) -> List[DGLCanonicalEType]:
+ return [("_N", "_E", "_N")]
+
+ @property
+ def CONNECTION_ATRIBS(self) -> Set[str]:
+ return {"hostname", "username", "password", "dbName"}
+
+ @property
+ def METAGRAPH_ATRIBS(self) -> Set[str]:
+ return {"vertexCollections", "edgeCollections"}
+
+ @property
+ def EDGE_DEFINITION_ATRIBS(self) -> Set[str]:
+ return {"edge_collection", "from_vertex_collections", "to_vertex_collections"}
+
+
+class Abstract_ADBDGL_Controller(ABC):
+ def _adb_attribute_to_dgl_feature(self, key: str, col: str, val: Any) -> Any:
+ raise NotImplementedError # pragma: no cover
+
+ def _dgl_feature_to_adb_attribute(self, key: str, col: str, val: Tensor) -> Any:
+ raise NotImplementedError # pragma: no cover
diff --git a/adbdgl_adapter/adbdgl_adapter/adbdgl_adapter.py b/adbdgl_adapter/adapter.py
similarity index 61%
rename from adbdgl_adapter/adbdgl_adapter/adbdgl_adapter.py
rename to adbdgl_adapter/adapter.py
index 50d13fe..b9d3075 100644
--- a/adbdgl_adapter/adbdgl_adapter/adbdgl_adapter.py
+++ b/adbdgl_adapter/adapter.py
@@ -1,75 +1,73 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-"""
-@author: Anthony Mahanna
-"""
-from .abc import ADBDGL_Adapter
-from .adbdgl_controller import Base_ADBDGL_Controller
+from collections import defaultdict
+from typing import Any, DefaultDict, Dict, List, Set, Union
from arango import ArangoClient
+from arango.cursor import Cursor
from arango.graph import Graph as ArangoDBGraph
-
-import dgl
-from dgl import DGLGraph
+from arango.result import Result
+from dgl import DGLGraph, heterograph
from dgl.heterograph import DGLHeteroGraph
from dgl.view import HeteroEdgeDataView, HeteroNodeDataView
-
-import torch
+from torch import tensor # type: ignore
from torch.functional import Tensor
-from typing import Union
-from collections import defaultdict
+from .abc import Abstract_ADBDGL_Adapter
+from .controller import ADBDGL_Controller
+from .typings import ArangoMetagraph, DGLCanonicalEType, DGLDataDict, Json
-class ArangoDB_DGL_Adapter(ADBDGL_Adapter):
+class ADBDGL_Adapter(Abstract_ADBDGL_Adapter):
"""ArangoDB-DGL adapter.
:param conn: Connection details to an ArangoDB instance.
- :type conn: dict
- :param controller_class: The ArangoDB-DGL controller, for controlling how ArangoDB attributes are converted into DGL features, and vice-versa. Optionally re-defined by the user if needed (otherwise defaults to Base_ADBDGL_Controller).
- :type controller_class: Base_ADBDGL_Controller
+ :type conn: adbdgl_adapter.typings.Json
+ :param controller: The ArangoDB-DGL controller, for controlling how
+ ArangoDB attributes are converted into DGL features, and vice-versa.
+ Optionally re-defined by the user if needed (otherwise defaults to
+ ADBDGL_Controller).
+ :type controller: adbdgl_adapter.controller.ADBDGL_Controller
:raise ValueError: If missing required keys in conn
"""
def __init__(
self,
- conn: dict,
- controller_class: Base_ADBDGL_Controller = Base_ADBDGL_Controller,
+ conn: Json,
+ controller: ADBDGL_Controller = ADBDGL_Controller(),
):
self.__validate_attributes("connection", set(conn), self.CONNECTION_ATRIBS)
- if issubclass(controller_class, Base_ADBDGL_Controller) is False:
- msg = "controller_class must inherit from Base_ADBDGL_Controller"
+ if issubclass(type(controller), ADBDGL_Controller) is False:
+ msg = "controller must inherit from ADBDGL_Controller"
raise TypeError(msg)
- username = conn["username"]
- password = conn["password"]
- db_name = conn["dbName"]
-
- protocol = conn.get("protocol", "https")
- host = conn["hostname"]
+ username: str = conn["username"]
+ password: str = conn["password"]
+ db_name: str = conn["dbName"]
+ host: str = conn["hostname"]
+ protocol: str = conn.get("protocol", "https")
port = str(conn.get("port", 8529))
url = protocol + "://" + host + ":" + port
print(f"Connecting to {url}")
self.__db = ArangoClient(hosts=url).db(db_name, username, password, verify=True)
- self.__cntrl: Base_ADBDGL_Controller = controller_class()
+ self.__cntrl: ADBDGL_Controller = controller
def arangodb_to_dgl(
- self,
- name: str,
- metagraph: dict,
- **query_options,
- ):
- """Create a DGL graph from user-defined metagraph.
+ self, name: str, metagraph: ArangoMetagraph, **query_options: Any
+ ) -> DGLHeteroGraph:
+ """Create a DGLHeteroGraph from the user-defined metagraph.
:param name: The DGL graph name.
:type name: str
- :param metagraph: An object defining vertex & edge collections to import to DGL, along with their associated attributes to keep.
- :type metagraph: dict
- :param query_options: Keyword arguments to specify AQL query options when fetching documents from the ArangoDB instance.
- :type query_options: **kwargs
+ :param metagraph: An object defining vertex & edge collections to import
+ to DGL, along with their associated attributes to keep.
+ :type metagraph: adbdgl_adapter.typings.ArangoMetagraph
+ :param query_options: Keyword arguments to specify AQL query options when
+ fetching documents from the ArangoDB instance.
+ :type query_options: Any
:return: A DGL Heterograph
:rtype: dgl.heterograph.DGLHeteroGraph
:raise ValueError: If missing required keys in metagraph
@@ -85,55 +83,64 @@ def arangodb_to_dgl(
},
"edgeCollections": {
"accountHolder": {},
- "transaction": {},
+ "transaction": {
+ "transaction_amt", "receiver_bank_id", "sender_bank_id"
+ },
},
}
"""
self.__validate_attributes("graph", set(metagraph), self.METAGRAPH_ATRIBS)
- adb_map = dict() # Maps ArangoDB vertex IDs to DGL node IDs
+ # Maps ArangoDB vertex IDs to DGL node IDs
+ adb_map: Dict[str, Dict[str, Any]] = dict()
# Dictionaries for constructing a heterogeneous graph.
- data_dict = dict()
- ndata = defaultdict(lambda: defaultdict(list))
- edata = defaultdict(lambda: defaultdict(list))
+ data_dict: DGLDataDict = dict()
+ ndata: DefaultDict[Any, Any] = defaultdict(lambda: defaultdict(list))
+ edata: DefaultDict[Any, Any] = defaultdict(lambda: defaultdict(list))
+ adb_v: Json
for v_col, atribs in metagraph["vertexCollections"].items():
- for i, v in enumerate(self.__fetch_adb_docs(v_col, atribs, query_options)):
- adb_map[v["_id"]] = {
+ for i, adb_v in enumerate(
+ self.__fetch_adb_docs(v_col, atribs, query_options)
+ ):
+ adb_map[adb_v["_id"]] = {
"id": i,
"col": v_col,
}
- self.__prepare_dgl_features(ndata, atribs, v, v_col)
+ self.__prepare_dgl_features(ndata, atribs, adb_v, v_col)
- from_col = set()
- to_col = set()
+ adb_e: Json
+ from_col: Set[str] = set()
+ to_col: Set[str] = set()
for e_col, atribs in metagraph["edgeCollections"].items():
- from_nodes = []
- to_nodes = []
- for e in self.__fetch_adb_docs(e_col, atribs, query_options):
- from_node = adb_map[e["_from"]]
- to_node = adb_map[e["_to"]]
+ from_nodes: List[int] = []
+ to_nodes: List[int] = []
+ for adb_e in self.__fetch_adb_docs(e_col, atribs, query_options):
+ from_node = adb_map[adb_e["_from"]]
+ to_node = adb_map[adb_e["_to"]]
from_col.add(from_node["col"])
to_col.add(to_node["col"])
if len(from_col | to_col) > 2:
raise ValueError(
- f"Can't convert to DGL: too many '_from' & '_to' collections in {e_col}"
+ f"""Can't convert to DGL:
+ too many '_from' & '_to' collections in {e_col}
+ """
)
from_nodes.append(from_node["id"])
to_nodes.append(to_node["id"])
- self.__prepare_dgl_features(edata, atribs, e, e_col)
+ self.__prepare_dgl_features(edata, atribs, adb_e, e_col)
data_dict[(from_col.pop(), e_col, to_col.pop())] = (
- torch.tensor(from_nodes),
- torch.tensor(to_nodes),
+ tensor(from_nodes),
+ tensor(to_nodes),
)
- dgl_g: DGLHeteroGraph = dgl.heterograph(data_dict)
+ dgl_g: DGLHeteroGraph = heterograph(data_dict)
has_one_ntype = len(dgl_g.ntypes) == 1
has_one_etype = len(dgl_g.etypes) == 1
@@ -146,37 +153,40 @@ def arangodb_to_dgl(
def arangodb_collections_to_dgl(
self,
name: str,
- vertex_collections: set,
- edge_collections: set,
- **query_options,
- ):
+ v_cols: Set[str],
+ e_cols: Set[str],
+ **query_options: Any,
+ ) -> DGLHeteroGraph:
"""Create a DGL graph from ArangoDB collections.
:param name: The DGL graph name.
:type name: str
- :param vertex_collections: A set of ArangoDB vertex collections to import to DGL.
- :type vertex_collections: set
- :param edge_collections: A set of ArangoDB edge collections to import to DGL.
- :type edge_collections: set
- :param query_options: Keyword arguments to specify AQL query options when fetching documents from the ArangoDB instance.
- :type query_options: **kwargs
+ :param v_cols: A set of ArangoDB vertex collections to
+ import to DGL.
+ :type v_cols: Set[str]
+ :param e_cols: A set of ArangoDB edge collections to import to DGL.
+ :type e_cols: Set[str]
+ :param query_options: Keyword arguments to specify AQL query options
+ when fetching documents from the ArangoDB instance.
+ :type query_options: Any
:return: A DGL Heterograph
:rtype: dgl.heterograph.DGLHeteroGraph
"""
- metagraph = {
- "vertexCollections": {col: {} for col in vertex_collections},
- "edgeCollections": {col: {} for col in edge_collections},
+ metagraph: ArangoMetagraph = {
+ "vertexCollections": {col: set() for col in v_cols},
+ "edgeCollections": {col: set() for col in e_cols},
}
return self.arangodb_to_dgl(name, metagraph, **query_options)
- def arangodb_graph_to_dgl(self, name: str, **query_options):
+ def arangodb_graph_to_dgl(self, name: str, **query_options: Any) -> DGLHeteroGraph:
"""Create a DGL graph from an ArangoDB graph.
:param name: The ArangoDB graph name.
:type name: str
- :param query_options: Keyword arguments to specify AQL query options when fetching documents from the ArangoDB instance.
- :type query_options: **kwargs
+ :param query_options: Keyword arguments to specify AQL query options
+ when fetching documents from the ArangoDB instance.
+ :type query_options: Any
:return: A DGL Heterograph
:rtype: dgl.heterograph.DGLHeteroGraph
"""
@@ -188,7 +198,7 @@ def arangodb_graph_to_dgl(self, name: str, **query_options):
def dgl_to_arangodb(
self, name: str, dgl_g: Union[DGLGraph, DGLHeteroGraph], batch_size: int = 1000
- ):
+ ) -> ArangoDBGraph:
"""Create an ArangoDB graph from a DGL graph.
:param name: The ArangoDB graph name.
@@ -200,9 +210,9 @@ def dgl_to_arangodb(
:return: The ArangoDB Graph API wrapper.
:rtype: arango.graph.Graph
"""
- is_default_type = dgl_g.canonical_etypes == self.DEFAULT_CANONICAL_ETYPE
- adb_v_cols = [name + dgl_g.ntypes[0]] if is_default_type else dgl_g.ntypes
- adb_e_cols = [name + dgl_g.etypes[0]] if is_default_type else dgl_g.etypes
+ is_default = dgl_g.canonical_etypes == self.DEFAULT_CANONICAL_ETYPE
+ adb_v_cols: List[str] = [name + dgl_g.ntypes[0]] if is_default else dgl_g.ntypes
+ adb_e_cols: List[str] = [name + dgl_g.etypes[0]] if is_default else dgl_g.etypes
e_definitions = self.etypes_to_edefinitions(
[
(
@@ -211,16 +221,16 @@ def dgl_to_arangodb(
adb_v_cols[0],
)
]
- if is_default_type
+ if is_default
else dgl_g.canonical_etypes
)
has_one_ntype = len(dgl_g.ntypes) == 1
has_one_etype = len(dgl_g.etypes) == 1
- adb_documents = defaultdict(list)
+ adb_documents: DefaultDict[str, List[Json]] = defaultdict(list)
for v_col in adb_v_cols:
- ntype = None if is_default_type else v_col
+ ntype = None if is_default else v_col
v_col_docs = adb_documents[v_col]
if self.__db.has_collection(v_col) is False:
@@ -228,7 +238,7 @@ def dgl_to_arangodb(
node: Tensor
for node in dgl_g.nodes(ntype):
- dgl_node_id: int = node.item()
+ dgl_node_id = node.item()
adb_vertex = {"_key": str(dgl_node_id)}
self.__prepare_adb_attributes(
dgl_g.ndata,
@@ -246,13 +256,13 @@ def dgl_to_arangodb(
from_nodes: Tensor
to_nodes: Tensor
for e_col in adb_e_cols:
- etype = None if is_default_type else e_col
+ etype = None if is_default else e_col
e_col_docs = adb_documents[e_col]
if self.__db.has_collection(e_col) is False:
self.__db.create_collection(e_col, edge=True)
- if is_default_type:
+ if is_default:
from_col = to_col = adb_v_cols[0]
else:
from_col, _, to_col = dgl_g.to_canonical_etype(e_col)
@@ -286,26 +296,29 @@ def dgl_to_arangodb(
print(f"ArangoDB: {name} created")
return adb_graph
- def etypes_to_edefinitions(self, canonical_etypes: list) -> list:
+ def etypes_to_edefinitions(
+ self, canonical_etypes: List[DGLCanonicalEType]
+ ) -> List[Json]:
"""Converts a DGL graph's canonical_etypes property to ArangoDB graph edge definitions
- :param canonical_etypes: A list of string triplets (str, str, str) for source node type, edge type and destination node type.
- :type canonical_etypes: list[tuple]
+ :param canonical_etypes: A list of string triplets (str, str, str) for
+ source node type, edge type and destination node type.
+ :type canonical_etypes: List[adbdgl_adapter.typings.DGLCanonicalEType]
:return: ArangoDB Edge Definitions
- :rtype: list[dict[str, Union[str, list[str]]]]
+ :rtype: List[adbdgl_adapter.typings.Json]
Here is an example of **edge_definitions**:
.. code-block:: python
[
{
- "edge_collection": "teach",
- "from_vertex_collections": ["teachers"],
- "to_vertex_collections": ["lectures"]
+ "edge_collection": "teaches",
+ "from_vertex_collections": ["Teacher"],
+ "to_vertex_collections": ["Lecture"]
}
]
"""
- edge_definitions = []
+ edge_definitions: List[Json] = []
for dgl_from, dgl_e, dgl_to in canonical_etypes:
edge_definitions.append(
{
@@ -319,91 +332,99 @@ def etypes_to_edefinitions(self, canonical_etypes: list) -> list:
def __prepare_dgl_features(
self,
- features_data: defaultdict,
- attributes: set,
- doc: dict,
+ features_data: DefaultDict[Any, Any],
+ attributes: Set[str],
+ doc: Json,
col: str,
- ):
+ ) -> None:
"""Convert a set of ArangoDB attributes into valid DGL features
:param features_data: A dictionary storing the DGL features formatted as lists.
- :type features_data: defaultdict[Any, defaultdict[Any, list]]
- :param col: The collection the current document belongs to
- :type col: str
+ :type features_data: Defaultdict[Any, Any]
:param attributes: A set of ArangoDB attribute keys to convert into DGL features
- :type attributes: set
+ :type attributes: Set[str]
:param doc: The current ArangoDB document
- :type doc: dict
-
+ :type doc: adbdgl_adapter.typings.Json
+ :param col: The collection the current document belongs to
+ :type col: str
"""
key: str
for key in attributes:
- arr: list = features_data[key][col]
+ arr: List[Any] = features_data[key][col]
arr.append(
- self.__cntrl._adb_attribute_to_dgl_feature(key, col, doc.get(key, -1))
+ self.__cntrl._adb_attribute_to_dgl_feature(key, col, doc.get(key, None))
)
def __insert_dgl_features(
self,
- features_data: defaultdict,
+ features_data: DefaultDict[Any, Any],
data: Union[HeteroNodeDataView, HeteroEdgeDataView],
has_one_type: bool,
- ):
+ ) -> None:
"""Insert valid DGL features into a DGL graph.
:param features_data: A dictionary storing the DGL features formatted as lists.
- :type features_data: defaultdict[Any, defaultdict[Any, list]]
- :param data: The (empty) ndata or edata instance attribute of a dgl graph, which is about to receive the **features_data**.
- :type data: Union[HeteroNodeDataView, HeteroEdgeDataView]
- :param has_one_type: Set to True if the DGL graph only has one ntype, or one etype.
+ :type features_data: Defaultdict[Any, Any]
+ :param data: The (empty) ndata or edata instance attribute of a dgl graph,
+ which is about to receive **features_data**.
+ :type data: Union[dgl.view.HeteroNodeDataView, dgl.view.HeteroEdgeDataView]
+ :param has_one_type: Set to True if the DGL graph only has one ntype,
+ or one etype.
:type has_one_type: bool
"""
- col_dict: dict
+ col_dict: Dict[str, List[Any]]
for key, col_dict in features_data.items():
for col, array in col_dict.items():
data[key] = (
- torch.tensor(array)
- if has_one_type
- else {**data[key], col: torch.tensor(array)}
+ tensor(array) if has_one_type else {**data[key], col: tensor(array)}
)
def __prepare_adb_attributes(
self,
data: Union[HeteroNodeDataView, HeteroEdgeDataView],
- features: set,
- id: int,
- doc: dict,
+ features: Set[Any],
+ id: Union[int, float, bool],
+ doc: Json,
col: str,
has_one_type: bool,
- ):
+ ) -> None:
"""Convert DGL features into a set of ArangoDB attributes for a given document
- :param data: The ndata or edata instance attribute of a dgl graph, filled with node or edge feature data.
- :type data: Union[HeteroNodeDataView, HeteroEdgeDataView]
+ :param data: The ndata or edata instance attribute of a dgl graph, filled with
+ node or edge feature data.
+ :type data: Union[dgl.view.HeteroNodeDataView, dgl.view.HeteroEdgeDataView]
:param features: A set of DGL feature keys to convert into ArangoDB attributes
- :type features: set
+ :type features: Set[Any]
:param id: The ID of the current DGL node / edge
- :type id: int
+ :type id: Union[int, float, bool]
:param doc: The current ArangoDB document
- :type doc: dict
+ :type doc: adbdgl_adapter.typings.Json
:param col: The collection the current document belongs to
:type col: str
- :param has_one_type: Set to True if the DGL graph only has one ntype, or one etype.
+ :param has_one_type: Set to True if the DGL graph only has one ntype,
+ or one etype.
:type has_one_type: bool
"""
for key in features:
tensor = data[key] if has_one_type else data[key][col]
doc[key] = self.__cntrl._dgl_feature_to_adb_attribute(key, col, tensor[id])
- def __insert_adb_docs(self, col: str, col_docs: list, doc: dict, batch_size: int):
- """Insert an ArangoDB document into a list. If the list exceeds batch_size documents, insert into the ArangoDB collection.
+ def __insert_adb_docs(
+ self,
+ col: str,
+ col_docs: List[Json],
+ doc: Json,
+ batch_size: int,
+ ) -> None:
+ """Insert an ArangoDB document into a list. If the list exceeds
+ batch_size documents, insert into the ArangoDB collection.
:param col: The collection name
:type col: str
:param col_docs: The existing documents data belonging to the collection.
- :type col_docs: list
+ :type col_docs: List[adbdgl_adapter.typings.Json]
:param doc: The current document to insert.
- :type doc: dict
+ :type doc: adbdgl_adapter.typings.Json
:param batch_size: The maximum number of documents to insert at once
:type batch_size: int
"""
@@ -413,38 +434,45 @@ def __insert_adb_docs(self, col: str, col_docs: list, doc: dict, batch_size: int
self.__db.collection(col).import_bulk(col_docs, on_duplicate="replace")
col_docs.clear()
- def __fetch_adb_docs(self, col: str, attributes: set, query_options: dict):
+ def __fetch_adb_docs(
+ self, col: str, attributes: Set[str], query_options: Any
+ ) -> Result[Cursor]:
"""Fetches ArangoDB documents within a collection.
:param col: The ArangoDB collection.
:type col: str
:param attributes: The set of document attributes.
- :type attributes: set
- :param query_options: Keyword arguments to specify AQL query options when fetching documents from the ArangoDB instance.
- :type query_options: **kwargs
+ :type attributes: Set[str]
+ :param query_options: Keyword arguments to specify AQL query options
+ when fetching documents from the ArangoDB instance.
+ :type query_options: Any
:return: Result cursor.
:rtype: arango.cursor.Cursor
"""
aql = f"""
FOR doc IN {col}
RETURN MERGE(
- KEEP(doc, {list(attributes)}),
- {{"_id": doc._id}},
+ KEEP(doc, {list(attributes)}),
+ {{"_id": doc._id}},
doc._from ? {{"_from": doc._from, "_to": doc._to}}: {{}}
)
"""
return self.__db.aql.execute(aql, **query_options)
- def __validate_attributes(self, type: str, attributes: set, valid_attributes: set):
- """Validates that a set of attributes includes the required valid attributes.
+ def __validate_attributes(
+ self, type: str, attributes: Set[str], valid_attributes: Set[str]
+ ) -> None:
+ """Validates that a set of attributes includes the required valid
+ attributes.
- :param type: The context of the attribute validation (e.g connection attributes, graph attributes, etc).
+ :param type: The context of the attribute validation
+ (e.g connection attributes, graph attributes, etc).
:type type: str
:param attributes: The provided attributes, possibly invalid.
- :type attributes: set
+ :type attributes: Set[str]
:param valid_attributes: The valid attributes.
- :type valid_attributes: set
+ :type valid_attributes: Set[str]
:raise ValueError: If **valid_attributes** is not a subset of **attributes**
"""
if valid_attributes.issubset(attributes) is False:
diff --git a/adbdgl_adapter/adbdgl_adapter/abc.py b/adbdgl_adapter/adbdgl_adapter/abc.py
deleted file mode 100644
index 9caab5d..0000000
--- a/adbdgl_adapter/adbdgl_adapter/abc.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-"""
-@author: Anthony Mahanna
-"""
-
-from abc import ABC
-
-
-class ADBDGL_Adapter(ABC):
- def __init__(self):
- raise NotImplementedError() # pragma: no cover
-
- def arangodb_to_dgl(self):
- raise NotImplementedError() # pragma: no cover
-
- def arangodb_collections_to_dgl(self):
- raise NotImplementedError() # pragma: no cover
-
- def arangodb_graph_to_dgl(self):
- raise NotImplementedError() # pragma: no cover
-
- def dgl_to_arangodb(self):
- raise NotImplementedError() # pragma: no cover
-
- def etypes_to_edefinitions(self):
- raise NotImplementedError() # pragma: no cover
-
- def __prepare_dgl_features(self):
- raise NotImplementedError() # pragma: no cover
-
- def __insert_dgl_features(self):
- raise NotImplementedError() # pragma: no cover
-
- def __prepare_adb_attributes(self):
- raise NotImplementedError() # pragma: no cover
-
- def __insert_adb_docs(self):
- raise NotImplementedError() # pragma: no cover
-
- def __fetch_adb_docs(self):
- raise NotImplementedError() # pragma: no cover
-
- def __validate_attributes(self):
- raise NotImplementedError() # pragma: no cover
-
- @property
- def DEFAULT_CANONICAL_ETYPE(self):
- return [("_N", "_E", "_N")]
-
- @property
- def CONNECTION_ATRIBS(self):
- return {"hostname", "username", "password", "dbName"}
-
- @property
- def METAGRAPH_ATRIBS(self):
- return {"vertexCollections", "edgeCollections"}
-
- @property
- def EDGE_DEFINITION_ATRIBS(self):
- return {"edge_collection", "from_vertex_collections", "to_vertex_collections"}
-
-
-class ADBDGL_Controller(ABC):
- def _adb_attribute_to_dgl_feature(self):
- raise NotImplementedError() # pragma: no cover
-
- def _dgl_feature_to_adb_attribute(self):
- raise NotImplementedError() # pragma: no cover
diff --git a/adbdgl_adapter/adbdgl_adapter/adbdgl_controller.py b/adbdgl_adapter/adbdgl_adapter/adbdgl_controller.py
deleted file mode 100644
index ca0497b..0000000
--- a/adbdgl_adapter/adbdgl_adapter/adbdgl_controller.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-from .abc import ADBDGL_Controller
-from collections import defaultdict
-from torch.functional import Tensor
-
-"""
-
-@author: Anthony Mahanna
-"""
-
-
-class Base_ADBDGL_Controller(ADBDGL_Controller):
- """ArangoDB-DGL controller.
-
- Responsible for controlling how ArangoDB attributes
- are converted into DGL features, and vice-versa.
-
- You can derive your own custom ADBDGL_Controller if you want to maintain
- consistency between your ArangoDB attributes & your DGL features.
- """
-
- def _adb_attribute_to_dgl_feature(self, key: str, col: str, val):
- """
- Given an ArangoDB attribute key, its assigned value (for an arbitrary document),
- and the collection it belongs to, convert it to a valid
- DGL feature: https://docs.dgl.ai/en/0.6.x/guide/graph-feature.html.
-
- NOTE: You must override this function if you want to transfer non-numerical ArangoDB
- attributes to DGL (DGL only accepts 'attributes' (a.k.a features) of numerical types).
- Read more about DGL features here: https://docs.dgl.ai/en/0.6.x/new-tutorial/2_dglgraph.html#assigning-node-and-edge-features-to-graph.
- """
- try:
- return float(val)
- except:
- return 0
-
- def _dgl_feature_to_adb_attribute(self, key: str, col: str, val: Tensor):
- """
- Given a DGL feature key, its assigned value (for an arbitrary node or edge),
- and the collection it belongs to, convert it to a valid ArangoDB attribute (e.g string, list, number, ...).
-
- NOTE: No action is needed here if you want to keep the numerical-based values of your DGL features.
- """
- try:
- return val.item()
- except ValueError:
- print("HERERERERE")
- return val.tolist()
diff --git a/adbdgl_adapter/controller.py b/adbdgl_adapter/controller.py
new file mode 100644
index 0000000..bd7e8d8
--- /dev/null
+++ b/adbdgl_adapter/controller.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+
+from typing import Any
+
+from torch.functional import Tensor
+
+from .abc import Abstract_ADBDGL_Controller
+
+
+class ADBDGL_Controller(Abstract_ADBDGL_Controller):
+ """ArangoDB-DGL controller.
+
+ Responsible for controlling how ArangoDB attributes
+ are converted into DGL features, and vice-versa.
+
+ You can derive your own custom ADBDGL_Controller if you want to maintain
+ consistency between your ArangoDB attributes & your DGL features.
+ """
+
+ def _adb_attribute_to_dgl_feature(self, key: str, col: str, val: Any) -> Any:
+ """
+ Given an ArangoDB attribute key, its assigned value (for an arbitrary document),
+ and the collection it belongs to, convert it to a valid
+ DGL feature: https://docs.dgl.ai/en/0.6.x/guide/graph-feature.html.
+
+ NOTE: You must override this function if you want to transfer non-numerical
+ ArangoDB attributes to DGL (DGL only accepts 'attributes' (a.k.a features)
+ of numerical types). Read more about DGL features here:
+ https://docs.dgl.ai/en/0.6.x/new-tutorial/2_dglgraph.html#assigning-node-and-edge-features-to-graph.
+
+ :param key: The ArangoDB attribute key name
+ :type key: str
+ :param col: The ArangoDB collection of the ArangoDB document.
+ :type col: str
+ :param val: The assigned attribute value of the ArangoDB document.
+ :type val: Any
+ :return: The attribute's representation as a DGL Feature
+ :rtype: Any
+ """
+ if type(val) in [int, float, bool]:
+ return val
+
+ try:
+ return float(val)
+ except (ValueError, TypeError, SyntaxError):
+ return 0
+
+ def _dgl_feature_to_adb_attribute(self, key: str, col: str, val: Tensor) -> Any:
+ """
+ Given a DGL feature key, its assigned value (for an arbitrary node or edge),
+ and the collection it belongs to, convert it to a valid ArangoDB attribute
+ (e.g string, list, number, ...).
+
+ NOTE: No action is needed here if you want to keep the numerical-based values
+ of your DGL features.
+
+ :param key: The DGL attribute key name
+ :type key: str
+ :param col: The ArangoDB collection of the (soon-to-be) ArangoDB document.
+ :type col: str
+ :param val: The assigned attribute value of the DGL node.
+ :type val: Tensor
+ :return: The feature's representation as an ArangoDB Attribute
+ :rtype: Any
+ """
+ try:
+ return val.item()
+ except ValueError:
+ return val.tolist()
diff --git a/adbdgl_adapter/setup.cfg b/adbdgl_adapter/setup.cfg
deleted file mode 100644
index 5df7f3d..0000000
--- a/adbdgl_adapter/setup.cfg
+++ /dev/null
@@ -1,7 +0,0 @@
-[metadata]
-description_file = README.md
-
-[tool:pytest]
-markers =
- unit: Marks a unit test
-testpaths = tests
\ No newline at end of file
diff --git a/adbdgl_adapter/typings.py b/adbdgl_adapter/typings.py
new file mode 100644
index 0000000..c3a7015
--- /dev/null
+++ b/adbdgl_adapter/typings.py
@@ -0,0 +1,12 @@
+__all__ = ["Json", "ArangoMetagraph", "DGLCanonicalEType"]
+
+from typing import Any, Dict, Set, Tuple
+
+from torch.functional import Tensor
+
+Json = Dict[str, Any]
+ArangoMetagraph = Dict[str, Dict[str, Set[str]]]
+
+
+DGLCanonicalEType = Tuple[str, str, str]
+DGLDataDict = Dict[DGLCanonicalEType, Tuple[Tensor, Tensor]]
diff --git a/examples/ArangoDB_DGL_Adapter.ipynb b/examples/ArangoDB_DGL_Adapter.ipynb
index 3dd6ab4..57d2ae3 100644
--- a/examples/ArangoDB_DGL_Adapter.ipynb
+++ b/examples/ArangoDB_DGL_Adapter.ipynb
@@ -36,7 +36,7 @@
"source": [
"Version: 1.0.0\n",
"\n",
- "Objective: Export Graphs from [ArangoDB](https://www.arangodb.com/), a multi-model Graph Database, into [Deep Graph Library](https://www.dgl.ai/) (DGL), a python package for graph neural networks, and vice-versa."
+ "Objective: Export Graphs from [ArangoDB](https://www.arangodb.com/), a multi-model Graph Database, to [Deep Graph Library](https://www.dgl.ai/) (DGL), a python package for graph neural networks, and vice-versa."
]
},
{
@@ -58,10 +58,10 @@
"source": [
"%%capture\n",
"!git clone -b oasis_connector --single-branch https://github.com/arangodb/interactive_tutorials.git\n",
- "!git clone https://github.com/arangoml/dgl-adapter.git # !git clone -b 1.0.0 --single-branch https://github.com/arangoml/dgl-adapter.git\n",
+ "!git clone -b 1.0.0 --single-branch https://github.com/arangoml/dgl-adapter.git\n",
"!rsync -av dgl-adapter/examples/ ./ --exclude=.git\n",
"!rsync -av interactive_tutorials/ ./ --exclude=.git\n",
- "!pip3 install \"git+https://github.com/arangoml/dgl-adapter.git#egg=adbdgl_adapter&subdirectory=adbdgl_adapter\" # pip3 install adbdgl_adapter==1.0.0\n",
+ "!pip3 install adbdgl_adapter==1.0.0\n",
"!pip3 install matplotlib\n",
"!pip3 install pyArango\n",
"!pip3 install networkx ## For drawing purposes "
@@ -71,7 +71,11 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
- "id": "RpqvL4COeG8-"
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "RpqvL4COeG8-",
+ "outputId": "2df55e4e-03fa-47ed-c2c9-baf9f597e1d8"
},
"outputs": [],
"source": [
@@ -87,8 +91,9 @@
"from dgl.data import KarateClubDataset\n",
"from dgl.data import MiniGCDataset\n",
"\n",
- "from adbdgl_adapter.adbdgl_adapter import ArangoDB_DGL_Adapter\n",
- "from adbdgl_adapter.adbdgl_controller import Base_ADBDGL_Controller"
+ "from adbdgl_adapter.adapter import ADBDGL_Adapter\n",
+ "from adbdgl_adapter.controller import ADBDGL_Controller\n",
+ "from adbdgl_adapter.typings import Json, ArangoMetagraph, DGLCanonicalEType, DGLDataDict"
]
},
{
@@ -124,7 +129,7 @@
"base_uri": "https://localhost:8080/"
},
"id": "vf0350qvj8up",
- "outputId": "9c2e9905-7272-44f6-8e59-e5f568a57758"
+ "outputId": "a65f00d2-cd6e-4583-94d8-2c9884e2e2e2"
},
"outputs": [],
"source": [
@@ -157,7 +162,7 @@
"base_uri": "https://localhost:8080/"
},
"id": "oOS3AVAnkQEV",
- "outputId": "9589b7b3-0867-4ff2-9c9f-8d0f38633490"
+ "outputId": "4609cdef-25ce-4f00-94b5-482c76274f88"
},
"outputs": [],
"source": [
@@ -193,7 +198,7 @@
"base_uri": "https://localhost:8080/"
},
"id": "meLon-KgkU4h",
- "outputId": "9f2f8081-393f-4a1b-9ff7-3f6c13289a62"
+ "outputId": "976680a4-eadd-43f2-da17-e6a574fad8a7"
},
"outputs": [],
"source": [
@@ -231,7 +236,7 @@
"base_uri": "https://localhost:8080/"
},
"id": "zTebQ0LOlsGA",
- "outputId": "0f3d26db-1b50-4d65-8385-8d0ad7147ec5"
+ "outputId": "9c84cb84-f7ce-42b3-9174-01f38295c5dd"
},
"outputs": [],
"source": [
@@ -274,7 +279,7 @@
"base_uri": "https://localhost:8080/"
},
"id": "KsxNujb0mSqZ",
- "outputId": "0b10fb67-5193-49ef-8aee-d691f53fe5bf"
+ "outputId": "3f3fd2b1-e1d3-4b03-c6c4-43566672cbb5"
},
"outputs": [],
"source": [
@@ -317,7 +322,7 @@
"base_uri": "https://localhost:8080/"
},
"id": "2ekGwnJDeG8-",
- "outputId": "fb348f69-8321-40b3-9bf5-0085ea218492"
+ "outputId": "92e9d288-0259-45cc-e73d-a8e9f629063a"
},
"outputs": [],
"source": [
@@ -377,16 +382,13 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
- "colab": {
- "base_uri": "https://localhost:8080/"
- },
- "id": "7bgGJ3QkeG8_",
- "outputId": "93451001-15f8-463d-8341-5c65fe6bc178"
+ "id": "7bgGJ3QkeG8_"
},
"outputs": [],
"source": [
+ "%%capture\n",
"!chmod -R 755 ./tools\n",
- "!./tools/arangorestore -c none --server.endpoint http+ssl://{con[\"hostname\"]}:{con[\"port\"]} --server.username {con[\"username\"]} --server.database {con[\"dbName\"]} --server.password {con[\"password\"]} --default-replication-factor 3 --input-directory \"data/fraud_dump\""
+ "!./tools/arangorestore -c none --server.endpoint http+ssl://{con[\"hostname\"]}:{con[\"port\"]} --server.username {con[\"username\"]} --server.database {con[\"dbName\"]} --server.password {con[\"password\"]} --replication-factor 3 --input-directory \"data/fraud_dump\""
]
},
{
@@ -424,7 +426,7 @@
"base_uri": "https://localhost:8080/"
},
"id": "PybHP7jpeG8_",
- "outputId": "724c9f23-c63b-4d34-f2f8-d9b7579cc985"
+ "outputId": "ba3bfc7c-ef56-47e7-8e98-3763d4f34afe"
},
"outputs": [],
"source": [
@@ -484,11 +486,11 @@
"base_uri": "https://localhost:8080/"
},
"id": "oG496kBeeG9A",
- "outputId": "164ea448-1117-4bde-e8e3-220558a1c0e3"
+ "outputId": "50ecbdf5-c82f-4540-d345-14eb4a488f2c"
},
"outputs": [],
"source": [
- "adbdgl_adapter = ArangoDB_DGL_Adapter(con)"
+ "adbdgl_adapter = ADBDGL_Adapter(con)"
]
},
{
@@ -518,7 +520,7 @@
"base_uri": "https://localhost:8080/"
},
"id": "zZ-Hu3lLVHgd",
- "outputId": "945d4971-cc05-4f97-eda3-b9e02cc05df8"
+ "outputId": "39f32c51-0753-45a8-a361-dcf46d4e6148"
},
"outputs": [],
"source": [
@@ -555,7 +557,7 @@
"base_uri": "https://localhost:8080/"
},
"id": "i4XOpdRLUNlJ",
- "outputId": "e445ea58-35ef-41ed-d4ac-717ac6f68e9c"
+ "outputId": "b58e75d1-e935-4abd-9bdb-bc8935d9cdc8"
},
"outputs": [],
"source": [
@@ -579,7 +581,7 @@
{
"cell_type": "markdown",
"metadata": {
- "id": "umy25EsUU6Lg"
+ "id": "qEH6OdSB23Ya"
},
"source": [
"## Via ArangoDB Metagraph"
@@ -592,8 +594,59 @@
"colab": {
"base_uri": "https://localhost:8080/"
},
- "id": "UWX9-MsKeG9A",
- "outputId": "f1ad45d9-d29a-4d7f-a853-b808ac898dfe"
+ "id": "7Kz8lXXq23Yk",
+ "outputId": "1458aef6-14e5-48c0-98bf-77f21431bc73"
+ },
+ "outputs": [],
+ "source": [
+ "# Define Metagraph\n",
+ "fraud_detection_metagraph = {\n",
+ " \"vertexCollections\": {\n",
+ " \"account\": {\"rank\", \"Balance\", \"customer_id\"},\n",
+ " \"Class\": {\"concrete\"},\n",
+ " \"customer\": {\"rank\"},\n",
+ " },\n",
+ " \"edgeCollections\": {\n",
+ " \"accountHolder\": {},\n",
+ " \"Relationship\": {},\n",
+ " \"transaction\": {\"receiver_bank_id\", \"sender_bank_id\", \"transaction_amt\"},\n",
+ " },\n",
+ "}\n",
+ "\n",
+ "# Create DGL Graph from attributes\n",
+ "dgl_g = adbdgl_adapter.arangodb_to_dgl('FraudDetection', fraud_detection_metagraph)\n",
+ "\n",
+ "# You can also provide valid Python-Arango AQL query options to the command above, like such:\n",
+ "# dgl_g = adbdgl_adapter.arangodb_to_dgl(graph_name = 'FraudDetection', fraud_detection_metagraph, ttl=1000, stream=True)\n",
+ "# See more here: https://docs.python-arango.com/en/main/specs.html#arango.aql.AQL.execute\n",
+ "\n",
+ "# Show graph data\n",
+ "print('\\n--------------')\n",
+ "print(dgl_g)\n",
+ "print('\\n--------------')\n",
+ "print(dgl_g.ndata)\n",
+ "print('--------------\\n')\n",
+ "print(dgl_g.edata)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "DqIKT1lO4ASw"
+ },
+ "source": [
+ "## Via ArangoDB Metagraph with a custom controller"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "U4_vSdU_4AS4",
+ "outputId": "b719b0d4-c0a4-43a0-915f-8ee765e1ec86"
},
"outputs": [],
"source": [
@@ -611,9 +664,9 @@
" },\n",
"}\n",
"\n",
- "# When converting to DGL via an ArangoDB metagraph, a user-defined Controller class\n",
- "# is required, to specify how ArangoDB attributes should be converted into DGL features.\n",
- "class FraudDetection_ADBDGL_Controller(Base_ADBDGL_Controller):\n",
+ "# When converting to DGL via an ArangoDB Metagraph that contains non-numerical values, a user-defined \n",
+ "# Controller class is required to specify how ArangoDB attributes should be converted to DGL features.\n",
+ "class FraudDetection_ADBDGL_Controller(ADBDGL_Controller):\n",
" \"\"\"ArangoDB-DGL controller.\n",
"\n",
" Responsible for controlling how ArangoDB attributes\n",
@@ -629,49 +682,59 @@
" and the collection it belongs to, convert it to a valid\n",
" DGL feature: https://docs.dgl.ai/en/0.6.x/guide/graph-feature.html.\n",
"\n",
- " NOTE: You must override this function if you want to transfer non-numerical ArangoDB\n",
- " attributes to DGL (DGL only accepts 'attributes' (a.k.a features) of numerical types).\n",
- " Read more about DGL features here: https://docs.dgl.ai/en/0.6.x/new-tutorial/2_dglgraph.html#assigning-node-and-edge-features-to-graph.\n",
+ " NOTE: You must override this function if you want to transfer non-numerical\n",
+ " ArangoDB attributes to DGL (DGL only accepts 'attributes' (a.k.a features)\n",
+ " of numerical types). Read more about DGL features here:\n",
+ " https://docs.dgl.ai/en/0.6.x/new-tutorial/2_dglgraph.html#assigning-node-and-edge-features-to-graph.\n",
+ "\n",
+ " :param key: The ArangoDB attribute key name\n",
+ " :type key: str\n",
+ " :param col: The ArangoDB collection of the ArangoDB document.\n",
+ " :type col: str\n",
+ " :param val: The assigned attribute value of the ArangoDB document.\n",
+ " :type val: Any\n",
+ " :return: The attribute's representation as a DGL Feature\n",
+ " :rtype: Any\n",
" \"\"\"\n",
- " if type(val) in [int, float, bool]:\n",
- " return val\n",
- "\n",
- " if col == \"transaction\":\n",
- " if key == \"transaction_date\":\n",
- " return int(str(val).replace(\"-\", \"\"))\n",
- " \n",
- " if key == \"trans_time\":\n",
- " return int(str(val).replace(\":\", \"\"))\n",
- " \n",
- " if col == \"customer\":\n",
- " if key == \"Sex\":\n",
- " return 0 if val == \"M\" else 1\n",
- "\n",
- " if key == \"Ssn\":\n",
- " return int(str(val).replace(\"-\", \"\"))\n",
- "\n",
- " if col == \"Class\":\n",
- " if key == \"name\":\n",
- " if val == \"Bank\":\n",
- " return 0\n",
- " elif val == \"Branch\":\n",
- " return 1\n",
- " elif val == \"Account\":\n",
- " return 2\n",
- " elif val == \"Customer\":\n",
- " return 3\n",
- " else:\n",
- " return -1\n",
+ " try:\n",
+ " if col == \"transaction\":\n",
+ " if key == \"transaction_date\":\n",
+ " return int(str(val).replace(\"-\", \"\"))\n",
+ " \n",
+ " if key == \"trans_time\":\n",
+ " return int(str(val).replace(\":\", \"\"))\n",
+ " \n",
+ " if col == \"customer\":\n",
+ " if key == \"Sex\":\n",
+ " return 0 if val == \"M\" else 1\n",
+ "\n",
+ " if key == \"Ssn\":\n",
+ " return int(str(val).replace(\"-\", \"\"))\n",
+ "\n",
+ " if col == \"Class\":\n",
+ " if key == \"name\":\n",
+ " if val == \"Bank\":\n",
+ " return 0\n",
+ " elif val == \"Branch\":\n",
+ " return 1\n",
+ " elif val == \"Account\":\n",
+ " return 2\n",
+ " elif val == \"Customer\":\n",
+ " return 3\n",
+ " else:\n",
+ " return -1\n",
+ " except (ValueError, TypeError, SyntaxError):\n",
+ " return 0\n",
"\n",
" return super()._adb_attribute_to_dgl_feature(key, col, val)\n",
"\n",
- "fraud_adbgl_adapter = ArangoDB_DGL_Adapter(con, FraudDetection_ADBDGL_Controller)\n",
+ "fraud_adbdgl_adapter = ADBDGL_Adapter(con, FraudDetection_ADBDGL_Controller())\n",
"\n",
"# Create DGL Graph from attributes\n",
- "dgl_g = fraud_adbgl_adapter.arangodb_to_dgl('FraudDetection', fraud_detection_metagraph)\n",
+ "dgl_g = fraud_adbdgl_adapter.arangodb_to_dgl('FraudDetection', fraud_detection_metagraph)\n",
"\n",
"# You can also provide valid Python-Arango AQL query options to the command above, like such:\n",
- "# dgl_g = adbdgl_adapter.arangodb_to_dgl(graph_name = 'FraudDetection', fraud_detection_metagraph, ttl=1000, stream=True)\n",
+ "# dgl_g = fraud_adbdgl_adapter.arangodb_to_dgl(graph_name = 'FraudDetection', fraud_detection_metagraph, ttl=1000, stream=True)\n",
"# See more here: https://docs.python-arango.com/en/main/specs.html#arango.aql.AQL.execute\n",
"\n",
"# Show graph data\n",
@@ -707,10 +770,10 @@
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
- "height": 0
+ "height": 577
},
"id": "eRVbiBy4ZdE4",
- "outputId": "bcbfce84-8bc0-4605-82d7-78fbaab53527"
+ "outputId": "d44eb9d9-e046-443b-8ded-79654f004e02"
},
"outputs": [],
"source": [
@@ -723,14 +786,13 @@
"python_arango_db_driver.delete_graph(name, drop_collections=True, ignore_missing=True)\n",
"adb_karate_graph = adbdgl_adapter.dgl_to_arangodb(name, dgl_karate_graph)\n",
"\n",
- "\n",
- "print(f\"\\nInspect the graph here: https://tutorials.arangodb.cloud:8529/_db/{con['dbName']}/_admin/aardvark/index.html#graph/{name}\\n\")\n",
- "\n",
+ "print('\\n--------------------')\n",
"print(\"https://{}:{}\".format(con[\"hostname\"], con[\"port\"]))\n",
"print(\"Username: \" + con[\"username\"])\n",
"print(\"Password: \" + con[\"password\"])\n",
"print(\"Database: \" + con[\"dbName\"])\n",
- "\n",
+ "print('--------------------\\n')\n",
+ "print(f\"\\nInspect the graph here: https://tutorials.arangodb.cloud:8529/_db/{con['dbName']}/_admin/aardvark/index.html#graph/{name}\\n\")\n",
"print(f\"\\nView the original graph below:\")"
]
},
@@ -750,10 +812,10 @@
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
- "height": 0
+ "height": 1000
},
"id": "dADiexlAioGH",
- "outputId": "286375c7-f2c9-4843-fb30-d086994985fc"
+ "outputId": "273988c8-1749-4fe0-85fe-51b0e1ab2058"
},
"outputs": [],
"source": [
@@ -783,16 +845,16 @@
"adb_hypercube_graph = adbdgl_adapter.dgl_to_arangodb(hypercube, dgl_hypercube_graph)\n",
"adb_clique_graph = adbdgl_adapter.dgl_to_arangodb(clique, dgl_clique_graph)\n",
"\n",
- "print(\"\\nInspect the graphs here:\\n\")\n",
- "print(f\"1) https://tutorials.arangodb.cloud:8529/_db/{con['dbName']}/_admin/aardvark/index.html#graph/{lollipop}\")\n",
- "print(f\"2) https://tutorials.arangodb.cloud:8529/_db/{con['dbName']}/_admin/aardvark/index.html#graph/{hypercube}\")\n",
- "print(f\"3) https://tutorials.arangodb.cloud:8529/_db/{con['dbName']}/_admin/aardvark/index.html#graph/{clique}\\n\")\n",
- "\n",
+ "print('\\n--------------------')\n",
"print(\"https://{}:{}\".format(con[\"hostname\"], con[\"port\"]))\n",
"print(\"Username: \" + con[\"username\"])\n",
"print(\"Password: \" + con[\"password\"])\n",
"print(\"Database: \" + con[\"dbName\"])\n",
- "\n",
+ "print('--------------------\\n')\n",
+ "print(\"\\nInspect the graphs here:\\n\")\n",
+ "print(f\"1) https://tutorials.arangodb.cloud:8529/_db/{con['dbName']}/_admin/aardvark/index.html#graph/{lollipop}\")\n",
+ "print(f\"2) https://tutorials.arangodb.cloud:8529/_db/{con['dbName']}/_admin/aardvark/index.html#graph/{hypercube}\")\n",
+ "print(f\"3) https://tutorials.arangodb.cloud:8529/_db/{con['dbName']}/_admin/aardvark/index.html#graph/{clique}\\n\")\n",
"print(f\"\\nView the original graphs below:\")"
]
},
@@ -803,7 +865,7 @@
},
"source": [
"\n",
- "## Example 3: DGL MiniGCDataset Graphs (with attribute transfer)"
+ "## Example 3: DGL MiniGCDataset Graphs with a custom controller"
]
},
{
@@ -814,30 +876,39 @@
"base_uri": "https://localhost:8080/"
},
"id": "jbJsvMMaoJoT",
- "outputId": "f426cdaf-a53c-4ade-d3b6-cbb11dd39c67"
+ "outputId": "2ddca41f-9c8b-4db4-c0aa-c1b2cc124fa5"
},
"outputs": [],
"source": [
"from torch.functional import Tensor\n",
"\n",
- "# Load the dgl graphs & populate node data\n",
+ "# Load the dgl graphs\n",
"dgl_lollipop_graph = remove_self_loop(MiniGCDataset(8, 7, 8)[3][0])\n",
- "dgl_lollipop_graph.ndata['lollipop_ndata'] = torch.ones(7)\n",
- "\n",
"dgl_hypercube_graph = remove_self_loop(MiniGCDataset(8, 8, 9)[4][0])\n",
- "dgl_hypercube_graph.ndata['hypercube_ndata'] = torch.zeros(8)\n",
- "\n",
"dgl_clique_graph = remove_self_loop(MiniGCDataset(8, 6, 7)[6][0])\n",
+ "\n",
+ " # Add DGL Node & Edge Features to each graph\n",
+ "dgl_lollipop_graph.ndata[\"random_ndata\"] = torch.tensor(\n",
+ " [[i, i, i] for i in range(0, dgl_lollipop_graph.num_nodes())]\n",
+ ")\n",
+ "dgl_lollipop_graph.edata[\"random_edata\"] = torch.rand(dgl_lollipop_graph.num_edges())\n",
+ "\n",
+ "dgl_hypercube_graph.ndata[\"random_ndata\"] = torch.rand(dgl_hypercube_graph.num_nodes())\n",
+ "dgl_hypercube_graph.edata[\"random_edata\"] = torch.tensor(\n",
+ " [[[i], [i], [i]] for i in range(0, dgl_hypercube_graph.num_edges())]\n",
+ ")\n",
+ "\n",
"dgl_clique_graph.ndata['clique_ndata'] = torch.tensor([1,2,3,4,5,6])\n",
+ "dgl_clique_graph.edata['clique_edata'] = torch.tensor(\n",
+ " [1 if i % 2 == 0 else 0 for i in range(0, dgl_clique_graph.num_edges())]\n",
+ ")\n",
"\n",
"\n",
"# When converting to ArangoDB from DGL, a user-defined Controller class\n",
"# is required to specify how DGL features (aka attributes) should be converted \n",
- "# into ArangoDB attributes.\n",
- "\n",
- "# NOTE: A custom Controller is NOT needed you want to keep the \n",
- "# numerical-based values of your DGL features (which is the case for dgl_lollipop_graph and dgl_hypercube_graph)\n",
- "class Clique_ADBDGL_Controller(Base_ADBDGL_Controller):\n",
+ "# into ArangoDB attributes. NOTE: A custom Controller is NOT needed you want to\n",
+ "# keep the numerical-based values of your DGL features.\n",
+ "class Clique_ADBDGL_Controller(ADBDGL_Controller):\n",
" \"\"\"ArangoDB-DGL controller.\n",
"\n",
" Responsible for controlling how ArangoDB attributes\n",
@@ -850,26 +921,40 @@
" def _dgl_feature_to_adb_attribute(self, key: str, col: str, val: Tensor):\n",
" \"\"\"\n",
" Given a DGL feature key, its assigned value (for an arbitrary node or edge),\n",
- " and the collection it belongs to, convert it to a valid ArangoDB attribute (e.g string, list, number, ...).\n",
- "\n",
- " NOTE: No action is needed here if you want to keep the numerical-based values of your DGL features.\n",
+ " and the collection it belongs to, convert it to a valid ArangoDB attribute\n",
+ " (e.g string, list, number, ...).\n",
+ "\n",
+ " NOTE: No action is needed here if you want to keep the numerical-based values\n",
+ " of your DGL features.\n",
+ "\n",
+ " :param key: The DGL attribute key name\n",
+ " :type key: str\n",
+ " :param col: The ArangoDB collection of the (soon-to-be) ArangoDB document.\n",
+ " :type col: str\n",
+ " :param val: The assigned attribute value of the DGL node.\n",
+ " :type val: Tensor\n",
+ " :return: The feature's representation as an ArangoDB Attribute\n",
+ " :rtype: Any\n",
" \"\"\"\n",
" if key == \"clique_ndata\":\n",
" if val == 1:\n",
" return \"one is fun\"\n",
" elif val == 2:\n",
- " return \"but two is blue\"\n",
+ " return \"two is blue\"\n",
" elif val == 3:\n",
- " return \"yet three is free\"\n",
+ " return \"three is free\"\n",
" elif val == 4:\n",
- " return \"and four is more\"\n",
- " else:\n",
+ " return \"four is more\"\n",
+ " else: # No special string for values 5 & 6\n",
" return f\"ERROR! Unrecognized value, got {val}\"\n",
"\n",
+ " if key == \"clique_edata\":\n",
+ " return bool(val)\n",
+ "\n",
" return super()._dgl_feature_to_adb_attribute(key, col, val)\n",
"\n",
"# Re-instantiate a new adapter specifically for the Clique Graph Conversion\n",
- "clique_adbgl_adapter = ArangoDB_DGL_Adapter(con, Clique_ADBDGL_Controller)\n",
+ "clique_adbgl_adapter = ADBDGL_Adapter(con, Clique_ADBDGL_Controller())\n",
"\n",
"# Create the ArangoDB graphs\n",
"lollipop = \"Lollipop_With_Attributes\"\n",
@@ -884,15 +969,16 @@
"adb_hypercube_graph = adbdgl_adapter.dgl_to_arangodb(hypercube, dgl_hypercube_graph)\n",
"adb_clique_graph = clique_adbgl_adapter.dgl_to_arangodb(clique, dgl_clique_graph) # Notice the new adapter here!\n",
"\n",
- "print(\"\\nInspect the graphs here:\\n\")\n",
- "print(f\"1) https://tutorials.arangodb.cloud:8529/_db/{con['dbName']}/_admin/aardvark/index.html#graph/{lollipop}\")\n",
- "print(f\"2) https://tutorials.arangodb.cloud:8529/_db/{con['dbName']}/_admin/aardvark/index.html#graph/{hypercube}\")\n",
- "print(f\"3) https://tutorials.arangodb.cloud:8529/_db/{con['dbName']}/_admin/aardvark/index.html#graph/{clique}\\n\")\n",
- "\n",
+ "print('\\n--------------------')\n",
"print(\"https://{}:{}\".format(con[\"hostname\"], con[\"port\"]))\n",
"print(\"Username: \" + con[\"username\"])\n",
"print(\"Password: \" + con[\"password\"])\n",
- "print(\"Database: \" + con[\"dbName\"])"
+ "print(\"Database: \" + con[\"dbName\"])\n",
+ "print('--------------------\\n')\n",
+ "print(\"\\nInspect the graphs here:\\n\")\n",
+ "print(f\"1) https://tutorials.arangodb.cloud:8529/_db/{con['dbName']}/_admin/aardvark/index.html#graph/{lollipop}\")\n",
+ "print(f\"2) https://tutorials.arangodb.cloud:8529/_db/{con['dbName']}/_admin/aardvark/index.html#graph/{hypercube}\")\n",
+ "print(f\"3) https://tutorials.arangodb.cloud:8529/_db/{con['dbName']}/_admin/aardvark/index.html#graph/{clique}\\n\")"
]
}
],
@@ -900,16 +986,15 @@
"colab": {
"collapsed_sections": [
"ot1oJqn7m78n",
- "Oc__NAd1eG8-",
"7y81WHO8eG8_",
"227hLXnPeG8_",
"QfE_tKxneG9A",
- "umy25EsUU6Lg",
+ "ZrEDmtqCVD0W",
+ "qEH6OdSB23Ya",
"UafSB_3JZNwK",
- "gshTlSX_ZZsS",
- "CNj1xKhwoJoL"
+ "gshTlSX_ZZsS"
],
- "name": "Copy of ArangoDB_DGLAdapter.ipynb",
+ "name": "ArangoDB_DGL_Adapter_v1.0.0.ipynb",
"provenance": []
},
"kernelspec": {
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..b9911d5
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,23 @@
+[build-system]
+requires = [
+ "setuptools>=42",
+ "setuptools_scm[toml]>=3.4",
+ "wheel",
+]
+build-backend = "setuptools.build_meta"
+
+[tool.coverage.run]
+omit = [
+ "adbdgl_adapter/version.py",
+ "setup.py",
+]
+
+[tool.isort]
+profile = "black"
+
+[tool.pytest.ini_options]
+minversion = "6.0"
+testpaths = ["tests"]
+
+[tool.setuptools_scm]
+write_to = "adbdgl_adapter/version.py"
diff --git a/scripts/assert_version.py b/scripts/assert_version.py
deleted file mode 100644
index 6621ea1..0000000
--- a/scripts/assert_version.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# -*- coding: utf-8 -*-
-import sys
-from packaging.version import Version
-
-if __name__ == "__main__":
- old = Version(sys.argv[1])
- current = Version(sys.argv[2])
- if current > old:
- print("true")
- sys.exit(0)
diff --git a/scripts/extract_version.py b/scripts/extract_version.py
deleted file mode 100644
index 8901dbd..0000000
--- a/scripts/extract_version.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# -*- coding: utf-8 -*-
-import requests
-
-if __name__ == "__main__":
- response = requests.get(
- "https://api.github.com/repos/arangoml/dgl-adapter/releases/latest"
- )
- response.raise_for_status()
- print(response.json().get("tag_name", "0.0.0"))
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..2e8bd1d
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,31 @@
+[metadata]
+name = adbdgl_adapter
+author = Anthony Mahanna
+author_email = [email protected]
+description = Convert ArangoDB graphs to DGL & vice-versa.
+long_description = file: README.md
+long_description_content_type = text/markdown
+url = https://github.com/arangoml/dgl-adapter
+classifiers =
+ Intended Audience :: Developers
+ License :: OSI Approved :: Apache Software License
+ Operating System :: OS Independent
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3.6
+ Programming Language :: Python :: 3.7
+ Programming Language :: Python :: 3.8
+ Programming Language :: Python :: 3.9
+ Topic :: Utilities
+ Typing :: Typed
+
+[options]
+python_requires = >=3.6
+
+[flake8]
+max-line-length = 88
+extend-ignore = E203, E741, W503
+exclude =.git .idea .*_cache dist venv
+
+[mypy]
+ignore_missing_imports = True
+strict = True
diff --git a/adbdgl_adapter/setup.py b/setup.py
similarity index 58%
rename from adbdgl_adapter/setup.py
rename to setup.py
index 5a62704..882e4f8 100644
--- a/adbdgl_adapter/setup.py
+++ b/setup.py
@@ -1,30 +1,43 @@
from setuptools import setup
-with open("../VERSION") as f:
- version = f.read().strip()
-
-with open("../README.md", "r") as f:
- long_description = f.read()
+with open("./README.md") as fp:
+ long_description = fp.read()
setup(
name="adbdgl_adapter",
- author="ArangoDB",
- author_email="[email protected]",
- version=version,
+ author="Anthony Mahanna",
+ author_email="[email protected]",
description="Convert ArangoDB graphs to DGL & vice-versa.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/arangoml/dgl-adapter",
+ keywords=["arangodb", "dgl", "adapter"],
packages=["adbdgl_adapter"],
include_package_data=True,
+ use_scm_version=True,
+ setup_requires=["setuptools_scm"],
python_requires=">=3.6",
license="Apache Software License",
install_requires=[
- "python-arango==7.2.0",
+ "python-arango==7.3.0",
"torch==1.10.0",
"dgl==0.6.1",
+ "setuptools>=42",
+ "setuptools_scm[toml]>=3.4",
],
- tests_require=["pytest", "pytest-cov"],
+ extras_require={
+ "dev": [
+ "black",
+ "flake8>=3.8.0",
+ "isort>=5.0.0",
+ "mypy>=0.790",
+ "pytest>=6.0.0",
+ "pytest-cov>=2.0.0",
+ "coveralls>=3.3.1",
+ "types-setuptools",
+ "types-requests",
+ ],
+ },
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
| Mirror recent networkx-adapter changes
In order to maintain adapter consistency, we need to bring over the following issues/changes recently made to the ArangoDB-NetworkX Adapter:
1. https://github.com/arangoml/networkx-adapter/issues/46
2. https://github.com/arangoml/networkx-adapter/issues/48
3. https://github.com/arangoml/networkx-adapter/issues/47
4. https://github.com/arangoml/networkx-adapter/issues/53
| 2021-12-24T01:35:37 | 0.0 | [] | [] |
|||
0xTCG/aldy | 0xTCG__aldy-54 | 5a9a129eec2e4fc93ba09a28a288ad201978b2a1 | diff --git a/README.rst b/README.rst
index 558f7c5..65b08f6 100644
--- a/README.rst
+++ b/README.rst
@@ -9,8 +9,9 @@
<a href="https://aldy.readthedocs.io/en/latest/?badge=latest"><img src="https://readthedocs.org/projects/aldy/badge/?version=latest" alt="ReadTheDocs"/></a>
<a href="https://codecov.io/github/0xTCG/aldy"><img src="https://codecov.io/github/0xTCG/aldy/coverage.svg?branch=master" alt="Code Coverage"/></a>
<a href="https://github.com/psf/black"><img src="https://img.shields.io/badge/code%20style-black-000000.svg" alt="Black"/></a>
- <a href="https://lgtm.com/projects/g/0xTCG/aldy/context:python"><img src="https://img.shields.io/lgtm/grade/python/g/0xTCG/aldy.svg?logo=lgtm&logoWidth=18" alt="Language grade: Python"/></a>
+ <br/>
<a href="https://www.nature.com/articles/s41467-018-03273-1"><img src="https://img.shields.io/badge/Published%20in-Nature%20Communications-red.svg" alt="Published in Nature Communications" /></a>
+ <a href="https://genome.cshlp.org/content/33/1/61.full"><img src="https://img.shields.io/badge/Published%20in-Genome%20Research-purple.svg" alt="Published in Genome Research" /></a>
<br/>
<b><i>A quick and nifty tool for genotyping and phasing popular pharmacogenes.</i></b>
</p>
@@ -34,7 +35,7 @@ Experimental data is available `here <paper>`_.
If you are using Aldy, please cite our papers in the
`Nature Communications <https://www.nature.com/articles/s41467-018-03273-1>`_
-and `bioRxiv <https://www.biorxiv.org/content/10.1101/2022.08.11.503701v1>`_.
+and `Genome Research <https://genome.cshlp.org/content/33/1/61.full>`_.
⚠️ Warning
==========
diff --git a/aldy/coverage.py b/aldy/coverage.py
index 49c8658..18d7732 100644
--- a/aldy/coverage.py
+++ b/aldy/coverage.py
@@ -193,7 +193,7 @@ def _normalize_coverage(self) -> None:
if sam_ref == 0:
raise AldyException(
f"CN-neutral region {self.profile.cn_region} has no reads. "
- + "Double check your input file for CYP2D8 (are you using hg19?), "
+ + "Double check your input file for CYP2D8 region, "
+ "or pass an alternative CN-neutral region via -n parameter."
)
ratio = self.profile.neutral_value / sam_ref
diff --git a/aldy/gene.py b/aldy/gene.py
index c7565c9..5aae519 100644
--- a/aldy/gene.py
+++ b/aldy/gene.py
@@ -236,6 +236,17 @@ def region_at(self, pos: int) -> Optional[Tuple[int, str]]:
""":returns: Gene ID and a region that covers the position."""
return self._region_at.get(pos, None)
+ def _reverse_op(self, op: str) -> str:
+ if ">" in op:
+ l, r = op.split(">")
+ return f"{rev_comp(l)}>{rev_comp(r)}"
+ elif op[:3] == "ins":
+ return f"ins{rev_comp(op[3:])}"
+ elif op[:3] == "del":
+ assert "ins" not in op, "del+ins not yet supported"
+ return f"del{rev_comp(op[3:])}"
+ return op
+
def get_functional(self, mut, infer=True) -> Optional[str]:
"""
:returns: String describing the mutation effect if a mutation is functional;
@@ -245,26 +256,20 @@ def get_functional(self, mut, infer=True) -> Optional[str]:
if (pos, op) in self.mutations:
return self.mutations[pos, op][0]
- def reverse_op(op: str) -> str:
- if ">" in op:
- l, r = op.split(">")
- return f"{rev_comp(l)}>{rev_comp(r)}"
- elif op[:3] == "ins":
- return f"ins{rev_comp(op[3:])}"
- elif op[:3] == "del":
- assert "ins" not in op, "del+ins not yet supported"
- return f"del{rev_comp(op[3:])}"
- return op
-
# Calculate based on aminoacid change
+ if pos not in self.chr_to_ref:
+ return None
pos = self.chr_to_ref[pos]
if infer and any(s <= pos < e for s, e in self.exons):
if ">" not in op:
return "indel"
if self.strand < 0:
- op = reverse_op(op)
+ op = self._reverse_op(op)
+ # pos -= 1
if op[2] == "N":
return None
+ if self.seq[pos] != op[0]:
+ log.warn(f"Bad mutation: {op[0]} != {self.seq[pos]}")
seq = "".join(
self.seq[s:pos] + op[2] + self.seq[pos + 1 : e]
if s <= pos < e
@@ -294,6 +299,8 @@ def get_rsid(self, *args, default=True) -> str:
return res if res != "-" or not default else f"{pos + 1}.{op}"
def get_allele(self, name):
+ if name in self.removed:
+ name = self.removed[name]
for an, a in self.alleles.items():
if name in a.minors:
return (a, a.minors[name])
@@ -789,7 +796,7 @@ def filter_f(m):
del self.alleles[f]
self.alleles.update({a.name: a for a in add.values()})
- self.removed = set()
+ self.removed = {}
for an, a in self.alleles.items():
# Clean up minor alleles (as many might be identical after a fusion).
# Put a reference to the cleaned-up alleles in `alt_name` field.
@@ -801,7 +808,7 @@ def filter_f(m):
if len(sa) > 1:
for s in sa:
if s != min(sa) and "#" not in s:
- self.removed.add(s)
+ self.removed[s] = min(sa)
log.debug(
f"Removing {self.name}*{s} as it is the same as"
f" {min(sa)}"
diff --git a/aldy/genotype.py b/aldy/genotype.py
index 81819ae..2c37138 100644
--- a/aldy/genotype.py
+++ b/aldy/genotype.py
@@ -205,7 +205,7 @@ def genotype(
)
if kind != "vcf":
avg_cov = sample.coverage.average_coverage()
- if profile.cn_region and avg_cov < 2:
+ if profile.cn_region and avg_cov < profile.min_avg_coverage:
if is_simple:
print(file=output_file)
raise AldyException(
diff --git a/aldy/lpinterface.py b/aldy/lpinterface.py
index 01117a9..fdb28b9 100644
--- a/aldy/lpinterface.py
+++ b/aldy/lpinterface.py
@@ -8,7 +8,7 @@
import importlib
import collections
-from .common import log, sorted_tuple, SOLUTION_PRECISION
+from .common import log, sorted_tuple, SOLUTION_PRECISION, AldyException
SOLVER_PRECISON = 1e-5
@@ -356,6 +356,8 @@ def test_gurobi(name): # pragma: no cover
log.trace("[lp] solver= gurobi")
except ImportError:
model = None
+ if model and not model.model:
+ model = None
return model
def test_cbc(name):
@@ -372,7 +374,7 @@ def test_cbc(name):
if model is None:
model = test_gurobi(name)
if model is None:
- raise Exception(
+ raise AldyException(
"No ILP solver found. Aldy cannot operate without an ILP solver. "
+ "Please install Gurobi or Google OR Tools."
)
@@ -380,6 +382,11 @@ def test_cbc(name):
else:
fname = "test_" + solver
if fname in locals():
- return locals()[fname](name)
+ m = locals()[fname](name)
+ if not m:
+ raise AldyException(
+ "ILP solver {} cannot be initialized".format(solver)
+ )
+ return m
else:
- raise Exception("ILP solver {} is not supported".format(solver))
+ raise AldyException("ILP solver {} is not supported".format(solver))
diff --git a/aldy/major.py b/aldy/major.py
index fd9b7e6..ecf3665 100644
--- a/aldy/major.py
+++ b/aldy/major.py
@@ -252,6 +252,28 @@ def filter_fns(cov, mut):
)
return cond
+ if coverage.profile.debug_probe:
+ # Check for probe mutations and show their pileup
+ probes = coverage.profile.debug_probe.split(";")
+
+ d = collections.defaultdict(set)
+ for (pos, op), (fn, rs, _, _, _) in gene.mutations.items():
+ d[fn].add((pos, op))
+ d[rs].add((pos, op))
+ d[str(pos + 1)].add((pos, op))
+
+ def pileup(pos):
+ return "".join(
+ (a if a == "_" else a[2:]) * len(c)
+ for a, c in coverage._coverage[pos].items()
+ )
+
+ for p in probes:
+ for pos, op in d.get(p, set()):
+ log.warn("{} -> {}: {}", p, pos + 1, op)
+ for i in range(pos - 5, pos + 5):
+ log.info(" {} {} {}", i + 1, "->" if i == pos else " ", pileup(i))
+
cov = coverage.filtered(Coverage.quality_filter)
cov = cov.filtered(filter_fns)
@@ -318,3 +340,33 @@ def print_mut(m):
for m in sorted(muts):
a = (f"*{a}" for a, b in gene.alleles.items() if m in b.func_muts)
log.debug(" {}, alleles={})", print_mut(m)[:-1], ", ".join(a))
+
+ if coverage.profile.debug_novel:
+ log.debug("[major] completely novel mutations=")
+ for pos, muts in coverage._coverage.items():
+ for op in muts:
+ if op == "_":
+ continue
+ if (pos, op) in gene.mutations:
+ continue
+ e = gene.get_functional((pos, op), infer=True)
+ if not e:
+ continue
+
+ m = Mutation(pos, op)
+ copies = (
+ coverage[m] / (coverage.total(m) / cn_solution.position_cn(m.pos))
+ if cn_solution.position_cn(m.pos) and coverage.total(m)
+ else 0
+ )
+ g = gene.region_at(m.pos)
+
+ op = gene._reverse_op(op) if gene.strand < 0 else op
+ log.warn(
+ f"[novel] {gene.name}: {str(m):15} "
+ + f"{gene.chr_to_ref[pos]+1}:{op} "
+ + f"(cov={coverage[m]:4}, cn= {copies:3.1f}; "
+ + f"region={g[1] if g else '?'}; "
+ + f"impact={e}; "
+ + ")"
+ )
diff --git a/aldy/profile.py b/aldy/profile.py
index 065ed12..4d4576a 100644
--- a/aldy/profile.py
+++ b/aldy/profile.py
@@ -191,6 +191,28 @@ def __init__(self, name, cn_region=None, data=None, **kwargs):
Default: False
"""
+ self.debug_probe = ""
+ """
+ (Debug) Show raw data for a given mutation (e.g., I223M)
+ """
+
+ self.debug_novel = False
+ """
+ (Debug) Show potential novel functional mutations that are not in the database.
+ """
+
+ self.min_avg_coverage = 2.0
+ """
+ Minimum average gene coverage needed for Aldy.
+ Default: 2
+ """
+
+ self.vcf_sample_idx = 0
+ """
+ VCF sample index.
+ Default: 0
+ """
+
self.update(kwargs)
def update(self, kwargs):
@@ -251,15 +273,20 @@ def load(gene, profile, cn_region=None, **params):
raise AldyException(f"Could not load profile from {profile}")
if is_yml and profile != "illumina" and cn_region:
raise AldyException("-n is only valid with illumina or BAM profile")
+
if is_yml and profile == "illumina" and cn_region:
prof["neutral"]["value"] = cn_region.end - cn_region.start
if "neutral" not in prof or "value" not in prof["neutral"]:
raise AldyException("Profile missing neutral region")
if gene.name not in prof:
raise AldyException(f"Profile missing {gene.name}")
+ if gene.genome not in prof["neutral"]:
+ raise AldyException(f"Profile {profile} not compatible with {gene.genome}")
+ if cn_region is None:
+ cn_region = GRange(*prof["neutral"][gene.genome])
return Profile(
profile,
- GRange(*prof["neutral"][gene.genome]),
+ cn_region,
prof,
neutral_value=prof["neutral"].get("value"),
**dict(prof.get("options", {}), **params),
@@ -375,6 +402,6 @@ def get_sam_profile_data(
d["neutral"][genome] = [*gene_regions["neutral", "value", 0]]
if params:
d["options"] = {}
- for k, v in Profile("")._parse_params(params).items():
+ for k, v in Profile("").update(params).items():
d["options"][k] = v
return d
diff --git a/aldy/resources/genes/pharmacoscan/cyp2e1.yml b/aldy/resources/genes/pharmacoscan/cyp2e1.yml
index f69425e..c284d2e 100644
--- a/aldy/resources/genes/pharmacoscan/cyp2e1.yml
+++ b/aldy/resources/genes/pharmacoscan/cyp2e1.yml
@@ -78,25 +78,21 @@ alleles:
- [15271, G>C, rs2070676, Intron]
CYP2E1*6:
mutations:
- - [ignored, 12678, T>A, rs6413432, Intron]
- ignored: true
+ - [12678, T>A, rs6413432, Intron]
CYP2E1*6_1B:
mutations:
- [15271, G>C, rs2070676, Intron]
- - [ignored, 12678, T>A, rs6413432, Intron]
- ignored: true
+ - [12678, T>A, rs6413432, Intron]
CYP2E1*6_7A:
mutations:
- [4701, T>A, rs2070673, 5'UTR]
- - [ignored, 12678, T>A, rs6413432, Intron]
- ignored: true
+ - [12678, T>A, rs6413432, Intron]
CYP2E1*6_7C_1B:
mutations:
- [4682, A>G, rs2070672, 5'UTR]
- [4701, T>A, rs2070673, 5'UTR]
- [15271, G>C, rs2070676, Intron]
- - [ignored, 12678, T>A, rs6413432, Intron]
- ignored: true
+ - [12678, T>A, rs6413432, Intron]
CYP2E1*7A:
mutations:
- [4701, T>A, rs2070673, 5'UTR]
diff --git a/aldy/sam.py b/aldy/sam.py
index 8294409..5e21fd3 100644
--- a/aldy/sam.py
+++ b/aldy/sam.py
@@ -98,7 +98,9 @@ def __init__(
if self.kind == "vcf":
try:
- norm, muts = self._load_vcf(path)
+ norm, muts = self._load_vcf(
+ path, profile.vcf_sample_idx if profile else 0
+ )
except ValueError:
raise AldyException(f"VCF {path} is not indexed")
elif self.kind == "dump":
@@ -205,7 +207,7 @@ def _load_sam(self, sam_path: str, reference=None, debug=None):
self._dump_reads.append(r)
return norm, muts
- def _load_vcf(self, vcf_path: str):
+ def _load_vcf(self, vcf_path: str, sample_idx: int = 0):
"""Load the read, mutation and coverage data from a VCF file."""
log.debug("[vcf] path= {}", os.path.abspath(vcf_path))
@@ -239,9 +241,13 @@ def get_mut(pos, ref, alt):
self._prefix = chr_prefix(self.gene.chr, list(vcf.header.contigs))
samples = list(vcf.header.samples)
- self.name = sample = samples[0]
- if len(samples) > 1:
- log.warn("WARNING: Multiple VCF samples found; using the first one.")
+ if sample_idx >= len(samples):
+ raise AldyException(
+ f"Cannot fetch sample no. {sample_idx}; "
+ f"input VCF has {len(vcf.header.samples)} samples"
+ )
+ self.name = sample = samples[sample_idx]
+
log.info("Using VCF sample {}", sample)
for read in vcf.fetch(
region=self.gene.get_wide_region().samtools(prefix=self._prefix)
@@ -312,6 +318,10 @@ def _load_dump(self, dump_path: str):
) = pickle.load(
fd
) # type: ignore
+ self.profile.display_format = False
+ self.profile.debug_probe = ""
+ self.profile.debug_novel = False
+ self.profile.min_avg_coverage = 2.0
self.phases = {f"r{i}": v for i, v in enumerate(phases)}
norm = {p: [q for q, n in c.items() for _ in range(n)] for p, c in norm.items()}
muts = {p: [q for q, n in c.items() for _ in range(n)] for p, c in muts.items()}
diff --git a/aldy/version.py b/aldy/version.py
index 326aa82..bc5a3ce 100644
--- a/aldy/version.py
+++ b/aldy/version.py
@@ -4,4 +4,4 @@
# file 'LICENSE', which is part of this source code package.
-__version__ = "4.4"
+__version__ = "4.5"
| FEATURE: Allow processing of files with multiple samples
Allowing Aldy to process files that contain multiple samples will shorten the manual work of separating large files with large samples into a different file each
| 2023-05-04T17:32:35 | 0.0 | [] | [] |
|||
cgq-qgc/pyhelp | cgq-qgc__pyhelp-73 | 4a0c4e1a861471842e32121a938a31596571c490 | diff --git a/pyhelp/managers.py b/pyhelp/managers.py
index 3443f84..b7f16d7 100644
--- a/pyhelp/managers.py
+++ b/pyhelp/managers.py
@@ -1,11 +1,11 @@
# -*- coding: utf-8 -*-
-# =============================================================================
-# Copyright © PyHelp Project Contributors
+# -----------------------------------------------------------------------------
+# Copyright © PyHELP Project Contributors
# https://github.com/cgq-qgc/pyhelp
#
-# This file is part of PyHelp.
+# This file is part of PyHELP.
# Licensed under the terms of the MIT License.
-# =============================================================================
+# -----------------------------------------------------------------------------
# ---- Standard Library Imports
import json
diff --git a/pyhelp/output.py b/pyhelp/output.py
index be3cdb6..65fb4c1 100644
--- a/pyhelp/output.py
+++ b/pyhelp/output.py
@@ -1,18 +1,18 @@
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
-# Copyright © PyHelp Project Contributors
+# Copyright © PyHELP Project Contributors
# https://github.com/cgq-qgc/pyhelp
#
# This file is part of PyHELP.
# Licensed under the terms of the MIT License.
# -----------------------------------------------------------------------------
+from __future__ import annotations
# ---- Standard Library imports
import os.path as osp
from collections.abc import Mapping
-
# ---- Third party imports
import matplotlib.pyplot as plt
import pandas as pd
@@ -27,7 +27,7 @@ class HelpOutput(Mapping):
with the :class:`~pyhelp.HelpManager` class.
"""
- def __init__(self, path_or_dict):
+ def __init__(self, path_or_dict: str | dict):
super(HelpOutput, self).__init__()
if isinstance(path_or_dict, dict):
self.data = path_or_dict['data']
@@ -47,7 +47,7 @@ def __iter__(self):
def __len__(self):
return len(self.data['cid'])
- def load_from_hdf5(self, path_to_hdf5):
+ def load_from_hdf5(self, path_to_hdf5: str):
"""Read data and grid from an HDF5 file at the specified location."""
print(f"Loading data and grid from {path_to_hdf5}")
hdf5 = h5py.File(path_to_hdf5, mode='r+')
@@ -63,21 +63,15 @@ def load_from_hdf5(self, path_to_hdf5):
# Load the grid.
self.grid = pd.DataFrame(
data=[],
- columns=hdf5['grid'].attrs['columns'],
- index=hdf5['grid'].attrs['index'])
- for key in list(hdf5['grid'].keys()):
- values = np.array(hdf5['grid'][key])
- if key == 'cid':
- values = values.astype(str)
- self.grid.loc[:, key] = values
- except Exception as e:
- print(e)
- self.data = None
- self.grid = None
+ columns=np.array(hdf5['grid']['__columns__']).astype(str),
+ index=np.array(hdf5['grid']['__index__']).astype(str))
+ for column in self.grid.columns:
+ self.grid.loc[:, key] = np.array(hdf5['grid'][column])
finally:
hdf5.close()
+ print("Data and grid loaded successfully.")
- def save_to_hdf5(self, path_to_hdf5):
+ def save_to_hdf5(self, path_to_hdf5: str):
"""Save the data and grid to an HDF5 file at the specified location."""
print("Saving data to {}...".format(osp.basename(path_to_hdf5)))
hdf5file = h5py.File(path_to_hdf5, mode='w')
@@ -97,10 +91,21 @@ def save_to_hdf5(self, path_to_hdf5):
group.create_dataset(key, data=self.data[key])
# Save the grid.
+
+ # See http://docs.h5py.org/en/latest/strings.html as to
+ # why this is necessary to do this in order to save a list
+ # of strings in a dataset with h5py.
+
group = hdf5file.create_group('grid')
- group.attrs['columns'] = list(self.grid.columns)
- group.attrs['index'] = list(self.grid.index)
- for column in list(self.grid.columns):
+ group.create_dataset(
+ '__columns__',
+ data=self.grid.columns.values,
+ dtype=h5py.string_dtype())
+ group.create_dataset(
+ '__index__',
+ data=self.grid.index.values,
+ dtype=h5py.string_dtype())
+ for column in self.grid.columns:
if column == 'cid':
# See http://docs.h5py.org/en/latest/strings.html as to
# why this is necessary to do this in order to save a list
@@ -109,9 +114,12 @@ def save_to_hdf5(self, path_to_hdf5):
column,
data=self.grid[column].values,
dtype=h5py.string_dtype())
- else:
- group.create_dataset(
- column, data=self.grid[column].values)
+ if column == 'cid':
+ # The 'cid' is already stored in the index, we don't
+ # want to store the same information in a column also.
+ continue
+ group.create_dataset(
+ column, data=self.grid[column].values)
finally:
hdf5file.close()
print("Data saved successfully.")
@@ -130,7 +138,6 @@ def save_to_csv(self, path_to_csv):
print("Data saved successfully.")
# ---- Calcul
-
def calc_area_monthly_avg(self):
"""
Calcul the monthly values of the water budget in mm/month for the
| Bug lors de la sauvegarde des outputs
```
HELP simulation in progress: 100.0% (0.0 min remaining)
Task completed in 145.64 sec
Post-processing cell 22202 of 22202... done
Saving data to help_example.out...
Traceback (most recent call last):
Input In [54] in <cell line: 1>
output = helpm.calc_help_cells(help_output_hdf5, tfsoil=-3)
File ~\.conda\envs\PyHELP\lib\site-packages\pyhelp\managers.py:316 in calc_help_cells
help_output.save_to_hdf5(path_to_hdf5)
File ~\.conda\envs\PyHELP\lib\site-packages\pyhelp\output.py:102 in save_to_hdf5
group.attrs['index'] = list(self.grid.index)
File h5py\_objects.pyx:54 in h5py._objects.with_phil.wrapper
File h5py\_objects.pyx:55 in h5py._objects.with_phil.wrapper
File ~\.conda\envs\PyHELP\lib\site-packages\h5py\_hl\attrs.py:103 in __setitem__
self.create(name, data=value)
File ~\.conda\envs\PyHELP\lib\site-packages\h5py\_hl\attrs.py:196 in create
attr = h5a.create(self._id, self._e(tempname), htype, space)
File h5py\_objects.pyx:54 in h5py._objects.with_phil.wrapper
File h5py\_objects.pyx:55 in h5py._objects.with_phil.wrapper
File h5py\h5a.pyx:50 in h5py.h5a.create
RuntimeError: Unable to create attribute (object header message is too large)
```
| See https://github.com/h5py/h5py/issues/1053#issuecomment-525363860 | 2022-03-16T14:05:34 | 0.0 | [] | [] |
||
bytinbit/nobubo | bytinbit__nobubo-32 | b586c345cf8e9b3e9833996b890563d0915b621b | diff --git a/nobubo/__main__.py b/nobubo/__main__.py
index 9bd7e22..972264b 100644
--- a/nobubo/__main__.py
+++ b/nobubo/__main__.py
@@ -5,6 +5,7 @@
https://www.python.org/dev/peps/pep-0338/
"""
-from nobubo import nobubo
+from nobubo import cli
if __name__ == '__main__':
- nobubo.main(prog_name="python -m nobubo") # https://github.com/pallets/click/issues/1399
+ # https://github.com/pallets/click/issues/1399
+ cli.main(prog_name="python -m nobubo")
diff --git a/nobubo/assembly.py b/nobubo/assembly.py
index 856c350..ed9fda2 100644
--- a/nobubo/assembly.py
+++ b/nobubo/assembly.py
@@ -20,24 +20,27 @@
"""
import pathlib
import subprocess
+from typing import List
from nobubo import core, calc, errors
def assemble_collage(input_properties: core.InputProperties,
- temp_output_dir: pathlib.Path) -> [pathlib.Path]:
+ temp_output_dir: pathlib.Path) -> List[pathlib.Path]:
"""
- Takes a pattern pdf where one page equals a part of the pattern and assembles it to one huge collage.
+ Takes a pattern pdf where one page equals a part of the pattern and
+ assembles it to one huge collage.
The default assembles it from top left to the bottom right.
:param input_properties: Properties of the input pdf.
:param temp_output_dir: The temporary path where all calculations should happen.
- :return A list of all the path to the collages, each with all pattern pages assembled on one single page.
+ :return A list of all the path to the collages, each with all pattern pages
+ assembled on one single page.
"""
- all_collages_paths: [pathlib.Path] = []
+ all_collages_paths: List[pathlib.Path] = []
for counter, layout in enumerate(input_properties.layout):
print(f"Assembling overview {counter + 1} of {len(input_properties.layout)}\n")
- print(f"Creating collage... Please be patient, this may take some time.")
+ print("Creating collage... Please be patient, this may take some time.")
all_collages_paths.append(_assemble(input_properties, temp_output_dir, layout))
return all_collages_paths
@@ -45,28 +48,32 @@ def assemble_collage(input_properties: core.InputProperties,
def _assemble(input_properties: core.InputProperties,
temp_output_dir: pathlib.Path,
current_layout: core.Layout) -> pathlib.Path:
-
collage_width = input_properties.pagesize.width * current_layout.columns
collage_height = input_properties.pagesize.height * current_layout.rows
if input_properties.reverse_assembly:
start, end, step = calc.pagerange_reverse(current_layout)
- l = list(reversed([(x, x+current_layout.columns-1) for x in range(start, end, step)]))
- tuples = ["-".join(map(str, i)) for i in l]
+ page_range_for_pdflatex = list(reversed([(x, x + current_layout.columns - 1)
+ for x in range(start, end, step)]))
+ tuples = ["-".join(map(str, i)) for i in page_range_for_pdflatex]
page_range = ",".join(tuples)
else:
begin = current_layout.first_page
- end = current_layout.first_page + (current_layout.columns * current_layout.rows) - 1
+ end_of_section = current_layout.columns * current_layout.rows
+ end = current_layout.first_page + end_of_section - 1
page_range = f"{begin}-{end}"
file_content = [
"\\batchmode\n",
"\\documentclass[a4paper,]{article}\n",
- f"\\usepackage[papersize={{{collage_width}pt,{collage_height}pt}}]{{geometry}}\n",
+ f"\\usepackage[papersize={{{collage_width}pt,"
+ f"{collage_height}pt}}]{{geometry}}\n",
"\\usepackage[utf8]{inputenc}\n",
"\\usepackage{pdfpages}\n",
"\\begin{document}\n",
- f"\\includepdfmerge[nup={current_layout.columns}x{current_layout.rows}, noautoscale=true, scale=1.0]{{{str(input_properties.input_filepath)},{page_range} }}\n",
+ f"\\includepdfmerge[nup={current_layout.columns}x{current_layout.rows}, "
+ f"noautoscale=true, scale=1.0]"
+ f"{{{str(input_properties.input_filepath)},{page_range} }}\n",
"\\end{document}\n",
]
@@ -80,17 +87,14 @@ def _assemble(input_properties: core.InputProperties,
"-interaction=nonstopmode",
f"-jobname={output_filename}",
f"-output-directory={temp_output_dir}",
- input_filepath]
+ str(input_filepath)]
try:
subprocess.check_output(command, stderr=subprocess.STDOUT)
- except subprocess.CalledProcessError:
- raise errors.UsageError(f"Error: pdflatex encountered a problem while assembling the collage and had to abort.")
+ except subprocess.CalledProcessError as e:
+ raise errors.UsageError("Error: pdflatex encountered a problem while "
+ f"assembling the collage and had to abort:\n{e}")
+ except FileNotFoundError as e:
+ raise errors.UsageError(f"pdflatex or the output file was not found:\n{e}")
return temp_output_dir / pathlib.Path(output_filename).with_suffix(".pdf")
-
-
-
-
-
-
diff --git a/nobubo/calc.py b/nobubo/calc.py
index d826fb9..d681605 100644
--- a/nobubo/calc.py
+++ b/nobubo/calc.py
@@ -22,7 +22,7 @@
import random
import re
import string
-from typing import List
+from typing import List, Tuple
import click
import pikepdf
@@ -31,9 +31,12 @@
from nobubo.core import Factor
-def parse_cli_input(input_layout: (int, int, int), output_layout_cli: str, print_margin: int,
- reverse_assembly: bool, input_path: str, output_path: str
- ) -> (core.InputProperties, core.OutputProperties):
+def parse_cli_input(input_layout: List[Tuple[int, int, int]],
+ output_layout_cli: str,
+ print_margin: int,
+ reverse_assembly: bool,
+ input_path: str, output_path: str
+ ) -> Tuple[core.InputProperties, core.OutputProperties]:
try:
with pikepdf.open(pathlib.Path(input_path)) as inputfile:
# first page (getPage(0)) may contain overview, so get second one
@@ -47,20 +50,22 @@ def parse_cli_input(input_layout: (int, int, int), output_layout_cli: str, print
reverse_assembly=reverse_assembly)
output_properties = core.OutputProperties(
output_path=pathlib.Path(output_path),
- output_layout=parse_output_layout(output_layout_cli, print_margin))
+ output_layout=parse_output_layout(output_layout_cli, print_margin)
+ if output_layout_cli else None
+ )
except OSError as e:
- raise errors.UsageError(f"While reading the input pdf file, this error occurred:\n{e}")
+ raise errors.UsageError(f"While reading the input pdf file, "
+ f"this error occurred:\n{e}")
return input_properties, output_properties
-def parse_input_layouts(input_layout: (int, int, int)) ->[core.Layout]:
- return [core.Layout(first_page=data[0], columns=data[1], rows=data[2]) for data in input_layout]
+def parse_input_layouts(input_layout: List[Tuple[int, int, int]]) -> List[core.Layout]:
+ return [core.Layout(first_page=data[0], columns=data[1], rows=data[2])
+ for data in input_layout]
-def parse_output_layout(output_layout_cli: str, print_margin: int = None) -> [int]:
+def parse_output_layout(output_layout_cli: str, print_margin: int = None) -> List[int]:
print_size: List[int] = []
- if output_layout_cli is None:
- return None
if output_layout_cli == "a0":
print_size = to_mm("841x1189")
if output_layout_cli == "us": # Arch E /Arch 6 size of 36 × 48 inches
@@ -81,30 +86,38 @@ def validate_output_layout(ctx, param, value):
return value
except AssertionError:
raise click.BadParameter(f"Output layout {value} does not exist. "
- f"Have you chosen a0, us or a custom layout, such as 222x444?")
+ "Have you chosen a0, us or a custom layout, "
+ "such as 222x444?")
def pages_needed(layout: core.Layout, n_up_factor: Factor) -> int:
- return math.ceil(layout.columns/n_up_factor.x) * math.ceil(layout.rows/n_up_factor.y)
+ x = layout.columns / n_up_factor.x
+ y = layout.rows / n_up_factor.y
+ return math.ceil(x) * math.ceil(y)
-def page_dimensions(page: pikepdf.Page) -> (float, float):
+def page_dimensions(page: pikepdf.Page) -> Tuple[float, float]:
"""
- Calculates the x, y value for the offset in default user space units as defined in the pdf standard.
+ Calculates the x, y value for the offset in default user space units
+ as defined in the pdf standard.
:param page: A PDF page.
:return: list with x, y value.
"""
if not hasattr(page, "CropBox"):
- box = page.MediaBox
+ # page is of type Object, and either MediaBox, CropBox or TrimBox
+ # are all of type pikepdf.objects.Object
+ # they exist (or not) depending on the pdf itself
+ box = page.MediaBox # type: ignore
else:
- box = page.CropBox
- return round(float(box[2])-float(box[0]), 2), round(float(box[3])-float(box[1]), 2)
+ box = page.CropBox # type: ignore
+ return round(float(box[2]) - float(box[0]), 2), round(float(box[3]) - float(box[1]),
+ 2)
-def to_userspaceunits(width_height: [int, int]) -> core.PageSize:
+def to_userspaceunits(width_height: List[int]) -> core.PageSize:
"""
- Converts a page's physical width and height from millimeters to default user space unit,
- which are defined in the pdf standard as 1/72 inch.
+ Converts a page's physical width and height from millimeters to
+ default user space unit, which is defined in the pdf standard as 1/72 inch.
:param width_height: Width and height of the physical page in millimeters (mm),
on which the pattern will be printed.
@@ -118,23 +131,25 @@ def to_userspaceunits(width_height: [int, int]) -> core.PageSize:
height=(round(width_height[1] * conversion_factor, 3)))
-def nup_factors(pagesize: core.PageSize, output_layout: [int]) -> Factor:
+def nup_factors(pagesize: core.PageSize, output_layout: List[int]) -> Factor:
output_papersize = to_userspaceunits(output_layout)
x_factor = int(output_papersize.width // pagesize.width)
y_factor = int(output_papersize.height // pagesize.height)
return Factor(x=x_factor, y=y_factor)
-def to_mm(output_layout: str) -> [int, int]:
+def to_mm(output_layout: str) -> List[int]:
ol_in_mm = re.compile(r"\d+[x]\d+").findall(output_layout)[0].split("x")
return [int(x) for x in ol_in_mm]
-def pagerange_reverse(layout: core.Layout) -> (int, int, int):
- return layout.first_page, layout.first_page + (layout.columns * layout.rows) - 1, layout.columns
+def pagerange_reverse(layout: core.Layout) -> Tuple[int, int, int]:
+ return layout.first_page, \
+ layout.first_page + (layout.columns * layout.rows) - 1, \
+ layout.columns
-def new_outputpath(output_path: pathlib.Path, page_count: int):
+def new_outputpath(output_path: pathlib.Path, page_count: int) -> pathlib.Path:
new_filename = f"{output_path.stem}_{page_count + 1}{output_path.suffix}"
return output_path.parent / new_filename
diff --git a/nobubo/nobubo.py b/nobubo/cli.py
similarity index 69%
rename from nobubo/nobubo.py
rename to nobubo/cli.py
index 27ccfe3..f81c871 100644
--- a/nobubo/nobubo.py
+++ b/nobubo/cli.py
@@ -17,6 +17,7 @@
import pathlib
import sys
import tempfile
+from typing import List
import click
@@ -24,35 +25,43 @@
@click.command()
[email protected]("--il", "input_layout_cli", nargs=3, type=click.INT, multiple=True, required=True,
[email protected]("--il", "input_layout_cli", nargs=3, type=click.INT, multiple=True,
+ required=True,
help="Input layout of the pdf. Can be used multiple times.",
metavar="FIRSTPAGE COLUMNS ROWS")
@click.option("--ol", "output_layout_cli", nargs=1, type=click.STRING,
callback=calc.validate_output_layout,
- help="Output layout. Supported formats: a0, us, custom. No output layout provided creates a huge collage.",
+ help="Output layout. Supported formats: a0, us, custom. No output "
+ "layout provided creates a huge collage.",
metavar="a0 | us | mmxmm")
@click.option("--margin", "print_margin", nargs=1, type=click.INT,
help="Define an optional print margin in mm.",
metavar="mm")
@click.option("--reverse", "reverse_assembly", is_flag="True",
- help="With reverse flag: collage is assembled from bottom left to top right."
+ help="With reverse flag: collage is assembled from bottom left to top "
+ "right. "
"No flag: collage is assembled from top left to bottom right. ")
@click.argument("input_path", type=click.STRING)
@click.argument("output_path", type=click.STRING)
-def main(input_layout_cli, output_layout_cli, print_margin, reverse_assembly, input_path, output_path):
+def main(input_layout_cli, output_layout_cli, print_margin, reverse_assembly,
+ input_path, output_path):
"""
- Creates a collage from digital pattern pages and then chops it up into a desired output layout.
+ Creates a collage from digital pattern pages
+ and then chops it up into a desired output layout.
The collage is assembled according to one or several overview sheets.
- These overviews are usually provided along with the pattern pages in the same pdf or in the instructions pdf.
+ These overviews are usually provided along with the pattern pages
+ in the same pdf or in the instructions pdf.
Note: In order to use nobubo, you need the original pdf pattern.
Create a backup of the original if you are afraid to have it damaged in any way.
- The author takes no responsibility if you face any fit issues or other problems now or later on.
+ The author takes no responsibility if you face any fit issues
+ or other problems now or later on.
- Example: A digital pattern contains 2 overview sheets at page 1 and 34 with different layouts each.
+ Example: A digital pattern contains 2 overview sheets
+ at page 1 and 34 with different layouts each.
The output is to be printed on A0 paper:
- $ nobubo --il 2 8 4 -il 35 7 3 --ol a0 "path/to/pattern/mypattern.pdf" "test_collage.pdf"
+ $ nobubo --il 2 8 4 -il 35 7 3 --ol a0 "myfolder/mypattern.pdf" "test_collage.pdf"
See the readme for further information: https://github.com/bytinbit/nobubo
@@ -66,13 +75,21 @@ def main(input_layout_cli, output_layout_cli, print_margin, reverse_assembly, in
try:
with tempfile.TemporaryDirectory() as td:
temp_output_dir = pathlib.Path(td)
- input_properties, output_properties = calc.parse_cli_input(input_layout_cli, output_layout_cli, print_margin,
- reverse_assembly, input_path, output_path)
- temp_collage_paths: [pathlib.Path] = assembly.assemble_collage(input_properties, temp_output_dir)
+ input_properties, output_properties = \
+ calc.parse_cli_input(input_layout_cli,
+ output_layout_cli,
+ print_margin,
+ reverse_assembly,
+ input_path,
+ output_path)
+ temp_collage_paths: List[pathlib.Path] = \
+ assembly.assemble_collage(input_properties, temp_output_dir)
print(f"Successfully assembled collage from {input_path}.")
if output_properties.output_layout is not None:
- disassembly.create_output_files(temp_collage_paths, input_properties, output_properties)
+ disassembly.create_output_files(temp_collage_paths,
+ input_properties,
+ output_properties)
else: # default: no output_layout specified, print collage pdf
disassembly.write_collage(temp_collage_paths, output_properties)
except (errors.UsageError, click.BadParameter) as e:
diff --git a/nobubo/core.py b/nobubo/core.py
index b3330f8..4a96852 100644
--- a/nobubo/core.py
+++ b/nobubo/core.py
@@ -20,6 +20,7 @@
"""
from dataclasses import dataclass
import pathlib
+from typing import List, Optional
@dataclass
@@ -36,7 +37,8 @@ class Layout:
"""
A Pattern layout.
- first_page: The number of the pdf page which marks the beginning of the pattern pages
+ first_page: The number of the pdf page
+ which marks the beginning of the pattern pages
that are covered by the columns and rows.
"""
first_page: int
@@ -62,7 +64,7 @@ class InputProperties:
output_path: pathlib.Path
number_of_pages: int
pagesize: PageSize
- layout: [Layout]
+ layout: List[Layout]
reverse_assembly: bool = False
@@ -72,7 +74,7 @@ class OutputProperties:
Holds all information of the output pdf.
"""
output_path: pathlib.Path
- output_layout: [int]
+ output_layout: Optional[List[int]]
@dataclass
@@ -81,4 +83,4 @@ class Factor:
Factor class for multiplication.
"""
x: int
- y: int
\ No newline at end of file
+ y: int
diff --git a/nobubo/disassembly.py b/nobubo/disassembly.py
index cc22cf9..faf50e4 100644
--- a/nobubo/disassembly.py
+++ b/nobubo/disassembly.py
@@ -19,6 +19,7 @@
Contains functions for various output layouts.
"""
import pathlib
+from typing import List, Tuple, Optional
import pikepdf
@@ -26,70 +27,87 @@
from nobubo import core, calc, errors
-def create_output_files(temp_collage_paths: [pathlib.Path],
+def create_output_files(temp_collage_paths: List[pathlib.Path],
input_properties: core.InputProperties,
- output_properties: core.OutputProperties):
+ output_properties: core.OutputProperties) -> None:
for counter, collage_path in enumerate(temp_collage_paths):
try:
collage = pikepdf.Pdf.open(collage_path)
except OSError as e:
- raise errors.UsageError(f"Could not open collage file for disassembly:\n{e}.")
+ raise errors.UsageError("Could not open collage file for disassembly:"
+ f"\n{e}.")
new_outputpath = calc.new_outputpath(output_properties.output_path, counter)
- print(f"\nChopping up the collage...")
+ print("\nChopping up the collage...")
chopped_up_files = _create_output_files(collage, input_properties.pagesize,
- input_properties.layout[counter], output_properties.output_layout)
- print(f"Successfully chopped up the collage.\n")
+ input_properties.layout[counter],
+ output_properties.output_layout)
+ print("Successfully chopped up the collage.\n")
write_chops(chopped_up_files, new_outputpath)
print(f"Final pdf written to {new_outputpath}. Enjoy your sewing :)")
-def write_chops(collage: pikepdf.Pdf, output_path: pathlib.Path):
+def write_chops(collage: pikepdf.Pdf, output_path: pathlib.Path) -> None:
print("Writing file...")
try:
collage.save(output_path)
except OSError as e:
- raise errors.UsageError(f"An error occurred while writing the output file:\n{e}")
+ raise errors.UsageError(f"An error occurred "
+ f"while writing the output file:\n{e}")
-def write_collage(temp_collage_paths: [pathlib.Path], output_properties: core.OutputProperties):
+def write_collage(temp_collage_paths: List[pathlib.Path],
+ output_properties: core.OutputProperties) -> None:
for counter, collage_path in enumerate(temp_collage_paths):
new_outputpath = calc.new_outputpath(output_properties.output_path, counter)
try:
temp_collage = pikepdf.Pdf.open(collage_path)
temp_collage.save(new_outputpath)
except OSError as e:
- raise errors.UsageError(f"An error occurred while writing the collage:\n{e}")
+ raise errors.UsageError(f"An error occurred "
+ f"while writing the collage:\n{e}")
print(f"Collage written to {new_outputpath}. Enjoy your sewing :)")
def _create_output_files(collage: pikepdf.Pdf,
pagesize: core.PageSize,
current_layout: core.Layout,
- output_layout: [int]) -> pikepdf.Pdf:
+ output_layout: Optional[List[int]]) -> pikepdf.Pdf:
"""
- Chops up the collage that consists of all the pattern pages to individual pages of the desired output size.
+ Chops up the collage that consists of all the pattern pages to individual pages
+ of the desired output size.
:param collage: One pdf page that contains all assembled pattern pages.
:param input_properties: Properties of the pdf.
:param output_layout: The desired output layout.
:return: The pdf with several pages, ready to write to disk.
"""
+ assert output_layout is not None
n_up_factor = calc.nup_factors(pagesize, output_layout)
# only two points are needed to be cropped, lower left (x, y) and upper right (x, y)
lowerleft_factor = nobubo.core.Factor(x=0, y=0)
upperright_factor = nobubo.core.Factor(x=1, y=1)
- output = pikepdf.Pdf.new()
+ output = pikepdf.Pdf.new() # type: ignore [call-arg]
output.copy_foreign(collage.Root)
- # Root must be copied too, not only the page: thanks to https://github.com/cfcurtis/sewingutils for this!
+ # Root must be copied too, not only the page:
+ # thanks to https://github.com/cfcurtis/sewingutils
for i in range(0, calc.pages_needed(current_layout, n_up_factor)):
page = output.copy_foreign(collage.pages[0])
- lowerleft: core.Point = _calculate_lowerleft_point(lowerleft_factor, n_up_factor, pagesize)
- upperright: core.Point = _calculate_upperright_point(upperright_factor, n_up_factor, current_layout, pagesize)
+ lowerleft: core.Point = _calculate_lowerleft_point(lowerleft_factor,
+ n_up_factor,
+ pagesize)
+ upperright: core.Point = _calculate_upperright_point(upperright_factor,
+ n_up_factor,
+ current_layout,
+ pagesize)
# adjust multiplying factor
- colsleft = _calculate_colsrows_left(current_layout.columns, upperright_factor.x, n_up_factor.x)
- lowerleft_factor, upperright_factor = _adjust_factors(lowerleft_factor, upperright_factor, colsleft)
+ colsleft = _calculate_colsrows_left(current_layout.columns,
+ upperright_factor.x,
+ n_up_factor.x)
+ lowerleft_factor, upperright_factor = _adjust_factors(lowerleft_factor,
+ upperright_factor,
+ colsleft)
page.CropBox = [lowerleft.x, lowerleft.y, upperright.x, upperright.y]
output.pages.append(page)
@@ -105,7 +123,7 @@ def _calculate_lowerleft_point(lowerleft_factor: nobubo.core.Factor,
n_up_factor: nobubo.core.Factor,
pagesize: core.PageSize) -> core.Point:
return core.Point(x=lowerleft_factor.x * n_up_factor.x * pagesize.width,
- y=lowerleft_factor.y * n_up_factor.y * pagesize.height)
+ y=lowerleft_factor.y * n_up_factor.y * pagesize.height)
def _calculate_upperright_point(upperright_factor: nobubo.core.Factor,
@@ -114,14 +132,18 @@ def _calculate_upperright_point(upperright_factor: nobubo.core.Factor,
pagesize: core.PageSize) -> core.Point:
upperright = core.Point(x=0, y=0)
# Manage ROWS: apply transformation to upper right, y-value
- rowsleft = _calculate_colsrows_left(current_layout.rows, upperright_factor.y, n_up_factor.y)
+ rowsleft = _calculate_colsrows_left(current_layout.rows,
+ upperright_factor.y,
+ n_up_factor.y)
if rowsleft < 0: # end of pattern reached (full amount of rows reached)
upperright.y = current_layout.rows * pagesize.height
else:
upperright.y = upperright_factor.y * n_up_factor.y * pagesize.height
# Manage COLS: apply transformation to upper right, x-value
- colsleft = _calculate_colsrows_left(current_layout.columns, upperright_factor.x, n_up_factor.x) # COLS
+ colsleft = _calculate_colsrows_left(current_layout.columns,
+ upperright_factor.x,
+ n_up_factor.x) # COLS
if colsleft > 0: # still assembling the same horizontal line
upperright.x = upperright_factor.x * n_up_factor.x * pagesize.width
@@ -133,26 +155,30 @@ def _calculate_upperright_point(upperright_factor: nobubo.core.Factor,
return upperright
-def _adjust_factors(lowerleft_factor: nobubo.core.Factor, upperright_factor: nobubo.core.Factor, colsleft: int) -> (
- nobubo.core.Factor, nobubo.core.Factor):
+def _adjust_factors(lowerleft_factor: nobubo.core.Factor,
+ upperright_factor: nobubo.core.Factor,
+ colsleft: int) -> Tuple[
+ nobubo.core.Factor, nobubo.core.Factor]:
if colsleft > 0: # still assembling the same horizontal line
return _advance_horizontally(lowerleft_factor, upperright_factor)
else: # end of line reached, need to go 1 row up
return _advance_vertically(lowerleft_factor, upperright_factor)
-def _advance_horizontally(lowerleft_factor: nobubo.core.Factor, upperright_factor: nobubo.core.Factor) -> (
- nobubo.core.Factor, nobubo.core.Factor):
+def _advance_horizontally(lowerleft_factor: nobubo.core.Factor,
+ upperright_factor: nobubo.core.Factor
+ ) -> Tuple[nobubo.core.Factor, nobubo.core.Factor]:
lowerleft_factor.x += 1
upperright_factor.x += 1
return lowerleft_factor, upperright_factor
-def _advance_vertically(lowerleft_factor: nobubo.core.Factor, upperright_factor: nobubo.core.Factor) -> (
-nobubo.core.Factor, nobubo.core.Factor):
+def _advance_vertically(lowerleft_factor: nobubo.core.Factor,
+ upperright_factor: nobubo.core.Factor
+ ) -> Tuple[nobubo.core.Factor, nobubo.core.Factor]:
lowerleft_factor.x = 0
lowerleft_factor.y += 1
upperright_factor.x = 1
upperright_factor.y += 1
- return lowerleft_factor, upperright_factor
\ No newline at end of file
+ return lowerleft_factor, upperright_factor
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 0000000..78df6f5
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,616 @@
+[[package]]
+name = "appdirs"
+version = "1.4.4"
+description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "astroid"
+version = "2.5.6"
+description = "An abstract syntax tree for Python with inference support."
+category = "dev"
+optional = false
+python-versions = "~=3.6"
+
+[package.dependencies]
+lazy-object-proxy = ">=1.4.0"
+wrapt = ">=1.11,<1.13"
+
+[[package]]
+name = "atomicwrites"
+version = "1.4.0"
+description = "Atomic file writes."
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[[package]]
+name = "attrs"
+version = "21.2.0"
+description = "Classes Without Boilerplate"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+
+[package.extras]
+dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"]
+docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
+tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"]
+tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"]
+
+[[package]]
+name = "click"
+version = "8.0.1"
+description = "Composable command line interface toolkit"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.4"
+description = "Cross-platform colored terminal text."
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+
+[[package]]
+name = "distlib"
+version = "0.3.2"
+description = "Distribution utilities"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "filelock"
+version = "3.0.12"
+description = "A platform independent file lock."
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "flake8"
+version = "3.9.2"
+description = "the modular source code checker: pep8 pyflakes and co"
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+
+[package.dependencies]
+mccabe = ">=0.6.0,<0.7.0"
+pycodestyle = ">=2.7.0,<2.8.0"
+pyflakes = ">=2.3.0,<2.4.0"
+
+[[package]]
+name = "iniconfig"
+version = "1.1.1"
+description = "iniconfig: brain-dead simple config-ini parsing"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "isort"
+version = "5.9.1"
+description = "A Python utility / library to sort Python imports."
+category = "dev"
+optional = false
+python-versions = ">=3.6.1,<4.0"
+
+[package.extras]
+pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
+requirements_deprecated_finder = ["pipreqs", "pip-api"]
+colors = ["colorama (>=0.4.3,<0.5.0)"]
+plugins = ["setuptools"]
+
+[[package]]
+name = "lazy-object-proxy"
+version = "1.6.0"
+description = "A fast and thorough lazy object proxy."
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+
+[[package]]
+name = "lxml"
+version = "4.6.3"
+description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*"
+
+[package.extras]
+cssselect = ["cssselect (>=0.7)"]
+html5 = ["html5lib"]
+htmlsoup = ["beautifulsoup4"]
+source = ["Cython (>=0.29.7)"]
+
+[[package]]
+name = "mccabe"
+version = "0.6.1"
+description = "McCabe checker, plugin for flake8"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "mypy"
+version = "0.910"
+description = "Optional static typing for Python"
+category = "dev"
+optional = false
+python-versions = ">=3.5"
+
+[package.dependencies]
+mypy-extensions = ">=0.4.3,<0.5.0"
+toml = "*"
+typing-extensions = ">=3.7.4"
+
+[package.extras]
+dmypy = ["psutil (>=4.0)"]
+python2 = ["typed-ast (>=1.4.0,<1.5.0)"]
+
+[[package]]
+name = "mypy-extensions"
+version = "0.4.3"
+description = "Experimental type system extensions for programs checked with the mypy typechecker."
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "packaging"
+version = "20.9"
+description = "Core utilities for Python packages"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[package.dependencies]
+pyparsing = ">=2.0.2"
+
+[[package]]
+name = "pikepdf"
+version = "2.13.0"
+description = "Read and write PDFs with Python, powered by qpdf"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+lxml = ">=4.0"
+Pillow = ">=6.0"
+
+[package.extras]
+docs = ["ipython", "matplotlib", "pybind11", "setuptools-scm", "Sphinx (>=3)", "sphinx-issues", "sphinx-rtd-theme"]
+test = ["attrs (>=20.2.0)", "hypothesis (>=5,<7)", "Pillow (>=7,<9)", "psutil (>=5,<6)", "pybind11 (>=2.6.0)", "pytest (>=6,<7)", "pytest-cov (>=2.10.1,<3)", "pytest-forked", "pytest-xdist (>=1.28,<3)", "pytest-timeout (>=1.4.2)", "python-dateutil (>=2.8.0)", "python-xmp-toolkit (>=2.0.1)"]
+
+[[package]]
+name = "pillow"
+version = "8.2.0"
+description = "Python Imaging Library (Fork)"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
+name = "pluggy"
+version = "0.13.1"
+description = "plugin and hook calling mechanisms for python"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+
+[[package]]
+name = "py"
+version = "1.10.0"
+description = "library with cross-python path, ini-parsing, io, code, log facilities"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[[package]]
+name = "pycodestyle"
+version = "2.7.0"
+description = "Python style guide checker"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[[package]]
+name = "pyflakes"
+version = "2.3.1"
+description = "passive checker of Python programs"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[[package]]
+name = "pylint"
+version = "2.8.3"
+description = "python code static checker"
+category = "dev"
+optional = false
+python-versions = "~=3.6"
+
+[package.dependencies]
+astroid = "2.5.6"
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+isort = ">=4.2.5,<6"
+mccabe = ">=0.6,<0.7"
+toml = ">=0.7.1"
+
+[[package]]
+name = "pyparsing"
+version = "2.4.7"
+description = "Python parsing module"
+category = "dev"
+optional = false
+python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+
+[[package]]
+name = "pytest"
+version = "6.2.4"
+description = "pytest: simple powerful testing with Python"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
+attrs = ">=19.2.0"
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+iniconfig = "*"
+packaging = "*"
+pluggy = ">=0.12,<1.0.0a1"
+py = ">=1.8.2"
+toml = "*"
+
+[package.extras]
+testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+
+[[package]]
+name = "toml"
+version = "0.10.2"
+description = "Python Library for Tom's Obvious, Minimal Language"
+category = "dev"
+optional = false
+python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+
+[[package]]
+name = "tox"
+version = "3.23.1"
+description = "tox is a generic virtualenv management and test command line tool"
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+
+[package.dependencies]
+colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""}
+filelock = ">=3.0.0"
+packaging = ">=14"
+pluggy = ">=0.12.0"
+py = ">=1.4.17"
+six = ">=1.14.0"
+toml = ">=0.9.4"
+virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7"
+
+[package.extras]
+docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"]
+testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "psutil (>=5.6.1)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "pytest-xdist (>=1.22.2)", "pathlib2 (>=2.3.3)"]
+
+[[package]]
+name = "typing-extensions"
+version = "3.10.0.0"
+description = "Backported and Experimental Type Hints for Python 3.5+"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "virtualenv"
+version = "20.4.7"
+description = "Virtual Python Environment builder"
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
+
+[package.dependencies]
+appdirs = ">=1.4.3,<2"
+distlib = ">=0.3.1,<1"
+filelock = ">=3.0.0,<4"
+six = ">=1.9.0,<2"
+
+[package.extras]
+docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"]
+testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)", "xonsh (>=0.9.16)"]
+
+[[package]]
+name = "wrapt"
+version = "1.12.1"
+description = "Module for decorators, wrappers and monkey patching."
+category = "dev"
+optional = false
+python-versions = "*"
+
+[metadata]
+lock-version = "1.1"
+python-versions = "^3.8"
+content-hash = "79f94f48ed41bcbf7aaf41dac4bf8d755dc30470b1b4a8fef0f0f225513e61c9"
+
+[metadata.files]
+appdirs = [
+ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
+ {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
+]
+astroid = [
+ {file = "astroid-2.5.6-py3-none-any.whl", hash = "sha256:4db03ab5fc3340cf619dbc25e42c2cc3755154ce6009469766d7143d1fc2ee4e"},
+ {file = "astroid-2.5.6.tar.gz", hash = "sha256:8a398dfce302c13f14bab13e2b14fe385d32b73f4e4853b9bdfb64598baa1975"},
+]
+atomicwrites = [
+ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
+ {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
+]
+attrs = [
+ {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"},
+ {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"},
+]
+click = [
+ {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"},
+ {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"},
+]
+colorama = [
+ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
+ {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
+]
+distlib = [
+ {file = "distlib-0.3.2-py2.py3-none-any.whl", hash = "sha256:23e223426b28491b1ced97dc3bbe183027419dfc7982b4fa2f05d5f3ff10711c"},
+ {file = "distlib-0.3.2.zip", hash = "sha256:106fef6dc37dd8c0e2c0a60d3fca3e77460a48907f335fa28420463a6f799736"},
+]
+filelock = [
+ {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"},
+ {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"},
+]
+flake8 = [
+ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"},
+ {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"},
+]
+iniconfig = [
+ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
+ {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
+]
+isort = [
+ {file = "isort-5.9.1-py3-none-any.whl", hash = "sha256:8e2c107091cfec7286bc0f68a547d0ba4c094d460b732075b6fba674f1035c0c"},
+ {file = "isort-5.9.1.tar.gz", hash = "sha256:83510593e07e433b77bd5bff0f6f607dbafa06d1a89022616f02d8b699cfcd56"},
+]
+lazy-object-proxy = [
+ {file = "lazy-object-proxy-1.6.0.tar.gz", hash = "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726"},
+ {file = "lazy_object_proxy-1.6.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b"},
+ {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win32.whl", hash = "sha256:ebfd274dcd5133e0afae738e6d9da4323c3eb021b3e13052d8cbd0e457b1256e"},
+ {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ed361bb83436f117f9917d282a456f9e5009ea12fd6de8742d1a4752c3017e93"},
+ {file = "lazy_object_proxy-1.6.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d900d949b707778696fdf01036f58c9876a0d8bfe116e8d220cfd4b15f14e741"},
+ {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5743a5ab42ae40caa8421b320ebf3a998f89c85cdc8376d6b2e00bd12bd1b587"},
+ {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:bf34e368e8dd976423396555078def5cfc3039ebc6fc06d1ae2c5a65eebbcde4"},
+ {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win32.whl", hash = "sha256:b579f8acbf2bdd9ea200b1d5dea36abd93cabf56cf626ab9c744a432e15c815f"},
+ {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:4f60460e9f1eb632584c9685bccea152f4ac2130e299784dbaf9fae9f49891b3"},
+ {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d7124f52f3bd259f510651450e18e0fd081ed82f3c08541dffc7b94b883aa981"},
+ {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:22ddd618cefe54305df49e4c069fa65715be4ad0e78e8d252a33debf00f6ede2"},
+ {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:9d397bf41caad3f489e10774667310d73cb9c4258e9aed94b9ec734b34b495fd"},
+ {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a5045889cc2729033b3e604d496c2b6f588c754f7a62027ad4437a7ecc4837"},
+ {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:17e0967ba374fc24141738c69736da90e94419338fd4c7c7bef01ee26b339653"},
+ {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:410283732af311b51b837894fa2f24f2c0039aa7f220135192b38fcc42bd43d3"},
+ {file = "lazy_object_proxy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:85fb7608121fd5621cc4377a8961d0b32ccf84a7285b4f1d21988b2eae2868e8"},
+ {file = "lazy_object_proxy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:d1c2676e3d840852a2de7c7d5d76407c772927addff8d742b9808fe0afccebdf"},
+ {file = "lazy_object_proxy-1.6.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b865b01a2e7f96db0c5d12cfea590f98d8c5ba64ad222300d93ce6ff9138bcad"},
+ {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4732c765372bd78a2d6b2150a6e99d00a78ec963375f236979c0626b97ed8e43"},
+ {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9698110e36e2df951c7c36b6729e96429c9c32b3331989ef19976592c5f3c77a"},
+ {file = "lazy_object_proxy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61"},
+ {file = "lazy_object_proxy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b"},
+]
+lxml = [
+ {file = "lxml-4.6.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:df7c53783a46febb0e70f6b05df2ba104610f2fb0d27023409734a3ecbb78fb2"},
+ {file = "lxml-4.6.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:1b7584d421d254ab86d4f0b13ec662a9014397678a7c4265a02a6d7c2b18a75f"},
+ {file = "lxml-4.6.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:079f3ae844f38982d156efce585bc540c16a926d4436712cf4baee0cce487a3d"},
+ {file = "lxml-4.6.3-cp27-cp27m-win32.whl", hash = "sha256:bc4313cbeb0e7a416a488d72f9680fffffc645f8a838bd2193809881c67dd106"},
+ {file = "lxml-4.6.3-cp27-cp27m-win_amd64.whl", hash = "sha256:8157dadbb09a34a6bd95a50690595e1fa0af1a99445e2744110e3dca7831c4ee"},
+ {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7728e05c35412ba36d3e9795ae8995e3c86958179c9770e65558ec3fdfd3724f"},
+ {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4"},
+ {file = "lxml-4.6.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51"},
+ {file = "lxml-4.6.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f90ba11136bfdd25cae3951af8da2e95121c9b9b93727b1b896e3fa105b2f586"},
+ {file = "lxml-4.6.3-cp35-cp35m-win32.whl", hash = "sha256:f2380a6376dfa090227b663f9678150ef27543483055cc327555fb592c5967e2"},
+ {file = "lxml-4.6.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4"},
+ {file = "lxml-4.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d2e35d7bf1c1ac8c538f88d26b396e73dd81440d59c1ef8522e1ea77b345ede4"},
+ {file = "lxml-4.6.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:289e9ca1a9287f08daaf796d96e06cb2bc2958891d7911ac7cae1c5f9e1e0ee3"},
+ {file = "lxml-4.6.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d"},
+ {file = "lxml-4.6.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:820628b7b3135403540202e60551e741f9b6d3304371712521be939470b454ec"},
+ {file = "lxml-4.6.3-cp36-cp36m-win32.whl", hash = "sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04"},
+ {file = "lxml-4.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:92e821e43ad382332eade6812e298dc9701c75fe289f2a2d39c7960b43d1e92a"},
+ {file = "lxml-4.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efd7a09678fd8b53117f6bae4fa3825e0a22b03ef0a932e070c0bdbb3a35e654"},
+ {file = "lxml-4.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:efac139c3f0bf4f0939f9375af4b02c5ad83a622de52d6dfa8e438e8e01d0eb0"},
+ {file = "lxml-4.6.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0fbcf5565ac01dff87cbfc0ff323515c823081c5777a9fc7703ff58388c258c3"},
+ {file = "lxml-4.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:122fba10466c7bd4178b07dba427aa516286b846b2cbd6f6169141917283aae2"},
+ {file = "lxml-4.6.3-cp37-cp37m-win32.whl", hash = "sha256:3439c71103ef0e904ea0a1901611863e51f50b5cd5e8654a151740fde5e1cade"},
+ {file = "lxml-4.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:4289728b5e2000a4ad4ab8da6e1db2e093c63c08bdc0414799ee776a3f78da4b"},
+ {file = "lxml-4.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa"},
+ {file = "lxml-4.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:76fa7b1362d19f8fbd3e75fe2fb7c79359b0af8747e6f7141c338f0bee2f871a"},
+ {file = "lxml-4.6.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:26e761ab5b07adf5f555ee82fb4bfc35bf93750499c6c7614bd64d12aaa67927"},
+ {file = "lxml-4.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791"},
+ {file = "lxml-4.6.3-cp38-cp38-win32.whl", hash = "sha256:89b8b22a5ff72d89d48d0e62abb14340d9e99fd637d046c27b8b257a01ffbe28"},
+ {file = "lxml-4.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:2a9d50e69aac3ebee695424f7dbd7b8c6d6eb7de2a2eb6b0f6c7db6aa41e02b7"},
+ {file = "lxml-4.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ce256aaa50f6cc9a649c51be3cd4ff142d67295bfc4f490c9134d0f9f6d58ef0"},
+ {file = "lxml-4.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:7610b8c31688f0b1be0ef882889817939490a36d0ee880ea562a4e1399c447a1"},
+ {file = "lxml-4.6.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f8380c03e45cf09f8557bdaa41e1fa7c81f3ae22828e1db470ab2a6c96d8bc23"},
+ {file = "lxml-4.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:884ab9b29feaca361f7f88d811b1eea9bfca36cf3da27768d28ad45c3ee6f969"},
+ {file = "lxml-4.6.3-cp39-cp39-win32.whl", hash = "sha256:33bb934a044cf32157c12bfcfbb6649807da20aa92c062ef51903415c704704f"},
+ {file = "lxml-4.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83"},
+ {file = "lxml-4.6.3.tar.gz", hash = "sha256:39b78571b3b30645ac77b95f7c69d1bffc4cf8c3b157c435a34da72e78c82468"},
+]
+mccabe = [
+ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
+ {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
+]
+mypy = [
+ {file = "mypy-0.910-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a155d80ea6cee511a3694b108c4494a39f42de11ee4e61e72bc424c490e46457"},
+ {file = "mypy-0.910-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:b94e4b785e304a04ea0828759172a15add27088520dc7e49ceade7834275bedb"},
+ {file = "mypy-0.910-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:088cd9c7904b4ad80bec811053272986611b84221835e079be5bcad029e79dd9"},
+ {file = "mypy-0.910-cp35-cp35m-win_amd64.whl", hash = "sha256:adaeee09bfde366d2c13fe6093a7df5df83c9a2ba98638c7d76b010694db760e"},
+ {file = "mypy-0.910-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ecd2c3fe726758037234c93df7e98deb257fd15c24c9180dacf1ef829da5f921"},
+ {file = "mypy-0.910-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d9dd839eb0dc1bbe866a288ba3c1afc33a202015d2ad83b31e875b5905a079b6"},
+ {file = "mypy-0.910-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:3e382b29f8e0ccf19a2df2b29a167591245df90c0b5a2542249873b5c1d78212"},
+ {file = "mypy-0.910-cp36-cp36m-win_amd64.whl", hash = "sha256:53fd2eb27a8ee2892614370896956af2ff61254c275aaee4c230ae771cadd885"},
+ {file = "mypy-0.910-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b6fb13123aeef4a3abbcfd7e71773ff3ff1526a7d3dc538f3929a49b42be03f0"},
+ {file = "mypy-0.910-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e4dab234478e3bd3ce83bac4193b2ecd9cf94e720ddd95ce69840273bf44f6de"},
+ {file = "mypy-0.910-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:7df1ead20c81371ccd6091fa3e2878559b5c4d4caadaf1a484cf88d93ca06703"},
+ {file = "mypy-0.910-cp37-cp37m-win_amd64.whl", hash = "sha256:0aadfb2d3935988ec3815952e44058a3100499f5be5b28c34ac9d79f002a4a9a"},
+ {file = "mypy-0.910-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec4e0cd079db280b6bdabdc807047ff3e199f334050db5cbb91ba3e959a67504"},
+ {file = "mypy-0.910-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:119bed3832d961f3a880787bf621634ba042cb8dc850a7429f643508eeac97b9"},
+ {file = "mypy-0.910-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:866c41f28cee548475f146aa4d39a51cf3b6a84246969f3759cb3e9c742fc072"},
+ {file = "mypy-0.910-cp38-cp38-win_amd64.whl", hash = "sha256:ceb6e0a6e27fb364fb3853389607cf7eb3a126ad335790fa1e14ed02fba50811"},
+ {file = "mypy-0.910-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a85e280d4d217150ce8cb1a6dddffd14e753a4e0c3cf90baabb32cefa41b59e"},
+ {file = "mypy-0.910-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:42c266ced41b65ed40a282c575705325fa7991af370036d3f134518336636f5b"},
+ {file = "mypy-0.910-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:3c4b8ca36877fc75339253721f69603a9c7fdb5d4d5a95a1a1b899d8b86a4de2"},
+ {file = "mypy-0.910-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:c0df2d30ed496a08de5daed2a9ea807d07c21ae0ab23acf541ab88c24b26ab97"},
+ {file = "mypy-0.910-cp39-cp39-win_amd64.whl", hash = "sha256:c6c2602dffb74867498f86e6129fd52a2770c48b7cd3ece77ada4fa38f94eba8"},
+ {file = "mypy-0.910-py3-none-any.whl", hash = "sha256:ef565033fa5a958e62796867b1df10c40263ea9ded87164d67572834e57a174d"},
+ {file = "mypy-0.910.tar.gz", hash = "sha256:704098302473cb31a218f1775a873b376b30b4c18229421e9e9dc8916fd16150"},
+]
+mypy-extensions = [
+ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
+ {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
+]
+packaging = [
+ {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"},
+ {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"},
+]
+pikepdf = [
+ {file = "pikepdf-2.13.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b18a90b4c6abaa1bf4b2b39266fa02c833be87c219b0e7cdfdb02c43e77eefe7"},
+ {file = "pikepdf-2.13.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:502ce98b1bc3e96aee9b38f2f4dc1542452d32a98f351f198ee6e2b387257ee4"},
+ {file = "pikepdf-2.13.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:4331bbab201136b83bac6977b750477e83dbb8356080cfaf99b0eca98ec12ce4"},
+ {file = "pikepdf-2.13.0-cp36-cp36m-win32.whl", hash = "sha256:a6f0774e83a72175a166ac75dd3b75320efecf506a6e5b65c007f61bb9552376"},
+ {file = "pikepdf-2.13.0-cp36-cp36m-win_amd64.whl", hash = "sha256:3bc9f6842865cfaa128802cdf15d7dcae60ba86b11af46ad65db5c3e9ecdbbb6"},
+ {file = "pikepdf-2.13.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e3c03319013ec00406c09671413c7b47ac1dd23e67a872c2386cd0538d7e371"},
+ {file = "pikepdf-2.13.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:66dd2be87c0ec317f80253d50296cfcf22c81667c69894ec7d4c892b1a613fbf"},
+ {file = "pikepdf-2.13.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d19b8f611d1c660d2048511921ed8ac26b2c6a694dbf96cd668c15397c857f63"},
+ {file = "pikepdf-2.13.0-cp37-cp37m-win32.whl", hash = "sha256:8afb75b9933b2cb7eb4cd34af4fff2b97e45ba9b7c3ce2cc42fbd72458ad3b0b"},
+ {file = "pikepdf-2.13.0-cp37-cp37m-win_amd64.whl", hash = "sha256:624e2d988627c1362bf83ff341b07ddee87809c9966da4eca48d8fca11099b0a"},
+ {file = "pikepdf-2.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:33643d8ff2339e7cef803227fdf9fe96461d38d79c9bf9fd26d910e27d3c49d1"},
+ {file = "pikepdf-2.13.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:a414898948ffb27ab797b41a19869e3b68a1476044a7e6cdd24e574077f1e9b3"},
+ {file = "pikepdf-2.13.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:8cd16afcc374cf5b122870603492e31a446285c3ef5f8389bf616cb85c504db2"},
+ {file = "pikepdf-2.13.0-cp38-cp38-win32.whl", hash = "sha256:7d156055a94fc0b01656afc49602311b40f0224013092ad27373826ea034aac2"},
+ {file = "pikepdf-2.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:d7426734b3f1f22637e1b3cf0cfaacbc7c9daab8ef558e1a299eebc8266ce8da"},
+ {file = "pikepdf-2.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfe796abe75e75659c5966332e08dda1a94b2f00ffddd5547802df454def6080"},
+ {file = "pikepdf-2.13.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:9c479fc03a68ad0d34d0de59d9264d0c9f4fd7d573c3173117f287123b19c244"},
+ {file = "pikepdf-2.13.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:394b56c095d45e9a312ced15e5f24c3d2b5f690702ff066bca5da169e66da27c"},
+ {file = "pikepdf-2.13.0-cp39-cp39-win32.whl", hash = "sha256:ae8b85cd77af424d0cc07a0aee539c5da11bd545bf72193841ad5f58bd26f979"},
+ {file = "pikepdf-2.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:1bc381015edd3793bd2f458b1b7fa0412550b0769f7371b4329481892cc482c8"},
+ {file = "pikepdf-2.13.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2a6689fc87f3886cf6d801a5cb606e4464b71f0408557010f503cf02f3ce95d2"},
+ {file = "pikepdf-2.13.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:6d665d4f1f98f8b264dbe677dd59f0c80bf3ad5c739a349186ab3f076bf7e3f0"},
+ {file = "pikepdf-2.13.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ed7b6f9fab3b4eef527301a0628ecc47586593d6c85fafc3d4b3e86206025e6e"},
+ {file = "pikepdf-2.13.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:bcfc8841d911b9c3a637ffa721ab83166876c6245ccb04fdeae1580cf2f7b3f9"},
+ {file = "pikepdf-2.13.0.tar.gz", hash = "sha256:9484921319aa072f7471b823bd196879f97945e811f9d9bc0a15e52ae05a25b6"},
+]
+pillow = [
+ {file = "Pillow-8.2.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:dc38f57d8f20f06dd7c3161c59ca2c86893632623f33a42d592f097b00f720a9"},
+ {file = "Pillow-8.2.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a013cbe25d20c2e0c4e85a9daf438f85121a4d0344ddc76e33fd7e3965d9af4b"},
+ {file = "Pillow-8.2.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8bb1e155a74e1bfbacd84555ea62fa21c58e0b4e7e6b20e4447b8d07990ac78b"},
+ {file = "Pillow-8.2.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c5236606e8570542ed424849f7852a0ff0bce2c4c8d0ba05cc202a5a9c97dee9"},
+ {file = "Pillow-8.2.0-cp36-cp36m-win32.whl", hash = "sha256:12e5e7471f9b637762453da74e390e56cc43e486a88289995c1f4c1dc0bfe727"},
+ {file = "Pillow-8.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5afe6b237a0b81bd54b53f835a153770802f164c5570bab5e005aad693dab87f"},
+ {file = "Pillow-8.2.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:cb7a09e173903541fa888ba010c345893cd9fc1b5891aaf060f6ca77b6a3722d"},
+ {file = "Pillow-8.2.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0d19d70ee7c2ba97631bae1e7d4725cdb2ecf238178096e8c82ee481e189168a"},
+ {file = "Pillow-8.2.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:083781abd261bdabf090ad07bb69f8f5599943ddb539d64497ed021b2a67e5a9"},
+ {file = "Pillow-8.2.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:c6b39294464b03457f9064e98c124e09008b35a62e3189d3513e5148611c9388"},
+ {file = "Pillow-8.2.0-cp37-cp37m-win32.whl", hash = "sha256:01425106e4e8cee195a411f729cff2a7d61813b0b11737c12bd5991f5f14bcd5"},
+ {file = "Pillow-8.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3b570f84a6161cf8865c4e08adf629441f56e32f180f7aa4ccbd2e0a5a02cba2"},
+ {file = "Pillow-8.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:031a6c88c77d08aab84fecc05c3cde8414cd6f8406f4d2b16fed1e97634cc8a4"},
+ {file = "Pillow-8.2.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:66cc56579fd91f517290ab02c51e3a80f581aba45fd924fcdee01fa06e635812"},
+ {file = "Pillow-8.2.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6c32cc3145928c4305d142ebec682419a6c0a8ce9e33db900027ddca1ec39178"},
+ {file = "Pillow-8.2.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:624b977355cde8b065f6d51b98497d6cd5fbdd4f36405f7a8790e3376125e2bb"},
+ {file = "Pillow-8.2.0-cp38-cp38-win32.whl", hash = "sha256:5cbf3e3b1014dddc45496e8cf38b9f099c95a326275885199f427825c6522232"},
+ {file = "Pillow-8.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:463822e2f0d81459e113372a168f2ff59723e78528f91f0bd25680ac185cf797"},
+ {file = "Pillow-8.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:95d5ef984eff897850f3a83883363da64aae1000e79cb3c321915468e8c6add5"},
+ {file = "Pillow-8.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b91c36492a4bbb1ee855b7d16fe51379e5f96b85692dc8210831fbb24c43e484"},
+ {file = "Pillow-8.2.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d68cb92c408261f806b15923834203f024110a2e2872ecb0bd2a110f89d3c602"},
+ {file = "Pillow-8.2.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f217c3954ce5fd88303fc0c317af55d5e0204106d86dea17eb8205700d47dec2"},
+ {file = "Pillow-8.2.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5b70110acb39f3aff6b74cf09bb4169b167e2660dabc304c1e25b6555fa781ef"},
+ {file = "Pillow-8.2.0-cp39-cp39-win32.whl", hash = "sha256:a7d5e9fad90eff8f6f6106d3b98b553a88b6f976e51fce287192a5d2d5363713"},
+ {file = "Pillow-8.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:238c197fc275b475e87c1453b05b467d2d02c2915fdfdd4af126145ff2e4610c"},
+ {file = "Pillow-8.2.0-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:0e04d61f0064b545b989126197930807c86bcbd4534d39168f4aa5fda39bb8f9"},
+ {file = "Pillow-8.2.0-pp36-pypy36_pp73-manylinux2010_i686.whl", hash = "sha256:63728564c1410d99e6d1ae8e3b810fe012bc440952168af0a2877e8ff5ab96b9"},
+ {file = "Pillow-8.2.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:c03c07ed32c5324939b19e36ae5f75c660c81461e312a41aea30acdd46f93a7c"},
+ {file = "Pillow-8.2.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:4d98abdd6b1e3bf1a1cbb14c3895226816e666749ac040c4e2554231068c639b"},
+ {file = "Pillow-8.2.0-pp37-pypy37_pp73-manylinux2010_i686.whl", hash = "sha256:aac00e4bc94d1b7813fe882c28990c1bc2f9d0e1aa765a5f2b516e8a6a16a9e4"},
+ {file = "Pillow-8.2.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:22fd0f42ad15dfdde6c581347eaa4adb9a6fc4b865f90b23378aa7914895e120"},
+ {file = "Pillow-8.2.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:e98eca29a05913e82177b3ba3d198b1728e164869c613d76d0de4bde6768a50e"},
+ {file = "Pillow-8.2.0.tar.gz", hash = "sha256:a787ab10d7bb5494e5f76536ac460741788f1fbce851068d73a87ca7c35fc3e1"},
+]
+pluggy = [
+ {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
+ {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
+]
+py = [
+ {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"},
+ {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"},
+]
+pycodestyle = [
+ {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"},
+ {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"},
+]
+pyflakes = [
+ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"},
+ {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"},
+]
+pylint = [
+ {file = "pylint-2.8.3-py3-none-any.whl", hash = "sha256:792b38ff30903884e4a9eab814ee3523731abd3c463f3ba48d7b627e87013484"},
+ {file = "pylint-2.8.3.tar.gz", hash = "sha256:0a049c5d47b629d9070c3932d13bff482b12119b6a241a93bc460b0be16953c8"},
+]
+pyparsing = [
+ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
+ {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
+]
+pytest = [
+ {file = "pytest-6.2.4-py3-none-any.whl", hash = "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890"},
+ {file = "pytest-6.2.4.tar.gz", hash = "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b"},
+]
+six = [
+ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+toml = [
+ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
+ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
+]
+tox = [
+ {file = "tox-3.23.1-py2.py3-none-any.whl", hash = "sha256:b0b5818049a1c1997599d42012a637a33f24c62ab8187223fdd318fa8522637b"},
+ {file = "tox-3.23.1.tar.gz", hash = "sha256:307a81ddb82bd463971a273f33e9533a24ed22185f27db8ce3386bff27d324e3"},
+]
+typing-extensions = [
+ {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"},
+ {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"},
+ {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"},
+]
+virtualenv = [
+ {file = "virtualenv-20.4.7-py2.py3-none-any.whl", hash = "sha256:2b0126166ea7c9c3661f5b8e06773d28f83322de7a3ff7d06f0aed18c9de6a76"},
+ {file = "virtualenv-20.4.7.tar.gz", hash = "sha256:14fdf849f80dbb29a4eb6caa9875d476ee2a5cf76a5f5415fa2f1606010ab467"},
+]
+wrapt = [
+ {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"},
+]
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..3e1e5b3
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,103 @@
+[tool.poetry]
+name = "nobubo"
+version = "1.3.0"
+description = "Nobubo assembles a digital pdf sewing pattern and cuts it up into a desired output print size. A specialized n-up tool also suited for non-sewing purposes."
+authors = ["bytinbit <[email protected]>"]
+license = "AGPL v3.0"
+readme = "README.md"
+homepage = "https://github.com/bytinbit/nobubo"
+repository = "https://github.com/bytinbit/nobubo"
+classifiers = [
+ "Topic :: Printing",
+ "Topic :: Utilities",
+ "Topic :: Multimedia :: Graphics :: Graphics Conversion",
+ "License :: OSI Approved :: GNU Affero General Public License v3",
+ ]
+
+
+[tool.poetry.scripts]
+nobubo = "nobubo.cli:main"
+
+[tool.poetry.dependencies]
+python = "^3.8"
+click = "^8.0.0"
+pikepdf = "^2.12.0"
+
+[tool.poetry.dev-dependencies]
+pytest = "^6.2.4"
+tox = "^3.23.1"
+mypy = "^0.910"
+pylint = "^2.8.2"
+flake8 = "^3.9.2"
+
+[build-system]
+requires = ["poetry-core>=1.0.0"]
+build-backend = "poetry.core.masonry.api"
+
+[tool.mypy]
+python_version = 3.8
+
+### --strict
+warn_unused_configs = true
+disallow_any_generics = true
+disallow_subclassing_any = true
+# disallow_untyped_calls = true
+# disallow_untyped_defs = true
+disallow_incomplete_defs = true
+check_untyped_defs = true
+disallow_untyped_decorators = true
+# no_implicit_optional = true
+warn_redundant_casts = true
+warn_unused_ignores = true
+# warn_return_any = true
+# no_implicit_reexport = true
+# strict_equality = true
+
+### Other strictness flags
+warn_unreachable = true
+disallow_any_unimported = true
+
+### Output
+show_error_codes = true
+show_error_context = true
+pretty = true
+
+[[tool.mypy.overrides]]
+module = "textract.*"
+ignore_missing_imports = true
+
+[tool.tox]
+legacy_tox_ini = """
+[tox]
+isolated_build = True
+envlist = py,mypy,linting
+testpaths = "tests"
+
+[testenv]
+description = Run pytest
+deps =
+ pytest
+ pytest-cov
+ textract
+commands = pytest {posargs}
+
+[testenv:mypy]
+description = Check typing with mypy
+deps = mypy
+ pytest
+ click
+ textract
+passenv = TERM
+commands = mypy {posargs} nobubo tests
+
+[testenv:flake8]
+description = Run flake8 for linting
+deps = flake8
+commands = flake8 --max-line-length 88 nobubo tests {posargs}
+
+[testenv:pikepdf-main]
+description = Run project tests against pikepdf-main branch
+deps =
+ git+https://github.com/pikepdf/pikepdf#master
+commands = pytest {posargs}
+"""
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index fc06e0b..0000000
--- a/requirements.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-click >= 7.1.2
-pikepdf >= 1.19.3
-
diff --git a/setup.py b/setup.py
deleted file mode 100644
index 019dd3e..0000000
--- a/setup.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from setuptools import setup
-
-with open("README.md", "r", encoding="utf-8") as fh:
- long_description = fh.read()
-
-setup(
- name="nobubo-bytinbit",
- version="1.2.0",
- description="Nobubo assembles a digital pdf sewing pattern and chops it into a desired output size to be printed.",
- long_description=long_description,
- long_description_content_type="text/markdown",
- url="https://github.com/bytinbit/nobubo",
- author="Méline Sieber",
- license="AGPLv3",
- packages=setuptools.find_packages(),
- entry_points = {
- "console_scripts": ["nobubo = nobubo.nobubo:main"]
- },
- python_requires=">=3.7",
- install_requires=["click", "pikepdf"],
- classifiers=[
- "Topic :: Printing",
- "Topic :: Utilities",
- "Topic :: Multimedia :: Graphics :: Graphics Conversion",
- "License :: OSI Approved :: GNU Affero General Public License v3",
- ],
- keywords="sewing pdf printing",
-)
| Setup tox, CI/CD
The project grows and proper setup is a must.
| 2021-06-29T16:35:29 | 0.0 | [] | [] |
|||
bytinbit/nobubo | bytinbit__nobubo-24 | a8b13786be44f9ac06e7f47d49e7b0ef65d9433e | diff --git a/nobubo/calc.py b/nobubo/calc.py
index 3049515..d43e53f 100644
--- a/nobubo/calc.py
+++ b/nobubo/calc.py
@@ -24,7 +24,7 @@
from dataclasses import dataclass
from typing import List
-import PyPDF2
+import pikepdf
from nobubo import core
@@ -41,15 +41,13 @@ class Factor:
def parse_cli_input(input_layout: (int, int, int), output_layout_cli: str, print_margin: int,
reverse_assembly: bool, input_path: str, output_path: str
) -> (core.InputProperties, core.OutputProperties):
- with open(pathlib.Path(input_path), "rb") as inputfile:
- reader = PyPDF2.PdfFileReader(inputfile, strict=False)
-
- width, height = calculate_page_dimensions(
- reader.getPage(1)) # first page (getPage(0)) may contain overview
+ with pikepdf.open(pathlib.Path(input_path)) as inputfile:
+ # first page (getPage(0)) may contain overview, so get second one
+ width, height = calculate_page_dimensions(inputfile.pages[1])
input_properties = core.InputProperties(
input_filepath=pathlib.Path(input_path),
output_path=pathlib.Path(output_path),
- number_of_pages=reader.getNumPages(),
+ number_of_pages=len(inputfile.pages),
pagesize=core.PageSize(width=width, height=height),
layout=parse_input_layouts(input_layout),
reverse_assembly=reverse_assembly)
@@ -57,7 +55,7 @@ def parse_cli_input(input_layout: (int, int, int), output_layout_cli: str, print
output_properties = core.OutputProperties(output_path=pathlib.Path(output_path),
output_layout=parse_output_layout(output_layout_cli, print_margin),
)
- return input_properties, output_properties
+ return input_properties, output_properties
def parse_input_layouts(input_layout: (int, int, int)) ->[core.Layout]:
@@ -83,15 +81,17 @@ def calculate_pages_needed(layout: core.Layout, n_up_factor: Factor) -> int:
return math.ceil(layout.columns/n_up_factor.x) * math.ceil(layout.rows/n_up_factor.y)
-def calculate_page_dimensions(page: PyPDF2.pdf.PageObject) -> (float, float):
+def calculate_page_dimensions(page: pikepdf.Page) -> (float, float):
"""
Calculates the x, y value for the offset in default user space units as defined in the pdf standard.
- Uses the cropBox value, since this is the area visible to the printer.
- :param page: A pattern page.
+ :param page: A PDF page.
:return: list with x, y value.
"""
- return round(float(page.cropBox[2])-float(page.cropBox[0]), 2), \
- round(float(page.cropBox[3])-float(page.cropBox[1]), 2)
+ if not hasattr(page, "CropBox"):
+ box = page.MediaBox
+ else:
+ box = page.CropBox
+ return round(float(box[2])-float(box[0]), 2), round(float(box[3])-float(box[1]), 2)
def convert_to_userspaceunits(width_height: [int, int]) -> core.PageSize:
diff --git a/nobubo/disassembly.py b/nobubo/disassembly.py
index c9a50aa..1d1d8fe 100644
--- a/nobubo/disassembly.py
+++ b/nobubo/disassembly.py
@@ -22,7 +22,7 @@
from copy import copy
import pathlib
-import PyPDF2
+from pikepdf import Pdf, Page
from nobubo import core, calc, output
@@ -31,25 +31,23 @@ def create_output_files(temp_collage_paths: [pathlib.Path],
input_properties: core.InputProperties,
output_properties: core.OutputProperties):
for counter, collage_path in enumerate(temp_collage_paths):
- with collage_path.open("rb") as collagefile:
- reader = PyPDF2.PdfFileReader(collagefile, strict=False)
- collage = reader.getPage(0)
- new_outputpath = calc.generate_new_outputpath(output_properties.output_path, counter)
- print(f"\nChopping up the collage...")
- chopped_up_files = _create_output_files(collage, input_properties.pagesize,
- input_properties.layout[counter], output_properties.output_layout)
- print(f"Successfully chopped up the collage.\n")
- output.write_chops(chopped_up_files, new_outputpath)
- print(f"Final pdf written to {new_outputpath}. Enjoy your sewing :)")
-
-
-def _create_output_files(assembled_collage: PyPDF2.pdf.PageObject,
+ collage = Pdf.open(collage_path)
+ new_outputpath = calc.generate_new_outputpath(output_properties.output_path, counter)
+ print(f"\nChopping up the collage...")
+ chopped_up_files = _create_output_files(collage, input_properties.pagesize,
+ input_properties.layout[counter], output_properties.output_layout)
+ print(f"Successfully chopped up the collage.\n")
+ output.write_chops(chopped_up_files, new_outputpath)
+ print(f"Final pdf written to {new_outputpath}. Enjoy your sewing :)")
+
+
+def _create_output_files(collage: Pdf,
pagesize: core.PageSize,
current_layout: core.Layout,
- output_layout: [int]) -> PyPDF2.PdfFileWriter:
+ output_layout: [int]) -> Pdf:
"""
Chops up the collage that consists of all the pattern pages to individual pages of the desired output size.
- :param assembled_collage: One pdf page that contains all assembled pattern pages.
+ :param collage: One pdf page that contains all assembled pattern pages.
:param input_properties: Properties of the pdf.
:param output_layout: The desired output layout.
:return: The pdf with several pages, ready to write to disk.
@@ -59,10 +57,11 @@ def _create_output_files(assembled_collage: PyPDF2.pdf.PageObject,
lowerleft_factor = calc.Factor(x=0, y=0)
upperright_factor = calc.Factor(x=1, y=1)
- writer = PyPDF2.PdfFileWriter()
- for x in range(0, calc.calculate_pages_needed(current_layout, n_up_factor)):
- page = copy(assembled_collage)
- # cf. https://stackoverflow.com/questions/52315259/pypdf2-cant-add-multiple-cropped-pages#
+ output = Pdf.new()
+ output.copy_foreign(collage.Root) # TODO must Root be updated if new pages are added?
+ # Root must be copied too, not only the page: thanks to https://github.com/cfcurtis/sewingutils for this!
+ for i in range(0, calc.calculate_pages_needed(current_layout, n_up_factor)):
+ page = output.copy_foreign(collage.pages[0])
lowerleft: core.Point = _calculate_lowerleft_point(lowerleft_factor, n_up_factor, pagesize)
upperright: core.Point = _calculate_upperright_point(upperright_factor, n_up_factor, current_layout, pagesize)
@@ -71,11 +70,10 @@ def _create_output_files(assembled_collage: PyPDF2.pdf.PageObject,
colsleft = _calculate_colsrows_left(current_layout.columns, upperright_factor.x, n_up_factor.x)
lowerleft_factor, upperright_factor = _adjust_factors(lowerleft_factor, upperright_factor, colsleft)
- page.cropBox.lowerLeft = (lowerleft.x, lowerleft.y)
- page.cropBox.upperRight = (upperright.x, upperright.y)
- writer.addPage(page)
+ page.CropBox = [lowerleft.x, lowerleft.y, upperright.x, upperright.y]
+ output.pages.append(page)
- return writer
+ return output
def _calculate_colsrows_left(layout_element: int, factor: int, nup_factor: int) -> int:
diff --git a/nobubo/output.py b/nobubo/output.py
index bb52b22..c74b1f5 100644
--- a/nobubo/output.py
+++ b/nobubo/output.py
@@ -14,18 +14,17 @@
#
# You should have received a copy of the GNU Affero General Public License
# along with Nobubo. If not, see <https://www.gnu.org/licenses/>.
-import PyPDF2
+from pikepdf import Pdf
import pathlib
import sys
from nobubo import core, calc
-def write_chops(pypdf2_writer: PyPDF2.PdfFileWriter, output_path: pathlib.Path):
+def write_chops(collage: Pdf, output_path: pathlib.Path):
print("Writing file...")
try:
- with open(output_path, "wb") as output:
- pypdf2_writer.write(output)
+ collage.save(output_path)
except OSError as e:
print(f"While writing the file, this error occurred:\n{e}")
sys.exit(1)
@@ -33,10 +32,7 @@ def write_chops(pypdf2_writer: PyPDF2.PdfFileWriter, output_path: pathlib.Path):
def write_collage(temp_collage_paths: [pathlib.Path], output_properties: core.OutputProperties):
for counter, collage_path in enumerate(temp_collage_paths):
- writer = PyPDF2.PdfFileWriter()
new_outputpath = calc.generate_new_outputpath(output_properties.output_path, counter)
- with collage_path.open("rb") as collagefile:
- reader = PyPDF2.PdfFileReader(collagefile, strict=False)
- writer.addPage(reader.getPage(0))
- write_chops(writer, new_outputpath)
+ temp_collage = Pdf.open(collage_path)
+ temp_collage.save(new_outputpath)
print(f"Collage written to {new_outputpath}. Enjoy your sewing :)")
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index 60a3076..fc06e0b 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,2 +1,3 @@
click >= 7.1.2
-PyPDF2 >= 1.26.0
+pikepdf >= 1.19.3
+
diff --git a/setup.py b/setup.py
index 4ed6e0b..019dd3e 100644
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@
setup(
name="nobubo-bytinbit",
- version="1.1.0",
+ version="1.2.0",
description="Nobubo assembles a digital pdf sewing pattern and chops it into a desired output size to be printed.",
long_description=long_description,
long_description_content_type="text/markdown",
@@ -17,7 +17,7 @@
"console_scripts": ["nobubo = nobubo.nobubo:main"]
},
python_requires=">=3.7",
- install_requires=["click", "PyPDF2"],
+ install_requires=["click", "pikepdf"],
classifiers=[
"Topic :: Printing",
"Topic :: Utilities",
| Switch from PyPDF2 to another module
[PyPDF2 development has stalled ](https://github.com/mstamy2/PyPDF2/wiki/State-of-PyPDF2-and-Future-Plans), which means it is wise to switch to another library to manipulate pdfs.
Possible Alternatives:
- [pdfpike](https://github.com/pikepdf/pikepdf)
- ~~convert to Pillow/PIL object with [pdf2image](https://pypi.org/project/pdf2image/), manipulate content, convert back to pdf using Pillow's built-in `save()`-method~~
- ~~[PyPDF3](https://github.com/mstamy2/PyPDF3)~~, last commit 2 years ago
- [PyPDF4](https://github.com/claird/PyPDF4), last commit May 2019
- [pdfrw](https://github.com/pmaupin/pdfrw)
| 2021-01-11T14:28:12 | 0.0 | [] | [] |
|||
ASFHyP3/burst2safe | ASFHyP3__burst2safe-73 | c025d3d6d3a2f071cc5b91add5b4b7e6873e561f | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9d8c21f..d1ddcd3 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,6 +6,19 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+## [1.1.0]
+
+### Added
+* Preview directory with all components except quick-look
+* KML and Preview SAFE components
+* KML, product-preview, and schema components to manifest
+
+### Changed
+* Creation time of measurement tiffs is now set to the end of SLC processing. This ensures consistent filenames of repeatedly created SAFEs because the name is dependent on measurement tiff checksums.
+
+### Fixed
+* KML preview file is now included to support processors that grab the SAFE footprint from this file.
+
## [1.0.0]
### Added
diff --git a/src/burst2safe/data/logo.png b/src/burst2safe/data/logo.png
new file mode 100644
index 0000000..8967fbc
Binary files /dev/null and b/src/burst2safe/data/logo.png differ
diff --git a/src/burst2safe/data/support_236/s1-level-1-quicklook.xsd b/src/burst2safe/data/support_236/s1-level-1-quicklook.xsd
new file mode 100644
index 0000000..e4dfe6a
--- /dev/null
+++ b/src/burst2safe/data/support_236/s1-level-1-quicklook.xsd
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-level-1-quicklook.xsd 8756 2012-07-30 20:13:02Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:element name="ENTER_NAME_OF_ROOT_ELEMENT_HERE">
+ <xsd:annotation>
+ <xsd:documentation>Comment describing your root element</xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_236/s1-map-overlay.xsd b/src/burst2safe/data/support_236/s1-map-overlay.xsd
new file mode 100644
index 0000000..eb1bdc8
--- /dev/null
+++ b/src/burst2safe/data/support_236/s1-map-overlay.xsd
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-map-overlay.xsd 10288 2014-01-10 18:17:28Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:complexType name="kmlType">
+ <xsd:sequence>
+ <xsd:element name="Document" type="xsd:anyType"/>
+ </xsd:sequence>
+ </xsd:complexType>
+ <xsd:element name="kml" type="kmlType"/>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_236/s1-product-preview.xsd b/src/burst2safe/data/support_236/s1-product-preview.xsd
new file mode 100644
index 0000000..11dd98f
--- /dev/null
+++ b/src/burst2safe/data/support_236/s1-product-preview.xsd
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-product-preview.xsd 8756 2012-07-30 20:13:02Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:element name="html">
+ <xsd:annotation>
+ <xsd:documentation>Comment describing your root element</xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_245/s1-level-1-quicklook.xsd b/src/burst2safe/data/support_245/s1-level-1-quicklook.xsd
new file mode 100644
index 0000000..e4dfe6a
--- /dev/null
+++ b/src/burst2safe/data/support_245/s1-level-1-quicklook.xsd
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-level-1-quicklook.xsd 8756 2012-07-30 20:13:02Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:element name="ENTER_NAME_OF_ROOT_ELEMENT_HERE">
+ <xsd:annotation>
+ <xsd:documentation>Comment describing your root element</xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_245/s1-map-overlay.xsd b/src/burst2safe/data/support_245/s1-map-overlay.xsd
new file mode 100644
index 0000000..eb1bdc8
--- /dev/null
+++ b/src/burst2safe/data/support_245/s1-map-overlay.xsd
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-map-overlay.xsd 10288 2014-01-10 18:17:28Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:complexType name="kmlType">
+ <xsd:sequence>
+ <xsd:element name="Document" type="xsd:anyType"/>
+ </xsd:sequence>
+ </xsd:complexType>
+ <xsd:element name="kml" type="kmlType"/>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_245/s1-product-preview.xsd b/src/burst2safe/data/support_245/s1-product-preview.xsd
new file mode 100644
index 0000000..11dd98f
--- /dev/null
+++ b/src/burst2safe/data/support_245/s1-product-preview.xsd
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-product-preview.xsd 8756 2012-07-30 20:13:02Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:element name="html">
+ <xsd:annotation>
+ <xsd:documentation>Comment describing your root element</xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_260/s1-level-1-quicklook.xsd b/src/burst2safe/data/support_260/s1-level-1-quicklook.xsd
new file mode 100644
index 0000000..e4dfe6a
--- /dev/null
+++ b/src/burst2safe/data/support_260/s1-level-1-quicklook.xsd
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-level-1-quicklook.xsd 8756 2012-07-30 20:13:02Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:element name="ENTER_NAME_OF_ROOT_ELEMENT_HERE">
+ <xsd:annotation>
+ <xsd:documentation>Comment describing your root element</xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_260/s1-map-overlay.xsd b/src/burst2safe/data/support_260/s1-map-overlay.xsd
new file mode 100644
index 0000000..eb1bdc8
--- /dev/null
+++ b/src/burst2safe/data/support_260/s1-map-overlay.xsd
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-map-overlay.xsd 10288 2014-01-10 18:17:28Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:complexType name="kmlType">
+ <xsd:sequence>
+ <xsd:element name="Document" type="xsd:anyType"/>
+ </xsd:sequence>
+ </xsd:complexType>
+ <xsd:element name="kml" type="kmlType"/>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_260/s1-product-preview.xsd b/src/burst2safe/data/support_260/s1-product-preview.xsd
new file mode 100644
index 0000000..11dd98f
--- /dev/null
+++ b/src/burst2safe/data/support_260/s1-product-preview.xsd
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-product-preview.xsd 8756 2012-07-30 20:13:02Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:element name="html">
+ <xsd:annotation>
+ <xsd:documentation>Comment describing your root element</xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_290/s1-level-1-quicklook.xsd b/src/burst2safe/data/support_290/s1-level-1-quicklook.xsd
new file mode 100644
index 0000000..e4dfe6a
--- /dev/null
+++ b/src/burst2safe/data/support_290/s1-level-1-quicklook.xsd
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-level-1-quicklook.xsd 8756 2012-07-30 20:13:02Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:element name="ENTER_NAME_OF_ROOT_ELEMENT_HERE">
+ <xsd:annotation>
+ <xsd:documentation>Comment describing your root element</xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_290/s1-map-overlay.xsd b/src/burst2safe/data/support_290/s1-map-overlay.xsd
new file mode 100644
index 0000000..eb1bdc8
--- /dev/null
+++ b/src/burst2safe/data/support_290/s1-map-overlay.xsd
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-map-overlay.xsd 10288 2014-01-10 18:17:28Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:complexType name="kmlType">
+ <xsd:sequence>
+ <xsd:element name="Document" type="xsd:anyType"/>
+ </xsd:sequence>
+ </xsd:complexType>
+ <xsd:element name="kml" type="kmlType"/>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_290/s1-product-preview.xsd b/src/burst2safe/data/support_290/s1-product-preview.xsd
new file mode 100644
index 0000000..11dd98f
--- /dev/null
+++ b/src/burst2safe/data/support_290/s1-product-preview.xsd
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-product-preview.xsd 8756 2012-07-30 20:13:02Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:element name="html">
+ <xsd:annotation>
+ <xsd:documentation>Comment describing your root element</xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_340/s1-level-1-quicklook.xsd b/src/burst2safe/data/support_340/s1-level-1-quicklook.xsd
new file mode 100644
index 0000000..e4dfe6a
--- /dev/null
+++ b/src/burst2safe/data/support_340/s1-level-1-quicklook.xsd
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-level-1-quicklook.xsd 8756 2012-07-30 20:13:02Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:element name="ENTER_NAME_OF_ROOT_ELEMENT_HERE">
+ <xsd:annotation>
+ <xsd:documentation>Comment describing your root element</xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_340/s1-map-overlay.xsd b/src/burst2safe/data/support_340/s1-map-overlay.xsd
new file mode 100644
index 0000000..eb1bdc8
--- /dev/null
+++ b/src/burst2safe/data/support_340/s1-map-overlay.xsd
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-map-overlay.xsd 10288 2014-01-10 18:17:28Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:complexType name="kmlType">
+ <xsd:sequence>
+ <xsd:element name="Document" type="xsd:anyType"/>
+ </xsd:sequence>
+ </xsd:complexType>
+ <xsd:element name="kml" type="kmlType"/>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_340/s1-product-preview.xsd b/src/burst2safe/data/support_340/s1-product-preview.xsd
new file mode 100644
index 0000000..11dd98f
--- /dev/null
+++ b/src/burst2safe/data/support_340/s1-product-preview.xsd
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-product-preview.xsd 8756 2012-07-30 20:13:02Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:element name="html">
+ <xsd:annotation>
+ <xsd:documentation>Comment describing your root element</xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_371/s1-level-1-quicklook.xsd b/src/burst2safe/data/support_371/s1-level-1-quicklook.xsd
new file mode 100644
index 0000000..e4dfe6a
--- /dev/null
+++ b/src/burst2safe/data/support_371/s1-level-1-quicklook.xsd
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-level-1-quicklook.xsd 8756 2012-07-30 20:13:02Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:element name="ENTER_NAME_OF_ROOT_ELEMENT_HERE">
+ <xsd:annotation>
+ <xsd:documentation>Comment describing your root element</xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_371/s1-map-overlay.xsd b/src/burst2safe/data/support_371/s1-map-overlay.xsd
new file mode 100644
index 0000000..eb1bdc8
--- /dev/null
+++ b/src/burst2safe/data/support_371/s1-map-overlay.xsd
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-map-overlay.xsd 10288 2014-01-10 18:17:28Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:complexType name="kmlType">
+ <xsd:sequence>
+ <xsd:element name="Document" type="xsd:anyType"/>
+ </xsd:sequence>
+ </xsd:complexType>
+ <xsd:element name="kml" type="kmlType"/>
+</xsd:schema>
diff --git a/src/burst2safe/data/support_371/s1-product-preview.xsd b/src/burst2safe/data/support_371/s1-product-preview.xsd
new file mode 100644
index 0000000..11dd98f
--- /dev/null
+++ b/src/burst2safe/data/support_371/s1-product-preview.xsd
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- $Id: s1-product-preview.xsd 8756 2012-07-30 20:13:02Z [email protected] $ -->
+<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
+ <xsd:element name="html">
+ <xsd:annotation>
+ <xsd:documentation>Comment describing your root element</xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+</xsd:schema>
diff --git a/src/burst2safe/manifest.py b/src/burst2safe/manifest.py
index c0890d2..d9489f4 100644
--- a/src/burst2safe/manifest.py
+++ b/src/burst2safe/manifest.py
@@ -1,3 +1,4 @@
+import hashlib
from copy import deepcopy
from pathlib import Path
from typing import List
@@ -9,6 +10,40 @@
from burst2safe.utils import calculate_crc16
+SAFE_NS = 'http://www.esa.int/safe/sentinel-1.0'
+NAMESPACES = {
+ 'xsi': 'http://www.w3.org/2001/XMLSchema-instance',
+ 'gml': 'http://www.opengis.net/gml',
+ 'xfdu': 'urn:ccsds:schema:xfdu:1',
+ 'safe': SAFE_NS,
+ 's1': f'{SAFE_NS}/sentinel-1',
+ 's1sar': f'{SAFE_NS}/sentinel-1/sar',
+ 's1sarl1': f'{SAFE_NS}/sentinel-1/sar/level-1',
+ 's1sarl2': f'{SAFE_NS}/sentinel-1/sar/level-2',
+ 'gx': 'http://www.google.com/kml/ext/2.2',
+}
+
+
+def get_footprint_string(bbox: Polygon, x_first=True) -> str:
+ """Get a string representation of the footprint of the product.
+
+ Args:
+ bbox: The bounding box of the product
+ x_first: Whether to put the x coordinate first or second
+
+ Returns:
+ A string representation of the product footprint
+ """
+ coords = [(np.round(y, 6), np.round(x, 6)) for x, y in bbox.exterior.coords]
+ # TODO: order assumes descending
+ coords = [coords[2], coords[3], coords[0], coords[1]]
+ if x_first:
+ coords_str = ' '.join([f'{x},{y}' for x, y in coords])
+ else:
+ coords_str = ' '.join([f'{y},{x}' for x, y in coords])
+ return coords_str
+
+
class Manifest:
"""Class representing a SAFE manifest."""
@@ -34,19 +69,6 @@ def __init__(
self.data_objects = data_objects
self.bbox = bbox
self.template = template_manifest
-
- safe_ns = 'http://www.esa.int/safe/sentinel-1.0'
- self.namespaces = {
- 'xsi': 'http://www.w3.org/2001/XMLSchema-instance',
- 'gml': 'http://www.opengis.net/gml',
- 'xfdu': 'urn:ccsds:schema:xfdu:1',
- 'safe': safe_ns,
- 's1': f'{safe_ns}/sentinel-1',
- 's1sar': f'{safe_ns}/sentinel-1/sar',
- 's1sarl1': f'{safe_ns}/sentinel-1/sar/level-1',
- 's1sarl2': f'{safe_ns}/sentinel-1/sar/level-2',
- 'gx': 'http://www.google.com/kml/ext/2.2',
- }
self.version = 'esa/safe/sentinel-1.0/sentinel-1/sar/level-1/slc/standard/iwdp'
# Updated by methods
@@ -59,7 +81,7 @@ def __init__(
def create_information_package_map(self):
"""Create the information package map."""
- xdfu_ns = self.namespaces['xfdu']
+ xdfu_ns = NAMESPACES['xfdu']
information_package_map = ET.Element(f'{{{xdfu_ns}}}informationPackageMap')
parent_content_unit = ET.Element(
f'{{{xdfu_ns}}}contentUnit',
@@ -87,16 +109,22 @@ def create_metadata_section(self):
'generalProductInformation',
'acquisitionPeriod',
'measurementFrameSet',
+ 's1Level1ProductSchema',
+ 's1Level1NoiseSchema',
+ 's1Level1RfiSchema',
+ 's1Level1CalibrationSchema',
+ 's1ObjectTypesSchema',
+ 's1Level1MeasurementSchema',
+ 's1Level1ProductPreviewSchema',
+ 's1Level1QuicklookSchema',
+ 's1MapOverlaySchema',
]
- section = 'metadataSection'
- [metadata_section.append(deepcopy(x)) for x in self.template.find(section) if x.get('ID') in ids_to_keep]
+ for obj in self.template.find('metadataSection'):
+ if obj.get('ID') in ids_to_keep:
+ metadata_section.append(deepcopy(obj))
- new_coords = [(np.round(y, 6), np.round(x, 6)) for x, y in self.bbox.exterior.coords]
- # TODO: only works for descending
- new_coords = [new_coords[2], new_coords[3], new_coords[0], new_coords[1]]
- new_coords = ' '.join([f'{x},{y}' for x, y in new_coords])
coordinates = metadata_section.find('.//{*}coordinates')
- coordinates.text = new_coords
+ coordinates.text = get_footprint_string(self.bbox)
self.metadata_section = metadata_section
@@ -112,7 +140,7 @@ def assemble(self):
self.create_metadata_section()
self.create_data_object_section()
- manifest = ET.Element('{%s}XFDU' % self.namespaces['xfdu'], nsmap=self.namespaces)
+ manifest = ET.Element('{%s}XFDU' % NAMESPACES['xfdu'], nsmap=NAMESPACES)
manifest.set('version', self.version)
manifest.append(self.information_package_map)
manifest.append(self.metadata_section)
@@ -133,3 +161,211 @@ def write(self, out_path: Path, update_info: bool = True) -> None:
if update_info:
self.path = out_path
self.crc = calculate_crc16(self.path)
+
+
+class Kml:
+ """Class representing a SAFE manifest."""
+
+ def __init__(self, bbox: Polygon):
+ """Initialize a KML object.
+
+ Args:
+ bbox: The bounding box of the product
+ """
+ self.bbox = bbox
+ self.xml = None
+
+ def assemble(self):
+ """Assemble the components of the SAFE KML preview file."""
+ kml = ET.Element('kml', nsmap=NAMESPACES)
+ document = ET.SubElement(kml, 'Document')
+ doc_name = ET.SubElement(document, 'name')
+ doc_name.text = 'Sentinel-1 Map Overlay'
+
+ folder = ET.SubElement(document, 'Folder')
+ folder_name = ET.SubElement(folder, 'name')
+ folder_name.text = 'Sentinel-1 Scene Overlay'
+
+ ground_overlay = ET.SubElement(folder, 'GroundOverlay')
+ ground_overlay_name = ET.SubElement(ground_overlay, 'name')
+ ground_overlay_name.text = 'Sentinel-1 Image Overlay'
+ icon = ET.SubElement(ground_overlay, 'Icon')
+ href = ET.SubElement(icon, 'href')
+ # TODO: we intentionally don't create this image because we don't know how to.
+ href.text = 'quick-look.png'
+ lat_lon_quad = ET.SubElement(ground_overlay, f'{{{NAMESPACES["gx"]}}}LatLonQuad')
+ coordinates = ET.SubElement(lat_lon_quad, 'coordinates')
+ coordinates.text = get_footprint_string(self.bbox, x_first=False)
+
+ kml_tree = ET.ElementTree(kml)
+ ET.indent(kml_tree, space=' ')
+ self.xml = kml_tree
+
+ def write(self, out_path: Path, update_info: bool = True) -> None:
+ """Write the SAFE kml to a file.
+
+ Args:
+ out_path: The path to write the manifest to
+ update_info: Whether to update the path
+ """
+ self.xml.write(out_path, pretty_print=True, xml_declaration=True, encoding='utf-8')
+ if update_info:
+ self.path = out_path
+ with open(out_path, 'rb') as f:
+ file_bytes = f.read()
+ self.size_bytes = len(file_bytes)
+ self.md5 = hashlib.md5(file_bytes).hexdigest()
+
+ def update_path(self, safe_path: Path):
+ """Update the path based on new a SAFE path.
+
+ Args:
+ safe_path: The new SAFE path
+ """
+ parts = self.path.parts
+ parent_index = parts.index(safe_path.parent.name)
+ self.path = safe_path / Path(*parts[parent_index + 2 :])
+
+
+class Preview:
+ """Class representing a product preview HTML file."""
+
+ def __init__(
+ self,
+ name: str,
+ product: List[str],
+ calibration: List[str],
+ measurement: List[str],
+ rfi: List[str] = [],
+ ):
+ """Initialize a Preview object.
+
+ Args:
+ name: The name of the product
+ product: A list of product annotation files
+ calibration: A list of calibration annotation files
+ measurement: A list of measurement annotation files
+ rfi: A list of rfi annotation files
+ """
+ self.name = '_'.join(name.split('_')[:-1])
+ self.product = product
+ self.calibration = calibration
+ self.measurement = measurement
+ self.rfi = rfi
+ self.preview = ['map-overlay.kml', 'product-preview.html', 'quick-look.png']
+ self.preview_icon = ['logo.png']
+ self.support = [
+ 's1-level-1-product.xsd',
+ 's1-level-1-noise.xsd',
+ 's1-level-1-calibration.xsd',
+ 's1-object-types.xsd',
+ 's1-map-overlay.xsd',
+ 's1-product-preview.xsd',
+ 's1-level-1-measurement.xsd',
+ 's1-level-1-quicklook.xsd',
+ ]
+ if len(self.rfi) > 0:
+ self.support.append('s1-level-1-rfi.xsd')
+ self.html = None
+ self.path = None
+
+ def create_base(self):
+ """Create the base HTML product preview."""
+ namespaces = {'xsd': 'http://www.w3.org/2001/XMLSchema', 'fn': 'http://www.w3.org/2005/xpath-functions'}
+ html = ET.Element('html', nsmap=namespaces)
+ head = ET.SubElement(html, 'head')
+ ET.SubElement(head, 'meta', attrib={'http-equiv': 'Content-Type', 'content': 'text/html; charset=utf-8'})
+
+ # Create the head section
+ nsmap = {'xsd': 'http://www.w3.org/2001/XMLSchema', 'fn': 'http://www.w3.org/2005/xpath-functions'}
+ html = ET.Element('html', nsmap=nsmap)
+ head = ET.SubElement(html, 'head')
+
+ # Meta element
+ ET.SubElement(head, 'meta', attrib={'http-equiv': 'Content-Type', 'content': 'text/html; charset=UTF-8'})
+
+ # Title element
+ title = ET.SubElement(head, 'title')
+ title.text = self.name
+
+ # Style element
+ style = ET.SubElement(head, 'style', attrib={'type': 'text/css'})
+ style.text = """
+ h1 {font-size:20px}
+ h2 {font-size: 18px}
+ """
+
+ # Create the body section
+ body = ET.SubElement(html, 'body')
+
+ # Add image and title
+ ET.SubElement(body, 'img', attrib={'src': 'icons/logo.png'})
+ h1 = ET.SubElement(body, 'h1')
+ h1.text = self.name
+
+ # Add manifest link
+ h2_manifest = ET.SubElement(body, 'h2')
+ a_manifest = ET.SubElement(h2_manifest, 'a', attrib={'href': '../manifest.safe'})
+ a_manifest.text = 'manifest.safe'
+
+ return html
+
+ def add_subsection(self, body, name, files):
+ """Add a file set subsection to the HTML preview."""
+
+ h2 = ET.SubElement(body, 'h2')
+ h2.text = name
+ ul = ET.SubElement(body, 'ul')
+ for file in files:
+ li = ET.SubElement(ul, 'li')
+ a = ET.SubElement(li, 'a', attrib={'href': f'../{name}/{file}'})
+ a.text = file
+
+ def add_img(self, body):
+ """Add the image to the HTML preview."""
+ ET.SubElement(body, 'img', attrib={'style': 'float:right', 'src': '../preview/quick-look.png'})
+
+ def assemble(self):
+ """Assemble the HTML preview."""
+ html = self.create_base()
+ body = html.find('.//body')
+
+ self.add_subsection(body, 'annotation', self.product)
+ self.add_subsection(body, 'annotation/calibration', self.calibration)
+ if len(self.rfi) > 0:
+ self.add_subsection(body, 'annotation/rfi', self.rfi)
+ self.add_subsection(body, 'measurement', self.measurement)
+ self.add_img(body)
+ self.add_subsection(body, 'preview', self.preview)
+ self.add_subsection(body, 'preview/icons', self.preview_icon)
+ self.add_subsection(body, 'support', self.support)
+
+ html_tree = ET.ElementTree(html)
+ ET.indent(html_tree, space=' ')
+ self.html = html_tree
+
+ def write(self, out_path: Path, update_info=True) -> None:
+ """Write the html to a file.
+
+ Args:
+ out_path: The path to write the annotation to.
+ update_info: Whether to update the size and md5 attributes of the html.
+ """
+ self.html.write(out_path, pretty_print=True, xml_declaration=True, encoding='utf-8')
+
+ if update_info:
+ self.path = out_path
+ with open(out_path, 'rb') as f:
+ file_bytes = f.read()
+ self.size_bytes = len(file_bytes)
+ self.md5 = hashlib.md5(file_bytes).hexdigest()
+
+ def update_path(self, safe_path: Path):
+ """Update the path based on new a SAFE path.
+
+ Args:
+ safe_path: The new SAFE path
+ """
+ parts = self.path.parts
+ parent_index = parts.index(safe_path.parent.name)
+ self.path = safe_path / Path(*parts[parent_index + 2 :])
diff --git a/src/burst2safe/measurement.py b/src/burst2safe/measurement.py
index d98421f..65f3ddf 100644
--- a/src/burst2safe/measurement.py
+++ b/src/burst2safe/measurement.py
@@ -15,17 +15,26 @@
class Measurement:
"""Class representing a measurement GeoTIFF."""
- def __init__(self, burst_infos: Iterable[BurstInfo], gcps: Iterable[GeoPoint], ipf_version: str, image_number: int):
+ def __init__(
+ self,
+ burst_infos: Iterable[BurstInfo],
+ gcps: Iterable[GeoPoint],
+ creation_time: datetime,
+ ipf_version: str,
+ image_number: int,
+ ):
"""Initialize a Measurement object.
Args:
burst_infos: A list of BurstInfo objects
gcps: A list of GeoPoint objects
+ creation_time: The creation time of the measurement
ipf_version: The IPF version of the measurement data
image_number: The image number of the measurement
"""
self.burst_infos = burst_infos
self.gcps = gcps
+ self.creation_time = creation_time
self.version = ipf_version
self.image_number = image_number
@@ -81,15 +90,6 @@ def get_burst_byte_offsets(self):
byte_offsets = [offsets[self.burst_length * i] for i in range(len(self.burst_infos))]
return byte_offsets
- def get_time_tag(self) -> str:
- """Get the current time as a time tag.
- This is a separate method to allow for easy mocking in tests.
-
- Returns:
- The time tag as a string
- """
- return datetime.strftime(datetime.now(), '%Y:%m:%d %H:%M:%S')
-
def add_metadata(self, dataset: gdal.Dataset):
"""Add metadata to an existing GDAL dataset.
@@ -101,7 +101,7 @@ def add_metadata(self, dataset: gdal.Dataset):
srs.ImportFromEPSG(4326)
dataset.SetGCPs(gdal_gcps, srs.ExportToWkt())
- dataset.SetMetadataItem('TIFFTAG_DATETIME', self.get_time_tag())
+ dataset.SetMetadataItem('TIFFTAG_DATETIME', datetime.strftime(self.creation_time, '%Y:%m:%d %H:%M:%S'))
dataset.SetMetadataItem('TIFFTAG_IMAGEDESCRIPTION', f'Sentinel-1{self.s1_platform} IW SLC L1')
dataset.SetMetadataItem('TIFFTAG_SOFTWARE', f'Sentinel-1 IPF {self.version}')
diff --git a/src/burst2safe/safe.py b/src/burst2safe/safe.py
index eed7ac0..5080bf6 100644
--- a/src/burst2safe/safe.py
+++ b/src/burst2safe/safe.py
@@ -1,5 +1,6 @@
import bisect
import shutil
+from datetime import datetime
from itertools import product
from pathlib import Path
from typing import Iterable, List, Optional, Tuple
@@ -7,7 +8,8 @@
import numpy as np
from shapely.geometry import MultiPolygon, Polygon
-from burst2safe.manifest import Manifest
+from burst2safe.base import create_content_unit, create_data_object, create_metadata_object
+from burst2safe.manifest import Kml, Manifest, Preview
from burst2safe.product import Product
from burst2safe.swath import Swath
from burst2safe.utils import BurstInfo, drop_duplicates, flatten, get_subxml_from_metadata, optional_wd
@@ -36,10 +38,30 @@ def __init__(self, burst_infos: Iterable[BurstInfo], all_anns: bool = False, wor
self.swaths = []
self.blank_products = []
self.manifest = None
+ self.kml = None
self.version = self.get_ipf_version(self.burst_infos[0].metadata_path)
self.major_version, self.minor_version = [int(x) for x in self.version.split('.')]
self.support_dir = self.get_support_dir()
+ self.creation_time = self.get_creation_time()
+
+ def get_creation_time(self) -> datetime:
+ """Get the creation time of the SAFE file.
+ Always set to the latest SLC processing stop time.
+
+ Returns:
+ The creation time of the SAFE file
+ """
+ metadata_paths = list(set([x.metadata_path for x in self.burst_infos]))
+ manifests = [get_subxml_from_metadata(metadata_path, 'manifest') for metadata_path in metadata_paths]
+ manifest = manifests[0]
+ desired_tag = './/{http://www.esa.int/safe/sentinel-1.0}processing'
+ creation_times = []
+ for manifest in manifests:
+ slc_processing = [elem for elem in manifest.findall(desired_tag) if elem.get('name') == 'SLC Processing'][0]
+ creation_times.append(datetime.strptime(slc_processing.get('stop'), '%Y-%m-%dT%H:%M:%S.%f'))
+ creation_time = max(creation_times)
+ return creation_time
def get_support_dir(self) -> Path:
"""Find the support directory version closest to but not exceeding the IPF major.minor verion"""
@@ -50,7 +72,8 @@ def get_support_dir(self) -> Path:
if safe_version in support_versions:
support_version = safe_version
- support_version = support_versions[bisect.bisect_left(support_versions, safe_version) - 1]
+ else:
+ support_version = support_versions[bisect.bisect_left(support_versions, safe_version) - 1]
return data_dir / f'support_{support_version}'
@@ -187,15 +210,19 @@ def create_dir_structure(self) -> Path:
"""
measurements_dir = self.safe_path / 'measurement'
annotations_dir = self.safe_path / 'annotation'
+ preview_dir = self.safe_path / 'preview'
+ icon_dir = preview_dir / 'icons'
calibration_dir = annotations_dir / 'calibration'
rfi_dir = annotations_dir / 'rfi'
calibration_dir.mkdir(parents=True, exist_ok=True)
measurements_dir.mkdir(parents=True, exist_ok=True)
+ icon_dir.mkdir(parents=True, exist_ok=True)
if self.major_version >= 3 and self.minor_version >= 40:
rfi_dir.mkdir(parents=True, exist_ok=True)
shutil.copytree(self.support_dir, self.safe_path / 'support', dirs_exist_ok=True)
+ shutil.copy(self.support_dir.parent / 'logo.png', icon_dir / 'logo.png')
@staticmethod
def create_representative_burst_set(template_bursts: Iterable[BurstInfo], swath: str, pol: str) -> List[BurstInfo]:
@@ -268,7 +295,7 @@ def create_safe_components(self) -> None:
for swath, polarization in product(swaths, polarizations):
image_number += 1
burst_infos = self.grouped_burst_infos[swath][polarization]
- swath = Swath(burst_infos, self.safe_path, self.version, image_number)
+ swath = Swath(burst_infos, self.safe_path, self.version, self.creation_time, image_number)
swath.assemble()
swath.write()
self.swaths.append(swath)
@@ -280,6 +307,48 @@ def create_safe_components(self) -> None:
blank_product.write(product_name)
self.blank_products.append(blank_product)
+ def add_preview_components(self, content_units: List, metadata_objects: List, data_objects: List) -> List:
+ """Add the preview components to unit lists.
+
+ Args:
+ content_units: A list of content units
+ metadata_objects: A list of metadata objects
+ data_objects: A list of data objects
+
+ Returns:
+ The updated content_units, metadata_objects, and data_objects lists
+ """
+ overlay_repid = 's1Level1MapOverlaySchema'
+ preview_repid = 's1Level1ProductPreviewSchema'
+ quicklook_repid = 's1Level1QuicklookSchema'
+ overlay_content_unit = create_content_unit('mapoverlay', 'Metadata Unit', overlay_repid)
+ preview_content_unit = create_content_unit('productpreview', 'Metadata Unit', preview_repid)
+ quicklook_content_unit = create_content_unit('quicklook', 'Measurement Data Unit', quicklook_repid)
+ content_units += [overlay_content_unit, preview_content_unit, quicklook_content_unit]
+
+ metadata_objects += [create_metadata_object('mapoverlay'), create_metadata_object('productpreview')]
+
+ # TOOD: add quciklook data object someday
+ overlay_data_object = create_data_object(
+ 'mapoverlay',
+ './preview/map-overlay.kml',
+ overlay_repid,
+ 'text/xml',
+ self.kml.size_bytes,
+ self.kml.md5,
+ )
+ preview_data_object = create_data_object(
+ 'productpreview',
+ './preview/product-preview.html',
+ preview_repid,
+ 'text/html',
+ self.preview.size_bytes,
+ self.preview.md5,
+ )
+ data_objects += [overlay_data_object, preview_data_object]
+
+ return content_units, metadata_objects, data_objects
+
def compile_manifest_components(self) -> Tuple[List, List, List]:
"""Compile the manifest components for all files within the SAFE file.
@@ -305,6 +374,9 @@ def compile_manifest_components(self) -> Tuple[List, List, List]:
metadata_objects.append(metadata_object)
data_objects.append(date_object)
+ content_units, metadata_objects, data_objects = self.add_preview_components(
+ content_units, metadata_objects, data_objects
+ )
return content_units, metadata_objects, data_objects
def create_manifest(self) -> None:
@@ -317,6 +389,22 @@ def create_manifest(self) -> None:
manifest.write(manifest_name)
self.manifest = manifest
+ def create_preview(self):
+ """Create the support files for the SAFE file."""
+ kml = Kml(self.get_bbox())
+ kml.assemble()
+ kml.write(self.safe_path / 'preview' / 'map-overlay.kml')
+ self.kml = kml
+
+ product_names = [s.product_name.name for s in self.swaths]
+ calibration_names = [s.noise_name.name for s in self.swaths] + [s.calibration_name.name for s in self.swaths]
+ measurement_names = [s.measurement_name.name for s in self.swaths]
+ rfi_names = [s.rfi_name.name for s in self.swaths if s.has_rfi]
+ preview = Preview(self.name, product_names, calibration_names, measurement_names, rfi_names)
+ preview.assemble()
+ preview.write(self.safe_path / 'preview' / 'product-preview.html')
+ self.preview = preview
+
def update_product_identifier(self) -> None:
"""Update the product identifier using the CRC of the manifest file."""
new_new = self.get_name(unique_id=self.manifest.crc)
@@ -324,8 +412,12 @@ def update_product_identifier(self) -> None:
if new_path.exists():
shutil.rmtree(new_path)
shutil.move(self.safe_path, new_path)
+
self.name = new_new
self.safe_path = new_path
+
+ self.kml.update_path(self.safe_path)
+ self.preview.update_path(self.safe_path)
for swath in self.swaths:
swath.update_paths(self.safe_path)
@@ -333,6 +425,7 @@ def create_safe(self) -> Path:
"""Create the SAFE file."""
self.create_dir_structure()
self.create_safe_components()
+ self.create_preview()
self.create_manifest()
self.update_product_identifier()
return self.safe_path
diff --git a/src/burst2safe/swath.py b/src/burst2safe/swath.py
index b1b67e5..bb1a1bb 100644
--- a/src/burst2safe/swath.py
+++ b/src/burst2safe/swath.py
@@ -15,17 +15,33 @@
class Swath:
"""Class representing a single swath (and polarization) of a SAFE file."""
- def __init__(self, burst_infos: Iterable[BurstInfo], safe_path: Path, version: str, image_number: int):
- """Initialize a Swath object."""
+ def __init__(
+ self,
+ burst_infos: Iterable[BurstInfo],
+ safe_path: Path,
+ version: str,
+ creation_time: datetime,
+ image_number: int,
+ ):
+ """Initialize a Swath object.
+
+ Args:
+ burst_infos: A list of BurstInfo objects
+ safe_path: The path to the SAFE directory
+ version: The IPF version of the SAFE file
+ creation_time: The creation time of the SAFE file
+ image_number: The image number of the swath
+ """
self.check_burst_group_validity(burst_infos)
self.burst_infos = sorted(burst_infos, key=lambda x: x.burst_id)
self.safe_path = safe_path
+ self.version = version
+ self.creation_time = creation_time
self.image_number = image_number
self.swath = self.burst_infos[0].swath
self.polarization = self.burst_infos[0].polarization
self.name = self.get_swath_name(self.burst_infos, self.safe_path, self.image_number)
- self.version = version
self.major_version, self.minor_version = [int(x) for x in self.version.split('.')]
self.measurement_name = self.safe_path / 'measurement' / f'{self.name}.tiff'
@@ -125,7 +141,9 @@ def assemble(self):
for component in self.annotations:
component.assemble()
- self.measurement = Measurement(self.burst_infos, self.product.gcps, self.version, self.image_number)
+ self.measurement = Measurement(
+ self.burst_infos, self.product.gcps, self.creation_time, self.version, self.image_number
+ )
def write(self, update_info: bool = True):
"""Write the Swath componets to the SAFE directory.
| Missing KMLs for topsStack in ISCE 2
Thank you @forrestfwilliams for this project! The burst SAFE files work perfectly well with `topsApp`. I'm now trying to get them to work with `topsStack` using `stackSentinel.py`, but it seems like `topsStack.Stack` is looking for a `map-overlay.kml` file that doesn't exist in the burst SAFE files. For example:
```
stackSentinel.py -s safe -o orbits -a aux -W offset -b '59.919 60.301 -140.829 -140.169' -d nasadem_wgs84.dem
Using default ISCE Path: /Users/briel/apps/isce2/packages/isce
Number of SAFE files found: 4
...
File "/Users/briel/src/isce2/contrib/stack/topsStack/stackSentinel.py", line 312, in get_dates
pnts = safeObj.getkmlQUAD(safe)
File "/Users/briel/src/isce2/contrib/stack/topsStack/Stack.py", line 1765, in getkmlQUAD
kmlFile = open( file, 'r' ).read(-1)
FileNotFoundError: [Errno 2] No such file or directory: 'safe/S1A_IW_SLC__1SSV_20190410T025508_20190410T025513_026722_030014_75EA.SAFE/preview/map-overlay.kml'
```
Are you currently planning on generating something like an empty KML with the burst boundaries, or do you have another strategy for making `burst2stack` compatible with `topsStack`?
| Hey @bryanvriel thanks for the issue. Apologies, I've been on vacation. We have two options to add support for topsStack:
1. Modify topsStack to grab boundaries from the manifest.safe file instead of the overlay.kml (see [hyp3-isce2](https://github.com/ASFHyP3/hyp3-isce2/blob/6176b5cf04142511130b74f3c5fe79cf2e34f094/src/hyp3_isce2/slc.py#L49) for an example)
2. As you suggest, include the KML file in the re-created SAFEs.
I can start looking into option 2 to see how feasible it is, and we can fall back to 1 if need be. | 2024-08-07T13:26:09 | 0.0 | [] | [] |
||
ASFHyP3/burst2safe | ASFHyP3__burst2safe-43 | 1cb72723010e6e3abb8e39713c6318a3c76c9444 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8d7a8eb..57e8c58 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,6 +6,15 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+## [0.3.5]
+
+### Fixed
+* Polarization code now accurately reflects bursts contained in SAFE.
+* Measurement GeoTiff metadata now correctly specifies Sentinel-1 A or B.
+
+### Added
+* CLI argument for specifying output directory.
+
## [0.3.4]
### Added
diff --git a/src/burst2safe/burst2safe.py b/src/burst2safe/burst2safe.py
index 18f430b..af89d71 100644
--- a/src/burst2safe/burst2safe.py
+++ b/src/burst2safe/burst2safe.py
@@ -12,9 +12,9 @@
from shapely import box
from shapely.geometry import Polygon
+from burst2safe.auth import get_earthdata_credentials
from burst2safe.safe import Safe
from burst2safe.utils import BurstInfo, download_url_with_retries, get_burst_infos, optional_wd
-from burst2safe.auth import get_earthdata_credentials
warnings.filterwarnings('ignore')
@@ -167,6 +167,7 @@ def main() -> None:
parser.add_argument('--orbit', type=int, help='The absolute orbit number of the bursts')
parser.add_argument('--bbox', type=float, nargs=4, help='Bounding box of the bursts (W S E N in lat/lon)')
parser.add_argument('--pols', type=str, nargs='+', help='The polarizations of the bursts (i.e., VV VH)')
+ parser.add_argument('--output-dir', type=str, default=None, help='Output directory to save to')
parser.add_argument('--keep-files', action='store_true', default=False, help='Keep the intermediate files')
args = parser.parse_args()
@@ -181,4 +182,5 @@ def main() -> None:
footprint=args.bbox,
polarizations=args.pols,
keep_files=args.keep_files,
+ work_dir=args.output_dir,
)
diff --git a/src/burst2safe/measurement.py b/src/burst2safe/measurement.py
index 3a7e70a..d98421f 100644
--- a/src/burst2safe/measurement.py
+++ b/src/burst2safe/measurement.py
@@ -21,6 +21,7 @@ def __init__(self, burst_infos: Iterable[BurstInfo], gcps: Iterable[GeoPoint], i
Args:
burst_infos: A list of BurstInfo objects
gcps: A list of GeoPoint objects
+ ipf_version: The IPF version of the measurement data
image_number: The image number of the measurement
"""
self.burst_infos = burst_infos
@@ -29,6 +30,7 @@ def __init__(self, burst_infos: Iterable[BurstInfo], gcps: Iterable[GeoPoint], i
self.image_number = image_number
self.swath = self.burst_infos[0].swath
+ self.s1_platform = self.burst_infos[0].slc_granule[2].upper()
burst_lengths = sorted(list(set([info.length for info in burst_infos])))
if len(burst_lengths) != 1:
@@ -100,8 +102,7 @@ def add_metadata(self, dataset: gdal.Dataset):
dataset.SetGCPs(gdal_gcps, srs.ExportToWkt())
dataset.SetMetadataItem('TIFFTAG_DATETIME', self.get_time_tag())
- # TODO make sure A/B is being set correctly.
- dataset.SetMetadataItem('TIFFTAG_IMAGEDESCRIPTION', 'Sentinel-1A IW SLC L1')
+ dataset.SetMetadataItem('TIFFTAG_IMAGEDESCRIPTION', f'Sentinel-1{self.s1_platform} IW SLC L1')
dataset.SetMetadataItem('TIFFTAG_SOFTWARE', f'Sentinel-1 IPF {self.version}')
def create_geotiff(self, out_path: Path, update_info=True):
diff --git a/src/burst2safe/safe.py b/src/burst2safe/safe.py
index 0e27d6b..f71bfa2 100644
--- a/src/burst2safe/safe.py
+++ b/src/burst2safe/safe.py
@@ -28,7 +28,7 @@ def __init__(self, burst_infos: Iterable[BurstInfo], work_dir: Optional[Path] =
self.check_group_validity(self.burst_infos)
self.grouped_burst_infos = self.group_burst_infos(self.burst_infos)
- self.name = self.get_name(self.burst_infos)
+ self.name = self.get_name()
self.safe_path = self.work_dir / self.name
self.swaths = []
self.manifest = None
@@ -101,8 +101,7 @@ def check_group_validity(burst_infos: Iterable[BurstInfo]):
if np.abs(max_diff) > 1:
raise ValueError(f'Products from swaths {swath1} and {swath2} do not overlap')
- @staticmethod
- def get_name(burst_infos: Iterable[BurstInfo], unique_id: str = '0000') -> str:
+ def get_name(self, unique_id: str = '0000') -> str:
"""Create a name for the SAFE file.
Args:
@@ -112,12 +111,18 @@ def get_name(burst_infos: Iterable[BurstInfo], unique_id: str = '0000') -> str:
Returns:
The name of the SAFE file
"""
- platform, beam_mode, product_type = burst_infos[0].slc_granule.split('_')[:3]
- product_info = f'1SS{burst_infos[0].polarization[0]}'
- min_date = min([x.date for x in burst_infos]).strftime('%Y%m%dT%H%M%S')
- max_date = max([x.date for x in burst_infos]).strftime('%Y%m%dT%H%M%S')
- absolute_orbit = f'{burst_infos[0].absolute_orbit:06d}'
- mission_data_take = burst_infos[0].slc_granule.split('_')[-2]
+
+ platform, beam_mode, product_type = self.burst_infos[0].slc_granule.split('_')[:3]
+
+ pol_codes = {'HH': 'SH', 'VV': 'SV', 'HH_HV': 'DH', 'VH_VV': 'DV'}
+ pols = sorted(list(set([x.polarization for x in self.burst_infos])))
+ pol_code = pol_codes['_'.join(pols)]
+ product_info = f'1S{pol_code}'
+
+ min_date = min([x.date for x in self.burst_infos]).strftime('%Y%m%dT%H%M%S')
+ max_date = max([x.date for x in self.burst_infos]).strftime('%Y%m%dT%H%M%S')
+ absolute_orbit = f'{self.burst_infos[0].absolute_orbit:06d}'
+ mission_data_take = self.burst_infos[0].slc_granule.split('_')[-2]
product_name = f'{platform}_{beam_mode}_{product_type}__{product_info}_{min_date}_{max_date}_{absolute_orbit}_{mission_data_take}_{unique_id}.SAFE'
return product_name
@@ -232,7 +237,7 @@ def create_manifest(self):
def update_product_identifier(self):
"""Update the product identifier using the CRC of the manifest file."""
- new_new = self.get_name(self.burst_infos, unique_id=self.manifest.crc)
+ new_new = self.get_name(unique_id=self.manifest.crc)
new_path = self.work_dir / new_new
if new_path.exists():
shutil.rmtree(new_path)
| Fix measurement Sentinel-1 A or B metadata
| 2024-05-16T13:46:50 | 0.0 | [] | [] |
|||
ASFHyP3/burst2safe | ASFHyP3__burst2safe-2 | d5a5be9a506de9267a4589df52f8e48290b762f0 | diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 0000000..d138f91
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,1 @@
+* @forrestfwilliams
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..bb7aab6
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,8 @@
+version: 2
+updates:
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: "weekly"
+ labels:
+ - "bumpless"
diff --git a/.github/workflows/build-and-deploy.yml b/.github/workflows/build-and-deploy.yml
new file mode 100644
index 0000000..7a914a3
--- /dev/null
+++ b/.github/workflows/build-and-deploy.yml
@@ -0,0 +1,37 @@
+name: Build and upload to PyPI
+
+on:
+ workflow_dispatch:
+
+jobs:
+ build-n-publish:
+ name: Build and publish Python distributions to PyPI and TestPyPI
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ - name: Checkout lastest tagged version
+ run: git checkout $(git describe --tags $(git rev-list --tags --max-count=1))
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.x"
+ - name: Install pypa/build
+ run: >-
+ python3 -m
+ pip install
+ build
+ --user
+ - name: Build a binary wheel and a source tarball
+ run: >-
+ python3 -m
+ build
+ --sdist
+ --wheel
+ --outdir dist/
+ .
+ - name: Publish distribution to PyPI
+ uses: pypa/gh-action-pypi-publish@release/v1
+ with:
+ password: ${{ secrets.PYPI_API_TOKEN }}
diff --git a/.github/workflows/bump-version.yml b/.github/workflows/bump-version.yml
new file mode 100644
index 0000000..723a20b
--- /dev/null
+++ b/.github/workflows/bump-version.yml
@@ -0,0 +1,16 @@
+name: Tag New Version
+
+on:
+ push:
+ branches:
+ - main
+
+jobs:
+ call-bump-version-workflow:
+ uses: ASFHyP3/actions/.github/workflows/[email protected]
+ with:
+ user: forrest-bot
+ email: [email protected]
+ secrets:
+ USER_TOKEN: ${{ secrets.FORREST_BOT_PAK }}
+
diff --git a/.github/workflows/changelog-check.yml b/.github/workflows/changelog-check.yml
new file mode 100644
index 0000000..3ce6f42
--- /dev/null
+++ b/.github/workflows/changelog-check.yml
@@ -0,0 +1,18 @@
+name: Changelog updated?
+
+on:
+ pull_request:
+ types:
+ - opened
+ - labeled
+ - unlabeled
+ - synchronize
+ branches:
+ - main
+ - develop
+
+jobs:
+ call-changelog-check-workflow:
+ uses: ASFHyP3/actions/.github/workflows/[email protected]
+ secrets:
+ USER_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/labeled-pr-check.yml b/.github/workflows/labeled-pr-check.yml
new file mode 100644
index 0000000..66ba502
--- /dev/null
+++ b/.github/workflows/labeled-pr-check.yml
@@ -0,0 +1,15 @@
+name: Is PR labeled?
+
+on:
+ pull_request:
+ types:
+ - opened
+ - labeled
+ - unlabeled
+ - synchronize
+ branches:
+ - main
+
+jobs:
+ call-labeled-pr-check-workflow:
+ uses: ASFHyP3/actions/.github/workflows/[email protected]
diff --git a/.github/workflows/release-checklist-comment.yml b/.github/workflows/release-checklist-comment.yml
new file mode 100644
index 0000000..2b2268e
--- /dev/null
+++ b/.github/workflows/release-checklist-comment.yml
@@ -0,0 +1,16 @@
+name: Checklist comment
+
+on:
+ pull_request:
+ types:
+ - opened
+ branches:
+ - main
+
+jobs:
+ call-release-workflow:
+ uses: ASFHyP3/actions/.github/workflows/[email protected]
+ permissions:
+ pull-requests: write
+ secrets:
+ USER_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 0000000..f00225e
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,17 @@
+name: Create Release
+
+on:
+ push:
+ tags:
+ - 'v*'
+
+jobs:
+ call-release-workflow:
+ uses: ASFHyP3/actions/.github/workflows/[email protected]
+ with:
+ release_prefix: burst2safe
+ release_branch: main # Optional; default shown
+ develop_branch: develop # Optional; default shown
+ sync_pr_label: forrest-bot # Optional; default shown
+ secrets:
+ USER_TOKEN: ${{ secrets.FORREST_BOT_PAK }}
diff --git a/.github/workflows/static-analysis.yml b/.github/workflows/static-analysis.yml
new file mode 100644
index 0000000..f8ee9c9
--- /dev/null
+++ b/.github/workflows/static-analysis.yml
@@ -0,0 +1,32 @@
+name: Static analysis
+
+on: [pull_request]
+
+jobs:
+ call-secrets-analysis-workflow:
+ # Docs: https://github.com/ASFHyP3/actions
+ uses: ASFHyP3/actions/.github/workflows/[email protected]
+
+ check-with-black:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: psf/black@stable
+ with:
+ options: "--check --diff --color"
+ src: "."
+
+ check-with-ruff:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Install Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install ruff
+ - name: Run Ruff
+ run: ruff check --output-format github . src/burst2safe/*.py
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000..bc81a4b
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,19 @@
+# Changelog
+
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
+and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
+and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+
+## [0.0.1]
+
+### Added
+* Create project structure and CI/CD tooling
+
+## [0.0.0]
+
+### Added
+* Initial version of project
+
diff --git a/environment.yml b/environment.yml
new file mode 100644
index 0000000..c122f5d
--- /dev/null
+++ b/environment.yml
@@ -0,0 +1,17 @@
+name: burst2safe
+channels:
+ - conda-forge
+ - nodefaults
+dependencies:
+ - python>=3.9
+ - pip
+ - gdal
+ - boto3
+ - tqdm
+ - numpy
+ - requests
+ - lxml
+ - asf_search
+ # For packaging, and testing
+ - pytest
+ - pytest-cov
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..82b41a4
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,67 @@
+[build-system]
+requires = ["setuptools>=61.0", "setuptools_scm[toml]>=6.2"]
+build-backend = "setuptools.build_meta"
+
+[tool.setuptools]
+include-package-data = true
+zip-safe = false
+
+[tool.setuptools.packages.find]
+where = ["src"]
+
+[tool.setuptools_scm]
+
+[project]
+name = "burst2safe"
+requires-python = ">=3.9"
+dynamic = ["version"]
+authors = [
+ {name="Forrest Williams", email="[email protected]"},
+]
+description = "A package for converting ASF-derived Sentinel-1 burst SLC products to the ESA SAFE format"
+readme = "README.md"
+classifiers=[
+ "Intended Audience :: Science/Research",
+ "Natural Language :: English",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+]
+dependencies = [
+ "gdal",
+ "boto3",
+ "tqdm",
+ "numpy",
+ "requests",
+ "lxml",
+ "asf_search",
+]
+
+[project.urls]
+Homepage = "https://github.com/forrestfwilliams/burst2safe"
+"Bug Tracker" ="https://github.com/forrestfwilliams/burst2safe/issues"
+
+[project.scripts]
+burst2safe = "burst2safe.burst2safe:main"
+
+[project.optional-dependencies]
+develop = [
+ "pytest",
+]
+
+[tool.pytest.ini_options]
+minversion = "6.0"
+addopts = '-ra -q -m "not integration"'
+markers = ["integration"]
+testpaths = ["tests"]
+
+[tool.black]
+line-length = 120
+skip-string-normalization = true
+include = '\.pyx?$'
+
+[tool.ruff]
+line-length = 120
diff --git a/src/burst2safe/burst2safe.py b/src/burst2safe/burst2safe.py
new file mode 100644
index 0000000..654e2c7
--- /dev/null
+++ b/src/burst2safe/burst2safe.py
@@ -0,0 +1,2 @@
+def main():
+ print('hello world')
| Create project structure and CI/CD
| 2024-03-07T21:22:04 | 0.0 | [] | [] |
|||
georgebv/drf-pydantic | georgebv__drf-pydantic-30 | b5b8e663bf5298750d5afda72b425c779edc3da6 | diff --git a/pyproject.toml b/pyproject.toml
index f12cd5f..e1f99f0 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "drf-pydantic"
-version = "2.5.0"
+version = "2.5.1"
description = "Use pydantic with the Django REST framework"
license = "MIT"
authors = ["George Bocharov <[email protected]>"]
diff --git a/src/drf_pydantic/parse.py b/src/drf_pydantic/parse.py
index eaf7c69..156fac0 100644
--- a/src/drf_pydantic/parse.py
+++ b/src/drf_pydantic/parse.py
@@ -46,6 +46,10 @@
datetime.date: serializers.DateField,
datetime.time: serializers.TimeField,
datetime.timedelta: serializers.DurationField,
+ # Scalar collections
+ list: serializers.ListField,
+ tuple: serializers.ListField,
+ dict: serializers.DictField,
}
| Dict as JSONField
I have a pydantic model that has a `dict` field which I usually mapped to a DRF `JSONField` but instead I get this error
`dict is not a supported scalar type.`.
How come this mapping between dict and [JSONField](https://www.django-rest-framework.org/api-guide/fields/#jsonfield) is not supported?
**Edit**: ...or even a [DictField](https://www.django-rest-framework.org/api-guide/fields/#dictfield) but that would assume ability to detect the `child` attribute, while JSONField would allow anything that's compatible with JSON primitives 🤔
| @ragecryx Can you provide a code sample to reproduce this error?
Yes, if you copy paste the following code in an interactive python shell (assuming `drf_pydantic` is installed):
```python
from datetime import datetime
from drf_pydantic import BaseModel as DRFBaseModel
class AvailabilityRequest(DRFBaseModel):
start_time: datetime
end_time: datetime
poi: dict
```
produces the error/exception I mentioned.
My intention is to accept [geojson](https://geojson.org/) in the `poi` field. I have done it in the past with JSONField in DRF serializers but in this case my `AvailabilityRequest` is not corresponding to a DB model so I cant use a DRF `ModelSerializer` and plain DRF `Serializer` doesnt help either because I want to pass around the `AvailabilityRequest` instance to various functions that do the actual work. | 2024-08-28T00:08:22 | 0.0 | [] | [] |
||
Alir3z4/html2text | Alir3z4__html2text-410 | ff0db816d31eeb711cf0299e94cbd99526325bdd | diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index fdc7048..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-language: python
-cache: pip
-
-matrix:
- include:
- - python: 3.12
- env: TOXENV=black
- - python: 3.12
- env: TOXENV=flake8
- - python: 3.12
- env: TOXENV=mypy
- - python: 3.12
- env: TOXENV=isort
- - python: 3.8
- env: TOXENV=py38
- - python: 3.9
- env: TOXENV=py39
- - python: 3.10
- env: TOXENV=py310
- - python: 3.11
- env: TOXENV=py310
- - python: 3.12
- env: TOXENV=py310
- - python: pypy3
- env: TOXENV=pypy3
-
-install:
- - pip install tox
-script:
- - tox
-after_success:
- - pip install coveralls
- - coveralls
diff --git a/ChangeLog.rst b/ChangeLog.rst
index 99d125d..48a62c1 100644
--- a/ChangeLog.rst
+++ b/ChangeLog.rst
@@ -1,3 +1,10 @@
+Unreleased
+==========
+----
+
+* Fixes #409: IndexError on empty strong mark.
+
+
2024.2.25
=========
----
diff --git a/html2text/__init__.py b/html2text/__init__.py
index 937ea9d..bd28a9e 100644
--- a/html2text/__init__.py
+++ b/html2text/__init__.py
@@ -438,6 +438,10 @@ def handle_tag(
if (
start
and self.preceding_data
+ # When `self.strong_mark` is set to empty, the next condition
+ # will cause IndexError since it's trying to match the data
+ # with the first character of the `self.strong_mark`.
+ and len(self.strong_mark) > 0
and self.preceding_data[-1] == self.strong_mark[0]
):
strong = " " + self.strong_mark
diff --git a/setup.cfg b/setup.cfg
index 7b0fab2..d2d5921 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -42,10 +42,12 @@ html2text = py.typed
[flake8]
max_line_length = 88
extend-ignore = E203
+extend-exclude = env/
[isort]
combine_as_imports = True
profile = black
+extend_skip = env/
[mypy]
python_version = 3.8
| IndexError on empty strong mark on version 2024.2.25
- html2text 2024.2.25
- Python 3.9+
- Test script
✅ default
```python
converter = html2text.HTML2Text()
converter.emphasis_mark = "_"
converter.strong_mark = "**"
string = "A <b>B</b> <i>C</i>."
result = converter.handle(string)
print(result)
# output: A **B** _C_.
```
✅ emphasis emptied `''`
```python
converter = html2text.HTML2Text()
converter.emphasis_mark = ""
converter.strong_mark = "**"
string = "A <b>B</b> <i>C</i>."
result = converter.handle(string)
print(result)
# output: A **B** C.
```
❌ strong emptied `''`
```python
converter = html2text.HTML2Text()
converter.emphasis_mark = "_"
converter.strong_mark = ""
string = "A <b>B</b> <i>C</i>."
result = converter.handle(string)
print(result)
# expected output: A B _C_.
```
```text
Traceback (most recent call last):
File "script.py", line 29, in <module>
main()
File "script.py", line 24, in main
result = converter.handle(string)
File "test/venv/lib/python3.9/site-packages/html2text/__init__.py", line 145, in handle
self.feed(data)
File "test/venv/lib/python3.9/site-packages/html2text/__init__.py", line 141, in feed
super().feed(data)
File "/opt/homebrew/Cellar/[email protected]/3.9.18_1/Frameworks/Python.framework/Versions/3.9/lib/python3.9/html/parser.py", line 110, in feed
self.goahead(0)
File "/opt/homebrew/Cellar/[email protected]/3.9.18_1/Frameworks/Python.framework/Versions/3.9/lib/python3.9/html/parser.py", line 170, in goahead
k = self.parse_starttag(i)
File "/opt/homebrew/Cellar/[email protected]/3.9.18_1/Frameworks/Python.framework/Versions/3.9/lib/python3.9/html/parser.py", line 344, in parse_starttag
self.handle_starttag(tag, attrs)
File "test/venv/lib/python3.9/site-packages/html2text/__init__.py", line 194, in handle_starttag
self.handle_tag(tag, dict(attrs), start=True)
File "test/venv/lib/python3.9/site-packages/html2text/__init__.py", line 441, in handle_tag
and self.preceding_data[-1] == self.strong_mark[0]
IndexError: string index out of range
```
| 2024-02-26T20:20:08 | 0.0 | [] | [] |
|||
kjappelbaum/mofdscribe | kjappelbaum__mofdscribe-446 | f95a6a2df106ff8d8116758a0de4b7c41f2730e4 | diff --git a/setup.cfg b/setup.cfg
index ef2f8b0e..65260a9c 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -101,9 +101,9 @@ all =
pyeqeq
nglview
lint =
- isort < 5
+ isort
black
- flake8 < 5
+ flake8
pre-commit
bumpversion
tests =
@@ -177,17 +177,17 @@ strictness = short
#########################
[flake8]
ignore =
- S301 # pickle
- S403 # pickle
+ S301
+ S403
S404
S603
- W503 # Line break before binary operator (flake8 is wrong)
- E203 # whitespace before ':'
- S101 # Complaining about assert statements
- D101 # Docstring missing
- D102 # Docstring missing
- D103 # Docstring missing
- D104 # Docstring missing
+ W503
+ E203
+ S101
+ D101
+ D102
+ D103
+ D104
D400
exclude =
.tox,
diff --git a/src/mofdscribe/featurizers/bu/bu_featurizer.py b/src/mofdscribe/featurizers/bu/bu_featurizer.py
index 9e31461e..080bafd5 100644
--- a/src/mofdscribe/featurizers/bu/bu_featurizer.py
+++ b/src/mofdscribe/featurizers/bu/bu_featurizer.py
@@ -103,7 +103,6 @@ def _extract_bbs(
nodes = [boxed_molecule(node.molecule) for node in fragments.nodes]
if mofbbs is not None:
-
linkers = list(mofbbs.linkers) if mofbbs.linkers is not None else []
nodes = list(mofbbs.nodes) if mofbbs.nodes is not None else []
types = [type(node) for node in nodes] + [type(linker) for linker in linkers]
diff --git a/src/mofdscribe/featurizers/chemistry/racs.py b/src/mofdscribe/featurizers/chemistry/racs.py
index fe6981f3..42739df1 100644
--- a/src/mofdscribe/featurizers/chemistry/racs.py
+++ b/src/mofdscribe/featurizers/chemistry/racs.py
@@ -49,7 +49,6 @@ def _compute_racs(
site = structure_graph.structure[start_atom]
for neighbor in neighbors:
-
n = structure_graph.structure[neighbor]
for prop in properties:
if prop in ("I", 1):
diff --git a/src/mofdscribe/featurizers/topology/_tda_helpers.py b/src/mofdscribe/featurizers/topology/_tda_helpers.py
index 6877bab1..b2d58744 100644
--- a/src/mofdscribe/featurizers/topology/_tda_helpers.py
+++ b/src/mofdscribe/featurizers/topology/_tda_helpers.py
@@ -9,6 +9,7 @@
from pymatgen.core import Structure
from pymatgen.transformations.advanced_transformations import CubicSupercellTransformation
+from mofdscribe.featurizers.utils import flat
from mofdscribe.featurizers.utils.aggregators import MA_ARRAY_AGGREGATORS
from mofdscribe.featurizers.utils.substructures import filter_element
@@ -20,15 +21,135 @@ def construct_pds_cached(coords, periodic=False, weights: Optional[Collection] =
return construct_pds(coords, periodic=periodic, weights=weights)
+# def _get_homology_generators(
+# filtration, persistence: Optional["dionysus._dionysus.ReducedMatrix"] = None
+# ) -> dict:
+# import dionysus as d
+# from moleculetda.construct_pd import get_persistence
+
+# if persistence is None:
+# persistence = get_persistence(filtration)
+
+# homology_generators = defaultdict(lambda: defaultdict(list))
+
+# for i, c in tqdm(enumerate(persistence), total=len(persistence)):
+# try:
+
+
+# death = filtration[i].data
+# points_a = list(filtration[i])
+# points_b = [list(filtration[x.index]) for x in c]
+# dim = len(points_b[-1]) - 1
+# data_b = [filtration[x.index].data for x in c]
+# birth = data_b[-1]
+
+# all_points = points_a + points_b
+# all_points = list(set(flat(all_points)))
+# if birth < death:
+# homology_generators[dim][(birth, death)].append(all_points)
+# except Exception as e:
+# pass
+
+# return homology_generators
+
+
+def _get_representative_cycles(filtration, persistence, dimension):
+ import dionysus as d
+
+ def data_representation_of_cycle(filtration, cycle):
+ return np.array(flat([list(filtration[s.index]) for s in cycle]))
+
+ diagrams = d.init_diagrams(persistence, filtration)
+ diagram = diagrams[dimension]
+ cycles = {}
+
+ intervals = sorted(diagram, key=lambda d: d.death - d.birth, reverse=True)
+
+ for interval in intervals:
+ if persistence.pair(interval.data) != persistence.unpaired:
+ cycle_raw = persistence[persistence.pair(interval.data)]
+
+ # Break dionysus iterator representation so it becomes a list
+ cycle = [s for s in cycle_raw]
+ cycle = data_representation_of_cycle(filtration, cycle)
+ cycles[interval.data] = cycle
+
+ return cycles
+
+
+def make_supercell(
+ coords: np.ndarray,
+ lattice: List[np.array],
+ size: float,
+ elements: Optional[List[str]] = None,
+ min_size: float = -5,
+) -> np.ndarray:
+ """
+ Generate cubic supercell of a given size.
+
+ Args:
+ coords (np.ndarray): matrix of xyz coordinates of the system
+ lattice (Tuple[np.array]): lattice vectors of the system
+ elements (List[str]): list of elements in the system.
+ If None, will create a list of 'X' of the same length as coords
+ size (float): dimension size of cubic cell, e.g., 10x10x10
+ min_size (float): minimum axes size to keep negative xyz coordinates from the original cell
+
+ Returns:
+ new_cell: supercell array
+ """
+ # handle potential weights that we want to carry over but not change
+ a, b, c = lattice
+
+ xyz_periodic_copies = []
+ element_copies = []
+
+ # xyz_periodic_copies.append(coords)
+ # element_copies.append(np.array(elements).reshape(-1,1))
+ min_range = -3 # we aren't going in the minimum direction too much, so can make this small
+ max_range = 20 # make this large enough, but can modify if wanting an even larger cell
+
+ if elements is None:
+ elements = ["X"] * len(coords)
+
+ for x in range(-min_range, max_range):
+ for y in range(0, max_range):
+ for z in range(0, max_range):
+ if x == y == z == 0:
+ continue
+ add_vector = x * a + y * b + z * c
+ xyz_periodic_copies.append(coords + add_vector)
+ assert len(elements) == len(
+ coords
+ ), f"Elements and coordinates are not the same length. \
+ Found {len(coords)} coordinates and {len(elements)} elements."
+ element_copies.append(np.array(elements).reshape(-1, 1))
+
+ # Combine into one array
+ xyz_periodic_total = np.vstack(xyz_periodic_copies)
+
+ element_periodic_total = np.vstack(element_copies)
+ assert len(xyz_periodic_total) == len(
+ element_periodic_total
+ ), f"Elements and coordinates are not the same length. \
+ Found {len(xyz_periodic_total)} coordinates and {len(element_periodic_total)} elements."
+ # Filter out all atoms outside of the cubic box
+ filter_a = np.max(xyz_periodic_total, axis=1) < size
+ new_cell = xyz_periodic_total[filter_a]
+ filter_b = np.min(new_cell[:], axis=1) > min_size
+ new_cell = new_cell[filter_b]
+ new_elements = element_periodic_total[filter_a][filter_b]
+
+ return new_cell, new_elements.flatten()
+
+
def _coords_for_structure(
structure: Structure,
min_size: int = 50,
periodic: bool = False,
no_supercell: bool = False,
weighting: Optional[str] = None,
-) -> Tuple[np.ndarray, np.ndarray]:
- from moleculetda.read_file import make_supercell
-
+) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
if no_supercell:
if weighting is not None:
weighting = encode_many([str(s.symbol) for s in structure.species], weighting)
@@ -36,9 +157,9 @@ def _coords_for_structure(
else:
if periodic:
- transformed_s = CubicSupercellTransformation(min_size=min_size).apply_transformation(
- structure
- )
+ transformed_s = CubicSupercellTransformation(
+ min_length=min_size, force_90_degrees=True
+ ).apply_transformation(structure)
if weighting is not None:
weighting = encode_many([str(s.symbol) for s in transformed_s.species], weighting)
return transformed_s.cart_coords, weighting
@@ -48,16 +169,23 @@ def _coords_for_structure(
encode_many([str(s.symbol) for s in structure.species], weighting)
)
# we can add the weighing as additional column for the cooords
- coords_w_weight = make_supercell(
+ coords_w_weight, elements = make_supercell(
np.hstack([structure.cart_coords, weighting_arr.reshape(-1, 1)]),
structure.lattice.matrix,
min_size,
)
- return coords_w_weight[:, :-1], coords_w_weight[:, -1]
+ return coords_w_weight[:, :-1], coords_w_weight[:, -1], elements
else:
+ sc, elements = make_supercell(
+ structure.cart_coords,
+ structure.lattice.matrix,
+ min_size,
+ elements=structure.species,
+ )
return (
- make_supercell(structure.cart_coords, structure.lattice.matrix, min_size),
+ sc,
None,
+ elements,
)
@@ -75,6 +203,27 @@ def _pd_arrays_from_coords(
return pd
+def get_images(
+ pd,
+ spread: float = 0.2,
+ weighting: str = "identity",
+ pixels: List[int] = (50, 50),
+ specs: List[dict] = None,
+ dimensions: Collection[int] = (0, 1, 2),
+):
+ from moleculetda.vectorize_pds import pd_vectorization
+
+ images = []
+ for dim in dimensions:
+ dgm = pd[f"dim{dim}"]
+ images.append(
+ pd_vectorization(
+ dgm, spread=spread, weighting=weighting, pixels=pixels, specs=specs[dim]
+ )
+ )
+ return images
+
+
# ToDo: only do this for selected elements
# ToDo: only do this for all if we want
def get_persistent_images_for_structure(
@@ -120,8 +269,6 @@ def get_persistent_images_for_structure(
persistent_images (dict): dictionary of persistent images and their
barcode representations
"""
- from moleculetda.vectorize_pds import get_images
-
element_images: Dict[dict] = defaultdict(dict)
specs = []
for mb, mp in zip(max_b, max_p):
@@ -129,46 +276,70 @@ def get_persistent_images_for_structure(
for element in elements:
try:
filtered_structure = filter_element(structure, element)
- coords, weights = _coords_for_structure(
+ coords, _weights, _elements = _coords_for_structure(
filtered_structure,
min_size=min_size,
periodic=periodic,
no_supercell=no_supercell,
weighting=alpha_weighting,
)
- pd = _pd_arrays_from_coords(coords, periodic=periodic)
+ persistent_dia = _pd_arrays_from_coords(coords, periodic=periodic)
images = get_images(
- pd,
+ persistent_dia,
spread=spread,
weighting=weighting,
pixels=pixels,
specs=specs,
+ dimensions=(0, 1, 2),
)
- except ValueError:
+ except Exception:
logger.exception(f"Error computing persistent images for {element}")
- images = np.zeros((0, pixels[0], pixels[1]))
- images[:] = np.nan
- pd = np.zeros((0, max_p + 1))
- pd[:] = np.nan
+ images = {}
+ for dim in [0, 1, 2]:
+ im = np.zeros((pixels[0], pixels[1]))
+ im[:] = np.nan
+ images[dim] = im
+ persistent_dia = np.zeros((0, max(max_p) + 1))
+ persistent_dia[:] = np.nan
# ToDo: make sure that we have the correct length
element_images["image"][element] = images
- element_images["array"][element] = pd
+ element_images["array"][element] = persistent_dia
if compute_for_all_elements:
- coords, weights = _coords_for_structure(
- structure,
- min_size=min_size,
- periodic=periodic,
- no_supercell=no_supercell,
- weighting=alpha_weighting,
- )
- pd = _pd_arrays_from_coords(coords, periodic=periodic)
+ try:
+ coords, weights, _elements = _coords_for_structure(
+ structure,
+ min_size=min_size,
+ periodic=periodic,
+ no_supercell=no_supercell,
+ weighting=alpha_weighting,
+ )
+ persistent_dia = _pd_arrays_from_coords(coords, periodic=periodic)
- images = get_images(pd, spread=spread, weighting=weighting, pixels=pixels, specs=specs)
- element_images["image"]["all"] = images
- element_images["array"]["all"] = pd
+ images = get_images(
+ persistent_dia,
+ spread=spread,
+ weighting=weighting,
+ pixels=pixels,
+ specs=specs,
+ dimensions=(0, 1, 2),
+ )
+ element_images["image"]["all"] = images
+ element_images["array"]["all"] = persistent_dia
+ except Exception:
+ logger.exception("Error computing persistent images for all elements")
+ images = {}
+ for dim in [0, 1, 2]:
+ im = np.zeros((pixels[0], pixels[1]))
+ im[:] = np.nan
+ images[dim] = im
+ persistent_dia = np.zeros((0, max(max_p) + 1))
+ persistent_dia[:] = np.nan
+
+ element_images["image"]["all"] = images
+ element_images["array"]["all"] = persistent_dia
return element_images
@@ -189,7 +360,6 @@ def diagrams_to_bd_arrays(dgms):
"""Convert persistence diagram objects to persistence diagram arrays."""
dgm_arrays = {}
for dim, dgm in enumerate(dgms):
-
if dgm:
arr = np.array(
[[np.sqrt(dgm[i].birth), np.sqrt(dgm[i].death)] for i in range(len(dgm))]
@@ -222,7 +392,7 @@ def get_diagrams_for_structure(
for element in elements:
try:
filtered_structure = filter_element(structure, element)
- coords, weights = _coords_for_structure(
+ coords, weights, _elements = _coords_for_structure(
filtered_structure,
min_size=min_size,
periodic=periodic,
@@ -242,7 +412,7 @@ def get_diagrams_for_structure(
element_dias[element] = arrays
if compute_for_all_elements:
- coords, weights = _coords_for_structure(
+ coords, weights, _elements = _coords_for_structure(
structure,
min_size=min_size,
periodic=periodic,
@@ -274,7 +444,7 @@ def get_persistence_image_limits_for_structure(
try:
filtered_structure = filter_element(structure, element)
- coords, weights = _coords_for_structure(
+ coords, weights, _elements = _coords_for_structure(
filtered_structure,
min_size=min_size,
periodic=periodic,
@@ -289,7 +459,7 @@ def get_persistence_image_limits_for_structure(
pass
if compute_for_all_elements:
- coords, weights = _coords_for_structure(
+ coords, weights, _elements = _coords_for_structure(
structure,
min_size=min_size,
periodic=periodic,
diff --git a/src/mofdscribe/featurizers/topology/ph_hist.py b/src/mofdscribe/featurizers/topology/ph_hist.py
index 74d1cfcc..4a814ef8 100644
--- a/src/mofdscribe/featurizers/topology/ph_hist.py
+++ b/src/mofdscribe/featurizers/topology/ph_hist.py
@@ -135,7 +135,6 @@ def _featurize(
flat_results = []
for atom_type in self.atom_types:
for dim in self.dimensions:
-
dimname = f"dim{dim}"
diagram = res[atom_type][dimname]
diff --git a/src/mofdscribe/featurizers/topology/ph_image.py b/src/mofdscribe/featurizers/topology/ph_image.py
index 9a406d9c..cb637ff8 100644
--- a/src/mofdscribe/featurizers/topology/ph_image.py
+++ b/src/mofdscribe/featurizers/topology/ph_image.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-"""Implements persistent homology images"""
+"""Implements persistent homology images."""
from collections import defaultdict
from typing import List, Optional, Tuple, Union
@@ -146,8 +146,8 @@ def __init__(
else:
max_p = [max_p] * len(dimensions)
- max_p_ = [0, 0, 0, 0]
- max_b_ = [0, 0, 0, 0]
+ max_p_ = [0, 0, 0]
+ max_b_ = [0, 0, 0]
for i, dim in enumerate(dimensions):
max_p_[dim] = max_p[i]
@@ -163,6 +163,22 @@ def __init__(
super().__init__(primitive=primitive)
+ def get_birth_persistance_death_from_pixel(self, dimension: int, x: int, y: int):
+ """Get birth, persistence, and death from pixel coordinates.
+
+ Args:
+ dimension (int): Dimension of the topological feature.
+ x (int): x coordinate.
+ y (int): y coordinate.
+
+ Returns:
+ Tuple[float, float, float]: Birth, persistence, and death.
+ """
+ birth_values = np.linspace(0, self.max_b[dimension], self.image_size[0])
+ persistance_values = np.linspace(0, self.max_p[dimension], self.image_size[1])
+
+ return birth_values[x], persistance_values[y], birth_values[x] + persistance_values[y]
+
def _get_feature_labels(self) -> List[str]:
labels = []
_elements = list(self.atom_types)
@@ -176,6 +192,86 @@ def _get_feature_labels(self) -> List[str]:
return labels
+ def find_relevant_substructure(self, structure, feature_name):
+ parts = feature_name.split("_")
+ # 'phimage_C-H-N-O_1_19_0'
+ dim = int(parts[2])
+ birth, persistance, death = self.get_birth_persistance_death_from_pixel(
+ dim, int(parts[4]), int(parts[3])
+ )
+ return self._find_relevant_substructure(structure, parts[1], dim, birth, persistance)
+
+ def _find_relevant_substructure(
+ self, structure: Structure, elements: str, dimension: int, birth, persistance
+ ) -> List[Molecule]:
+ """Find the substructure that matches a representative cycle.
+
+ Done for the point on the persistence diagram
+ that is closest to the given birth and persistence values.
+
+ Args:
+ structure (Structure): Structure to find the substructure in.
+ elements (str): Element to find the substructure for.
+ dimension (int): Dimension of the homology generator.
+ birth (float): Birth of the homology generator.
+ persistance (float): Persistence of the representative cycle.
+
+ Returns:
+ Molecule: Representative substructure.
+ """
+ import dionysus as d
+ from moleculetda.construct_pd import get_alpha_shapes, get_persistence
+
+ from mofdscribe.featurizers.topology._tda_helpers import (
+ _coords_for_structure,
+ _get_representative_cycles,
+ )
+ from mofdscribe.featurizers.utils.substructures import filter_element
+
+ if elements != "all":
+ structure = filter_element(structure, elements.split("-"))
+ coords, _weights, species = _coords_for_structure(
+ structure,
+ min_size=self.min_size,
+ periodic=self.periodic,
+ no_supercell=self.no_supercell,
+ weighting=self.alpha_weight,
+ )
+
+ f = get_alpha_shapes(coords, True, periodic=False)
+ f = d.Filtration(f)
+ m = get_persistence(f)
+
+ cycles = _get_representative_cycles(f, m, dimension)
+
+ dgms = d.init_diagrams(m, f)
+ diagram = dgms[dimension]
+
+ births, deaths, persistances, indices = [], [], [], []
+ for interval in diagram:
+ births.append(interval.birth)
+ deaths.append(interval.death)
+ indices.append(interval.data)
+ persistances.append(interval.death - interval.birth)
+ births = np.array(births)
+ deaths = np.array(deaths)
+ indices = np.array(indices)
+ persistances = np.array(persistances)
+
+ distances = np.sqrt((births - birth) ** 2 + (persistances - persistance) ** 2)
+
+ min_index = np.argmin(distances)
+ point = indices[min_index]
+
+ cycle = cycles[point]
+
+ molecule = Molecule(
+ species[cycle],
+ coords[cycle],
+ )
+
+ return molecule
+
def feature_labels(self) -> List[str]:
return self._get_feature_labels()
@@ -202,8 +298,8 @@ def _featurize(
elements.append("all")
for element in elements:
for dim in self.dimensions:
-
features.append(np.array(results["image"][element][dim]).flatten())
+
return np.concatenate(features)
def _fit(self, structures: List[Union[Structure, IStructure, Molecule, IMolecule]]) -> None:
diff --git a/src/mofdscribe/featurizers/topology/ph_stats.py b/src/mofdscribe/featurizers/topology/ph_stats.py
index 60dc70fb..841a264d 100644
--- a/src/mofdscribe/featurizers/topology/ph_stats.py
+++ b/src/mofdscribe/featurizers/topology/ph_stats.py
@@ -122,7 +122,6 @@ def _featurize(
flat_results = []
for atom_type in self.atom_types:
for dim in self.dimensions:
-
dimname = f"dim{dim}"
stats = persistent_diagram_stats(
res[atom_type][dimname], self.aggregation_functions
diff --git a/src/mofdscribe/featurizers/topology/ph_vect.py b/src/mofdscribe/featurizers/topology/ph_vect.py
index 7566f793..9bc21475 100644
--- a/src/mofdscribe/featurizers/topology/ph_vect.py
+++ b/src/mofdscribe/featurizers/topology/ph_vect.py
@@ -71,7 +71,6 @@ def _fit_transform_structures(
if len(diagrams[element][dim]) == 0:
raise ValueError(f"{element} dimension {dim} has no diagrams")
try:
-
results[element][dim] = _apply_and_fill(
transformer.fit_transform, diagrams[element][dim]
)
@@ -118,7 +117,6 @@ def _transform_structures(
for element, element_transformers in transformers.items():
for dim, transformer in element_transformers.items():
-
results[element][dim] = _apply_and_fill(
transformer.fit_transform, diagrams[element][dim]
)
diff --git a/src/mofdscribe/featurizers/utils/__init__.py b/src/mofdscribe/featurizers/utils/__init__.py
index 57db31df..4aaf2a77 100644
--- a/src/mofdscribe/featurizers/utils/__init__.py
+++ b/src/mofdscribe/featurizers/utils/__init__.py
@@ -13,6 +13,18 @@
from collections import MutableMapping
+def flat(my_list: list) -> list:
+ if not my_list:
+ return my_list
+ f = my_list[0]
+ try:
+ p = [i for i in f]
+ except TypeError:
+ # f is not iterable, so put it in a list.
+ p = [f]
+ return p + flat(my_list[1:])
+
+
def nan_array(size):
return np.full(size, np.nan)
diff --git a/src/mofdscribe/featurizers/utils/raspa/base_parser.py b/src/mofdscribe/featurizers/utils/raspa/base_parser.py
index 1522eb46..df6299e0 100644
--- a/src/mofdscribe/featurizers/utils/raspa/base_parser.py
+++ b/src/mofdscribe/featurizers/utils/raspa/base_parser.py
@@ -173,7 +173,6 @@ def parse_base_output(output_abs_path, system_name, ncomponents): # noqa: C901
result_dict = {"exceeded_walltime": False}
with open(output_abs_path, "r") as fobj:
-
# 1st parsing part: input settings
# --------------------------------
# from: start of file
diff --git a/tox.ini b/tox.ini
index 2616e35e..59d81226 100644
--- a/tox.ini
+++ b/tox.ini
@@ -56,12 +56,12 @@ description = Run linters.
skip_install = true
deps =
darglint
- flake8 < 5
+ flake8
flake8-black
flake8-bugbear
flake8-colors
flake8-docstrings
- flake8-isort < 5
+ flake8-isort
flake8-print
pep8-naming
pydocstyle
| PHImage: implement helper that maps pixel indices to birth/death/persistence
| 2023-03-22T11:51:14 | 0.0 | [] | [] |
|||
kjappelbaum/mofdscribe | kjappelbaum__mofdscribe-441 | b5ce7a504a959a8ac0483f1e2aae3ca8b2592936 | diff --git a/docs/source/api/featurizers.rst b/docs/source/api/featurizers.rst
index 954b8b6f..ef26b4fa 100644
--- a/docs/source/api/featurizers.rst
+++ b/docs/source/api/featurizers.rst
@@ -94,4 +94,11 @@ Host Guest featurization
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: mofdscribe.featurizers.hostguest.host_guest_featurizer
+ :members:
+
+
+Text description
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. automodule:: mofdscribe.featurizers.text.mofdscriber
:members:
\ No newline at end of file
diff --git a/docs/source/featurizers/global/text.rst b/docs/source/featurizers/global/text.rst
new file mode 100644
index 00000000..5afb0c26
--- /dev/null
+++ b/docs/source/featurizers/global/text.rst
@@ -0,0 +1,12 @@
+Text-based
+---------------
+
+.. featurizer:: MOFDescriber
+ :id: MOFDescriber
+ :considers_geometry: True
+ :considers_structure_graph: True
+ :encodes_chemistry: True
+ :scope: global
+ :scalar: False
+
+ This describes MOF structures in natural language using robocrystallographer augmented using MOF-specific features.
diff --git a/src/mofdscribe/featurizers/text/__init__.py b/src/mofdscribe/featurizers/text/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/src/mofdscribe/featurizers/text/mofdscriber.py b/src/mofdscribe/featurizers/text/mofdscriber.py
new file mode 100644
index 00000000..d76406ce
--- /dev/null
+++ b/src/mofdscribe/featurizers/text/mofdscriber.py
@@ -0,0 +1,120 @@
+# -*- coding: utf-8 -*-
+"""Describe MOF structures in natural language."""
+
+from collections import Counter
+from typing import Dict, Optional, Union
+
+from moffragmentor import MOF as MOFFragmentorMOF # noqa: N811
+from pymatgen.analysis.graphs import StructureGraph
+from pymatgen.core import IStructure, Structure
+from robocrys import StructureCondenser, StructureDescriber
+
+from mofdscribe.featurizers.base import BaseFeaturizer, MOFMultipleFeaturizer
+from mofdscribe.featurizers.pore import AccessibleVolume, PoreDiameters, SurfaceArea
+from mofdscribe.featurizers.utils.structure_graph import get_sg
+
+_pore_formatters = {
+ "lis": lambda x: "largest included sphere {:.2f} A".format(x),
+ "density_0.1": lambda x: "density {:.2f} g/cm3".format(x),
+ "asa_m2g_0.1": lambda x: "surface area {:.2f} m2/g".format(x),
+ "av_cm3g_0.1": lambda x: "accessible volume {:.2f} cm3/g".format(x),
+}
+
+
+class MOFDescriber(BaseFeaturizer):
+ """Describe a metal-organic framework in natural language.
+
+ Uses robocrystallographer [Robocrys]_ as well as MOF-specific descriptions.
+
+ References:
+ .. [Robocrys] Ganose, A., & Jain, A. (2019).
+ Robocrystallographer: Automated crystal structure text descriptions and analysis.
+ MRS Communications, 9(3), 874-881.
+ https://doi.org/10.1557/mrc.2019.94
+ """
+
+ def __init__(
+ self,
+ condenser_kwargs: Optional[Dict] = None,
+ describer_kwargs: Optional[Dict] = None,
+ incorporate_smiles: bool = True,
+ describe_pores: bool = True,
+ ) -> None:
+ """Construct an instance of the MOFDescriber.
+
+ Args:
+ condenser_kwargs (Dict): Arguments to pass to the
+ StructureCondenser.
+ describer_kwargs (Dict): Arguments to pass to the
+ StructureDescriber
+ incorporate_smiles (bool): If True, describe building blocks.
+ describe_pores (bool): If True, add description of the geometry
+ of the MOF pores.
+ """
+ describer_defaults = {"describe_oxidation_states": False, "describe_bond_lengths": True}
+ self.condenser_kwargs = condenser_kwargs or {}
+ self.describer_kwargs = {**describer_defaults, **(describer_kwargs or {})}
+ self.incorporate_smiles = incorporate_smiles
+ self.describe_pores = describe_pores
+
+ def _get_bb_description(self, structure: Structure, structure_graph: StructureGraph) -> str:
+ moffragmentor_mof = MOFFragmentorMOF(structure, structure_graph)
+ fragments = moffragmentor_mof.fragment()
+ linker_counter = Counter(fragments.linkers.smiles)
+ metal_counter = Counter(fragments.nodes.smiles)
+
+ linker_smiles = " ,".join("{} {}".format(v, k) for k, v in linker_counter.items())
+ metal_smiles = " ,".join("{} {}".format(v, k) for k, v in metal_counter.items())
+ return "Linkers: {}. Metal clusters: {}.".format(linker_smiles, metal_smiles)
+
+ def _get_pore_description(self, structure):
+ pore_featurizer = MOFMultipleFeaturizer(
+ [PoreDiameters(), SurfaceArea(), AccessibleVolume()]
+ )
+
+ features = pore_featurizer.featurize(structure)
+ feature_names = pore_featurizer.feature_labels()
+
+ d = dict(zip(feature_names, features))
+ return "The MOF has " + ", ".join([v(d[k]) for k, v in _pore_formatters.items()])
+
+ def _get_robocrys_description(self, structure):
+ sc = StructureCondenser(**self.condenser_kwargs)
+ sd = StructureDescriber(**self.describer_kwargs)
+ structure = Structure.from_sites(structure.sites)
+ condensed_structure = sc.condense_structure(structure)
+ return sd.describe(condensed_structure)
+
+ def _featurize(self, structure: Structure, structure_graph: StructureGraph):
+ description = self._get_robocrys_description(structure)
+ if self.incorporate_smiles:
+ description += " " + self._get_bb_description(structure, structure_graph)
+ if self.describe_pores:
+ description += " " + self._get_pore_description(structure)
+ return description
+
+ def featurize(self, structure: Union[Structure, IStructure]):
+ return self._featurize(structure, get_sg(structure))
+
+ def feature_labels(self):
+ return ["description"]
+
+ def citations(self):
+ return [
+ "@article{Ganose_2019,"
+ " doi = {10.1557/mrc.2019.94},"
+ " url = {https://doi.org/10.1557%2Fmrc.2019.94},"
+ " year = 2019,"
+ " month = {sep},"
+ " publisher = {Springer Science and Business Media {LLC}},"
+ " volume = {9},"
+ " number = {3},"
+ " pages = {874--881},"
+ " author = {Alex M. Ganose and Anubhav Jain},"
+ " title = {Robocrystallographer: automated crystal structure text descriptions and analysis},"
+ " journal = {MRS Communications} Communications}"
+ "}"
+ ]
+
+ def implementors(self):
+ return ["Kevin Maik Jablonka"]
| implement "text" featurizer
In LIFT-like framework, combine different features in a text prompt, starting with Robocrystallographer
| 2023-03-19T20:28:33 | 0.0 | [] | [] |
|||
kjappelbaum/mofdscribe | kjappelbaum__mofdscribe-415 | 6681e6ee1602aca5779db02ac6896828ebd53d6f | diff --git a/README.md b/README.md
index 75fc3a40..be61a13f 100644
--- a/README.md
+++ b/README.md
@@ -26,6 +26,9 @@
<a href='http://commitizen.github.io/cz-cli/'>
<img src='https://img.shields.io/badge/commitizen-friendly-brightgreen.svg' alt='Commitizen friendly' />
</a>
+ <a href="https://colab.research.google.com/github/kjappelbaum/mofdscribe/">
+ <img src=https://colab.research.google.com/assets/colab-badge.svg >
+ </a>
</p>
Featurizing metal-organic frameworks (MOFs) made simple! This package builds on the power of [matminer](https://hackingmaterials.lbl.gov/matminer/) to make featurization of MOFs as easy as possible. Now, you can use features that are mostly used for porous materials in the same way as all other matminer featurizers.
@@ -73,18 +76,10 @@ pip install -e .
if you want to use all utilities, you can use the `all` extra: `pip install -e ".[all]"`
-We depend on many other external tools. Currently, you need to manually install these dependencies (due to pending merges for conda-recipies):
+We depend on many other external tools. Most external tools are automatically installed if you install mofdscribe via conda:
```bash
-# RASPA and Zeo++ (if you want to use energy grid/Henry coefficient and pore descriptors)
-conda install -c conda-forge raspa2 zeopp-lsmo
-
-# cgal dependency for moltda (if you want to use persistent-homology based features)
-# on some systems, you might need to replace this with sudo apt-get install libcgal-dev or brew install cgal
-conda install -c conda-forge cgal dionysus
-
-# openbabel dependency for moffragmentor (if you want to use SBU-centered features)
-conda install -c conda-forge openbabel
+conda install -c conda-forge mofdscribe
```
## 👐 Contributing
@@ -105,13 +100,13 @@ See the [ChemRxiv preprint](https://chemrxiv.org/engage/chemrxiv/article-details
```
@article{Jablonka_2022,
- doi = {10.26434/chemrxiv-2022-4g7rx},
- url = {https://doi.org/10.26434%2Fchemrxiv-2022-4g7rx},
- year = 2022,
- month = {sep},
- publisher = {American Chemical Society ({ACS})},
- author = {Kevin Maik Jablonka and Andrew S. Rosen and Aditi S. Krishnapriyan and Berend Smit},
- title = {An ecosystem for digital reticular chemistry}
+ doi = {10.26434/chemrxiv-2022-4g7rx},
+ url = {https://doi.org/10.26434%2Fchemrxiv-2022-4g7rx},
+ year = 2022,
+ month = {sep},
+ publisher = {American Chemical Society ({ACS})},
+ author = {Kevin Maik Jablonka and Andrew S. Rosen and Aditi S. Krishnapriyan and Berend Smit},
+ title = {An ecosystem for digital reticular chemistry}
}
```
diff --git a/docs/source/api.rst b/docs/source/api.rst
index 83ae1a71..537a6680 100644
--- a/docs/source/api.rst
+++ b/docs/source/api.rst
@@ -10,3 +10,4 @@ API documentation
api/splitters
api/metrics
api/bench
+ api/helpers
\ No newline at end of file
diff --git a/docs/source/api/helpers.rst b/docs/source/api/helpers.rst
new file mode 100644
index 00000000..57bf0e6a
--- /dev/null
+++ b/docs/source/api/helpers.rst
@@ -0,0 +1,5 @@
+Helpers
+-----------
+
+.. automodule:: mofdscribe.helpers
+ :members:
\ No newline at end of file
diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst
index 467cf17e..4c9c5d9c 100644
--- a/docs/source/getting_started.rst
+++ b/docs/source/getting_started.rst
@@ -318,3 +318,38 @@ If you use a dataset or featurizers please cite all the references you find in
the `citations` property of the featurizer/dataset.
+Logging
+---------
+
+mofdscribe uses the `loguru <https://loguru.readthedocs.io/en/stable/index.html>`_ for logging.
+By default, logging from mofdscribe is disabled to not interfere with your logs.
+
+However, you can easily customize the logging:
+
+.. code-block:: python
+
+ import sys
+ from loguru import logger
+
+ # enable mofdscribe logging
+ logger.enable("mofdscribe")
+
+ # define the logging level
+ LEVEL = "INFO || DEBUG || WARNING || etc."
+
+ # set the handler
+ # for logging to stdout
+ logger.add(sys.stdout, level=LEVEL)
+ # or for logging to a file
+ logger.add("my_log_file.log", level=LEVEL, enqueue=True)
+
+
+In many cases, however, you might find it convenient to simply call :py:meth:`~mofdscribe.helpers.enable_logging`
+
+.. code-block:: python
+
+ from mofdscribe.helpers import enable_logging
+
+ enable_logging()
+
+which will enable logging with sane defaults (i.e. logging to ``stderr`` for ``INFO`` and ``WARNING`` levels).
\ No newline at end of file
diff --git a/examples/build_model_using_mofdscribe.ipynb b/examples/build_model_using_mofdscribe.ipynb
index 4934892d..a47f2ce5 100644
--- a/examples/build_model_using_mofdscribe.ipynb
+++ b/examples/build_model_using_mofdscribe.ipynb
@@ -7,16 +7,6 @@
"# Build a model on experimental data using mofdscribe\n"
]
},
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {},
- "outputs": [],
- "source": [
- "%load_ext autoreload\n",
- "%autoreload 2"
- ]
- },
{
"cell_type": "code",
"execution_count": null,
diff --git a/src/mofdscribe/__init__.py b/src/mofdscribe/__init__.py
index 1fbb3358..882af8c2 100644
--- a/src/mofdscribe/__init__.py
+++ b/src/mofdscribe/__init__.py
@@ -1,3 +1,5 @@
# -*- coding: utf-8 -*-
-
"""Compute features for MOFs."""
+from loguru import logger
+
+logger.disable("mofdscribe")
diff --git a/src/mofdscribe/featurizers/bu/rdkitadaptor.py b/src/mofdscribe/featurizers/bu/rdkitadaptor.py
index 370f77e6..8370871a 100644
--- a/src/mofdscribe/featurizers/bu/rdkitadaptor.py
+++ b/src/mofdscribe/featurizers/bu/rdkitadaptor.py
@@ -51,10 +51,13 @@ def __init__(
self._force_sanitize = force_sanitize
def __repr__(self) -> str:
+ """Print string representation of the featurizer."""
return "RDKitAdaptor(featurizer={}, feature_labels={})".format(
- self._featurizer, self._feature_labels)
+ self._featurizer, self._feature_labels
+ )
def _repr_mimebundle_(self, include=None, exclude=None):
+ """Print string representation in Ipyython notebooks."""
return self.__repr__()
def feature_labels(self) -> List[str]:
diff --git a/src/mofdscribe/helpers.py b/src/mofdscribe/helpers.py
new file mode 100644
index 00000000..9903c7ca
--- /dev/null
+++ b/src/mofdscribe/helpers.py
@@ -0,0 +1,32 @@
+# -*- coding: utf-8 -*-
+"""Convenience functions for mofdscribe."""
+
+import sys
+from typing import List
+
+from loguru import logger
+
+__all__ = ["enable_logging"]
+
+
+def enable_logging() -> List[int]:
+ """Set up the mofdscribe logging with sane defaults."""
+ logger.enable("mofdscribe")
+
+ config = dict(
+ handlers=[
+ dict(
+ sink=sys.stderr,
+ format="<green>{time:YYYY-MM-DD HH:mm:ss.SSS Z UTC}</>"
+ " <red>|</> <lvl>{level}</> <red>|</> <cyan>{name}:{function}:{line}</>"
+ " <red>|</> <lvl>{message}</>",
+ level="INFO",
+ ),
+ dict(
+ sink=sys.stderr,
+ format="<red>{time:YYYY-MM-DD HH:mm:ss.SSS Z UTC} | {level} | {name}:{function}:{line} | {message}</>",
+ level="WARNING",
+ ),
+ ]
+ )
+ return logger.configure(**config)
| change default logging level to `info`
no reason to annoy users with `debug`
| 2022-12-18T22:15:53 | 0.0 | [] | [] |
|||
kjappelbaum/mofdscribe | kjappelbaum__mofdscribe-411 | 97fa791bc7447aa7b140b8f0f16b5f6b7c097b6a | diff --git a/docs/source/api/data.rst b/docs/source/api/data.rst
index 35c0e308..98dbbe8f 100644
--- a/docs/source/api/data.rst
+++ b/docs/source/api/data.rst
@@ -24,5 +24,9 @@ Structure datasets
:members:
+.. automodule:: mofdscribe.datasets.thermal_stability_dataset
+ :members:
+
+
.. automodule:: mofdscribe.datasets.structuredataset
:members:
\ No newline at end of file
diff --git a/docs/source/featurizers/host_guest/host_guest_aprdf.rst b/docs/source/featurizers/host_guest/host_guest_aprdf.rst
index 6d1d5937..2fba836e 100644
--- a/docs/source/featurizers/host_guest/host_guest_aprdf.rst
+++ b/docs/source/featurizers/host_guest/host_guest_aprdf.rst
@@ -1,6 +1,9 @@
Guest-centered atomic-property labeled radial distribution function (APRDF)
............................................................................
+This featurizer builds on the :ref:`APRDF` featurizer, but instead of using the
+correlations between all atoms, it only considers the ones between the guest and all host atoms
+(within some cutoff distance).
.. math::
diff --git a/docs/source/references.rst b/docs/source/references.rst
index 49416a26..2c1201fe 100644
--- a/docs/source/references.rst
+++ b/docs/source/references.rst
@@ -111,4 +111,4 @@ References
.. [Trappe] `Potoff, J. J.; Siepmann, J. I. Vapor–Liquid Equilibria of Mixtures Containing Alkanes, Carbon Dioxide, and Nitrogen. AIChE Journal 2001, 47 (7), 1676–1682. <https://doi.org/10.1002/aic.690470719>`_
-.. [Varoquaux] `Varoquaux, G. Cross-Validation Failure: Small Sample Sizes Lead to Large Error Bars. NeuroImage 2018, 180, 68–77. <https://doi.org/10.1016/j.neuroimage.2017.06.061>`_
\ No newline at end of file
+.. [Varoquaux] `Varoquaux, G. Cross-Validation Failure: Small Sample Sizes Lead to Large Error Bars. NeuroImage 2018, 180, 68–77. <https://doi.org/10.1016/j.neuroimage.2017.06.061>`_
diff --git a/src/mofdscribe/datasets/core_dataset.py b/src/mofdscribe/datasets/core_dataset.py
index 9ae6d3a4..fd6852be 100644
--- a/src/mofdscribe/datasets/core_dataset.py
+++ b/src/mofdscribe/datasets/core_dataset.py
@@ -49,7 +49,6 @@ class CoREDataset(AbstractStructureDataset):
The available labels are:
-
* 'pure_CO2_kH': Henry coefficient of CO2 obtained by Widom method in mol kg-1 Pa-1
* 'pure_CO2_widomHOA': Heat of adsorption of CO2 obtained by Widom method in
* 'pure_methane_kH': Henry coefficient of methane obtained by Widom method in mol kg-1 Pa-1
@@ -256,5 +255,5 @@ def citations(self) -> Tuple[str]:
"title = {Computation-Ready, Experimental Metal{\textendash}Organic Frameworks: "
"A Tool To Enable High-Throughput Screening of Nanoporous Crystals},"
"journal = {Chemistry of Materials}"
- "}"
+ "}",
]
diff --git a/src/mofdscribe/datasets/thermal_stability_dataset.py b/src/mofdscribe/datasets/thermal_stability_dataset.py
new file mode 100644
index 00000000..2e891b41
--- /dev/null
+++ b/src/mofdscribe/datasets/thermal_stability_dataset.py
@@ -0,0 +1,239 @@
+# -*- coding: utf-8 -*-
+"""Thermal Stability Dataset."""
+import os
+from typing import Collection, Optional, Tuple
+
+import numpy as np
+import pandas as pd
+from loguru import logger
+
+from mofdscribe.constants import MOFDSCRIBE_PYSTOW_MODULE
+from mofdscribe.datasets.checks import check_all_file_exists, length_check
+from mofdscribe.datasets.dataset import AbstractStructureDataset
+from mofdscribe.datasets.utils import compress_dataset
+
+__all__ = ["ThermalStabilityDataset"]
+
+
+class ThermalStabilityDataset(AbstractStructureDataset):
+ """Thermal stability for a subset of CoRE MOFs.
+
+ Reproduced from [Nandy2022]_.
+ Nandy et al. (2022) digitized traces from thermogravimetric analysis.
+ The decomposition temperature they determined in this way is reported in
+ `outputs.assigned_T_decomp`.
+
+ To reduce the risk of data leakage, we (by default) also only keep one representative
+ structure for a "base refcode" (i.e. the first five letters of a refcode).
+ For instance, the base refcode for IGAHED001 is IGAHED. Structures with same
+ base refcode but different refcodes are often different refinements, or measurements
+ at different temperatures and hence chemically quite similar. For instance,
+ the base refcode `UMODEH` would appear 21 times, `KEDJAG` 17 times, and `UMOYOM` 17 times
+ in the CoRE dataset used by Moosavi et al.
+ Additionally, we (by default) only keep one structure per "structure hash"
+ which is an approximate graph-isomoprhism check, assuming the VESTA bond thresholds
+ for the derivation of the structure graph (e.g. the structure
+ graph of ULOMAL occurs 59 in the CoRE database used by Moosavi et al.).
+
+ The years refer to the publication dates of the paper crossreferenced
+ in the CSD entry of the structure.
+
+ The available labels are:
+
+ * `outputs.assigned_T_decomp`: Decomposition temperature in Kelvin.
+
+ References::
+ .. [Nandy2022] `Nandy, A.; Terrones, G.; Arunachalam, N.; Duan, C.;
+ Kastner, D. W.; Kulik, H. J.
+ MOFSimplify, Machine Learning Models with Extracted Stability Data
+ of Three Thousand Metal–Organic Frameworks.
+ Scientific Data 2022, 9 (1). <https://doi.org/10.1038/s41597-022-01181-0>`_
+ """
+
+ _files = {
+ "v0.0.1": {
+ "df": "https://zenodo.org/record/7428485/files/data.json?download=1",
+ "structures": "https://zenodo.org/record/7428485/files/structures.tar.gz?download=1",
+ "expected_length": 2039,
+ }
+ }
+
+ def __init__(
+ self,
+ version: str = "v0.0.1",
+ drop_basename_duplicates: bool = True,
+ drop_graph_duplicates: bool = True,
+ subset: Optional[Collection[int]] = None,
+ drop_nan: bool = False,
+ ):
+ """Construct an instance of the ThermalStabilityDataset.
+
+ Args:
+ version (str): version number to use.
+ Defaults to "v0.0.1".
+ drop_basename_duplicates (bool): If True, keep only one structure
+ per CSD basename. Defaults to True.
+ drop_graph_duplicates (bool): If True, keep only one structure
+ per decorated graph hash. Defaults to True.
+ subset (Collection[int], optional): indices of the structures to include.
+ Defaults to None.
+ drop_nan (bool): If True, drop rows with NaN values in features or hashes.
+ Defaults to True.
+
+ Raises:
+ ValueError: If the provided version number is not available.
+ """
+ self._drop_basename_duplicates = drop_basename_duplicates
+ self._drop_nan = drop_nan
+ self._drop_graph_duplicates = drop_graph_duplicates
+ if version not in self._files:
+ raise ValueError(
+ f"Version {version} not available. Available versions: {list(self._files.keys())}"
+ )
+ self.version = version
+
+ self._structure_dir = MOFDSCRIBE_PYSTOW_MODULE.ensure_untar(
+ "thermal-stability",
+ self.version,
+ name="structures.tar.gz",
+ url=self._files[version]["structures"],
+ )
+
+ self._df = pd.DataFrame(
+ MOFDSCRIBE_PYSTOW_MODULE.ensure_json(
+ "thermal-stability", self.version, name="data.json", url=self._files[version]["df"]
+ )
+ ).reset_index(drop=True)
+
+ compress_dataset(self._df)
+
+ length_check(self._df, self._files[version]["expected_length"])
+
+ if drop_basename_duplicates:
+ old_len = len(self._df)
+ self._df = self._df.drop_duplicates(subset=["info.basename"])
+ logger.debug(
+ f"Dropped {old_len - len(self._df)} duplicate basenames. New length {len(self._df)}"
+ )
+ if drop_graph_duplicates:
+ old_len = len(self._df)
+ self._df = self._df.drop_duplicates(subset=["info.decorated_graph_hash"])
+ logger.debug(
+ f"Dropped {old_len - len(self._df)} duplicate graphs. New length {len(self._df)}"
+ )
+ self._df = self._df.reset_index(drop=True)
+ if drop_nan:
+ self._df.dropna(
+ subset=[c for c in self._df.columns if c.startswith("features.")]
+ + [c for c in self._df.columns if c.startswith("info.")],
+ inplace=True,
+ )
+ self._df.reset_index(drop=True, inplace=True)
+
+ if subset is not None:
+ self._df = self._df.iloc[subset]
+ self._df = self._df.reset_index(drop=True)
+
+ self._structures = [
+ os.path.join(self._structure_dir, f + ".cif") for f in self._df["info.CoRE_name"]
+ ]
+
+ check_all_file_exists(self._structures)
+
+ self._years = self._df["info.year"].values
+ self._decorated_graph_hashes = self._df["info.decorated_graph_hash"].values
+ self._undecorated_graph_hashes = self._df["info.undecorated_graph_hash"].values
+ self._decorated_scaffold_hashes = self._df["info.decorated_scaffold_hash"].values
+ self._undecorated_scaffold_hashes = self._df["info.undecorated_scaffold_hash"].values
+ self._densities = self._df["info.density"].values
+ self._labelnames = (c for c in self._df.columns if c.startswith("outputs."))
+ self._featurenames = (c for c in self._df.columns if c.startswith("features."))
+ self._infonames = (c for c in self._df.columns if c.startswith("info."))
+
+ def get_subset(self, indices: Collection[int]) -> "AbstractStructureDataset":
+ """Get a subset of the dataset.
+
+ Args:
+ indices (Collection[int]): indices of the structures to include.
+
+ Returns:
+ AbstractStructureDataset: a new dataset containing only the structures
+ specified by the indices.
+ """
+ return ThermalStabilityDataset(
+ version=self.version,
+ drop_basename_duplicates=self._drop_basename_duplicates,
+ drop_graph_duplicates=self._drop_graph_duplicates,
+ subset=indices,
+ drop_nan=self._drop_nan,
+ )
+
+ @property
+ def available_info(self) -> Tuple[str]:
+ return self._infonames
+
+ @property
+ def available_features(self) -> Tuple[str]:
+ return self._featurenames
+
+ @property
+ def available_labels(self) -> Tuple[str]:
+ return self._labelnames
+
+ def get_labels(self, idx: Collection[int], labelnames: Collection[str] = None) -> np.ndarray:
+ labelnames = labelnames if labelnames is not None else self._labelnames
+ return self._df.iloc[idx][list(labelnames)].values
+
+ @property
+ def citations(self) -> Tuple[str]:
+ return [
+ "@article{Chung2019,"
+ "doi = {10.1021/acs.jced.9b00835},"
+ "url = {https://doi.org/10.1021/acs.jced.9b00835},"
+ "year = {2019},"
+ "month = nov,"
+ "publisher = {American Chemical Society ({ACS})},"
+ "volume = {64},"
+ "number = {12},"
+ "pages = {5985--5998},"
+ "author = {Yongchul G. Chung and Emmanuel Haldoupis and Benjamin J. Bucior "
+ "and Maciej Haranczyk and Seulchan Lee and Hongda Zhang and "
+ "Konstantinos D. Vogiatzis and Marija Milisavljevic and Sanliang Ling "
+ "and Jeffrey S. Camp and Ben Slater and J. Ilja Siepmann and "
+ "David S. Sholl and Randall Q. Snurr},"
+ "title = {Advances, Updates, and Analytics for the Computation-Ready, "
+ "Experimental Metal{\textendash}Organic Framework Database: {CoRE} {MOF} 2019},"
+ r"journal = {Journal of Chemical {\&}amp$\mathsemicolon$ Engineering Data}"
+ "}",
+ "@article{Chung2014,"
+ "doi = {10.1021/cm502594j},"
+ "url = {https://doi.org/10.1021/cm502594j},"
+ "year = {2014},"
+ "month = oct,"
+ "publisher = {American Chemical Society ({ACS})},"
+ "volume = {26},"
+ "number = {21},"
+ "pages = {6185--6192},"
+ "author = {Yongchul G. Chung and Jeffrey Camp and "
+ "Maciej Haranczyk and Benjamin J. Sikora and Wojciech Bury "
+ "and Vaiva Krungleviciute and Taner Yildirim and Omar K. Farha "
+ "and David S. Sholl and Randall Q. Snurr},"
+ "title = {Computation-Ready, Experimental Metal{\textendash}Organic Frameworks: "
+ "A Tool To Enable High-Throughput Screening of Nanoporous Crystals},"
+ "journal = {Chemistry of Materials}"
+ "}",
+ "@article{Nandy_2022,"
+ "doi = {10.1038/s41597-022-01181-0},"
+ "url = {https://doi.org/10.1038%2Fs41597-022-01181-0},"
+ "year = 2022,"
+ "month = {mar},"
+ "publisher = {Springer Science and Business Media {LLC}},"
+ "volume = {9},"
+ "number = {1},"
+ "author = {Aditya Nandy and Gianmarco Terrones and "
+ "Naveen Arunachalam and Chenru Duan and David W. Kastner and Heather J. Kulik},"
+ "title = {{MOFSimplify}, machine learning models with extracted stability data "
+ "of three thousand metal{\textendash}organic frameworks},"
+ "journal = {Sci Data}"
+ "}",
+ ]
| interface thermal stability dataset
i'm not sure if the solvent removal one is that useful as it does not indicate which solvent and how much of it
| 2022-12-12T12:24:13 | 0.0 | [] | [] |
|||
ppb/pursuedpybear | ppb__pursuedpybear-706 | 5d8805045fb579bd3c50a8b51cbedc0fe05429ba | diff --git a/ppb/assets.py b/ppb/assets.py
index 09435ce5..037e9b5d 100644
--- a/ppb/assets.py
+++ b/ppb/assets.py
@@ -106,7 +106,7 @@ class Rectangle(Shape):
def _draw_shape(self, renderer, rgb, **_):
sdl_call(
- SDL_SetRenderDrawColor, renderer, *rgb, 255,
+ SDL_SetRenderDrawColor, renderer, *(int(c) for c in rgb), 255,
_check_error=lambda rv: rv < 0
)
sdl_call(
diff --git a/ppb/systems/renderer.py b/ppb/systems/renderer.py
index 9361f913..83c52246 100644
--- a/ppb/systems/renderer.py
+++ b/ppb/systems/renderer.py
@@ -224,7 +224,7 @@ def on_render(self, render_event, signal):
def render_background(self, scene):
bg = scene.background_color
sdl_call(
- SDL_SetRenderDrawColor, self.renderer, bg[0], bg[1], bg[2], 255,
+ SDL_SetRenderDrawColor, self.renderer, int(bg[0]), int(bg[1]), int(bg[2]), 255,
_check_error=lambda rv: rv < 0
)
sdl_call(SDL_RenderClear, self.renderer, _check_error=lambda rv: rv < 0)
| Can't handle float colors
If colors are given as a float, things crash:
```
Traceback (most recent call last):
File "viztests/text.py", line 29, in <module>
ppb.run(starting_scene=TextScene)
File "/home/astraluma/src/ppb/pursuedpybear/ppb/__init__.py", line 73, in run
eng.run()
File "/home/astraluma/src/ppb/pursuedpybear/ppb/engine.py", line 142, in run
self.main_loop()
File "/home/astraluma/src/ppb/pursuedpybear/ppb/engine.py", line 167, in main_loop
self.loop_once()
File "/home/astraluma/src/ppb/pursuedpybear/ppb/engine.py", line 183, in loop_once
self.publish()
File "/home/astraluma/src/ppb/pursuedpybear/ppb/engine.py", line 232, in publish
method(event, self.signal)
File "/home/astraluma/src/ppb/pursuedpybear/ppb/systems/renderer.py", line 166, in on_render
self.render_background(render_event.scene)
File "/home/astraluma/src/ppb/pursuedpybear/ppb/systems/renderer.py", line 185, in render_background
sdl_call(
File "/home/astraluma/src/ppb/pursuedpybear/ppb/systems/_sdl_utils.py", line 51, in sdl_call
rv = func(*pargs, **kwargs)
ctypes.ArgumentError: argument 2: <class 'TypeError'>: wrong type
```
| This does dig into the renderer and messes with the ctypes call a bit, but it should also be an extremely simple change.
Hey I would like to work on this issue | 2024-04-08T18:58:01 | 0.0 | [] | [] |
||
jwodder/javaproperties-cli | jwodder__javaproperties-cli-43 | 3f22441b56ad36e37058944baeedf97967517bd2 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3f9b047..6901b01 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,8 @@
+v0.9.0 (in development)
+-----------------------
+- `json2properties` now supports input in UTF-16 and UTF-32 in addition to
+ UTF-8
+
v0.8.1 (2024-12-01)
-------------------
- Support Python 3.10, 3.11, 3.12, and 3.13
diff --git a/docs/changelog.rst b/docs/changelog.rst
index 67f1cfe..11de6b9 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -1,6 +1,12 @@
Changelog
=========
+v0.9.0 (in development)
+-----------------------
+- :program:`json2properties` now supports input in UTF-16 and UTF-32 in
+ addition to UTF-8
+
+
v0.8.1 (2024-12-01)
-------------------
- Support Python 3.10, 3.11, 3.12, and 3.13
diff --git a/src/javaproperties_cli/__init__.py b/src/javaproperties_cli/__init__.py
index 59b4952..a845c6b 100644
--- a/src/javaproperties_cli/__init__.py
+++ b/src/javaproperties_cli/__init__.py
@@ -10,7 +10,7 @@
<http://javaproperties-cli.rtfd.io> for more information.
"""
-__version__ = "0.8.1"
+__version__ = "0.9.0.dev1"
__author__ = "John Thorvald Wodder II"
__author_email__ = "[email protected]"
__license__ = "MIT"
diff --git a/src/javaproperties_cli/fromjson.py b/src/javaproperties_cli/fromjson.py
index e79f3c9..addb17a 100644
--- a/src/javaproperties_cli/fromjson.py
+++ b/src/javaproperties_cli/fromjson.py
@@ -119,7 +119,7 @@
help="Key-value separator to use in output",
)
@click.option("-S", "--sort-keys", is_flag=True, help="Sort entries in output by key")
[email protected]("infile", type=click.File("r"), default="-")
[email protected]("infile", type=click.File("rb"), default="-")
@click.argument("outfile", type=outfile_type, default="-")
@click.pass_context
def json2properties(
| `json2properties`: Open input file in binary mode
| 2024-12-01T14:11:02 | 0.0 | [] | [] |
|||
neuromorphs/NIR | neuromorphs__NIR-116 | 6c9775fbf339df92873ad1e7be9a42444b1dbff5 | diff --git a/README.md b/README.md
index d5e2a10..818c3ac 100644
--- a/README.md
+++ b/README.md
@@ -5,41 +5,39 @@
# NIR - Neuromorphic Intermediate Representation
+[](https://doi.org/10.1038/s41467-024-52259-9)
+[](https://pypi.org/project/nir/)
+[](https://github.com/neuromorphs/NIR/releases)
+[](https://discord.gg/JRMRGP9h3c)
+
NIR is a set of computational primitives, shared across different neuromorphic frameworks and technology stacks.
**NIR is currently supported by 7 simulators and 4 hardware platforms**, allowing users to seamlessly move between any of these platforms.
-The goal of NIR is to decouple the evolution of neuromorphic hardware and software, ultimately increasing the interoperability between platforms and improving accessibility to neuromorphic technologies.
-## Installation
-NIR is installable via [pip](https://pypi.org/)
-```bash
-pip install nir
-```
+NIR is useful when you want to move a model from one platform to another, for instance from a simulator to a hardware platform.
+
+> Read more about NIR in our [documentation about NIR primitives](https://neuroir.org/docs/primitives.html)
-Check your [local framework]([https://neuroir.org/docs](https://neuroir.org/docs/support.html)) for NIR support.
+> See [which frameworks are currently supported by NIR](https://neuroir.org/docs/support.html).
## Usage
-> Read more in our [documentation about NIR usage](https://neuroir.org/docs)
+> Read more in our [documentation about NIR usage](https://neuroir.org/docs) and see more examples in our [examples section](https://neuroir.org/docs/examples)
-To end-users, NIR is just a declarative format that sits between formats and will hopefully be as invisible as possible.
-However, it is possible to export Python objects or NIR files.
+NIR serves as a format between neuromorphic platforms and will be installed alongside your framework of choice.
+Using NIR is typically a part of your favorite framework's workflow, but follows the same pattern when you want to move from a *source* to a *target* platform:
```python
-import nir
-# Write to file
-nir.write("my_graph.nir", nir_graph)
-
-# Read file
+# Define a model
+my_model = ...
+# Save the model (source platform)
+nir.write("my_graph.nir", my_model)
+# Load the model (target platform)
imported_graph = nir.read("my_graph.nir")
```
-## About NIR
-> Read more in our [documentation about NIR primitives](https://neuroir.org/docs/primitives.html)
-
-On top of popular primitives such as convolutional or fully connected/linear computations, we define additional compuational primitives that are specific to neuromorphic computing and hardware implementations thereof.
-Computational units that are not specifically neuromorphic take inspiration from the Pytorch ecosystem in terms of naming and parameters (such as Conv2d that uses groups/strides).
-
+See our [example section](https://neuroir.org/docs/examples) for how to use NIR with your favorite framework.
## Frameworks that currently support NIR
+> Read more in our [documentation about NIR support](https://neuroir.org/docs/support.html)
| **Framework** | **Write to NIR** | **Read from NIR** | **Examples** |
| --------------- | :--: | :--: | :------: |
@@ -54,7 +52,7 @@ Computational units that are not specifically neuromorphic take inspiration from
## Acknowledgements
-This work was originally conceived at the [Telluride Neuromorphic Workshop 2023](tellurideneuromorphic.org) by the authors below (in alphabetical order):
+This work was originally conceived at the [Telluride Neuromorphic Workshop 2023](https://tellurideneuromorphic.org) by the authors below (in alphabetical order):
* [Steven Abreu](https://github.com/stevenabreu7)
* [Felix Bauer](https://github.com/bauerfe)
* [Jason Eshraghian](https://github.com/jeshraghian)
@@ -64,7 +62,7 @@ This work was originally conceived at the [Telluride Neuromorphic Workshop 2023]
* [Sadique Sheik](https://github.com/sheiksadique)
* [Peng Zhou](https://github.com/pengzhouzp)
-If you use NIR in your work, please cite the [following arXiv preprint](https://arxiv.org/abs/2311.14641)
+If you use NIR in your work, please cite the [following paper](https://www.nature.com/articles/s41467-024-52259-9)
```
article{NIR2024,
diff --git a/docs/source/_config.yml b/docs/source/_config.yml
index 3d06869..a705a71 100644
--- a/docs/source/_config.yml
+++ b/docs/source/_config.yml
@@ -10,7 +10,16 @@ repository:
execute:
execute_notebooks: off
+parse:
+ myst_enable_extensions:
+ - amsmath
+
launch_buttons:
notebook_interface: "jupyterlab"
binderhub_url: "https://mybinder.org/v2/gh/neuromorphs/nir/main?urlpath=lab"
- colab_url: "https://colab.research.google.com"
\ No newline at end of file
+ colab_url: "https://colab.research.google.com"
+
+sphinx:
+ extra_extensions:
+ - 'sphinx.ext.autodoc'
+
\ No newline at end of file
diff --git a/docs/source/_toc.yml b/docs/source/_toc.yml
index 80178af..c9e4b3a 100644
--- a/docs/source/_toc.yml
+++ b/docs/source/_toc.yml
@@ -14,19 +14,24 @@ parts:
- file: primitives
- file: support
title: Platform support
-- caption: Examples
- chapters:
- - file: examples/lava/nir-conversion
- - file: examples/nengo/nir-conversion
- - file: examples/norse/nir-conversion
- - file: examples/rockpool/nir-conversion
- - file: examples/sinabs/nir-conversion
- - file: examples/snntorch/nir-conversion
- - file: examples/spinnaker2/import
- - file: examples/spyx/conversion
- - file: examples/snntorch_to_norse
+ - file: examples/index
+ sections:
+ - file: examples/lava/nir-conversion
+ - file: examples/nengo/nir-conversion
+ - file: examples/norse/nir-conversion
+ - file: examples/rockpool/nir-conversion
+ - file: examples/sinabs/nir-conversion
+ - file: examples/snntorch/nir-conversion
+ - file: examples/spinnaker2/import
+ - file: examples/spyx/conversion
+ - file: examples/snntorch_to_norse
- caption: Developer guide
chapters:
- file: porting_nir
- - file: api_design
+ - file: dev_pytorch
+ - file: dev_jax
- file: contributing
+- caption: API documentation
+ chapters:
+ - file: api_design
+ - file: doctrees
\ No newline at end of file
diff --git a/docs/source/contributing.md b/docs/source/contributing.md
index 3a3a638..3b1c6d3 100644
--- a/docs/source/contributing.md
+++ b/docs/source/contributing.md
@@ -1,5 +1,9 @@
# Contributing
+NIR is a community-led initiative, and we welcome contributions from everyone.
+Here, we outline some technical details on getting started.
+Join the conversation on our [Discord server](https://discord.gg/JRMRGP9h3c) or [GitHub](https://github.com/neuromophs/nir) if you have any questions.
+
## Developer guide: Getting started
Use the standard github workflow.
diff --git a/docs/source/dev_jax.md b/docs/source/dev_jax.md
new file mode 100644
index 0000000..0b91cc1
--- /dev/null
+++ b/docs/source/dev_jax.md
@@ -0,0 +1,6 @@
+# Developing JAX extensions
+
+JAX is a popular deep learning framework that more and more of the NIR-supported libraries are built on.
+For PyTorch, we have built the [`nirtorch` package](https://github.com/neuromorphs/nirtorch), but *no such package exists for JAX*.
+If you're interested in developing such a package, please reach out to us!
+Either on [Discord](https://discord.gg/JRMRGP9h3c) or by [opening an issue](https://github.com/neuromorphs/NIR/issues).
\ No newline at end of file
diff --git a/docs/source/dev_pytorch.md b/docs/source/dev_pytorch.md
new file mode 100644
index 0000000..883cce2
--- /dev/null
+++ b/docs/source/dev_pytorch.md
@@ -0,0 +1,69 @@
+# Developing PyTorch extensions
+
+PyTorch is a popular deep learning framework that many of the NIR-supported libraries are built on.
+We have built the [`nirtorch` package](https://github.com/neuromorphs/nirtorch) to make it easier to develop PyTorch extensions for the NIR-supported libraries.
+`nirtorch` helps you write PyTorch code that (1) exports NIR models from PyTorch and (2) imports NIR models into PyTorch.
+
+## Exporting NIR models from PyTorch
+Exporting a NIR model requires two things: exporting the model's nodes and edges.
+
+### Exporting edges
+Exporting edges is slightly complicated because PyTorch modules can have multiple inputs and outputs.
+And because PyTorch modules are connected via function calls, which only happen at runtime.
+Therefore, we need to trace the PyTorch module to get the edges with some sample input.
+Luckily, `nirtorch` package helps you do exactly that.
+It works behind the scenes, but you can read about it in the [`to_nir.py` file in `nirtorch`](https://github.com/neuromorphs/NIRTorch/blob/main/nirtorch/to_nir.py#L11).
+
+### Exporting nodes
+The only thing we really have to do to use `nirtorch` is to export modules.
+Since all PyTorch modules inherit from the `torch.nn.Module` class, exporting the nodes is straightforward: we simply need a function that looks at a PyTorch module and returns the corresponding NIR node.
+Assume this is done in a function called `export_node`.
+
+```python
+import nir
+import torch
+
+class MyModule(torch.nn.Module):
+ weight: torch.Tensor
+ bias: torch.Tensor
+
+
+def export_node(module: torch.nn.Module) -> Node:
+ # Export the module to a NIR node
+ if isinstance(module, MyModule):
+ return nir.Linear(module.weight, module.bias)
+ ...
+```
+This example converts a custom Linear module to a NIR Linear node.
+
+### Putting it all together
+The following code is a snippet taken from the [Norse library](https://github.com/norse/norse) that demonstrates how to export custom PyTorch models to a NIR using the `nirtorch` package.
+Note that we only have to declare the `export_node` function for each custom module we want to export.
+The edges are traced automatically by the `nirtorch` package.
+
+```python
+def _extract_norse_module(module: torch.nn.Module) -> Optional[nir.NIRNode]:
+ if isinstance(module, LIFBoxCell):
+ return nir.LIF(
+ tau=module.p.tau_mem_inv,
+ v_th=module.p.v_th,
+ v_leak=module.p.v_leak,
+ r=torch.ones_like(module.p.v_leak),
+ )
+ elif isinstance(module, torch.nn.Linear):
+ return nir.Linear(module.weight, module.bias)
+ elif ...
+
+ return None
+
+def to_nir(
+ module: torch.nn.Module, sample_data: torch.Tensor, model_name: str = "norse"
+) -> nir.NIRNode:
+ return extract_nir_graph(
+ module, _extract_norse_module, sample_data, model_name=model_name
+ )
+```
+
+## Importing NIR models into PyTorch
+Importing NIR models into PyTorch with `nirtorch` is also straightforward.
+Assuming you have a NIR graph in the Python object `nir_graph` (see [Usage](#usage))
\ No newline at end of file
diff --git a/docs/source/examples/index.md b/docs/source/examples/index.md
new file mode 100644
index 0000000..5568f19
--- /dev/null
+++ b/docs/source/examples/index.md
@@ -0,0 +1,33 @@
+# Code examples
+
+NIR can be used to *export* or *import* models.
+*Exporting* is when you convert a model from a source platform to NIR, and *importing* is when you convert a model from NIR to a target platform.
+One typical workflow is to *export* a model from a simulator and *import* it to a hardware platform.
+
+In the menu, you see examples for how to use NIR with your favorite framework.
+But note that some frameworks only support importing or exporting.
+
+## Writing to and reading from files with NIR
+While NIR is typically integrated into your favorite framework, NIR supports writing to and reading from files directly.
+This is useful when you want to send a model over email, store it for later, or share it with a colleague.
+
+### Writing to a file
+To write a model to a file, use the `nir.write` function.
+Note that this requires you to provide a NIR model, so you need to find a way to convert your model to NIR within your framework.
+The `nir.write` function takes two arguments: the file path and the model to write.
+```python
+import nir
+my_nir_graph = ...
+nir.write("my_graph.nir", my_model)
+```
+
+### Reading from a file
+To read a model from a file, use the `nir.read` function.
+This function takes a single argument: the file path.
+```python
+import nir
+imported_graph = nir.read("my_graph.nir")
+```
+
+This gives you a NIR model, which then needs to be converted to your framework's model format.
+The NIR graph itself is just a data structure.
\ No newline at end of file
diff --git a/docs/source/primitives.md b/docs/source/primitives.md
index 7240268..b8d3c34 100644
--- a/docs/source/primitives.md
+++ b/docs/source/primitives.md
@@ -10,24 +10,25 @@ But, if you plan to execute the graph on restricted neuromorphic hardware, pleas
NIR defines 16 fundamental primitives listed in the table below, which backends are free to implement as they want, leading to varying outputs across platforms. While discrepancies could be minimized by constraining implementations or making backends aware of each other's discretization choices, NIR does not do this since it is declarative, specifying only the necessary inputs and outputs. Constraining implementations would cause hardware incompatibilities and making backends aware of each other could create large O(N^2) overhead for N backends. The primitives are already computationally expressive and able to solve complex PDEs.
-| Primitive | Parameters | Computation | Reset |
-|-|-|-|-|
-| **Input** | Input shape | - | - |
-| **Output** | Output shape | - | - |
-| **Affine** | $W, b$ | $ W*I + b$ | - |
-| **Convolution** | $W$, Stride, Padding, Dilation, Groups, Bias | $f \star g$ | - |
-| **Current-based leaky integrate-and-fire** | $\tau_\text{syn}$, $\tau_\text{mem}$, R, $v_\text{leak}$, $v_\text{thr}$, $w_\text{in}$ | **LI**_1_; **Linear**; **LIF**_2_ | $\begin{cases} v_\text{LI\_2}-v_\text{thr} & \text{Spike} \\ v & \text{else} \end{cases}$ |
-| **Delay** | $\tau$ | $I(t - \tau)$ | - |
-| **Flatten** | Input shape, Start dim., End dim. | - | - |
-| **Integrator** | $\text{R}$ | $\dot{v} = R I$ | - |
-| **Integrate-and-fire** | $\text{R}, v_\text{thr}$ | **Integrator**; **Threshold** | $\begin{cases} v-v_\text{thr} & \text{Spike} \\ v & \text{else} \end{cases}$ |
-| **Leaky integrator (LI)** | $\tau, \text{R}, v_\text{leak}$ | $\tau \dot{v} = (v_\text{leak} - v) + R I$ | - |
-| **Linear** | $W$ | $W I$ | - |
-| **Leaky integrate-fire (LIF)** | $\tau, \text{R}, v_\text{leak}, v_\text{thr}$ | **LI**; **Threshold** | $\begin{cases} v-v_\text{thr} & \text{Spike} \\ v & \text{else} \end{cases}$ |
-| **Scale** | $s$ | $s I$ | - |
-| **SumPooling** | $p$ | $\sum_{j} x_j$ | |
-| **AvgPooling** | $p$ | **SumPooling**; **Scale** | - |
-| **Threshold** | $\theta_\text{thr}$ | $H(I - \theta_\text{thr})$ | - |
+| Primitive | Parameters | Computation | Reset |
+|------------------------------------|---------------------------------------------------------------------------|----------------------------------------------------------|----------------------------------------------------------------------------------------|
+| **Input** | Input shape | - | - |
+| **Output** | Output shape | - | - |
+| **Affine** | $W, b$ | $W \cdot I + b$ | - |
+| **Convolution** | $W$, Stride, Padding, Dilation, Groups, Bias | $f \star g$ | - |
+| **Current-based leaky integrate-and-fire** | $\tau_\text{syn}, \tau_\text{mem}, R, v_\text{leak}, v_\text{thr}, w_\text{in}$ | **LI**; **Linear**; **LIF** | $\begin{cases} v_\text{LIF} - v_\text{thr} & \text{Spike} \\ v_\text{LIF} & \text{else} \end{cases}$ |
+| **Delay** | $\tau$ | $I(t - \tau)$ | - |
+| **Flatten** | Input shape, Start dim., End dim. | - | - |
+| **Integrator** | $R$ | $\dot{v} = R I$ | - |
+| **Integrate-and-fire** | $R, v_\text{thr}$ | **Integrator**; **Threshold** | $\begin{cases} v - v_\text{thr} & \text{Spike} \\ v & \text{else} \end{cases}$ |
+| **Leaky integrator (LI)** | $\tau, R, v_\text{leak}$ | $\tau \dot{v} = (v_\text{leak} - v) + R I$ | - |
+| **Linear** | $W$ | $W I$ | - |
+| **Leaky integrate-fire (LIF)** | $\tau, R, v_\text{leak}, v_\text{thr}$ | **LI**; **Threshold** | $\begin{cases} v - v_\text{thr} & \text{Spike} \\ v & \text{else} \end{cases}$ |
+| **Scale** | $s$ | $s I$ | - |
+| **SumPooling** | $p$ | $\sum_{j} x_j$ | - |
+| **AvgPooling** | $p$ | **SumPooling**; **Scale** | - |
+| **Spike** | $\theta_\text{thr}$ | $\dirac(I - \theta_\text{thr})$ | - |
+
Each primitive is defined by their own dynamical equation, specified in the [API docs](https://nnir.readthedocs.io/en/latest/).
diff --git a/docs/source/usage.md b/docs/source/usage.md
index 120702f..8052cf9 100644
--- a/docs/source/usage.md
+++ b/docs/source/usage.md
@@ -13,6 +13,9 @@ Please refer to the **Examples** section in the sidebar for code for each suppor
More code examples are available [in the repository for our paper](https://github.com/neuromorphs/NIR/tree/main/paper/).
## Example: Norse model to Sinabs Speck
+This example demonstrates how to convert a Norse model to a Sinabs model and then to a Speck chip.
+Note that Norse is based on PyTorch and uses [NIRTorch](#dev_pytorch) to convert PyTorch models to NIR.
+You can also do this manually, by constructing your own NIR graphs as shown in our [API design documentation](#api_desige).
### Part 1: Convert Norse model to NIR
```python
@@ -20,10 +23,8 @@ import torch
import norse.torch as norse
# Define our neural network model
-model = norse.SequentialState(
- norse.LIFCell(),
- ...
-)
+model = ...
+
# Convert model to NIR
# Note that we use some sample data to "trace" the graph in PyTorch.
# You need to ensure that shape of the data fits your model
@@ -44,3 +45,30 @@ dynapcnn_model = DynapcnnNetwork(sinabs_model, input_shape=sample_data.shape[-1]
# Move model to chip!
dynapcnn_model.to("speck2fdevkit")
```
+
+## Example: Manually writing and reading NIR files
+You can also manually write and read NIR files.
+This is useful if you want to save a model to disk and use it later.
+Or if you want to load in a model that someone else has created.
+
+### Writing a NIR file
+[NIR consists of graphs](#primitives) that describe the structure of a neural network.
+Our reference implementation uses Python to describe these graphs, so you can imagine having a graph in an object, say `nir_model`.
+To write this graph to file, you can use
+
+```python
+import nir
+nir.write(nir_model, "my_model.nir")
+```
+
+### Reading a NIR file
+Reading a NIR file is similarly easy and will give you a graph object that you can use in your code.
+
+```python
+import nir
+nir_model = nir.read("my_model.nir")
+```
+
+Note that the graph object (`nir_model`) doesn't do anything by itself.
+You still need to convert it to a format that your hardware or simulator can understand.
+Read more about this in the [Using NIR in hardware guide](#porting_nir).
\ No newline at end of file
| Upload example code in the documentation `docs/source/examples`
This issue tracks example code for each of the frameworks
- [x] SpiNNaker2
- [ ] snnTorch
- [x] Norse
- [x] Sinabs
- [ ] Rockpool
- [ ] Lava
- [x] Nengo
| 2024-11-11T17:28:31 | 0.0 | [] | [] |
|||
neuromorphs/NIR | neuromorphs__NIR-101 | 8f9177e9f74374de7bd72e49b42217a24df716ec | diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 2a40513..4ee5753 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -28,9 +28,9 @@ jobs:
- name: Lint with ruff
run: |
# stop the build if there are Python syntax errors or undefined names
- ruff --output-format=github --select=E9,F63,F7,F82 --target-version=py37 .
+ ruff check . --output-format=github --select=E9,F63,F7,F82 --target-version=py37
# default set of ruff rules with GitHub Annotations
- ruff --output-format=github --target-version=py37 --exclude=docs/ --exclude=paper/ .
+ ruff check . --output-format=github --target-version=py37 --exclude=docs/ --exclude=paper/
- name: Test with pytest
run: |
pytest
diff --git a/nir/ir/utils.py b/nir/ir/utils.py
index c003af3..3e864fd 100644
--- a/nir/ir/utils.py
+++ b/nir/ir/utils.py
@@ -50,7 +50,7 @@ def calculate_conv_output(
shapes = []
for i in range(ndim):
if isinstance(padding, str) and padding == "same":
- shape = input_shape[i]
+ shape = _index_tuple(input_shape, i)
else:
shape = np.floor(
(
@@ -87,19 +87,22 @@ def calc_flatten_output(input_shape: Sequence[int], start_dim: int, end_dim: int
)
-def _index_tuple(
- tuple: Union[int, Sequence[int]], index: int
-) -> Union[int, np.ndarray]:
+def _index_tuple(tuple: Union[int, Sequence[int]], index: int) -> np.ndarray:
"""If the input is a tuple/array, index it.
Otherwise, return it as-is.
"""
- if isinstance(tuple, np.ndarray) or isinstance(tuple, Sequence):
+ if isinstance(tuple, np.ndarray):
return tuple[index]
+ elif isinstance(tuple, Sequence):
+ return np.array(tuple[index])
elif isinstance(tuple, (int, np.integer)):
return np.array([tuple])
else:
- raise TypeError(f"tuple must be int or np.ndarray, not {type(tuple)}")
+ try:
+ return tuple[index]
+ except TypeError:
+ raise TypeError(f"tuple must be int or np.ndarray, not {type(tuple)}")
def ensure_str(a: Union[str, bytes]) -> str:
diff --git a/pyproject.toml b/pyproject.toml
index 90be725..e1865a5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -47,5 +47,5 @@ find={include = ["nir*"]}
[tool.ruff]
line-length = 100
-per-file-ignores = {"docs/conf.py" = ["E402"]}
+lint.per-file-ignores = {"docs/conf.py" = ["E402"]}
exclude = ["paper/"]
| 'int' object has no attribute 'item' on conv shapes calculation
This line assumes that the product of the operation/indexing is going to be a tensor/array
https://github.com/neuromorphs/NIR/blob/df2f1fc863557af1c318dfe20f9c3f2b6e5e54db/nir/ir/utils.py#L65
However, I don't know how, I end up with lists here and the elements do not have the `item` method available. You could just check if the `shape` object is an `ndarray` | `tensor` or it has a the `item` attribute before calling it, otherwise just grab the element?
| Hi there, thanks a lot for catching this! Could you add a simple test input that gives you this failure? And is the `shape` variable a `list`, or a single `int`? I don't immediately see why it would be a `list`, since we make sure that it would be a `np.array` here:
https://github.com/neuromorphs/NIR/blob/df2f1fc863557af1c318dfe20f9c3f2b6e5e54db/nir/ir/utils.py#L100
I just looked again and the `input_shape` type is no `int` nor `list` nor `tuple` it's a lovely `torch.Size` type. ~~I would have thought that type would get bundled with the `Sequence` type in Line 91?~~
I think this comes from
https://github.com/neuromorphs/NIR/blob/df2f1fc863557af1c318dfe20f9c3f2b6e5e54db/nir/ir/utils.py#L53
when `padding == "same"` and there is no conversion to `np.array([input_shape[i]])` | 2024-04-28T08:42:05 | 0.0 | [] | [] |
||
neuromorphs/NIR | neuromorphs__NIR-79 | 2ce271e3a8ff977a6b45b01c0356ab56729c88b5 | diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 9470563..2a40513 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -9,7 +9,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest]
- python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
+ python-version: ["3.9", "3.10", "3.11", "3.12"]
runs-on: ${{ matrix.os }}
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 14a99b6..ba13650 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -56,7 +56,7 @@
]
# MyST settings
-nb_execution_mode = "off" # this can be turned to 'auto' once the package is stable
+nb_execution_mode = "off" # this can be turned to 'auto' once the package is stable
nb_execution_timeout = 300
nb_execution_show_tb = True
diff --git a/docs/source/primitives.md b/docs/source/primitives.md
index 49e5391..7240268 100644
--- a/docs/source/primitives.md
+++ b/docs/source/primitives.md
@@ -1,6 +1,14 @@
# Primitives
-NIR defines 16 fundamental primitives listed in the table below, which backends are free to implement as they want, leading to varying outputs across platforms. While discrepancies could be minimized by constraining implementations or making backends aware of each other's discretization choices, NIR does not do this since it is declarative, specifying only the necessary inputs and outputs. Constraining implementations would cause hardware incompatibilities and making backends aware of each other could create large O(N^2) overhead for N backends. The primitives are already computationally expressive and able to solve complex PDEs.
+At its core, NIR is simply a [directed graph](https://en.wikipedia.org/wiki/Directed_graph) (using the [`NIRGraph` primitive](https://github.com/neuromorphs/NIR/blob/main/nir/ir/graph.py)).
+The nodes of the graph are computational units, and the edges are the (directed) connections between them.
+There are no restrictions on the graph structure, so it can be a simple feedforward network, a recurrent network, a graph with cycles, and even with duplicated connections, if needed.
+
+But, if you plan to execute the graph on restricted neuromorphic hardware, please **verify that the graph is compatible with the hardware**.
+
+## NIR computational primitives
+
+NIR defines 16 fundamental primitives listed in the table below, which backends are free to implement as they want, leading to varying outputs across platforms. While discrepancies could be minimized by constraining implementations or making backends aware of each other's discretization choices, NIR does not do this since it is declarative, specifying only the necessary inputs and outputs. Constraining implementations would cause hardware incompatibilities and making backends aware of each other could create large O(N^2) overhead for N backends. The primitives are already computationally expressive and able to solve complex PDEs.
| Primitive | Parameters | Computation | Reset |
|-|-|-|-|
@@ -21,17 +29,89 @@ NIR defines 16 fundamental primitives listed in the table below, which backends
| **AvgPooling** | $p$ | **SumPooling**; **Scale** | - |
| **Threshold** | $\theta_\text{thr}$ | $H(I - \theta_\text{thr})$ | - |
-Each primitive is defined by their own dynamical equation, specified in the [API docs](https://nnir.readthedocs.io/en/latest/modindex.html).
+Each primitive is defined by their own dynamical equation, specified in the [API docs](https://nnir.readthedocs.io/en/latest/).
## Connectivity
-Each computational unit is a node in a static graph.
-Given 3 nodes $A$ which is a LIF node, $B$ which is a Linear node and $C$ which is another LIF node, we can define edges in the graph such as:
+In the graph, each node has a name like "Neuron 1" or, in some cases, simply just an index "1".
+Connections between nodes are simply a tuple of the strings desribing the source and target.
+As an example, `("A", "B")`, tells us that the output of node `A` is sent to node `B`.
+
+Describing the full connectivity in a graph is as simple as listing all the connections in the graph:
+```
+[
+ ("A", "B"),
+ ("B", "C"),
+ ("C", "D"),
+ ...
+]
+```
+
+## Input and output nodes
+Given a graph, how do we know which nodes should receive inputs? And which nodes should provide outputs?
+For that, we define two special nodes: `Input` and `Output`.
+Both nodes are "dummies" in the sense that they do not provide any function, apart from marking the beginning and end of the graph.
+Note that a single node can be both an input and an output node.
+
+To clarify the dimensionality/input types of the input and output nodes, we require the user to specify the shape *and* name of the input, like so:
+```python
+import numpy as np
+nir.Input(
+ input_type = {"input": np.array([28, 28])}
+)
+nir.Output(
+ output_type = {"output": np.array([2])}
+)
+```
+
+## A Graph Example in Python
+To illustrate how a computational graph can be defined using the NIR Python primitives, here is an example of a graph with a single `LIF` neuron with input and output nodes:
+
+```python
+import nir
+
+nir.NIRGraph(
+ nodes = {
+ "input" : nir.Input({"input": np.array([1])}),
+ "lif" : nir.LIF(...),
+ "output": nir.Output{"output": np.array([1])}
+ },
+ edges = [
+ ("Input", "LIF"),
+ ("LIF" , "Output"),
+ ],
+)
+```
+
+## Metadata
+
+Each node in the graph can have metadata attached to it.
+The metadata is a dictionary that can contain any information that may be helpful for the user or backend.
+Any dictionary entries can be added, although we recommend restricting the entries to strings, numbers, and arrays.
+Here is an example of a metadata dictionary attached to a graph:
+
+```python
+import nir
+
+nir.NIRGraph(
+ ...,
+ metadata = {"some": "metadata", "info": 1}
+)
+```
+
+
+```{admonition} Do not rely on the metadata
+:class: warning
+It's vital to ensure that **no backend should rely on this metadata**.
+Metadata entries should contain non-essential meta-information about nodes or graphs, such as the discretization scheme with which the graph was trained, timestamps, etc.
+Tidbits that can improve the model or execution, but are not necessary for the execution itself.
+
+If the backend would strictly rely this metadata, it would require everyone else to adhere to this non-enforced standard.
+NIR graphs should be self-contained and unambiguous, such that the graph itself (without the metadata) contains all the necessary information to execute the graph.
+```
-$$
- A \rightarrow B \\
- B \rightarrow C
-$$
+## Importing and exporting
+While the NIR librray is written in Python, the graph can be defined and used in any language.
+We provide import and export functions to and from the [Hierarchical Data Format](https://en.wikipedia.org/wiki/Hierarchical_Data_Format) which allows for easy storage and retrieval of the graph.
-## Format
-The intermediate represenation can be stored as hdf5 file, which benefits from compression.
+See [the usage page](usage) for more information.
diff --git a/nir/ir/conv.py b/nir/ir/conv.py
index e51e39d..ce7c452 100644
--- a/nir/ir/conv.py
+++ b/nir/ir/conv.py
@@ -1,5 +1,5 @@
-from dataclasses import dataclass
-from typing import Optional, Tuple, Union
+from dataclasses import dataclass, field
+from typing import Any, Dict, Optional, Tuple, Union
import numpy as np
@@ -41,6 +41,9 @@ class Conv1d(NIRNode):
dilation: int # Dilation
groups: int # Groups
bias: np.ndarray # Bias C_out
+ input_type: Optional[Dict[str, np.ndarray]] = None
+ output_type: Optional[Dict[str, np.ndarray]] = None
+ metadata: Dict[str, Any] = field(default_factory=dict)
def __post_init__(self):
if isinstance(self.padding, str) and self.padding not in ["same", "valid"]:
diff --git a/nir/ir/delay.py b/nir/ir/delay.py
index e244e4e..5d355a8 100644
--- a/nir/ir/delay.py
+++ b/nir/ir/delay.py
@@ -1,4 +1,5 @@
-from dataclasses import dataclass
+from dataclasses import dataclass, field
+from typing import Any, Dict, Optional
import numpy as np
@@ -16,6 +17,9 @@ class Delay(NIRNode):
"""
delay: np.ndarray # Delay
+ input_type: Optional[Dict[str, np.ndarray]] = None
+ output_type: Optional[Dict[str, np.ndarray]] = None
+ metadata: Dict[str, Any] = field(default_factory=dict)
def __post_init__(self):
# set input and output shape, if not set by user
diff --git a/nir/ir/flatten.py b/nir/ir/flatten.py
index dc350f3..b1c5ba7 100644
--- a/nir/ir/flatten.py
+++ b/nir/ir/flatten.py
@@ -1,5 +1,5 @@
-from dataclasses import dataclass
-from typing import Any, Dict
+from dataclasses import dataclass, field
+from typing import Any, Dict, Optional
import numpy as np
@@ -21,6 +21,9 @@ class Flatten(NIRNode):
input_type: Types
start_dim: int = 1 # First dimension to flatten
end_dim: int = -1 # Last dimension to flatten
+ input_type: Optional[Dict[str, np.ndarray]] = None
+ output_type: Optional[Dict[str, np.ndarray]] = None
+ metadata: Dict[str, Any] = field(default_factory=dict)
def __post_init__(self):
self.input_type = parse_shape_argument(self.input_type, "input")
@@ -41,7 +44,6 @@ def __post_init__(self):
def to_dict(self) -> Dict[str, Any]:
ret = super().to_dict()
- del ret["input_type"]
ret["input_type"] = self.input_type["input"]
return ret
diff --git a/nir/ir/graph.py b/nir/ir/graph.py
index 5e01701..f4315c8 100644
--- a/nir/ir/graph.py
+++ b/nir/ir/graph.py
@@ -1,6 +1,6 @@
from collections import Counter
-from dataclasses import dataclass
-from typing import Any, Dict
+from dataclasses import dataclass, field
+from typing import Any, Dict, Optional
import numpy as np
@@ -27,6 +27,9 @@ class NIRGraph(NIRNode):
nodes: Nodes # List of computational nodes
edges: Edges # List of edges between nodes
+ input_type: Optional[Dict[str, np.ndarray]] = None
+ output_type: Optional[Dict[str, np.ndarray]] = None
+ metadata: Dict[str, Any] = field(default_factory=dict)
@property
def inputs(self):
@@ -456,7 +459,6 @@ def __post_init__(self):
def to_dict(self) -> Dict[str, Any]:
ret = super().to_dict()
- del ret["input_type"]
ret["shape"] = self.input_type["input"]
return ret
@@ -484,7 +486,6 @@ def __post_init__(self):
def to_dict(self) -> Dict[str, Any]:
ret = super().to_dict()
- del ret["output_type"]
ret["shape"] = self.output_type["output"]
return ret
diff --git a/nir/ir/linear.py b/nir/ir/linear.py
index 2b4ea4b..ccd45bd 100644
--- a/nir/ir/linear.py
+++ b/nir/ir/linear.py
@@ -1,4 +1,5 @@
-from dataclasses import dataclass
+from dataclasses import dataclass, field
+from typing import Any, Dict, Optional
import numpy as np
@@ -20,6 +21,9 @@ class Affine(NIRNode):
weight: np.ndarray # Weight term
bias: np.ndarray # Bias term
+ input_type: Optional[Dict[str, np.ndarray]] = None
+ output_type: Optional[Dict[str, np.ndarray]] = None
+ metadata: Dict[str, Any] = field(default_factory=dict)
def __post_init__(self):
assert len(self.weight.shape) >= 2, "Weight must be at least 2D"
diff --git a/nir/ir/neuron.py b/nir/ir/neuron.py
index d152260..f397475 100644
--- a/nir/ir/neuron.py
+++ b/nir/ir/neuron.py
@@ -1,4 +1,5 @@
-from dataclasses import dataclass
+from dataclasses import dataclass, field
+from typing import Any, Dict, Optional
import numpy as np
@@ -45,6 +46,9 @@ class CubaLIF(NIRNode):
v_leak: np.ndarray # Leak voltage
v_threshold: np.ndarray # Firing threshold
w_in: np.ndarray = 1.0 # Input current weight
+ input_type: Optional[Dict[str, np.ndarray]] = None
+ output_type: Optional[Dict[str, np.ndarray]] = None
+ metadata: Dict[str, Any] = field(default_factory=dict)
def __post_init__(self):
assert (
@@ -71,6 +75,9 @@ class I(NIRNode): # noqa: E742
"""
r: np.ndarray
+ input_type: Optional[Dict[str, np.ndarray]] = None
+ output_type: Optional[Dict[str, np.ndarray]] = None
+ metadata: Dict[str, Any] = field(default_factory=dict)
def __post_init__(self):
self.input_type = {"input": np.array(self.r.shape)}
@@ -101,6 +108,9 @@ class IF(NIRNode):
r: np.ndarray # Resistance
v_threshold: np.ndarray # Firing threshold
+ input_type: Optional[Dict[str, np.ndarray]] = None
+ output_type: Optional[Dict[str, np.ndarray]] = None
+ metadata: Dict[str, Any] = field(default_factory=dict)
def __post_init__(self):
assert (
@@ -127,6 +137,9 @@ class LI(NIRNode):
tau: np.ndarray # Time constant
r: np.ndarray # Resistance
v_leak: np.ndarray # Leak voltage
+ input_type: Optional[Dict[str, np.ndarray]] = None
+ output_type: Optional[Dict[str, np.ndarray]] = None
+ metadata: Dict[str, Any] = field(default_factory=dict)
def __post_init__(self):
assert (
@@ -166,6 +179,9 @@ class LIF(NIRNode):
r: np.ndarray # Resistance
v_leak: np.ndarray # Leak voltage
v_threshold: np.ndarray # Firing threshold
+ input_type: Optional[Dict[str, np.ndarray]] = None
+ output_type: Optional[Dict[str, np.ndarray]] = None
+ metadata: Dict[str, Any] = field(default_factory=dict)
def __post_init__(self):
assert (
diff --git a/nir/ir/node.py b/nir/ir/node.py
index f572385..dc591bd 100644
--- a/nir/ir/node.py
+++ b/nir/ir/node.py
@@ -10,9 +10,11 @@ class NIRNode:
instantiated.
"""
- # Note: Adding input/output types as follows is ideal, but requires Python 3.10
- # input_type: Types = field(init=False, kw_only=True)
- # output_type: Types = field(init=False, kw_only=True)
+ # Note: Adding input/output types and metadata as follows is ideal, but requires Python 3.10
+ # TODO: implement this in 2025 when 3.9 is EOL
+ # input_type: Dict[str, np.ndarray] = field(init=False, kw_only=True)
+ # output_type: Dict[str, np.ndarray] = field(init=False, kw_only=True)
+ # metadata: Dict[str, Any] = field(init=True, default_factory=dict)
def __eq__(self, other):
return self is other
@@ -20,6 +22,10 @@ def __eq__(self, other):
def to_dict(self) -> Dict[str, Any]:
"""Serialize into a dictionary."""
ret = asdict(self)
+ if "input_type" in ret.keys():
+ del ret["input_type"]
+ if "output_type" in ret.keys():
+ del ret["output_type"]
# Note: The customization below won't be automatically done recursively for nested NIRNode.
# Therefore, classes with nested NIRNode e.g. NIRGraph must implement its own to_dict
ret["type"] = type(self).__name__
diff --git a/nir/ir/pooling.py b/nir/ir/pooling.py
index 1329e8f..2f2ae47 100644
--- a/nir/ir/pooling.py
+++ b/nir/ir/pooling.py
@@ -1,4 +1,5 @@
-from dataclasses import dataclass
+from dataclasses import dataclass, field
+from typing import Any, Dict, Optional
import numpy as np
@@ -12,6 +13,9 @@ class SumPool2d(NIRNode):
kernel_size: np.ndarray # (Height, Width)
stride: np.ndarray # (Height, width)
padding: np.ndarray # (Height, width)
+ input_type: Optional[Dict[str, np.ndarray]] = None
+ output_type: Optional[Dict[str, np.ndarray]] = None
+ metadata: Dict[str, Any] = field(default_factory=dict)
def __post_init__(self):
self.input_type = {"input": None}
diff --git a/nir/ir/surrogate_gradient.py b/nir/ir/surrogate_gradient.py
index 4600047..9bc81e8 100644
--- a/nir/ir/surrogate_gradient.py
+++ b/nir/ir/surrogate_gradient.py
@@ -1,4 +1,5 @@
-from dataclasses import dataclass
+from dataclasses import dataclass, field
+from typing import Any, Dict, Optional
import numpy as np
@@ -19,6 +20,9 @@ class Threshold(NIRNode):
"""
threshold: np.ndarray # Firing threshold
+ input_type: Optional[Dict[str, np.ndarray]] = None
+ output_type: Optional[Dict[str, np.ndarray]] = None
+ metadata: Dict[str, Any] = field(default_factory=dict)
def __post_init__(self):
self.input_type = {"input": np.array(self.threshold.shape)}
diff --git a/nir/ir/utils.py b/nir/ir/utils.py
index afdf359..c003af3 100644
--- a/nir/ir/utils.py
+++ b/nir/ir/utils.py
@@ -62,7 +62,7 @@ def calculate_conv_output(
/ _index_tuple(stride, i)
+ 1
)
- shapes.append(int(shape))
+ shapes.append(int(shape.item()))
return np.array(shapes)
diff --git a/nir/serialization.py b/nir/serialization.py
index ca50c9b..545e933 100644
--- a/nir/serialization.py
+++ b/nir/serialization.py
@@ -1,3 +1,4 @@
+import io
import pathlib
from typing import Any, Dict, Union
@@ -7,43 +8,55 @@
import nir
+def _read_metadata(node: Any) -> Dict[str, Any]:
+ if "metadata" in node.keys():
+ return {"metadata": {k: v[()] for k, v in node["metadata"].items()}}
+ else:
+ return {}
+
+
def try_byte_to_str(a: Union[bytes, Any]) -> Union[str, Any]:
return a.decode("utf8") if isinstance(a, bytes) else a
-def read_node(node: Any) -> nir.typing.NIRNode:
+def read_node(node: Any) -> nir.NIRNode:
"""Read a graph from a HDF/conn5 file."""
if node["type"][()] == b"Affine":
- return nir.Affine(weight=node["weight"][()], bias=node["bias"][()])
+ return nir.Affine(
+ weight=node["weight"][()], bias=node["bias"][()], **_read_metadata(node)
+ )
elif node["type"][()] == b"Conv1d":
return nir.Conv1d(
- input_shape=node["input_shape"][()]
- if "input_shape" in node.keys()
- else None,
+ input_shape=(
+ node["input_shape"][()] if "input_shape" in node.keys() else None
+ ),
weight=node["weight"][()],
stride=node["stride"][()],
padding=node["padding"][()],
dilation=node["dilation"][()],
groups=node["groups"][()],
bias=node["bias"][()],
+ **_read_metadata(node),
)
elif node["type"][()] == b"Conv2d":
return nir.Conv2d(
- input_shape=node["input_shape"][()]
- if "input_shape" in node.keys()
- else None,
+ input_shape=(
+ node["input_shape"][()] if "input_shape" in node.keys() else None
+ ),
weight=node["weight"][()],
stride=node["stride"][()],
padding=node["padding"][()],
dilation=node["dilation"][()],
groups=node["groups"][()],
bias=node["bias"][()],
+ **_read_metadata(node),
)
elif node["type"][()] == b"SumPool2d":
return nir.SumPool2d(
kernel_size=node["kernel_size"][()],
stride=node["stride"][()],
padding=node["padding"][()],
+ **_read_metadata(node),
)
elif node["type"][()] == b"AvgPool2d":
return nir.AvgPool2d(
@@ -60,27 +73,32 @@ def read_node(node: Any) -> nir.typing.NIRNode:
input_type={
"input": node["input_type"][()] if "input_type" in node.keys() else None
},
+ **_read_metadata(node),
)
elif node["type"][()] == b"I":
- return nir.I(r=node["r"][()])
+ return nir.I(r=node["r"][()], **_read_metadata(node))
elif node["type"][()] == b"IF":
- return nir.IF(r=node["r"][()], v_threshold=node["v_threshold"][()])
+ return nir.IF(
+ r=node["r"][()], v_threshold=node["v_threshold"][()], **_read_metadata(node)
+ )
elif node["type"][()] == b"Input":
- return nir.Input(input_type={"input": node["shape"][()]})
+ return nir.Input(input_type={"input": node["shape"][()]}, **_read_metadata(node))
elif node["type"][()] == b"LI":
return nir.LI(
tau=node["tau"][()],
r=node["r"][()],
v_leak=node["v_leak"][()],
+ **_read_metadata(node),
)
elif node["type"][()] == b"Linear":
- return nir.Linear(weight=node["weight"][()])
+ return nir.Linear(weight=node["weight"][()], **_read_metadata(node))
elif node["type"][()] == b"LIF":
return nir.LIF(
tau=node["tau"][()],
r=node["r"][()],
v_leak=node["v_leak"][()],
v_threshold=node["v_threshold"][()],
+ **_read_metadata(node),
)
elif node["type"][()] == b"CubaLIF":
return nir.CubaLIF(
@@ -90,18 +108,22 @@ def read_node(node: Any) -> nir.typing.NIRNode:
v_leak=node["v_leak"][()],
v_threshold=node["v_threshold"][()],
w_in=node["w_in"][()],
+ **_read_metadata(node),
)
elif node["type"][()] == b"NIRGraph":
return nir.NIRGraph(
nodes={k: read_node(n) for k, n in node["nodes"].items()},
edges=[(a.decode("utf8"), b.decode("utf8")) for a, b in node["edges"][()]],
+ **_read_metadata(node),
)
elif node["type"][()] == b"Output":
- return nir.Output(output_type={"output": node["shape"][()]})
+ return nir.Output(
+ output_type={"output": node["shape"][()]}, **_read_metadata(node)
+ )
elif node["type"][()] == b"Scale":
- return nir.Scale(scale=node["scale"][()])
+ return nir.Scale(scale=node["scale"][()], **_read_metadata(node))
elif node["type"][()] == b"Threshold":
- return nir.Threshold(threshold=node["threshold"][()])
+ return nir.Threshold(threshold=node["threshold"][()], **_read_metadata(node))
else:
raise ValueError(f"Unknown unit type: {node['type'][()]}")
@@ -127,7 +149,7 @@ def read(filename: Union[str, pathlib.Path]) -> nir.NIRGraph:
"""Load a NIR from a HDF/conn5 file."""
with h5py.File(filename, "r") as f:
data_dict = hdf2dict(f["node"])
- return nir.ir.dict2NIRNode(data_dict)
+ return nir.dict2NIRNode(data_dict)
def read_version(filename: Union[str, pathlib.Path]) -> str:
@@ -137,12 +159,15 @@ def read_version(filename: Union[str, pathlib.Path]) -> str:
return f["version"][()].decode("utf8")
-def write(filename: Union[str, pathlib.Path], graph: nir.typing.NIRNode) -> None:
+def write(filename: Union[str, pathlib.Path, io.RawIOBase], graph: nir.NIRNode) -> None:
"""Write a NIR to a HDF5 file."""
def write_recursive(group: h5py.Group, node: dict) -> None:
for k, v in node.items():
- if isinstance(v, str):
+ if k == "metadata":
+ if not v == {}: # Skip metadata if empty
+ write_recursive(group.create_group(k), v)
+ elif isinstance(v, str):
group.create_dataset(k, data=v, dtype=h5py.string_dtype())
elif isinstance(v, np.ndarray):
group.create_dataset(k, data=v, dtype=v.dtype)
diff --git a/paper/01_lif/snntorch_debug.py b/paper/01_lif/snntorch_debug.py
index 7db469e..b53c088 100644
--- a/paper/01_lif/snntorch_debug.py
+++ b/paper/01_lif/snntorch_debug.py
@@ -5,15 +5,20 @@
from snntorch import spikeplot as splt
-V_THR = 2. # TODO: STRANGE BEHAVIOR FOR *V_THR = 2.5*
+V_THR = 2.0 # TODO: STRANGE BEHAVIOR FOR *V_THR = 2.5*
class TestNet(torch.nn.Module):
def __init__(self, reset_after=False) -> None:
super().__init__()
- self.lif = snn.Synaptic(alpha=0.1, beta=0.96, threshold=V_THR,
- reset_mechanism='zero', init_hidden=False,
- reset_after=reset_after)
+ self.lif = snn.Synaptic(
+ alpha=0.1,
+ beta=0.96,
+ threshold=V_THR,
+ reset_mechanism="zero",
+ init_hidden=False,
+ reset_after=reset_after,
+ )
def forward(self, x):
syn, mem = self.lif.init_synaptic()
@@ -23,7 +28,11 @@ def forward(self, x):
arr_syn.append(syn)
arr_mem.append(mem)
arr_spk.append(spk)
- return torch.stack(arr_spk, dim=0), torch.stack(arr_syn, dim=0), torch.stack(arr_mem, dim=0)
+ return (
+ torch.stack(arr_spk, dim=0),
+ torch.stack(arr_syn, dim=0),
+ torch.stack(arr_mem, dim=0),
+ )
isis = np.array([5, 4, 3, 2, 1, 0])
@@ -41,15 +50,17 @@ def forward(self, x):
fig, axs = plt.subplots(4, 1, sharex=True, figsize=(10, 5))
axs[0].eventplot(np.where(spk_times == 1)[0], linelengths=0.5)
-axs[0].set_ylabel('input')
+axs[0].set_ylabel("input")
axs[1].eventplot(np.where(spk.detach().numpy() == 1)[0])
-axs[1].eventplot(np.where(spk2.detach().numpy() == 1)[0], lineoffsets=-0.5, color='orange')
-axs[1].set_ylabel('spikes')
+axs[1].eventplot(
+ np.where(spk2.detach().numpy() == 1)[0], lineoffsets=-0.5, color="orange"
+)
+axs[1].set_ylabel("spikes")
axs[2].plot(syn.detach().numpy())
axs[2].plot(syn2.detach().numpy() - 0.03)
-axs[2].set_ylabel('current')
+axs[2].set_ylabel("current")
axs[3].plot(mem.detach().numpy())
axs[3].plot(mem2.detach().numpy() - 0.1)
-axs[3].hlines(V_THR, 0, mem.shape[0], color='r', ls='--')
-axs[3].set_ylabel('membrane')
+axs[3].hlines(V_THR, 0, mem.shape[0], color="r", ls="--")
+axs[3].set_ylabel("membrane")
plt.show()
diff --git a/paper/02_cnn/lava_cnn_reader.py b/paper/02_cnn/lava_cnn_reader.py
index d70157c..9ca8073 100644
--- a/paper/02_cnn/lava_cnn_reader.py
+++ b/paper/02_cnn/lava_cnn_reader.py
@@ -17,7 +17,7 @@ def forward(self, x):
end_dim -= 1
elif self.end_dim == len(x.shape):
end_dim = -2
- elif self.end_dim == len(x.shape)-1:
+ elif self.end_dim == len(x.shape) - 1:
end_dim = -2
# if end_dim != self.end_dim:
# print(f'FLATTEN: changed end_dim from {self.start_dim} to {end_dim}')
@@ -31,22 +31,30 @@ def convert_node_to_lava_dl_element(node, scale_v_thr=1.0):
debug_affine = False
if isinstance(node, nir.ir.Conv2d):
- assert np.abs(node.bias).sum() == 0.0, 'bias not supported in lava-dl'
+ assert np.abs(node.bias).sum() == 0.0, "bias not supported in lava-dl"
out_features = node.weight.shape[0]
in_features = node.weight.shape[1]
kernel_size = (node.weight.shape[2], node.weight.shape[3])
# stride = int(node.stride[0])
# assert node.stride[0] == node.stride[1], 'stride must be the same in both dimensions'
if debug_conv:
- print(f'Conv2d with weights of shape {node.weight.shape}:')
- print(f'\t{in_features} in, {out_features} out, kernel {kernel_size}')
- print(f'\tstride {node.stride}, padding {node.padding}, dilation {node.dilation}')
- print(f'\tgroups {node.groups}')
+ print(f"Conv2d with weights of shape {node.weight.shape}:")
+ print(f"\t{in_features} in, {out_features} out, kernel {kernel_size}")
+ print(
+ f"\tstride {node.stride}, padding {node.padding}, dilation {node.dilation}"
+ )
+ print(f"\tgroups {node.groups}")
conv_synapse_params = dict(
- in_features=in_features, out_features=out_features,
- kernel_size=kernel_size, stride=node.stride, padding=node.padding,
- dilation=node.dilation, groups=node.groups,
- weight_scale=1, weight_norm=False, pre_hook_fx=None
+ in_features=in_features,
+ out_features=out_features,
+ kernel_size=kernel_size,
+ stride=node.stride,
+ padding=node.padding,
+ dilation=node.dilation,
+ groups=node.groups,
+ weight_scale=1,
+ weight_norm=False,
+ pre_hook_fx=None,
)
conv = slayer.synapse.Conv(**conv_synapse_params)
conv.weight.data = torch.from_numpy(node.weight.reshape(conv.weight.shape))
@@ -54,21 +62,32 @@ def convert_node_to_lava_dl_element(node, scale_v_thr=1.0):
elif isinstance(node, nir.ir.SumPool2d):
if debug_pool:
- print(f'SumPool2d: kernel {node.kernel_size} pad {node.padding}, stride {node.stride}')
+ print(
+ f"SumPool2d: kernel {node.kernel_size} pad {node.padding}, stride {node.stride}"
+ )
pool_synapse_params = dict(
- kernel_size=node.kernel_size, stride=node.stride, padding=node.padding, dilation=1,
- weight_scale=1, weight_norm=False, pre_hook_fx=None
+ kernel_size=node.kernel_size,
+ stride=node.stride,
+ padding=node.padding,
+ dilation=1,
+ weight_scale=1,
+ weight_norm=False,
+ pre_hook_fx=None,
)
return slayer.synapse.Pool(**pool_synapse_params)
elif isinstance(node, nir.ir.IF):
- assert len(np.unique(node.v_threshold)) == 1, 'v_threshold must be the same for all neurons'
- assert len(np.unique(node.r)) == 1, 'resistance must be the same for all neurons'
+ assert (
+ len(np.unique(node.v_threshold)) == 1
+ ), "v_threshold must be the same for all neurons"
+ assert (
+ len(np.unique(node.r)) == 1
+ ), "resistance must be the same for all neurons"
v_thr = np.unique(node.v_threshold)[0]
resistance = np.unique(node.r)[0]
v_thr_eff = v_thr * resistance * scale_v_thr
if debug_if:
- print(f'IF with v_thr={v_thr}, R={resistance} -> eff. v_thr={v_thr_eff}')
+ print(f"IF with v_thr={v_thr}, R={resistance} -> eff. v_thr={v_thr_eff}")
cuba_neuron_params = dict(
threshold=v_thr_eff,
current_decay=1.0,
@@ -86,15 +105,18 @@ def convert_node_to_lava_dl_element(node, scale_v_thr=1.0):
# return slayer.neuron.alif.Neuron(**alif_neuron_params)
elif isinstance(node, nir.ir.Affine):
- assert np.abs(node.bias).sum() == 0.0, 'bias not supported in lava-dl'
+ assert np.abs(node.bias).sum() == 0.0, "bias not supported in lava-dl"
weight = node.weight
out_neurons = weight.shape[0]
in_neurons = weight.shape[1]
if debug_affine:
- print(f'Affine: weight shape: {weight.shape}')
+ print(f"Affine: weight shape: {weight.shape}")
dense = slayer.synapse.Dense(
- in_neurons=in_neurons, out_neurons=out_neurons,
- weight_scale=1, weight_norm=False, pre_hook_fx=None
+ in_neurons=in_neurons,
+ out_neurons=out_neurons,
+ weight_scale=1,
+ weight_norm=False,
+ pre_hook_fx=None,
)
dense.weight.data = torch.from_numpy(weight.reshape(dense.weight.shape))
return dense
@@ -108,7 +130,7 @@ def convert_node_to_lava_dl_element(node, scale_v_thr=1.0):
def get_next_node_key(node_key, edges):
possible_next_node_keys = [edge[1] for edge in edges if edge[0] == node_key]
- assert len(possible_next_node_keys) <= 1, 'branching networks are not supported'
+ assert len(possible_next_node_keys) <= 1, "branching networks are not supported"
if len(possible_next_node_keys) == 0:
return None
else:
@@ -132,25 +154,27 @@ def forward(self, spike):
if isinstance(block, torch.nn.Module):
spike = block(spike)
else:
- raise Exception('Unknown block type')
+ raise Exception("Unknown block type")
return spike
def nir_to_lava_dl(graph, scale_v_thr=1.0, debug=False):
- node_key = 'input'
+ node_key = "input"
visited_node_keys = [node_key]
module_list = []
while get_next_node_key(node_key, graph.edges) is not None:
node_key = get_next_node_key(node_key, graph.edges)
node = graph.nodes[node_key]
- assert node_key not in visited_node_keys, 'cycling NIR graphs are not supported'
+ assert node_key not in visited_node_keys, "cycling NIR graphs are not supported"
visited_node_keys.append(node_key)
if debug:
- print(f'node {node_key}: {type(node).__name__}')
- if node_key == 'output':
+ print(f"node {node_key}: {type(node).__name__}")
+ if node_key == "output":
continue
- module_list.append(convert_node_to_lava_dl_element(node, scale_v_thr=scale_v_thr))
+ module_list.append(
+ convert_node_to_lava_dl_element(node, scale_v_thr=scale_v_thr)
+ )
- assert len(visited_node_keys) == len(graph.nodes), 'not all nodes visited'
+ assert len(visited_node_keys) == len(graph.nodes), "not all nodes visited"
return NIR2LavaDLNetwork(module_list)
diff --git a/paper/02_cnn/nir_to_nengo.py b/paper/02_cnn/nir_to_nengo.py
index f0e0954..40b571e 100644
--- a/paper/02_cnn/nir_to_nengo.py
+++ b/paper/02_cnn/nir_to_nengo.py
@@ -20,8 +20,11 @@ def nir_to_nengo(n, dt=0.001):
filters = {}
for name, obj in n.nodes.items():
if isinstance(obj, nir.Input):
- node = nengo.Node(None, size_in=np.product(obj.input_type['input']),
- label=f"Input {name} {obj.input_type['input']}")
+ node = nengo.Node(
+ None,
+ size_in=np.product(obj.input_type["input"]),
+ label=f"Input {name} {obj.input_type['input']}",
+ )
nengo_map[name] = node
pre_map[name] = node
elif isinstance(obj, nir.LIF):
@@ -32,9 +35,12 @@ def nir_to_nengo(n, dt=0.001):
n_neurons=N,
dimensions=1,
label=f"LIF {name}",
- neuron_type=nengo.LIF(tau_rc=obj.tau[0], tau_ref=0,
- initial_state={"voltage": nengo.dists.Choice([0])}),
- gain=np.ones(N)/obj.v_threshold,
+ neuron_type=nengo.LIF(
+ tau_rc=obj.tau[0],
+ tau_ref=0,
+ initial_state={"voltage": nengo.dists.Choice([0])},
+ ),
+ gain=np.ones(N) / obj.v_threshold,
bias=np.zeros(N),
)
nengo_map[name] = ens.neurons
@@ -47,9 +53,10 @@ def nir_to_nengo(n, dt=0.001):
n_neurons=N,
dimensions=1,
label=f"IF {name}",
- neuron_type=IF(initial_state={"voltage": nengo.dists.Choice([0])},
- amplitude=dt),
- gain=np.ones(N)/obj.v_threshold.flatten()/dt,
+ neuron_type=IF(
+ initial_state={"voltage": nengo.dists.Choice([0])}, amplitude=dt
+ ),
+ gain=np.ones(N) / obj.v_threshold.flatten() / dt,
bias=np.zeros(N),
)
nengo_map[name] = ens.neurons
@@ -76,26 +83,29 @@ def nir_to_nengo(n, dt=0.001):
pre_map[name] = w
post_map[name] = w
elif isinstance(obj, nir.Output):
- nengo_map[name] = nengo.Node(None, size_in=np.product(obj.output_type['output']),
- label=f"Output {name} {obj.input_type['input']}")
+ nengo_map[name] = nengo.Node(
+ None,
+ size_in=np.product(obj.output_type["output"]),
+ label=f"Output {name} {obj.input_type['input']}",
+ )
post_map[name] = nengo_map[name]
elif isinstance(obj, nir.Flatten):
- if name == '5':
- size_in = 16*8*8
- elif name == '8':
+ if name == "5":
+ size_in = 16 * 8 * 8
+ elif name == "8":
size_in = 128
else:
- 1/0
- node = nengo.Node(None, size_in=size_in, label=f'Flatten {name}')
+ 1 / 0
+ node = nengo.Node(None, size_in=size_in, label=f"Flatten {name}")
nengo_map[name] = node
pre_map[name] = node
post_map[name] = node
elif isinstance(obj, nir.Conv2d):
- conv = nengo.Network(label=f'Conv2d {name}')
+ conv = nengo.Network(label=f"Conv2d {name}")
- if name == '0':
+ if name == "0":
input_shape = (2, 34, 34)
- elif name == '2':
+ elif name == "2":
input_shape = (16, 16, 16)
else:
input_shape = (16, 8, 8)
@@ -106,69 +116,81 @@ def nir_to_nengo(n, dt=0.001):
[
(0, 0),
(obj.padding[0], obj.padding[0]),
- (obj.padding[1], obj.padding[1])
+ (obj.padding[1], obj.padding[1]),
],
- 'constant',
- constant_values=0)
+ "constant",
+ constant_values=0,
+ )
with conv:
ww = np.transpose(obj.weight, (2, 3, 1, 0))
- c = nengo.Convolution(n_filters=obj.weight.shape[0],
- input_shape=pad.shape,
- channels_last=False,
- init=ww,
- strides=obj.stride,
- padding='valid',
- kernel_size=(obj.weight.shape[2], obj.weight.shape[3])
- )
- conv.input = nengo.Node(None, size_in=np.product(input_shape),
- label=f'{name}.in')
+ c = nengo.Convolution(
+ n_filters=obj.weight.shape[0],
+ input_shape=pad.shape,
+ channels_last=False,
+ init=ww,
+ strides=obj.stride,
+ padding="valid",
+ kernel_size=(obj.weight.shape[2], obj.weight.shape[3]),
+ )
+ conv.input = nengo.Node(
+ None, size_in=np.product(input_shape), label=f"{name}.in"
+ )
conv.pad = nengo.Node(None, size_in=np.product(pad.shape))
- nengo.Connection(conv.input, conv.pad[np.where(pad.flatten() > 0)[0]],
- synapse=None)
- conv.output = nengo.Node(None, size_in=c.size_out, label=f'{name}.out')
+ nengo.Connection(
+ conv.input,
+ conv.pad[np.where(pad.flatten() > 0)[0]],
+ synapse=None,
+ )
+ conv.output = nengo.Node(
+ None, size_in=c.size_out, label=f"{name}.out"
+ )
- nengo.Connection(conv.pad, conv.output, synapse=None,
- transform=c)
+ nengo.Connection(conv.pad, conv.output, synapse=None, transform=c)
nengo_map[name] = conv
pre_map[name] = conv.output
post_map[name] = conv.input
elif isinstance(obj, nir.SumPool2d):
- pool = nengo.Network(label=f'SumPool2d {name}')
+ pool = nengo.Network(label=f"SumPool2d {name}")
with pool:
- if name == '4':
+ if name == "4":
input_shape = (16, 16, 16)
- elif name == '7':
+ elif name == "7":
input_shape = (8, 8, 8)
else:
- 1/0
+ 1 / 0
n_filters = input_shape[0]
pool_size = tuple(obj.kernel_size)
n_pool = np.product(pool_size)
kernel = np.reshape(
- [np.eye(n_filters)] * n_pool, pool_size + (n_filters, n_filters)
+ [np.eye(n_filters)] * n_pool, pool_size + (n_filters, n_filters)
)
- c = nengo.Convolution(n_filters=input_shape[0],
- input_shape=input_shape,
- channels_last=False,
- init=kernel,
- strides=obj.stride,
- padding='valid',
- kernel_size=pool_size,
- )
- pool.input = nengo.Node(None, size_in=np.product(input_shape),
- label=f'SumPool2d {name}.in')
- pool.output = nengo.Node(None, size_in=c.size_out,
- label=f'SumPool2d {name}.out')
+ c = nengo.Convolution(
+ n_filters=input_shape[0],
+ input_shape=input_shape,
+ channels_last=False,
+ init=kernel,
+ strides=obj.stride,
+ padding="valid",
+ kernel_size=pool_size,
+ )
+ pool.input = nengo.Node(
+ None,
+ size_in=np.product(input_shape),
+ label=f"SumPool2d {name}.in",
+ )
+ pool.output = nengo.Node(
+ None, size_in=c.size_out, label=f"SumPool2d {name}.out"
+ )
nengo.Connection(pool.input, pool.output, synapse=None, transform=c)
nengo_map[name] = pool
pre_map[name] = pool.output
post_map[name] = pool.input
else:
- raise Exception(f'Unknown object: {obj}')
+ raise Exception(f"Unknown object: {obj}")
for pre, post in n.edges:
synapse = filters.get(nengo_map[post], None)
@@ -176,7 +198,7 @@ def nir_to_nengo(n, dt=0.001):
print("Error")
print("pre", pre, pre_map[pre], pre_map[pre].size_out)
print("post", post, post_map[post], post_map[post].size_in)
- 1/0
+ 1 / 0
else:
nengo.Connection(pre_map[pre], post_map[post], synapse=synapse)
diff --git a/paper/02_cnn/snntorch_cnn.py b/paper/02_cnn/snntorch_cnn.py
index 3adfde9..88c2357 100644
--- a/paper/02_cnn/snntorch_cnn.py
+++ b/paper/02_cnn/snntorch_cnn.py
@@ -14,7 +14,7 @@
print(net)
inp_data = torch.from_numpy(np.load("val_numbers.npy")).float()
-print('input data:', inp_data.shape)
+print("input data:", inp_data.shape)
modules = [e.elem for e in net.get_execution_order()]
# init all I&F neurons
@@ -49,9 +49,13 @@
bs = 128
collate = tonic.collation.PadTensors(batch_first=False)
-to_frame = tonic.transforms.ToFrame(sensor_size=tonic.datasets.NMNIST.sensor_size, time_window=1e3)
+to_frame = tonic.transforms.ToFrame(
+ sensor_size=tonic.datasets.NMNIST.sensor_size, time_window=1e3
+)
test_ds = tonic.datasets.NMNIST("./nmnist", transform=to_frame, train=False)
-test_dl = torch.utils.data.DataLoader(test_ds, shuffle=True, batch_size=bs, collate_fn=collate)
+test_dl = torch.utils.data.DataLoader(
+ test_ds, shuffle=True, batch_size=bs, collate_fn=collate
+)
accuracies = []
pbar = tqdm.tqdm(total=len(test_dl), desc="Processing", position=0, leave=True)
@@ -79,10 +83,10 @@
pred = out.mean(0).argmax(-1)
accuracy = (pred == y).sum() / x.shape[1]
accuracies.append(accuracy)
- pbar.set_postfix(accuracy="{:.2f}%".format(sum(accuracies)/len(accuracies)*100))
+ pbar.set_postfix(accuracy="{:.2f}%".format(sum(accuracies) / len(accuracies) * 100))
pbar.update(1)
pbar.close()
accuracies = np.array(accuracies)
-print(f'accuracy: {accuracies.mean():.2%} +/- {accuracies.std():.2%}')
-np.save('snntorch_accuracies.npy', accuracies)
-np.save('snntorch_accuracy.npy', accuracies.mean())
+print(f"accuracy: {accuracies.mean():.2%} +/- {accuracies.std():.2%}")
+np.save("snntorch_accuracies.npy", accuracies)
+np.save("snntorch_accuracy.npy", accuracies.mean())
diff --git a/paper/02_cnn/training.py b/paper/02_cnn/training.py
index 64aef63..6612c32 100644
--- a/paper/02_cnn/training.py
+++ b/paper/02_cnn/training.py
@@ -11,13 +11,18 @@
model = norse.torch.from_nir(g)
# %%
-to_frame = tonic.transforms.ToFrame(sensor_size=tonic.datasets.NMNIST.sensor_size,
- time_window = 1e3)
+to_frame = tonic.transforms.ToFrame(
+ sensor_size=tonic.datasets.NMNIST.sensor_size, time_window=1e3
+)
dataset = tonic.datasets.NMNIST(".", transform=to_frame)
# %%
-loader = torch.utils.data.DataLoader(dataset, shuffle=True, batch_size=10,
- collate_fn=tonic.collation.PadTensors(batch_first=False))
+loader = torch.utils.data.DataLoader(
+ dataset,
+ shuffle=True,
+ batch_size=10,
+ collate_fn=tonic.collation.PadTensors(batch_first=False),
+)
# %%
events, label = next(iter(loader))
@@ -32,10 +37,12 @@
# %%
+
class CNNModel(pl.LightningModule):
def __init__(self, model):
super().__init__()
self.model = model
+
def training_step(self, batch, batch_idx):
xs, label = batch
state = None
@@ -45,15 +52,14 @@ def training_step(self, batch, batch_idx):
loss = torch.nn.functional.cross_entropy(agg, label.float())
self.log("train_loss", loss)
return loss
+
def configure_optimizers(self):
return torch.optim.Adam(self.parameters(), lr=1e-4)
-
+
+
cnn = CNNModel(model)
logger = pl.loggers.TensorBoardLogger(".")
trainer = pl.Trainer(max_epochs=100, accelerator="gpu", logger=logger)
trainer.fit(model=cnn, train_dataloaders=loader)
# %%
-
-
-
diff --git a/paper/03_rnn/analyze_graph.py b/paper/03_rnn/analyze_graph.py
index 3064848..27fb41b 100644
--- a/paper/03_rnn/analyze_graph.py
+++ b/paper/03_rnn/analyze_graph.py
@@ -1,4 +1,5 @@
"""Analyze weight distribution of Braille graph."""
+
import matplotlib.pyplot as plt
import numpy as np
diff --git a/paper/03_rnn/flatten_braille_graph.py b/paper/03_rnn/flatten_braille_graph.py
index 0e5a40d..dc8311f 100644
--- a/paper/03_rnn/flatten_braille_graph.py
+++ b/paper/03_rnn/flatten_braille_graph.py
@@ -1,4 +1,5 @@
"""Flatten Braille graph for SpiNNaker2 and fix some shapes."""
+
import nir
nir_model = nir.read("braille.nir")
diff --git a/paper/03_rnn/generate_rnn_nir_graph.py b/paper/03_rnn/generate_rnn_nir_graph.py
index eeac982..5822c2b 100644
--- a/paper/03_rnn/generate_rnn_nir_graph.py
+++ b/paper/03_rnn/generate_rnn_nir_graph.py
@@ -7,8 +7,10 @@
import torch
import torch.nn as nn
from torch.utils.data import DataLoader, TensorDataset
+
# NIR stuff
import nir
+
# NOTE: this requires snntorch/nir (PR) and nirtorch/master (unreleased)
# from snntorch import export_nir
from snntorch import import_nirtorch, export_nirtorch
@@ -194,7 +196,9 @@ def val_test_loop_nirtorch(
):
with torch.no_grad():
net.eval()
- loader = DataLoader(dataset, batch_size=batch_size, shuffle=shuffle, drop_last=False)
+ loader = DataLoader(
+ dataset, batch_size=batch_size, shuffle=shuffle, drop_last=False
+ )
batch_loss = []
batch_acc = []
@@ -203,15 +207,19 @@ def val_test_loop_nirtorch(
data = data.to(device).swapaxes(1, 0)
labels = labels.to(device)
- print('data.shape', data.shape)
- print('labels.shape', labels.shape)
+ print("data.shape", data.shape)
+ print("labels.shape", labels.shape)
# TODO: implement the forward pass correctly (iterate over time)
# TODO: reset the state of the network
for node in net.graph.node_list:
if isinstance(node.elem, snn.RSynaptic):
- node.elem.spk, node.elem.syn, node.elem.mem = node.elem.init_rsynaptic()
+ (
+ node.elem.spk,
+ node.elem.syn,
+ node.elem.mem,
+ ) = node.elem.init_rsynaptic()
elif isinstance(node.elem, snn.Synaptic):
node.elem.syn, node.elem.mem = node.elem.init_synaptic()
elif isinstance(node.elem, snn.RLeaky):
@@ -228,7 +236,7 @@ def val_test_loop_nirtorch(
spk_out = torch.stack(spk_out_arr, dim=0)
- print('spk_out.shape', spk_out.shape)
+ print("spk_out.shape", spk_out.shape)
print()
# Validation loss
@@ -307,6 +315,7 @@ def val_test_loop_nirtorch(
##########################################################################################
##########################################################################################
+
def print_nir_graph(nir_graph: nir.NIRGraph):
print("nodes:")
for nodekey, node in nir_graph.nodes.items():
@@ -322,45 +331,45 @@ def print_nir_graph(nir_graph: nir.NIRGraph):
# nir.write("braille.nir", nir_graph)
nir_graph = export_nirtorch.to_nir(net, ds_test[0][0])
-print('\nRNN graph with NIRTorch\n')
+print("\nRNN graph with NIRTorch\n")
# print_nir_graph(nir_graph)
nir.write("braille_v2.nir", nir_graph)
net2 = import_nirtorch.from_nir(nir_graph)
# check that parameters are the same in both networks
-print('\ncheck parameter match\n')
-w1 = net._modules['fc1']._parameters['weight']
-w2 = net2._modules['fc1']._parameters['weight']
-print(f'input weights: {torch.allclose(w1, w2)}')
-b1 = net._modules['fc1']._parameters['bias']
-b2 = net2._modules['fc1']._parameters['bias']
-print(f'input bias: {torch.allclose(b1, b2)}')
-w1 = net._modules['fc2']._parameters['weight']
-w2 = net2._modules['fc2']._parameters['weight']
-print(f'output weights: {torch.allclose(w1, w2)}')
-b1 = net._modules['fc2']._parameters['bias']
-b2 = net2._modules['fc2']._parameters['bias']
-print(f'output bias: {torch.allclose(b1, b2)}')
-w1 = net._modules['lif1'].recurrent._parameters['weight']
-w2 = net2._modules['lif1'].recurrent._parameters['weight']
-print(f'recurrent weights: {torch.allclose(w1, w2)}')
-b1 = net._modules['lif1'].recurrent._parameters['bias']
-b2 = net2._modules['lif1'].recurrent._parameters['bias']
-print(f'recurrent bias: {torch.allclose(b1, b2)}')
-
-alpha1 = net._modules['lif1'].alpha
-alpha2 = net2._modules['lif1'].alpha
-print(f'lif1 alpha: {alpha1 == alpha2}')
-beta1 = net._modules['lif1'].beta
-beta2 = net2._modules['lif1'].beta
-print(f'lif1 beta: {beta1 == beta2}')
-alpha1 = net._modules['lif2'].alpha
-alpha2 = net2._modules['lif2'].alpha
-print(f'lif2 alpha: {alpha1 == alpha2}')
-beta1 = net._modules['lif2'].beta
-beta2 = net2._modules['lif2'].beta
-print(f'lif2 beta: {beta1 == beta2}')
+print("\ncheck parameter match\n")
+w1 = net._modules["fc1"]._parameters["weight"]
+w2 = net2._modules["fc1"]._parameters["weight"]
+print(f"input weights: {torch.allclose(w1, w2)}")
+b1 = net._modules["fc1"]._parameters["bias"]
+b2 = net2._modules["fc1"]._parameters["bias"]
+print(f"input bias: {torch.allclose(b1, b2)}")
+w1 = net._modules["fc2"]._parameters["weight"]
+w2 = net2._modules["fc2"]._parameters["weight"]
+print(f"output weights: {torch.allclose(w1, w2)}")
+b1 = net._modules["fc2"]._parameters["bias"]
+b2 = net2._modules["fc2"]._parameters["bias"]
+print(f"output bias: {torch.allclose(b1, b2)}")
+w1 = net._modules["lif1"].recurrent._parameters["weight"]
+w2 = net2._modules["lif1"].recurrent._parameters["weight"]
+print(f"recurrent weights: {torch.allclose(w1, w2)}")
+b1 = net._modules["lif1"].recurrent._parameters["bias"]
+b2 = net2._modules["lif1"].recurrent._parameters["bias"]
+print(f"recurrent bias: {torch.allclose(b1, b2)}")
+
+alpha1 = net._modules["lif1"].alpha
+alpha2 = net2._modules["lif1"].alpha
+print(f"lif1 alpha: {alpha1 == alpha2}")
+beta1 = net._modules["lif1"].beta
+beta2 = net2._modules["lif1"].beta
+print(f"lif1 beta: {beta1 == beta2}")
+alpha1 = net._modules["lif2"].alpha
+alpha2 = net2._modules["lif2"].alpha
+print(f"lif2 alpha: {alpha1 == alpha2}")
+beta1 = net._modules["lif2"].beta
+beta2 = net2._modules["lif2"].beta
+print(f"lif2 beta: {beta1 == beta2}")
loader = DataLoader(ds_test, batch_size=4, shuffle=True, drop_last=False)
data, labels = next(iter(loader))
@@ -373,8 +382,8 @@ def print_nir_graph(nir_graph: nir.NIRGraph):
node.elem.syn, node.elem.mem = node.elem.init_synaptic()
# ALSO RESET THE HIDDEN OF NET1
-spk1, syn1, mem1 = net._modules['lif1'].init_rsynaptic()
-syn2, mem2 = net._modules['lif2'].init_synaptic()
+spk1, syn1, mem1 = net._modules["lif1"].init_rsynaptic()
+syn2, mem2 = net._modules["lif2"].init_synaptic()
sout1_arr, hrec1_arr = [], []
sout2_arr, hrec2_arr = [], []
@@ -382,11 +391,11 @@ def print_nir_graph(nir_graph: nir.NIRGraph):
x = data[:, tstep, :]
# forward pass through network 1
- cur1 = net._modules['fc1'](x)
- spk1, syn1, mem1 = net._modules['lif1'](cur1, spk1, syn1, mem1)
+ cur1 = net._modules["fc1"](x)
+ spk1, syn1, mem1 = net._modules["lif1"](cur1, spk1, syn1, mem1)
# Output layer
- cur2 = net._modules['fc2'](spk1)
- spk2, syn2, mem2 = net._modules['lif2'](cur2, syn2, mem2)
+ cur2 = net._modules["fc2"](spk1)
+ spk2, syn2, mem2 = net._modules["lif2"](cur2, syn2, mem2)
sout1_arr.append(spk2)
# forward pass through network 2
@@ -400,7 +409,7 @@ def print_nir_graph(nir_graph: nir.NIRGraph):
# HACK: remove self-recurrence of lif1
# [e for e in net2.graph.node_list][-1].outgoing_nodes.pop({el.name: el for el in [e for e in net2.graph.node_list][-1].outgoing_nodes}['lif1'])
-print('\n test the re-imported torch network\n')
+print("\n test the re-imported torch network\n")
batch_size = 64
input_size = 12
num_steps = next(iter(ds_test))[0].shape[0]
@@ -417,7 +426,7 @@ def print_nir_graph(nir_graph: nir.NIRGraph):
net2 = import_nirtorch.from_nir(nir_graph) # reset the network
-print('\nback to NIR\n')
+print("\nback to NIR\n")
nir_graph2 = export_nirtorch.to_nir(net2, ds_test[0][0])
# print_nir_graph(nir_graph2)
nir.write("braille_v2.nir", nir_graph2) # same, but without recurrent edge
@@ -425,16 +434,26 @@ def print_nir_graph(nir_graph: nir.NIRGraph):
# important: reload original nir_graph bc it was modified
nir_graph = export_nirtorch.to_nir(net, ds_test[0][0])
-assert nir_graph.nodes.keys() == nir_graph2.nodes.keys(), 'node keys mismatch'
+assert nir_graph.nodes.keys() == nir_graph2.nodes.keys(), "node keys mismatch"
for nodekey in nir_graph.nodes:
- a = nir_graph.nodes[nodekey].__class__.__name__ if nodekey in nir_graph.nodes else None
- b = nir_graph2.nodes[nodekey].__class__.__name__ if nodekey in nir_graph2.nodes else None
- assert a == b, f'node type mismatch: {a} vs {b}'
- print(f'{nodekey}: {a}')
+ a = (
+ nir_graph.nodes[nodekey].__class__.__name__
+ if nodekey in nir_graph.nodes
+ else None
+ )
+ b = (
+ nir_graph2.nodes[nodekey].__class__.__name__
+ if nodekey in nir_graph2.nodes
+ else None
+ )
+ assert a == b, f"node type mismatch: {a} vs {b}"
+ print(f"{nodekey}: {a}")
for attr in nir_graph.nodes[nodekey].__dict__:
close = None
if isinstance(nir_graph.nodes[nodekey].__dict__[attr], np.ndarray):
- close = np.allclose(nir_graph.nodes[nodekey].__dict__[attr],
- nir_graph2.nodes[nodekey].__dict__[attr])
+ close = np.allclose(
+ nir_graph.nodes[nodekey].__dict__[attr],
+ nir_graph2.nodes[nodekey].__dict__[attr],
+ )
print(f'\t{attr:12}: {close} {"!!!" if close is False else ""}')
diff --git a/paper/03_rnn/lava__qat.py b/paper/03_rnn/lava__qat.py
index 3178bd1..bf1661f 100644
--- a/paper/03_rnn/lava__qat.py
+++ b/paper/03_rnn/lava__qat.py
@@ -9,6 +9,7 @@
import torch
import torch.nn as nn
from torch.utils.data import DataLoader, TensorDataset
+
# lava-dl
import nir
import nirtorch
@@ -39,7 +40,7 @@
letter_written = ["Space", "A", "E", "I", "O", "U", "Y"]
# load the lava-dl network
-nir_filename = 'braille_noDelay_bias_zero.nir'
+nir_filename = "braille_noDelay_bias_zero.nir"
nirgraph = nir.read(nir_filename)
net = from_nir(nirgraph).to(device)
@@ -47,7 +48,9 @@
train_loader = DataLoader(
ds_train, batch_size=batch_size, shuffle=True, drop_last=False
)
-error = slayer.loss.SpikeRate(true_rate=0.2, false_rate=0.03, reduction='sum').to(device)
+error = slayer.loss.SpikeRate(true_rate=0.2, false_rate=0.03, reduction="sum").to(
+ device
+)
optimizer = torch.optim.Adam(net.parameters(), lr=0.0001)
n_epochs = 300
@@ -57,7 +60,7 @@
for epoch in range(n_epochs):
n_samples = 0
n_correct = 0
- sum_loss = 0.
+ sum_loss = 0.0
for data, labels in train_loader:
data = data.to(device).swapaxes(1, 2)
labels = labels.to(device)
@@ -75,16 +78,21 @@
optimizer.step()
n_samples += data.shape[0]
sum_loss += loss.detach().cpu().data.item() * spk_rec.shape[0]
- n_correct += torch.sum(
- slayer.classifier.Rate.predict(spk_rec) == labels
- ).detach().cpu().data.item()
- batch_acc = n_correct/n_samples
- batch_loss = sum_loss/n_samples
+ n_correct += (
+ torch.sum(slayer.classifier.Rate.predict(spk_rec) == labels)
+ .detach()
+ .cpu()
+ .data.item()
+ )
+ batch_acc = n_correct / n_samples
+ batch_loss = sum_loss / n_samples
stats_acc.append(batch_acc)
stats_lss.append(batch_loss)
- print(f"Epoch [{epoch+1:3}/{n_epochs:3}] loss {batch_loss:.3f} accuracy {batch_acc:.2%}")
+ print(
+ f"Epoch [{epoch+1:3}/{n_epochs:3}] loss {batch_loss:.3f} accuracy {batch_acc:.2%}"
+ )
-input('done...')
+input("done...")
loader = DataLoader(ds_test, batch_size=batch_size, shuffle=False)
@@ -101,7 +109,9 @@
spk_out = spk_out.moveaxis(2, 0) # TCN
#####
act_total_out = torch.sum(spk_out, 0) # sum over time
- _, neuron_max_act_total_out = torch.max(act_total_out, 1) # argmax output > labels
+ _, neuron_max_act_total_out = torch.max(
+ act_total_out, 1
+ ) # argmax output > labels
pred.extend(neuron_max_act_total_out.detach().cpu().numpy())
act_out.extend(act_total_out.detach().cpu().numpy())
batch_acc.extend((neuron_max_act_total_out == labels).detach().cpu().numpy())
@@ -151,9 +161,7 @@ def training_loop(
_, neuron_max_act_total_out = torch.max(
act_total_out, 1
) # argmax over output units to compare to labels
- batch_acc.extend(
- (neuron_max_act_total_out == labels).detach().cpu().numpy()
- )
+ batch_acc.extend((neuron_max_act_total_out == labels).detach().cpu().numpy())
# the "old" one with mean per batch:
# batch_acc.append(np.mean((neuron_max_act_total_out == labels).detach().cpu().numpy()))
@@ -279,10 +287,12 @@ def val_test_loop(
validation_results.append([val_loss, val_acc])
if (ee == 0) | ((ee + 1) % 10 == 0):
- print(f"\tepoch {ee + 1}/{num_epochs} done \t --> \ttraining accuracy (loss): "
- f"{np.round(training_results[-1][1] * 100, 4)}% "
- f"({training_results[-1][0]}), \tvalidation accuracy (loss): "
- f"{np.round(validation_results[-1][1] * 100, 4)}% ({validation_results[-1][0]})")
+ print(
+ f"\tepoch {ee + 1}/{num_epochs} done \t --> \ttraining accuracy (loss): "
+ f"{np.round(training_results[-1][1] * 100, 4)}% "
+ f"({training_results[-1][0]}), \tvalidation accuracy (loss): "
+ f"{np.round(validation_results[-1][1] * 100, 4)}% ({validation_results[-1][0]})"
+ )
if val_acc >= np.max(np.array(validation_results)[:, 1]):
best_val_layers = copy.deepcopy(net.state_dict())
diff --git a/paper/03_rnn/lava_analysis_simple.py b/paper/03_rnn/lava_analysis_simple.py
index 01e4da0..4b0f442 100644
--- a/paper/03_rnn/lava_analysis_simple.py
+++ b/paper/03_rnn/lava_analysis_simple.py
@@ -1,14 +1,16 @@
-
import nir
+
# import nirtorch
import torch
+
# import numpy as np
import lava.lib.dl.slayer as slayer
import snntorch as snn
import numpy as np
import matplotlib.pyplot as plt
-plt.rcParams['figure.figsize'] = [10, 2]
-plt.rcParams['figure.dpi'] = 200
+
+plt.rcParams["figure.figsize"] = [10, 2]
+plt.rcParams["figure.dpi"] = 200
dt = 1e-4
@@ -35,7 +37,7 @@
seq_len = 200
# ut = np.random.random((lif_size, seq_len))
# np.save('ut.npy', ut)
-ut = np.load('ut.npy')
+ut = np.load("ut.npy")
ut = (ut > 0.95).astype(np.float32)
ut = torch.from_numpy(ut).reshape(1, lif_size, seq_len)
ut_scale = 0.6
@@ -46,7 +48,7 @@ def snntorch_net():
alpha=alpha,
beta=beta,
threshold=vthr,
- reset_mechanism='zero',
+ reset_mechanism="zero",
init_hidden=False,
all_to_all=True,
linear_features=lif_size,
@@ -74,7 +76,7 @@ def lava_net():
voltage_decay=v_decay,
shared_param=True,
scale=scale,
- )
+ ),
)
w_in_shape = rnn_lava.input_synapse.weight.data.shape
rnn_lava.input_synapse.weight.data = w_in.reshape(w_in_shape)
@@ -113,34 +115,36 @@ def lava_net():
# plot
fig, axs = plt.subplots(5, 1, figsize=(15, 6), dpi=200, sharex=True)
- # gridspec_kw={'height_ratios': [1, 2, 2]})
-axs[0].set_title('input spikes')
+# gridspec_kw={'height_ratios': [1, 2, 2]})
+axs[0].set_title("input spikes")
axs[0].set_xlim(0, seq_len)
for idx, ut_idx in enumerate(ut[0]):
- axs[0].eventplot(np.where(ut_idx == 1)[0], lineoffsets=idx, linelengths=0.8, color='red')
+ axs[0].eventplot(
+ np.where(ut_idx == 1)[0], lineoffsets=idx, linelengths=0.8, color="red"
+ )
-axs[1].set_title('lava-dl output spikes')
+axs[1].set_title("lava-dl output spikes")
axs[1].set_xlim(0, seq_len)
for idx, yt_idx in enumerate(spk_lava[0]):
axs[1].eventplot(np.where(yt_idx == 1)[0], lineoffsets=idx, linelengths=0.8)
-axs[2].set_title('lava-dl membrane')
+axs[2].set_title("lava-dl membrane")
axs[2].set_xlim(0, seq_len)
for idx, vt_idx in enumerate(mem_lava[0]):
axs[2].plot(vt_idx + idx)
- axs[2].hlines(idx+1, 0, seq_len, color='red', ls='--')
+ axs[2].hlines(idx + 1, 0, seq_len, color="red", ls="--")
-axs[3].set_title('snntorch output spikes')
+axs[3].set_title("snntorch output spikes")
axs[3].set_xlim(0, seq_len)
for idx, yt_idx in enumerate(out_snntorch[0]):
axs[3].eventplot(np.where(yt_idx == 1)[0], lineoffsets=idx, linelengths=0.8)
-axs[4].set_title('snnTorch membrane')
+axs[4].set_title("snnTorch membrane")
axs[4].set_xlim(0, seq_len)
for idx, vt_idx in enumerate(mem_snntorch[0]):
axs[4].plot(vt_idx + idx)
- axs[4].hlines(idx+1, 0, seq_len, color='red', ls='--')
+ axs[4].hlines(idx + 1, 0, seq_len, color="red", ls="--")
plt.tight_layout()
-plt.savefig('lava_analysis.png')
+plt.savefig("lava_analysis.png")
plt.close()
diff --git a/paper/03_rnn/lava_debug.py b/paper/03_rnn/lava_debug.py
index 9d7bb35..7ad2811 100644
--- a/paper/03_rnn/lava_debug.py
+++ b/paper/03_rnn/lava_debug.py
@@ -1,6 +1,7 @@
import nir
import torch
import numpy as np
+
# import os
# import torch.nn as nn
from torch.utils.data import DataLoader
@@ -18,10 +19,10 @@
torch.manual_seed(seed)
torch.use_deterministic_algorithms(True)
-nirgraph = nir.read('braille.nir')
+nirgraph = nir.read("braille.nir")
net = from_nir(nirgraph)
-nirgraph = nir.read('braille.nir')
+nirgraph = nir.read("braille.nir")
net_snn = import_nirtorch.from_nir(nirgraph)
test_data_path = "data/ds_test.pt"
@@ -45,9 +46,12 @@
snn_inputs.append(data.swapaxes(1, 0))
labels = labels
- h_state = GraphExecutorState(state={
- 'lif1': net_snn._modules['lif1'].init_rsynaptic(), # 3-tuple: spk, syn, mem
- 'lif2': net_snn._modules['lif2'].init_synaptic(), # 2-tuple: syn, mem
+ h_state = GraphExecutorState(
+ state={
+ "lif1": net_snn._modules[
+ "lif1"
+ ].init_rsynaptic(), # 3-tuple: spk, syn, mem
+ "lif2": net_snn._modules["lif2"].init_synaptic(), # 2-tuple: syn, mem
}
)
@@ -99,26 +103,28 @@
#########################
for i in range(len(snn_inputs)):
- print(f'input #{i} match: {torch.allclose(snn_inputs[i], ldl_inputs[i])}')
+ print(f"input #{i} match: {torch.allclose(snn_inputs[i], ldl_inputs[i])}")
-fc1_snn = torch.stack([h.cache['fc1'] for h in h_states], dim=-1)
-fc1_ldl = hid_rec.cache['fc1']
-print(f'fc1 match (atol=1e-6): {torch.allclose(fc1_snn, fc1_ldl, atol=1e-6)}')
-print(f'fc1 match (atol=1e-8): {torch.allclose(fc1_snn, fc1_ldl, atol=1e-8)}')
+fc1_snn = torch.stack([h.cache["fc1"] for h in h_states], dim=-1)
+fc1_ldl = hid_rec.cache["fc1"]
+print(f"fc1 match (atol=1e-6): {torch.allclose(fc1_snn, fc1_ldl, atol=1e-6)}")
+print(f"fc1 match (atol=1e-8): {torch.allclose(fc1_snn, fc1_ldl, atol=1e-8)}")
-lif1_snn = torch.stack([h.cache['lif1'] for h in h_states], dim=-1)
-lif1_ldl = hid_rec.cache['lif1']
+lif1_snn = torch.stack([h.cache["lif1"] for h in h_states], dim=-1)
+lif1_ldl = hid_rec.cache["lif1"]
# lif2_ldl = hid_rec.cache['lif2']
# net._modules['lif1'].neuron.current_decay
# net._modules['lif1'].neuron.voltage_decay
# check if lif1 synapse weights are identity matrix
-torch.allclose(net._modules['lif1'].input_synapse.weight.flatten(1).detach(), torch.eye(38))
+torch.allclose(
+ net._modules["lif1"].input_synapse.weight.flatten(1).detach(), torch.eye(38)
+)
# fc2_snn = torch.stack([h.cache['fc2'] for h in h_states], dim=-1)
# fc2_ldl = hid_rec.cache['fc2']
# print(f'fc2 match (atol=1e-2): {torch.allclose(fc2_snn, fc2_ldl, atol=1e-2)}')
# print(f'fc2 match (atol=1e-6): {torch.allclose(fc2_snn, fc2_ldl, atol=1e-6)}')
# print(f'fc2 match (atol=1e-8): {torch.allclose(fc2_snn, fc2_ldl, atol=1e-8)}')
-print('done')
+print("done")
diff --git a/paper/03_rnn/lava_inference.py b/paper/03_rnn/lava_inference.py
index d55552e..1a7b433 100644
--- a/paper/03_rnn/lava_inference.py
+++ b/paper/03_rnn/lava_inference.py
@@ -1,4 +1,3 @@
-
import nir
import nirtorch
from lava_rnn import from_nir
@@ -18,7 +17,7 @@
torch.manual_seed(seed)
torch.use_deterministic_algorithms(True)
-nir_filename = 'braille_noDelay_bias_zero.nir'
+nir_filename = "braille_noDelay_bias_zero.nir"
nirgraph = nir.read(nir_filename)
net = from_nir(nirgraph)
@@ -28,7 +27,7 @@
test_data_path = "data/ds_test.pt"
ds_test = torch.load(test_data_path)
-letter_written = ['Space', 'A', 'E', 'I', 'O', 'U', 'Y']
+letter_written = ["Space", "A", "E", "I", "O", "U", "Y"]
loss_fn = SF.ce_count_loss()
batch_size = 64
@@ -39,7 +38,9 @@
def val_test_loop_nirtorch(dataset, batch_size, net, loss_fn, shuffle=True):
with torch.no_grad():
net.eval()
- loader = DataLoader(dataset, batch_size=batch_size, shuffle=shuffle, drop_last=False)
+ loader = DataLoader(
+ dataset, batch_size=batch_size, shuffle=shuffle, drop_last=False
+ )
batch_loss = []
batch_acc = []
@@ -50,8 +51,10 @@ def val_test_loop_nirtorch(dataset, batch_size, net, loss_fn, shuffle=True):
h_state = nirtorch.from_nir.GraphExecutorState(
state={
- 'lif1': net._modules['lif1'].init_rsynaptic(), # 3-tuple: spk, syn, mem
- 'lif2': net._modules['lif2'].init_synaptic(), # 2-tuple: syn, mem
+ "lif1": net._modules[
+ "lif1"
+ ].init_rsynaptic(), # 3-tuple: spk, syn, mem
+ "lif2": net._modules["lif2"].init_synaptic(), # 2-tuple: syn, mem
}
)
@@ -66,19 +69,21 @@ def val_test_loop_nirtorch(dataset, batch_size, net, loss_fn, shuffle=True):
act_total_out = torch.sum(spk_out, 0) # sum over time
_, neuron_max_act_total_out = torch.max(act_total_out, 1)
- batch_acc.extend((neuron_max_act_total_out == labels).detach().cpu().numpy())
+ batch_acc.extend(
+ (neuron_max_act_total_out == labels).detach().cpu().numpy()
+ )
return [np.mean(batch_loss), np.mean(batch_acc)]
acc = val_test_loop_nirtorch(ds_test, batch_size, net_snn, loss_fn, shuffle=False)[1]
-print(f'snnTorch test accuracy: {acc:.2%}')
+print(f"snnTorch test accuracy: {acc:.2%}")
-wg = nirgraph.nodes['fc1'].weight
-wn = net._modules['fc1'].weight.detach().squeeze(-1).squeeze(-1).squeeze(-1).numpy()
-bg = nirgraph.nodes['fc1'].bias
-bn = net._modules['fc1'].bias.detach().numpy()
-print('weights close', np.allclose(wg, wn), np.allclose(bg, bn))
+wg = nirgraph.nodes["fc1"].weight
+wn = net._modules["fc1"].weight.detach().squeeze(-1).squeeze(-1).squeeze(-1).numpy()
+bg = nirgraph.nodes["fc1"].bias
+bn = net._modules["fc1"].bias.detach().numpy()
+print("weights close", np.allclose(wg, wn), np.allclose(bg, bn))
[e.elem for e in net.get_execution_order()]
@@ -109,19 +114,21 @@ def val_test_loop_nirtorch(dataset, batch_size, net, loss_fn, shuffle=True):
rec_hid = {}
for node in net.get_execution_order():
- if isinstance(node.elem, (slayer.block.cuba.Recurrent, slayer.block.cuba.Dense)):
+ if isinstance(
+ node.elem, (slayer.block.cuba.Recurrent, slayer.block.cuba.Dense)
+ ):
if not torch.equal(node.elem.neuron.current_state, torch.Tensor([0])):
- print('current_state not zero, resetting manually')
+ print("current_state not zero, resetting manually")
node.elem.neuron.current_state = torch.Tensor([0])
if not torch.equal(node.elem.neuron.voltage_state, torch.Tensor([0])):
- print('voltage_state not zero, resetting manually')
+ print("voltage_state not zero, resetting manually")
node.elem.neuron.voltage_state = torch.Tensor([0])
assert torch.equal(node.elem.neuron.current_state, torch.Tensor([0]))
assert torch.equal(node.elem.neuron.voltage_state, torch.Tensor([0]))
x = node.elem(x)
if isinstance(x, tuple):
x, v, c = x
- rec_hid[node.name] = {'v': v, 'c': c}
+ rec_hid[node.name] = {"v": v, "c": c}
int_lava[node.name] = x
# spk_out, hid_rec = net(data_ldl)
# spk_out = spk_out.moveaxis(2, 0) # TCN
@@ -130,8 +137,10 @@ def val_test_loop_nirtorch(dataset, batch_size, net, loss_fn, shuffle=True):
# snnTorch network
h_state = nirtorch.from_nir.GraphExecutorState(
state={
- 'lif1': net_snn._modules['lif1'].init_rsynaptic(), # 3-tuple: spk, syn, mem
- 'lif2': net_snn._modules['lif2'].init_synaptic(), # 2-tuple: syn, mem
+ "lif1": net_snn._modules[
+ "lif1"
+ ].init_rsynaptic(), # 3-tuple: spk, syn, mem
+ "lif2": net_snn._modules["lif2"].init_synaptic(), # 2-tuple: syn, mem
}
)
spk_out_arr = []
@@ -146,13 +155,23 @@ def val_test_loop_nirtorch(dataset, batch_size, net, loss_fn, shuffle=True):
for k in h_state.cache.keys()
}
snntorch_curr = {
- k: torch.stack([h_state.state[k][1 if len(h_state.state[k]) == 3 else 0]
- for h_state in h_state_arr], dim=-1)
+ k: torch.stack(
+ [
+ h_state.state[k][1 if len(h_state.state[k]) == 3 else 0]
+ for h_state in h_state_arr
+ ],
+ dim=-1,
+ )
for k in h_state.state.keys()
}
snntorch_mem = {
- k: torch.stack([h_state.state[k][2 if len(h_state.state[k]) == 3 else 1]
- for h_state in h_state_arr], dim=-1)
+ k: torch.stack(
+ [
+ h_state.state[k][2 if len(h_state.state[k]) == 3 else 1]
+ for h_state in h_state_arr
+ ],
+ dim=-1,
+ )
for k in h_state.state.keys()
}
@@ -163,60 +182,66 @@ def val_test_loop_nirtorch(dataset, batch_size, net, loss_fn, shuffle=True):
#####
# analyze
- fc1_lava = int_lava['fc1']
- fc1_snntorch = snntorch_cache['fc1']
- mem1_lava = rec_hid['lif1']['v']
- mem1_snntorch = snntorch_mem['lif1']
- cur1_lava = rec_hid['lif1']['c']
- cur1_snntorch = snntorch_curr['lif1']
- spk1_lava = int_lava['lif1']
- spk1_snntorch = snntorch_cache['lif1']
+ fc1_lava = int_lava["fc1"]
+ fc1_snntorch = snntorch_cache["fc1"]
+ mem1_lava = rec_hid["lif1"]["v"]
+ mem1_snntorch = snntorch_mem["lif1"]
+ cur1_lava = rec_hid["lif1"]["c"]
+ cur1_snntorch = snntorch_curr["lif1"]
+ spk1_lava = int_lava["lif1"]
+ spk1_snntorch = snntorch_cache["lif1"]
fig, axs = plt.subplots(4, 2, figsize=(24, 8), dpi=200, sharex=True)
- axs[0][0].set_title('fc1 output traces - lava-dl')
+ axs[0][0].set_title("fc1 output traces - lava-dl")
axs[0][0].set_xlim(0, data.shape[1])
axs[0][0].plot(fc1_lava[0].T)
- axs[1][0].set_title('fc1 output traces - snnTorch')
+ axs[1][0].set_title("fc1 output traces - snnTorch")
axs[1][0].set_xlim(0, data.shape[1])
axs[1][0].plot(fc1_snntorch[0].T)
- axs[2][0].set_title('lif1 current - lava-dl')
+ axs[2][0].set_title("lif1 current - lava-dl")
axs[2][0].set_xlim(0, data.shape[1])
axs[2][0].plot(cur1_lava[0].T)
- axs[3][0].set_title('lif1 current - snnTorch')
+ axs[3][0].set_title("lif1 current - snnTorch")
axs[3][0].set_xlim(0, data.shape[1])
axs[3][0].plot(cur1_snntorch[0].T)
- axs[0][1].set_title('lif1 membrane - lava-dl')
+ axs[0][1].set_title("lif1 membrane - lava-dl")
# axs[2].set_ylim(-1, 1.2)
axs[0][1].set_xlim(0, data.shape[1])
axs[0][1].plot(mem1_lava[0].T)
- axs[1][1].set_title('lif1 membrane - snnTorch')
+ axs[1][1].set_title("lif1 membrane - snnTorch")
# axs[3].set_ylim(-1, 1.2)
axs[1][1].set_xlim(0, data.shape[1])
axs[1][1].plot(mem1_snntorch[0].T)
- axs[2][1].set_title('lif1 spikes - lava-dl')
+ axs[2][1].set_title("lif1 spikes - lava-dl")
axs[2][1].set_xlim(0, data.shape[1])
for idx, yt_idx in enumerate(spk1_lava[0]):
- axs[2][1].eventplot(np.where(yt_idx == 1)[0], lineoffsets=idx, linelengths=0.8)
- axs[3][1].set_title('lif1 spikes - snnTorch')
+ axs[2][1].eventplot(
+ np.where(yt_idx == 1)[0], lineoffsets=idx, linelengths=0.8
+ )
+ axs[3][1].set_title("lif1 spikes - snnTorch")
axs[3][1].set_xlim(0, data.shape[1])
for idx, yt_idx in enumerate(spk1_snntorch[0]):
- axs[3][1].eventplot(np.where(yt_idx == 1)[0], lineoffsets=idx, linelengths=0.8)
+ axs[3][1].eventplot(
+ np.where(yt_idx == 1)[0], lineoffsets=idx, linelengths=0.8
+ )
plt.tight_layout()
- plt.savefig('lava_analysis_nir.png')
+ plt.savefig("lava_analysis_nir.png")
plt.close()
spk1_lava_over_snn.append((spk1_lava.sum() / spk1_snntorch.sum()).item())
print(spk1_lava_over_snn)
# lava-dl loss & accuracy
- spk_out = int_lava['lif2'].moveaxis(2, 0) # TBN
+ spk_out = int_lava["lif2"].moveaxis(2, 0) # TBN
loss_val = loss_fn(spk_out, labels)
batch_loss.append(loss_val.detach().cpu().item())
act_total_out = torch.sum(spk_out, 0) # sum over time
- _, neuron_max_act_total_out = torch.max(act_total_out, 1) # argmax output > labels
+ _, neuron_max_act_total_out = torch.max(
+ act_total_out, 1
+ ) # argmax output > labels
pred.extend(neuron_max_act_total_out.detach().cpu().numpy())
act_out.extend(act_total_out.detach().cpu().numpy())
batch_acc.extend((neuron_max_act_total_out == labels).detach().cpu().numpy())
@@ -225,11 +250,13 @@ def val_test_loop_nirtorch(dataset, batch_size, net, loss_fn, shuffle=True):
spk_out_snn = spk_out_arr
act_total_out_snn = torch.sum(spk_out_snn, 0) # sum over time
_, neuron_max_act_total_out_snn = torch.max(act_total_out_snn, 1)
- batch_acc_snn.extend((neuron_max_act_total_out_snn == labels).detach().cpu().numpy())
+ batch_acc_snn.extend(
+ (neuron_max_act_total_out_snn == labels).detach().cpu().numpy()
+ )
if batch_idx == 0:
- print('saving activity for first sample')
- fname = 'lava_activity_noDelay_bias_zero.npy'
+ print("saving activity for first sample")
+ fname = "lava_activity_noDelay_bias_zero.npy"
np.save(fname, spk1_lava[0].detach().numpy())
test_results = [np.mean(batch_loss), np.mean(batch_acc)]
@@ -237,5 +264,5 @@ def val_test_loop_nirtorch(dataset, batch_size, net, loss_fn, shuffle=True):
print(f"lava-dl test accuracy: {test_results[1]:.2%}")
print(f"snntorch test accuracy: {np.mean(batch_acc_snn):.2%}")
-fname = 'lava_accuracy_noDelay_bias_zero.npy'
+fname = "lava_accuracy_noDelay_bias_zero.npy"
np.save(fname, np.mean(batch_acc))
diff --git a/paper/03_rnn/lava_rnn.py b/paper/03_rnn/lava_rnn.py
index 5eb4a9c..fc0e955 100644
--- a/paper/03_rnn/lava_rnn.py
+++ b/paper/03_rnn/lava_rnn.py
@@ -2,6 +2,7 @@
Sharp edges:
- in lava-dl, the current and voltage state is not automatically reset. must do this manually after every forward pass.
"""
+
import nir
import nirtorch
import torch
@@ -14,17 +15,20 @@ def _create_rnn_subgraph(graph: nir.NIRGraph, lif_nk: str, w_nk: str) -> nir.NIR
which has the RNN subgraph replaced with a subgraph (i.e., a single NIRGraph node).
"""
# NOTE: assuming that the LIF and W_rec have keys of form xyz.abc
- sg_key = lif_nk.split('.')[0] # TODO: make this more general?
+ sg_key = lif_nk.split(".")[0] # TODO: make this more general?
# create subgraph for RNN
sg_edges = [
- (lif_nk, w_nk), (w_nk, lif_nk), (lif_nk, f'{sg_key}.output'), (f'{sg_key}.input', w_nk)
+ (lif_nk, w_nk),
+ (w_nk, lif_nk),
+ (lif_nk, f"{sg_key}.output"),
+ (f"{sg_key}.input", w_nk),
]
sg_nodes = {
lif_nk: graph.nodes[lif_nk],
w_nk: graph.nodes[w_nk],
- f'{sg_key}.input': nir.Input(graph.nodes[lif_nk].input_type),
- f'{sg_key}.output': nir.Output(graph.nodes[lif_nk].output_type),
+ f"{sg_key}.input": nir.Input(graph.nodes[lif_nk].input_type),
+ f"{sg_key}.output": nir.Output(graph.nodes[lif_nk].output_type),
}
sg = nir.NIRGraph(nodes=sg_nodes, edges=sg_edges)
@@ -45,10 +49,10 @@ def _create_rnn_subgraph(graph: nir.NIRGraph, lif_nk: str, w_nk: str) -> nir.NIR
def _replace_rnn_subgraph_with_nirgraph(graph: nir.NIRGraph) -> nir.NIRGraph:
"""Take a NIRGraph and replace any RNN subgraphs with a single NIRGraph node."""
- print('replace rnn subgraph with nirgraph')
+ print("replace rnn subgraph with nirgraph")
if len(set(graph.edges)) != len(graph.edges):
- print('[WARNING] duplicate edges found, removing')
+ print("[WARNING] duplicate edges found, removing")
graph.edges = list(set(graph.edges))
# find cycle of LIF <> Dense nodes
@@ -85,34 +89,38 @@ def _parse_rnn_subgraph(graph: nir.NIRGraph) -> (nir.NIRNode, nir.NIRNode, int):
lif_size: int, number of neurons in the RNN
"""
sub_nodes = graph.nodes.values()
- assert len(sub_nodes) == 4, 'only 4-node RNN allowed in subgraph'
+ assert len(sub_nodes) == 4, "only 4-node RNN allowed in subgraph"
try:
input_node = [n for n in sub_nodes if isinstance(n, nir.Input)][0]
output_node = [n for n in sub_nodes if isinstance(n, nir.Output)][0]
lif_node = [n for n in sub_nodes if isinstance(n, (nir.LIF, nir.CubaLIF))][0]
wrec_node = [n for n in sub_nodes if isinstance(n, (nir.Affine, nir.Linear))][0]
except IndexError:
- raise ValueError('invalid RNN subgraph - could not find all required nodes')
- lif_size = int(list(input_node.input_type.values())[0][0]) # NOTE: needed for lava-dl
- assert lif_size == list(output_node.output_type.values())[0][0], 'output size mismatch'
- assert lif_size == lif_node.v_threshold.size, 'lif size mismatch (v_threshold)'
- assert lif_size == wrec_node.weight.shape[0], 'w_rec shape mismatch'
- assert lif_size == wrec_node.weight.shape[1], 'w_rec shape mismatch'
+ raise ValueError("invalid RNN subgraph - could not find all required nodes")
+ lif_size = int(
+ list(input_node.input_type.values())[0][0]
+ ) # NOTE: needed for lava-dl
+ assert (
+ lif_size == list(output_node.output_type.values())[0][0]
+ ), "output size mismatch"
+ assert lif_size == lif_node.v_threshold.size, "lif size mismatch (v_threshold)"
+ assert lif_size == wrec_node.weight.shape[0], "w_rec shape mismatch"
+ assert lif_size == wrec_node.weight.shape[1], "w_rec shape mismatch"
return lif_node, wrec_node, lif_size
def _nir_to_lavadl_module(
- node: nir.NIRNode,
- scale: int = 1 << 6,
- # hack_w_scale=True,
- dt=1e-4
+ node: nir.NIRNode,
+ scale: int = 1 << 6,
+ # hack_w_scale=True,
+ dt=1e-4,
) -> torch.nn.Module:
if isinstance(node, nir.Input) or isinstance(node, nir.Output):
return None
elif isinstance(node, nir.Affine):
- assert node.bias is not None, 'bias must be specified for Affine layer'
+ assert node.bias is not None, "bias must be specified for Affine layer"
mod = slayer.synapse.Dense(
in_neurons=node.weight.shape[1],
@@ -123,19 +131,19 @@ def _nir_to_lavadl_module(
)
weight = torch.from_numpy(node.weight.reshape(mod.weight.shape))
mod.weight = torch.nn.Parameter(data=weight, requires_grad=True)
- if not np.allclose(node.bias, 0.):
+ if not np.allclose(node.bias, 0.0):
bias = torch.from_numpy(node.bias.reshape((node.weight.shape[0])))
mod.bias = torch.nn.Parameter(data=bias, requires_grad=True)
return mod
elif isinstance(node, nir.Linear):
- print('[WARNING] Linear layer not supported, using Dense instead')
+ print("[WARNING] Linear layer not supported, using Dense instead")
mod = slayer.synapse.Dense(
in_neurons=node.weight.shape[1],
out_neurons=node.weight.shape[0],
weight_scale=1,
weight_norm=False,
- pre_hook_fx=None
+ pre_hook_fx=None,
)
weight = torch.from_numpy(node.weight.reshape(mod.weight.shape))
mod.weight = torch.nn.Parameter(data=weight, requires_grad=True)
@@ -143,17 +151,23 @@ def _nir_to_lavadl_module(
elif isinstance(node, nir.CubaLIF):
# bias = node.v_leak * dt / node.tau_mem
- assert np.allclose(node.v_leak, 0), 'v_leak not supported' # not yet in lava-dl?
- assert np.allclose(node.r, node.tau_mem / dt), 'r not supported in CubaLIF'
+ assert np.allclose(
+ node.v_leak, 0
+ ), "v_leak not supported" # not yet in lava-dl?
+ assert np.allclose(node.r, node.tau_mem / dt), "r not supported in CubaLIF"
cur_decay = dt / node.tau_syn
vol_decay = dt / node.tau_mem
w_scale = node.w_in * (dt / node.tau_syn)
vthr = node.v_threshold
- assert np.unique(cur_decay).size == 1, 'CubaLIF cur_decay must be same for all neurons'
- assert np.unique(vol_decay).size == 1, 'CubaLIF vol_decay must be same for all neurons'
- assert np.unique(vthr).size == 1, 'CubaLIF v_thr must be same for all neurons'
+ assert (
+ np.unique(cur_decay).size == 1
+ ), "CubaLIF cur_decay must be same for all neurons"
+ assert (
+ np.unique(vol_decay).size == 1
+ ), "CubaLIF vol_decay must be same for all neurons"
+ assert np.unique(vthr).size == 1, "CubaLIF v_thr must be same for all neurons"
n_neurons = 7 # HACK: hard-coded
@@ -170,15 +184,17 @@ def _nir_to_lavadl_module(
voltage_decay=np.unique(vol_decay)[0],
shared_param=True,
scale=scale,
- )
+ ),
)
# block.neuron.threshold_eps = 0.0
weight_pre = torch.eye(n_neurons).reshape(block.synapse.weight.shape)
- if not np.allclose(w_scale, 1.):
+ if not np.allclose(w_scale, 1.0):
# TODO: make sure that dims match up
- print(f'[warning] scaling weights according to w_in -> w_scale={w_scale[0]}')
+ print(
+ f"[warning] scaling weights according to w_in -> w_scale={w_scale[0]}"
+ )
weight_pre = weight_pre * w_scale
block.synapse.weight = torch.nn.Parameter(data=weight_pre, requires_grad=True)
return block
@@ -187,21 +203,31 @@ def _nir_to_lavadl_module(
lif_node, wrec_node, lif_size = _parse_rnn_subgraph(node)
if isinstance(lif_node, nir.LIF):
- raise NotImplementedError('LIF in subgraph not supported')
+ raise NotImplementedError("LIF in subgraph not supported")
elif isinstance(lif_node, nir.CubaLIF):
# bias = lif_node.v_leak * dt / lif_node.tau_mem
- assert np.allclose(lif_node.v_leak, 0), 'v_leak not supported' # not yet in lava-dl?
- assert np.allclose(lif_node.r, lif_node.tau_mem / dt), 'r not supported in CubaLIF'
+ assert np.allclose(
+ lif_node.v_leak, 0
+ ), "v_leak not supported" # not yet in lava-dl?
+ assert np.allclose(
+ lif_node.r, lif_node.tau_mem / dt
+ ), "r not supported in CubaLIF"
cur_decay = dt / lif_node.tau_syn
vol_decay = dt / lif_node.tau_mem
w_scale = lif_node.w_in * (dt / lif_node.tau_syn)
vthr = lif_node.v_threshold
- assert np.unique(cur_decay).size == 1, 'CubaLIF cur_decay must be same for all neurons'
- assert np.unique(vol_decay).size == 1, 'CubaLIF vol_decay must be same for all neurons'
- assert np.unique(vthr).size == 1, 'CubaLIF v_thr must be same for all neurons'
+ assert (
+ np.unique(cur_decay).size == 1
+ ), "CubaLIF cur_decay must be same for all neurons"
+ assert (
+ np.unique(vol_decay).size == 1
+ ), "CubaLIF vol_decay must be same for all neurons"
+ assert (
+ np.unique(vthr).size == 1
+ ), "CubaLIF v_thr must be same for all neurons"
rnn_block = slayer.block.cuba.Recurrent(
in_neurons=lif_size,
@@ -216,33 +242,39 @@ def _nir_to_lavadl_module(
voltage_decay=np.unique(vol_decay)[0],
shared_param=True,
scale=scale,
- )
+ ),
)
# rnn_block.neuron.threshold_eps = 0.0
w_pre = torch.eye(lif_size).reshape(rnn_block.input_synapse.weight.shape)
- if not np.allclose(w_scale, 1.):
+ if not np.allclose(w_scale, 1.0):
# TODO: make sure that dims match up
- print(f'[warning] scaling pre weights for w_in -> w_scale={w_scale[0]}')
+ print(f"[warning] scaling pre weights for w_in -> w_scale={w_scale[0]}")
w_pre = w_pre * w_scale
- rnn_block.input_synapse.weight = torch.nn.Parameter(data=w_pre, requires_grad=True)
+ rnn_block.input_synapse.weight = torch.nn.Parameter(
+ data=w_pre, requires_grad=True
+ )
wrec_shape = rnn_block.recurrent_synapse.weight.shape
wrec = torch.from_numpy(wrec_node.weight).reshape(wrec_shape)
- rnn_block.recurrent_synapse.weight = torch.nn.Parameter(data=wrec, requires_grad=True)
+ rnn_block.recurrent_synapse.weight = torch.nn.Parameter(
+ data=wrec, requires_grad=True
+ )
if isinstance(wrec_node, nir.Affine) and wrec_node.bias is not None:
bias = torch.from_numpy(wrec_node.bias).reshape((lif_size))
- rnn_block.recurrent_synapse.bias = torch.nn.Parameter(data=bias, requires_grad=True)
+ rnn_block.recurrent_synapse.bias = torch.nn.Parameter(
+ data=bias, requires_grad=True
+ )
return rnn_block
elif isinstance(node, nir.LIF):
- raise NotImplementedError('not implemented for lava-dl yet')
+ raise NotImplementedError("not implemented for lava-dl yet")
else:
- print('[WARNING] could not parse node of type:', node.__class__.__name__)
+ print("[WARNING] could not parse node of type:", node.__class__.__name__)
return None
@@ -254,8 +286,8 @@ def from_nir(graph: nir.NIRGraph) -> torch.nn.Module:
return nirtorch.load(graph, _nir_to_lavadl_module)
-if __name__ == '__main__':
- nirgraph = nir.read('braille_retrained_zero.nir')
+if __name__ == "__main__":
+ nirgraph = nir.read("braille_retrained_zero.nir")
net = from_nir(nirgraph)
test_data_path = "data/ds_test.pt"
diff --git a/paper/03_rnn/nir_to_nengo.py b/paper/03_rnn/nir_to_nengo.py
index af80005..9bcc954 100644
--- a/paper/03_rnn/nir_to_nengo.py
+++ b/paper/03_rnn/nir_to_nengo.py
@@ -20,8 +20,11 @@ def nir_to_nengo(n, dt=0.001):
filters = {}
for name, obj in n.nodes.items():
if isinstance(obj, nir.Input):
- node = nengo.Node(None, size_in=np.product(obj.input_type['input']),
- label=f"Input {name} {obj.input_type['input']}")
+ node = nengo.Node(
+ None,
+ size_in=np.product(obj.input_type["input"]),
+ label=f"Input {name} {obj.input_type['input']}",
+ )
nengo_map[name] = node
pre_map[name] = node
elif isinstance(obj, nir.LIF):
@@ -32,9 +35,12 @@ def nir_to_nengo(n, dt=0.001):
n_neurons=N,
dimensions=1,
label=f"LIF {name}",
- neuron_type=nengo.LIF(tau_rc=obj.tau[0], tau_ref=0,
- initial_state={"voltage": nengo.dists.Choice([0])}),
- gain=np.ones(N)/obj.v_threshold,
+ neuron_type=nengo.LIF(
+ tau_rc=obj.tau[0],
+ tau_ref=0,
+ initial_state={"voltage": nengo.dists.Choice([0])},
+ ),
+ gain=np.ones(N) / obj.v_threshold,
bias=np.zeros(N),
)
nengo_map[name] = ens.neurons
@@ -47,29 +53,31 @@ def nir_to_nengo(n, dt=0.001):
assert np.all(obj.v_threshold == obj.v_threshold[0])
assert np.all(obj.tau_syn == obj.tau_syn[0])
assert np.all(obj.tau_mem == obj.tau_mem[0])
-
+
tau_mem = obj.tau_mem[0]
- #tau_mem = -dt/np.log(1-dt/tau_mem)
+ # tau_mem = -dt/np.log(1-dt/tau_mem)
tau_syn = obj.tau_syn[0]
- tau_syn = -dt/np.log(1-dt/tau_syn)
-
+ tau_syn = -dt / np.log(1 - dt / tau_syn)
+
N = obj.tau_mem.flatten().shape[0]
ens = nengo.Ensemble(
n_neurons=N,
dimensions=1,
label=f"CubaLIF {name}",
neuron_type=nengo.LIF(
- #neuron_type=nengo.RegularSpiking(nengo.LIFRate(
- tau_ref=0,tau_rc=tau_mem,
- amplitude=dt,
- initial_state={"voltage": nengo.dists.Choice([0])}),
- gain=obj.w_in*R*np.ones(N)/obj.v_threshold,
+ # neuron_type=nengo.RegularSpiking(nengo.LIFRate(
+ tau_ref=0,
+ tau_rc=tau_mem,
+ amplitude=dt,
+ initial_state={"voltage": nengo.dists.Choice([0])},
+ ),
+ gain=obj.w_in * R * np.ones(N) / obj.v_threshold,
bias=np.zeros(N),
)
nengo_map[name] = ens.neurons
pre_map[name] = ens.neurons
post_map[name] = ens.neurons
- filters[ens.neurons] = nengo.synapses.Lowpass(tau_syn)
+ filters[ens.neurons] = nengo.synapses.Lowpass(tau_syn)
elif isinstance(obj, nir.IF):
assert np.all(obj.r == 1)
N = obj.r.flatten().shape[0]
@@ -77,9 +85,10 @@ def nir_to_nengo(n, dt=0.001):
n_neurons=N,
dimensions=1,
label=f"IF {name}",
- neuron_type=IF(initial_state={"voltage": nengo.dists.Choice([0])},
- amplitude=dt),
- gain=np.ones(N)/obj.v_threshold.flatten()/dt,
+ neuron_type=IF(
+ initial_state={"voltage": nengo.dists.Choice([0])}, amplitude=dt
+ ),
+ gain=np.ones(N) / obj.v_threshold.flatten() / dt,
bias=np.zeros(N),
)
nengo_map[name] = ens.neurons
@@ -116,26 +125,29 @@ def nir_to_nengo(n, dt=0.001):
pre_map[name] = w
post_map[name] = w
elif isinstance(obj, nir.Output):
- nengo_map[name] = nengo.Node(None, size_in=np.product(obj.output_type['output']),
- label=f"Output {name} {obj.input_type['input']}")
+ nengo_map[name] = nengo.Node(
+ None,
+ size_in=np.product(obj.output_type["output"]),
+ label=f"Output {name} {obj.input_type['input']}",
+ )
post_map[name] = nengo_map[name]
elif isinstance(obj, nir.Flatten):
- if name == '5':
- size_in = 16*8*8
- elif name == '8':
+ if name == "5":
+ size_in = 16 * 8 * 8
+ elif name == "8":
size_in = 128
else:
- 1/0
- node = nengo.Node(None, size_in=size_in, label=f'Flatten {name}')
+ 1 / 0
+ node = nengo.Node(None, size_in=size_in, label=f"Flatten {name}")
nengo_map[name] = node
pre_map[name] = node
post_map[name] = node
elif isinstance(obj, nir.Conv2d):
- conv = nengo.Network(label=f'Conv2d {name}')
+ conv = nengo.Network(label=f"Conv2d {name}")
- if name == '0':
+ if name == "0":
input_shape = (2, 34, 34)
- elif name == '2':
+ elif name == "2":
input_shape = (16, 16, 16)
else:
input_shape = (16, 8, 8)
@@ -146,69 +158,81 @@ def nir_to_nengo(n, dt=0.001):
[
(0, 0),
(obj.padding[0], obj.padding[0]),
- (obj.padding[1], obj.padding[1])
+ (obj.padding[1], obj.padding[1]),
],
- 'constant',
- constant_values=0)
+ "constant",
+ constant_values=0,
+ )
with conv:
ww = np.transpose(obj.weight, (2, 3, 1, 0))
- c = nengo.Convolution(n_filters=obj.weight.shape[0],
- input_shape=pad.shape,
- channels_last=False,
- init=ww,
- strides=obj.stride,
- padding='valid',
- kernel_size=(obj.weight.shape[2], obj.weight.shape[3])
- )
- conv.input = nengo.Node(None, size_in=np.product(input_shape),
- label=f'{name}.in')
+ c = nengo.Convolution(
+ n_filters=obj.weight.shape[0],
+ input_shape=pad.shape,
+ channels_last=False,
+ init=ww,
+ strides=obj.stride,
+ padding="valid",
+ kernel_size=(obj.weight.shape[2], obj.weight.shape[3]),
+ )
+ conv.input = nengo.Node(
+ None, size_in=np.product(input_shape), label=f"{name}.in"
+ )
conv.pad = nengo.Node(None, size_in=np.product(pad.shape))
- nengo.Connection(conv.input, conv.pad[np.where(pad.flatten() > 0)[0]],
- synapse=None)
- conv.output = nengo.Node(None, size_in=c.size_out, label=f'{name}.out')
+ nengo.Connection(
+ conv.input,
+ conv.pad[np.where(pad.flatten() > 0)[0]],
+ synapse=None,
+ )
+ conv.output = nengo.Node(
+ None, size_in=c.size_out, label=f"{name}.out"
+ )
- nengo.Connection(conv.pad, conv.output, synapse=None,
- transform=c)
+ nengo.Connection(conv.pad, conv.output, synapse=None, transform=c)
nengo_map[name] = conv
pre_map[name] = conv.output
post_map[name] = conv.input
elif isinstance(obj, nir.SumPool2d):
- pool = nengo.Network(label=f'SumPool2d {name}')
+ pool = nengo.Network(label=f"SumPool2d {name}")
with pool:
- if name == '4':
+ if name == "4":
input_shape = (16, 16, 16)
- elif name == '7':
+ elif name == "7":
input_shape = (8, 8, 8)
else:
- 1/0
+ 1 / 0
n_filters = input_shape[0]
pool_size = tuple(obj.kernel_size)
n_pool = np.product(pool_size)
kernel = np.reshape(
- [np.eye(n_filters)] * n_pool, pool_size + (n_filters, n_filters)
+ [np.eye(n_filters)] * n_pool, pool_size + (n_filters, n_filters)
)
- c = nengo.Convolution(n_filters=input_shape[0],
- input_shape=input_shape,
- channels_last=False,
- init=kernel,
- strides=obj.stride,
- padding='valid',
- kernel_size=pool_size,
- )
- pool.input = nengo.Node(None, size_in=np.product(input_shape),
- label=f'SumPool2d {name}.in')
- pool.output = nengo.Node(None, size_in=c.size_out,
- label=f'SumPool2d {name}.out')
+ c = nengo.Convolution(
+ n_filters=input_shape[0],
+ input_shape=input_shape,
+ channels_last=False,
+ init=kernel,
+ strides=obj.stride,
+ padding="valid",
+ kernel_size=pool_size,
+ )
+ pool.input = nengo.Node(
+ None,
+ size_in=np.product(input_shape),
+ label=f"SumPool2d {name}.in",
+ )
+ pool.output = nengo.Node(
+ None, size_in=c.size_out, label=f"SumPool2d {name}.out"
+ )
nengo.Connection(pool.input, pool.output, synapse=None, transform=c)
nengo_map[name] = pool
pre_map[name] = pool.output
post_map[name] = pool.input
else:
- raise Exception(f'Unknown object: {obj}')
+ raise Exception(f"Unknown object: {obj}")
for pre, post in n.edges:
synapse = filters.get(nengo_map[post], None)
@@ -216,7 +240,7 @@ def nir_to_nengo(n, dt=0.001):
print("Error")
print("pre", pre, pre_map[pre], pre_map[pre].size_out)
print("post", post, post_map[post], post_map[post].size_in)
- 1/0
+ 1 / 0
else:
nengo.Connection(pre_map[pre], post_map[post], synapse=synapse)
diff --git a/paper/03_rnn/snntorch_debug.py b/paper/03_rnn/snntorch_debug.py
index 7541ed5..5cd94ac 100644
--- a/paper/03_rnn/snntorch_debug.py
+++ b/paper/03_rnn/snntorch_debug.py
@@ -8,6 +8,7 @@
import torch.nn as nn
from torch.utils.data import DataLoader
import nir
+
# NOTE: this requires snntorch/nir (PR) and nirtorch/master (unreleased)
from snntorch import import_nirtorch, export_nirtorch
import nirtorch
@@ -40,56 +41,56 @@
def check_parameters(net, net2) -> bool:
ok = True
- w1 = net._modules['fc1']._parameters['weight']
- w2 = net2._modules['fc1']._parameters['weight']
+ w1 = net._modules["fc1"]._parameters["weight"]
+ w2 = net2._modules["fc1"]._parameters["weight"]
if not torch.allclose(w1, w2):
- print(f'input weights: {torch.allclose(w1, w2)}')
+ print(f"input weights: {torch.allclose(w1, w2)}")
ok = False
- b1 = net._modules['fc1']._parameters['bias']
- b2 = net2._modules['fc1']._parameters['bias']
+ b1 = net._modules["fc1"]._parameters["bias"]
+ b2 = net2._modules["fc1"]._parameters["bias"]
if not torch.allclose(b1, b2):
- print(f'input bias: {torch.allclose(b1, b2)}')
+ print(f"input bias: {torch.allclose(b1, b2)}")
ok = False
- w1 = net._modules['fc2']._parameters['weight']
- w2 = net2._modules['fc2']._parameters['weight']
+ w1 = net._modules["fc2"]._parameters["weight"]
+ w2 = net2._modules["fc2"]._parameters["weight"]
if not torch.allclose(w1, w2):
- print(f'output weights: {torch.allclose(w1, w2)}')
+ print(f"output weights: {torch.allclose(w1, w2)}")
ok = False
- b1 = net._modules['fc2']._parameters['bias']
- b2 = net2._modules['fc2']._parameters['bias']
+ b1 = net._modules["fc2"]._parameters["bias"]
+ b2 = net2._modules["fc2"]._parameters["bias"]
if not torch.allclose(b1, b2):
- print(f'output bias: {torch.allclose(b1, b2)}')
+ print(f"output bias: {torch.allclose(b1, b2)}")
ok = False
- w1 = net._modules['lif1'].recurrent._parameters['weight']
- w2 = net2._modules['lif1'].recurrent._parameters['weight']
+ w1 = net._modules["lif1"].recurrent._parameters["weight"]
+ w2 = net2._modules["lif1"].recurrent._parameters["weight"]
if not torch.allclose(w1, w2):
- print(f'recurrent weights: {torch.allclose(w1, w2)}')
+ print(f"recurrent weights: {torch.allclose(w1, w2)}")
ok = False
- b1 = net._modules['lif1'].recurrent._parameters['bias']
- b2 = net2._modules['lif1'].recurrent._parameters['bias']
+ b1 = net._modules["lif1"].recurrent._parameters["bias"]
+ b2 = net2._modules["lif1"].recurrent._parameters["bias"]
if not torch.allclose(b1, b2):
- print(f'recurrent bias: {torch.allclose(b1, b2)}')
+ print(f"recurrent bias: {torch.allclose(b1, b2)}")
ok = False
- alpha1 = net._modules['lif1'].alpha
- alpha2 = net2._modules['lif1'].alpha
+ alpha1 = net._modules["lif1"].alpha
+ alpha2 = net2._modules["lif1"].alpha
if not torch.allclose(alpha1, alpha2):
- print(f'lif1 alpha: {torch.allclose(alpha1, alpha2)}')
+ print(f"lif1 alpha: {torch.allclose(alpha1, alpha2)}")
ok = False
- beta1 = net._modules['lif1'].beta
- beta2 = net2._modules['lif1'].beta
+ beta1 = net._modules["lif1"].beta
+ beta2 = net2._modules["lif1"].beta
if not torch.allclose(beta1, beta2):
- print(f'lif1 beta: {torch.allclose(beta1, beta2)}')
+ print(f"lif1 beta: {torch.allclose(beta1, beta2)}")
ok = False
- alpha1 = net._modules['lif2'].alpha
- alpha2 = net2._modules['lif2'].alpha
+ alpha1 = net._modules["lif2"].alpha
+ alpha2 = net2._modules["lif2"].alpha
if not torch.allclose(alpha1, alpha2):
- print(f'lif2 alpha: {torch.allclose(alpha1, alpha2)}')
+ print(f"lif2 alpha: {torch.allclose(alpha1, alpha2)}")
ok = False
- beta1 = net._modules['lif2'].beta
- beta2 = net2._modules['lif2'].beta
+ beta1 = net._modules["lif2"].beta
+ beta2 = net2._modules["lif2"].beta
if not torch.allclose(beta1, beta2):
- print(f'lif2 beta: {torch.allclose(beta1, beta2)}')
+ print(f"lif2 beta: {torch.allclose(beta1, beta2)}")
ok = False
return ok
@@ -173,7 +174,9 @@ def val_test_loop(
act_total_out = torch.sum(spk_out, 0) # sum over time
_, neuron_max_act_total_out = torch.max(act_total_out, 1)
- batch_acc.extend((neuron_max_act_total_out == labels).detach().cpu().numpy())
+ batch_acc.extend(
+ (neuron_max_act_total_out == labels).detach().cpu().numpy()
+ )
return [np.mean(batch_loss), np.mean(batch_acc)]
@@ -181,7 +184,9 @@ def val_test_loop(
def val_test_loop_nirtorch(dataset, batch_size, net, loss_fn, device, shuffle=True):
with torch.no_grad():
net.eval()
- loader = DataLoader(dataset, batch_size=batch_size, shuffle=shuffle, drop_last=False)
+ loader = DataLoader(
+ dataset, batch_size=batch_size, shuffle=shuffle, drop_last=False
+ )
batch_loss = []
batch_acc = []
@@ -192,8 +197,10 @@ def val_test_loop_nirtorch(dataset, batch_size, net, loss_fn, device, shuffle=Tr
h_state = nirtorch.from_nir.GraphExecutorState(
state={
- 'lif1': net._modules['lif1'].init_rsynaptic(), # 3-tuple: spk, syn, mem
- 'lif2': net._modules['lif2'].init_synaptic(), # 2-tuple: syn, mem
+ "lif1": net._modules[
+ "lif1"
+ ].init_rsynaptic(), # 3-tuple: spk, syn, mem
+ "lif2": net._modules["lif2"].init_synaptic(), # 2-tuple: syn, mem
}
)
@@ -208,7 +215,9 @@ def val_test_loop_nirtorch(dataset, batch_size, net, loss_fn, device, shuffle=Tr
act_total_out = torch.sum(spk_out, 0) # sum over time
_, neuron_max_act_total_out = torch.max(act_total_out, 1)
- batch_acc.extend((neuron_max_act_total_out == labels).detach().cpu().numpy())
+ batch_acc.extend(
+ (neuron_max_act_total_out == labels).detach().cpu().numpy()
+ )
return [np.mean(batch_loss), np.mean(batch_acc)]
@@ -218,18 +227,18 @@ def __init__(self, nir_graph: nir.NIRGraph):
super().__init__()
self.graph = nir_graph
- node = nir_graph.nodes['fc1']
+ node = nir_graph.nodes["fc1"]
self.fc1 = torch.nn.Linear(node.weight.shape[1], node.weight.shape[0])
self.fc1.weight.data = torch.Tensor(node.weight)
self.fc1.bias.data = torch.Tensor(node.bias)
- nodelif = nir_graph.nodes['lif1.lif']
- nodewrec = nir_graph.nodes['lif1.w_rec']
+ nodelif = nir_graph.nodes["lif1.lif"]
+ nodewrec = nir_graph.nodes["lif1.w_rec"]
self.lif1 = snn.RSynaptic(
alpha=float(np.unique(1 - (1e-4 / nodelif.tau_syn))[0]),
beta=float(np.unique(1 - (1e-4 / nodelif.tau_mem))[0]),
threshold=float(np.unique(nodelif.v_threshold)[0]),
- reset_mechanism='zero',
+ reset_mechanism="zero",
all_to_all=True,
linear_features=nodewrec.weight.shape[0],
init_hidden=False,
@@ -237,17 +246,17 @@ def __init__(self, nir_graph: nir.NIRGraph):
self.lif1.recurrent.weight.data = torch.Tensor(nodewrec.weight)
self.lif1.recurrent.bias.data = torch.Tensor(nodewrec.bias)
- node = nir_graph.nodes['fc2']
+ node = nir_graph.nodes["fc2"]
self.fc2 = torch.nn.Linear(node.weight.shape[1], node.weight.shape[0])
self.fc2.weight.data = torch.Tensor(node.weight)
self.fc2.bias.data = torch.Tensor(node.bias)
- node = nir_graph.nodes['lif2']
+ node = nir_graph.nodes["lif2"]
self.lif2 = snn.Synaptic(
alpha=float(np.unique(1 - (1e-4 / node.tau_syn))[0]),
beta=float(np.unique(1 - (1e-4 / node.tau_mem))[0]),
threshold=float(np.unique(node.v_threshold)[0]),
- reset_mechanism='zero',
+ reset_mechanism="zero",
init_hidden=False,
)
@@ -272,7 +281,7 @@ def forward(self, x):
# build initial network
###########################
-print('\nload snnTorch module from checkpoint\n')
+print("\nload snnTorch module from checkpoint\n")
batch_size = 4
input_size = 12
@@ -280,14 +289,21 @@ def forward(self, x):
num_steps = next(iter(ds_test))[0].shape[0]
net = model_build(parameters, input_size, num_steps, device)
-test_results = val_test_loop(ds_test, batch_size, net, loss_fn, device,
- shuffle=SHUFFLE, saved_state_dict=best_val_layers)
+test_results = val_test_loop(
+ ds_test,
+ batch_size,
+ net,
+ loss_fn,
+ device,
+ shuffle=SHUFFLE,
+ saved_state_dict=best_val_layers,
+)
print("test accuracy: {}%".format(np.round(test_results[1] * 100, 2)))
# export to NIR
###########################
-print('\nexport to NIR graph\n')
+print("\nexport to NIR graph\n")
nir_graph = export_nirtorch.to_nir(net, ds_test[0][0], ignore_dims=[0])
nir.write("braille_v2.nir", nir_graph)
@@ -299,33 +315,35 @@ def forward(self, x):
# import from NIR - using nirtorch
###########################
-print('\nimport NIR graph (using nirtorch)\n')
+print("\nimport NIR graph (using nirtorch)\n")
-nir_graph2 = nir.read('braille_v2.nir')
+nir_graph2 = nir.read("braille_v2.nir")
assert sorted(nir_graph2.nodes.keys()) == sorted(nir_graph.nodes.keys())
assert sorted(nir_graph2.edges) == sorted(nir_graph.edges)
for k in nir_graph.nodes:
- assert nir_graph2.nodes[k].__class__.__name__ == nir_graph.nodes[k].__class__.__name__
+ assert (
+ nir_graph2.nodes[k].__class__.__name__ == nir_graph.nodes[k].__class__.__name__
+ )
for k2 in nir_graph.nodes[k].__dict__.keys():
a = nir_graph.nodes[k].__dict__[k2]
b = nir_graph2.nodes[k].__dict__[k2]
if isinstance(a, np.ndarray):
if not np.allclose(a, b):
- print('not close:', k, k2)
+ print("not close:", k, k2)
elif isinstance(a, dict):
for k3 in a:
if not np.allclose(a[k3], b[k3]):
- print('not close:', k, k2, k3)
+ print("not close:", k, k2, k3)
else:
- print('unknown type:', type(a), k, k2)
+ print("unknown type:", type(a), k, k2)
net2 = import_nirtorch.from_nir(nir_graph)
if check_parameters(net, net2):
- print('parameters match!')
+ print("parameters match!")
else:
- print('parameters do not match!')
+ print("parameters do not match!")
# forward pass through all networks in parallel
###########################
@@ -337,13 +355,13 @@ def forward(self, x):
spk1_0, syn1_0, mem1_0 = net0.lif1.init_rsynaptic()
syn2_0, mem2_0 = net0.lif2.init_synaptic()
# reset network 1 states
-spk1, syn1, mem1 = net._modules['lif1'].init_rsynaptic()
-syn2, mem2 = net._modules['lif2'].init_synaptic()
+spk1, syn1, mem1 = net._modules["lif1"].init_rsynaptic()
+syn2, mem2 = net._modules["lif2"].init_synaptic()
# reset network 2 states -- init_hidden=False
h2_state = nirtorch.from_nir.GraphExecutorState(
state={
- 'lif1': net2._modules['lif1'].init_rsynaptic(), # 3-tuple: spk, syn, mem
- 'lif2': net2._modules['lif2'].init_synaptic(), # 2-tuple: syn, mem
+ "lif1": net2._modules["lif1"].init_rsynaptic(), # 3-tuple: spk, syn, mem
+ "lif2": net2._modules["lif2"].init_synaptic(), # 2-tuple: syn, mem
}
)
@@ -360,76 +378,109 @@ def forward(self, x):
spk2_0, syn2_0, mem2_0 = net0.lif2(cur2_0, syn2_0, mem2_0)
sout0_arr.append(spk2_0)
# forward pass through network 1
- cur1 = net._modules['fc1'](x)
- spk1, syn1, mem1 = net._modules['lif1'](cur1, spk1, syn1, mem1)
- cur2 = net._modules['fc2'](spk1)
- spk2, syn2, mem2 = net._modules['lif2'](cur2, syn2, mem2)
+ cur1 = net._modules["fc1"](x)
+ spk1, syn1, mem1 = net._modules["lif1"](cur1, spk1, syn1, mem1)
+ cur2 = net._modules["fc2"](spk1)
+ spk2, syn2, mem2 = net._modules["lif2"](cur2, syn2, mem2)
sout1_arr.append(spk2)
# forward pass through network 2
spk_out, h2_state = net2(x, h2_state)
sout2_arr.append(spk_out)
- comparison = np.array([
- [
- cur1.sum().item(), syn1.sum().item(), mem1.sum().item(), spk1.sum().item(),
- cur2.sum().item(), syn2.sum().item(), mem2.sum().item(), spk2.sum().item(),
- ],
- [
- cur1_0.sum().item(), syn1_0.sum().item(), mem1_0.sum().item(), spk1_0.sum().item(),
- cur2_0.sum().item(), syn2_0.sum().item(), mem2_0.sum().item(), spk2_0.sum().item(),
- ],
+ comparison = np.array(
[
- h2_state.cache['fc1'].sum().item(), h2_state.state['lif1'][1].sum().item(),
- h2_state.state['lif1'][2].sum().item(), h2_state.cache['lif1'].sum().item(),
- h2_state.cache['fc2'].sum().item(), h2_state.state['lif2'][0].sum().item(),
- h2_state.state['lif2'][1].sum().item(), h2_state.cache['lif2'].sum().item(),
+ [
+ cur1.sum().item(),
+ syn1.sum().item(),
+ mem1.sum().item(),
+ spk1.sum().item(),
+ cur2.sum().item(),
+ syn2.sum().item(),
+ mem2.sum().item(),
+ spk2.sum().item(),
+ ],
+ [
+ cur1_0.sum().item(),
+ syn1_0.sum().item(),
+ mem1_0.sum().item(),
+ spk1_0.sum().item(),
+ cur2_0.sum().item(),
+ syn2_0.sum().item(),
+ mem2_0.sum().item(),
+ spk2_0.sum().item(),
+ ],
+ [
+ h2_state.cache["fc1"].sum().item(),
+ h2_state.state["lif1"][1].sum().item(),
+ h2_state.state["lif1"][2].sum().item(),
+ h2_state.cache["lif1"].sum().item(),
+ h2_state.cache["fc2"].sum().item(),
+ h2_state.state["lif2"][0].sum().item(),
+ h2_state.state["lif2"][1].sum().item(),
+ h2_state.cache["lif2"].sum().item(),
+ ],
]
- ])
-
- if not torch.equal(h2_state.cache['fc1'], cur1):
- print(tstep, 'fc1', h2_state.cache['fc1'].sum().item(), cur1.sum().item())
- if not torch.equal(h2_state.cache['lif1'], spk1):
- print(tstep, 'lif1', h2_state.cache['lif1'].sum().item(), spk1.sum().item())
- if not torch.equal(h2_state.cache['fc2'], cur2):
- print(tstep, 'fc2', h2_state.cache['fc2'].sum().item(), cur2.sum().item())
+ )
+
+ if not torch.equal(h2_state.cache["fc1"], cur1):
+ print(tstep, "fc1", h2_state.cache["fc1"].sum().item(), cur1.sum().item())
+ if not torch.equal(h2_state.cache["lif1"], spk1):
+ print(tstep, "lif1", h2_state.cache["lif1"].sum().item(), spk1.sum().item())
+ if not torch.equal(h2_state.cache["fc2"], cur2):
+ print(tstep, "fc2", h2_state.cache["fc2"].sum().item(), cur2.sum().item())
if not torch.equal(spk_out, spk2):
- print(tstep, 'lif2', spk_out.sum().item(), spk2.sum().item())
+ print(tstep, "lif2", spk_out.sum().item(), spk2.sum().item())
# print(tstep)
# print(comparison)
-print('\ntest the re-imported snnTorch network (using nirtorch)\n')
-test_results = val_test_loop_nirtorch(ds_test, batch_size, net2, loss_fn, device, shuffle=SHUFFLE)
+print("\ntest the re-imported snnTorch network (using nirtorch)\n")
+test_results = val_test_loop_nirtorch(
+ ds_test, batch_size, net2, loss_fn, device, shuffle=SHUFFLE
+)
print("test accuracy: {}%".format(np.round(test_results[1] * 100, 2)))
# back to NIR and test
###########################
net2 = import_nirtorch.from_nir(nir_graph) # reset the network
-print('\nexporting back to NIR\n')
+print("\nexporting back to NIR\n")
# HACK: initialize hidden state and pass it to the graph executor
model_fwd_args = [
nirtorch.from_nir.GraphExecutorState(
state={
- 'lif1': net2._modules['lif1'].init_rsynaptic(), # 3-tuple: spk, syn, mem
- 'lif2': net2._modules['lif2'].init_synaptic(), # 2-tuple: syn, mem
+ "lif1": net2._modules["lif1"].init_rsynaptic(), # 3-tuple: spk, syn, mem
+ "lif2": net2._modules["lif2"].init_synaptic(), # 2-tuple: syn, mem
}
)
]
-nir_graph2 = export_nirtorch.to_nir(net2, ds_test[0][0], model_fwd_args=model_fwd_args,
- ignore_dims=[0])
+nir_graph2 = export_nirtorch.to_nir(
+ net2, ds_test[0][0], model_fwd_args=model_fwd_args, ignore_dims=[0]
+)
nir_graph2.infer_types()
nir.write("braille_v2a.nir", nir_graph2)
-nir_graph = export_nirtorch.to_nir(net, ds_test[0][0], ignore_dims=[0]) # must reload (modified)
+nir_graph = export_nirtorch.to_nir(
+ net, ds_test[0][0], ignore_dims=[0]
+) # must reload (modified)
-assert nir_graph.nodes.keys() == nir_graph2.nodes.keys(), 'node keys mismatch'
+assert nir_graph.nodes.keys() == nir_graph2.nodes.keys(), "node keys mismatch"
for nodekey in nir_graph.nodes:
- a = nir_graph.nodes[nodekey].__class__.__name__ if nodekey in nir_graph.nodes else None
- b = nir_graph2.nodes[nodekey].__class__.__name__ if nodekey in nir_graph2.nodes else None
- assert a == b, f'node type mismatch: {a} vs {b}'
+ a = (
+ nir_graph.nodes[nodekey].__class__.__name__
+ if nodekey in nir_graph.nodes
+ else None
+ )
+ b = (
+ nir_graph2.nodes[nodekey].__class__.__name__
+ if nodekey in nir_graph2.nodes
+ else None
+ )
+ assert a == b, f"node type mismatch: {a} vs {b}"
for attr in nir_graph.nodes[nodekey].__dict__:
close = None
if isinstance(nir_graph.nodes[nodekey].__dict__[attr], np.ndarray):
- close = np.allclose(nir_graph.nodes[nodekey].__dict__[attr],
- nir_graph2.nodes[nodekey].__dict__[attr])
- assert close is not False, f'node attribute mismatch: {nodekey}.{attr}'
+ close = np.allclose(
+ nir_graph.nodes[nodekey].__dict__[attr],
+ nir_graph2.nodes[nodekey].__dict__[attr],
+ )
+ assert close is not False, f"node attribute mismatch: {nodekey}.{attr}"
diff --git a/paper/figures/figures.py b/paper/figures/figures.py
index 80466f4..93d3900 100644
--- a/paper/figures/figures.py
+++ b/paper/figures/figures.py
@@ -6,22 +6,26 @@
fig, ax = plt.subplots(figsize=(8, 6), dpi=300)
# Define Software (SW) and Hardware (HW) labels
-sw_labels = [f'SW {i}' for i in range(1, 7)]
-hw_labels = [f'HW {i}' for i in range(1, 6)]
+sw_labels = [f"SW {i}" for i in range(1, 7)]
+hw_labels = [f"HW {i}" for i in range(1, 6)]
# Draw the Software (SW) labels on the left
for i, label in enumerate(sw_labels):
- ax.text(0.1, 0.9 - i * 0.15, label, fontsize=12, ha='center', va='center')
+ ax.text(0.1, 0.9 - i * 0.15, label, fontsize=12, ha="center", va="center")
# Draw the Hardware (HW) labels on the right
for i, label in enumerate(hw_labels):
- ax.text(0.7, 0.9 - i * 0.15 - 0.075, label, fontsize=12, ha='center', va='center')
+ ax.text(0.7, 0.9 - i * 0.15 - 0.075, label, fontsize=12, ha="center", va="center")
# Draw connections between SW and HW without NIR
for i in range(len(sw_labels)):
for j in range(len(hw_labels)):
spacing = 0.05
- ax.plot([0.1+spacing, 0.7-spacing], [0.9 - i * 0.15, 0.9 - j * 0.15 - 0.075], 'k-')
+ ax.plot(
+ [0.1 + spacing, 0.7 - spacing],
+ [0.9 - i * 0.15, 0.9 - j * 0.15 - 0.075],
+ "k-",
+ )
# # Draw 'Without NIR' label at the top
# ax.text(0.4, 1, 'Without NIR', fontsize=14, ha='center', va='center')
@@ -32,10 +36,10 @@
# ax.text(0.4, 0.125-0.1, '30 HW->SW', fontsize=12, ha='center', va='center')
# Set aspect of the plot and hide axes
-ax.set_aspect('equal', 'box')
-ax.axis('off')
+ax.set_aspect("equal", "box")
+ax.axis("off")
plt.tight_layout()
-plt.savefig('compiler_withoutnir.pdf', dpi=300)
+plt.savefig("compiler_withoutnir.pdf", dpi=300)
plt.show()
#########################################################################################
@@ -43,37 +47,46 @@
fig, ax = plt.subplots(figsize=(8, 6), dpi=300)
# Define Software (SW) and Hardware (HW) labels
-sw_labels = [f'SW{i}' for i in range(1, 7)]
-hw_labels = [f'HW{i}' for i in range(1, 6)]
+sw_labels = [f"SW{i}" for i in range(1, 7)]
+hw_labels = [f"HW{i}" for i in range(1, 6)]
# Draw the Software (SW) labels on the left
for i, label in enumerate(sw_labels):
- ax.text(0.1, 0.9 - i * 0.15, label, fontsize=12, ha='center', va='center')
+ ax.text(0.1, 0.9 - i * 0.15, label, fontsize=12, ha="center", va="center")
# Draw the Hardware (HW) labels on the right
for i, label in enumerate(hw_labels):
- ax.text(0.7, 0.9 - i * 0.15 - 0.075, label, fontsize=12, ha='center', va='center')
+ ax.text(0.7, 0.9 - i * 0.15 - 0.075, label, fontsize=12, ha="center", va="center")
# Draw the centered NIR box
-rect_centery = 0.9 - (len(sw_labels) - 1) * 0.15 / 2 # Vertically center with respect to the labels
+rect_centery = (
+ 0.9 - (len(sw_labels) - 1) * 0.15 / 2
+) # Vertically center with respect to the labels
hrect = 0.15
xoff = -0.1
-rect = patches.Rectangle((0.4 + xoff, rect_centery - hrect/2), 0.2, hrect, linewidth=1, edgecolor='w', facecolor='lightgray')
+rect = patches.Rectangle(
+ (0.4 + xoff, rect_centery - hrect / 2),
+ 0.2,
+ hrect,
+ linewidth=1,
+ edgecolor="w",
+ facecolor="lightgray",
+)
ax.add_patch(rect)
-ax.text(0.5 + xoff, rect_centery, 'NIR', fontsize=12, ha='center', va='center')
+ax.text(0.5 + xoff, rect_centery, "NIR", fontsize=12, ha="center", va="center")
# Draw connections between SW and NIR
spacing = 0.05
for i in range(len(sw_labels)):
- ax.plot([0.1 + spacing, 0.4 + xoff], [0.9 - i * 0.15, rect_centery], 'k-')
+ ax.plot([0.1 + spacing, 0.4 + xoff], [0.9 - i * 0.15, rect_centery], "k-")
# Draw connections between HW and NIR
for i in range(len(hw_labels)):
- ax.plot([0.75 + xoff, 0.6 + xoff], [0.9 - i * 0.15 - 0.075, rect_centery], 'k-')
+ ax.plot([0.75 + xoff, 0.6 + xoff], [0.9 - i * 0.15 - 0.075, rect_centery], "k-")
# Set aspect of the plot and hide axes
-ax.set_aspect('equal', 'box')
-ax.axis('off')
+ax.set_aspect("equal", "box")
+ax.axis("off")
plt.tight_layout()
-plt.savefig('compiler_withnir.pdf', dpi=300)
+plt.savefig("compiler_withnir.pdf", dpi=300)
plt.show()
diff --git a/pyproject.toml b/pyproject.toml
index 90a0182..6b12931 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -29,6 +29,7 @@ classifiers = [
]
dependencies = [ "numpy", "h5py" ]
dynamic = ["version"] # Version number read from __init__.py
+requires-python = ">=3.9"
[project.urls]
homepage = "https://github.com/neuromorphs/nir"
| meta-data in NIR nodes
In our discussions, the idea came up to include metadata into NIR nodes that would help developers implement NIR nodes on their respective platform.
As an example, @bvogginger pointed out in #63 that we should not include the reset mechanism into the NIR node definition itself, since the reset mechanism (subtractive, reset to zero, etc) is specific to digital implementations and has no place in the ODE formulation of the node. However, to make sure that NIR graphs commute across platforms, developers need to know what reset mechanism was originally implemented for a given NIR graph. Hence, the idea is to include a file of "metadata" into [every | some] NIR nodes that will give such relevant information.
Suggestions for metadata:
- LIF/CubaLIF/IF/Threshold nodes: reset mechanism used (e.g. reset-to-zero, subtractive-reset), output spike types used (e.g. graded or not graded - this then also influences the reset mechanism), time step used (for discretization)
| Thought: A big risk of adding this meta data is libraries getting dependent on this data. This can quickly go downhill from there. | 2024-03-03T21:59:01 | 0.0 | [] | [] |
||
Rishit-dagli/TF-Watcher | Rishit-dagli__TF-Watcher-59 | 4a906c0d934afaf7077309bc16643ecb58174f2a | diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml
index 3dc58f9..5358af6 100644
--- a/.github/workflows/linter.yml
+++ b/.github/workflows/linter.yml
@@ -8,6 +8,11 @@ jobs:
runs-on: ubuntu-latest
steps:
+ - name: Free space in hosted agent
+ run: |
+ sudo rm -rf "/usr/local/share/boost"
+ sudo rm -rf "$AGENT_TOOLSDIRECTORY"
+
- name: Checkout Code
uses: actions/checkout@v2
with:
diff --git a/.github/workflows/sphinx-ci.yml b/.github/workflows/sphinx-ci.yml
new file mode 100644
index 0000000..3c230fa
--- /dev/null
+++ b/.github/workflows/sphinx-ci.yml
@@ -0,0 +1,37 @@
+name: Build Docs
+
+on: [push]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+
+ - name: Free space in hosted agent
+ run: |
+ sudo rm -rf "/usr/local/share/boost"
+ sudo rm -rf "$AGENT_TOOLSDIRECTORY"
+
+ - uses: actions/checkout@v2
+
+ - uses: ammaraskar/sphinx-action@master
+ with:
+ docs-folder: "docs/"
+
+ - uses: ammaraskar/sphinx-action@master
+ with:
+ pre-build-command: "apt-get update -y && apt-get install -y latexmk texlive-latex-recommended texlive-latex-extra texlive-fonts-recommended imagemagick"
+ build-command: "make latexpdf"
+ docs-folder: "docs/"
+
+ # Create an artifact of the html output.
+ - uses: actions/upload-artifact@v2
+ with:
+ name: DocumentationHTML
+ path: docs/build/html/
+
+ # Create an artifact out of the previously built pdf.
+ - uses: actions/upload-artifact@v2
+ with:
+ name: Documentation
+ path: docs/build/latex/tfwatcher.pdf
diff --git a/.gitignore b/.gitignore
index 9fe17bc..642c25b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -70,6 +70,7 @@ instance/
# Sphinx documentation
docs/_build/
+docs/source/_build
# PyBuilder
target/
@@ -126,4 +127,5 @@ venv.bak/
dmypy.json
# Pyre type checker
-.pyre/
\ No newline at end of file
+.pyre/
+
diff --git a/docs/Makefile b/docs/Makefile
new file mode 100644
index 0000000..d0c3cbf
--- /dev/null
+++ b/docs/Makefile
@@ -0,0 +1,20 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line, and also
+# from the environment for the first two.
+SPHINXOPTS ?=
+SPHINXBUILD ?= sphinx-build
+SOURCEDIR = source
+BUILDDIR = build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/docs/make.bat b/docs/make.bat
new file mode 100644
index 0000000..6247f7e
--- /dev/null
+++ b/docs/make.bat
@@ -0,0 +1,35 @@
+@ECHO OFF
+
+pushd %~dp0
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set SOURCEDIR=source
+set BUILDDIR=build
+
+if "%1" == "" goto help
+
+%SPHINXBUILD% >NUL 2>NUL
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.http://sphinx-doc.org/
+ exit /b 1
+)
+
+%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+goto end
+
+:help
+%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+
+:end
+popd
diff --git a/docs/requirements.txt b/docs/requirements.txt
new file mode 100644
index 0000000..4cda4aa
--- /dev/null
+++ b/docs/requirements.txt
@@ -0,0 +1,3 @@
+pyrebase4
+tensorflow
+sphinx_rtd_theme
\ No newline at end of file
diff --git a/docs/source/_static/tf-watcher.png b/docs/source/_static/tf-watcher.png
new file mode 100644
index 0000000..140b320
Binary files /dev/null and b/docs/source/_static/tf-watcher.png differ
diff --git a/docs/source/conf.py b/docs/source/conf.py
new file mode 100644
index 0000000..c13bd38
--- /dev/null
+++ b/docs/source/conf.py
@@ -0,0 +1,68 @@
+# Configuration file for the Sphinx documentation builder.
+#
+# This file only contains a selection of the most common options. For a full
+# list see the documentation:
+# https://www.sphinx-doc.org/en/master/usage/configuration.html
+
+# -- Path setup --------------------------------------------------------------
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#
+import os
+import sys
+
+sys.path.insert(0, os.path.abspath("../../"))
+
+# -- Project information -----------------------------------------------------
+
+project = "TF Watcher"
+copyright = "2021, Rishit Dagli"
+author = "Rishit Dagli"
+
+# The full version, including alpha/beta/rc tags
+release = "0.1.0"
+
+
+# -- General configuration ---------------------------------------------------
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ "sphinx.ext.autodoc",
+ "sphinx.ext.viewcode",
+ "sphinx.ext.coverage",
+ "sphinx.ext.imgconverter",
+]
+
+# Show doc coverage stats
+coverage_show_missing_items = True
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ["_templates"]
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This pattern also affects html_static_path and html_extra_path.
+exclude_patterns = []
+
+
+# -- Options for HTML output -------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+#
+html_theme = "sphinx_rtd_theme"
+
+html_logo = "_static/tf-watcher.png"
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ["_static"]
+
+# -- Options for LaTeX output -------------------------------------------------
+
+latex_elements = {"extraclassoptions": "openany,oneside"}
diff --git a/docs/source/index.rst b/docs/source/index.rst
new file mode 100644
index 0000000..cbab158
--- /dev/null
+++ b/docs/source/index.rst
@@ -0,0 +1,40 @@
+.. TF Watcher documentation master file, created by
+ sphinx-quickstart on Thu Aug 5 10:54:35 2021.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Welcome to TF Watcher's documentation!
+======================================
+
+.. image:: https://img.shields.io/github/stars/Rishit-dagli/TF-Watcher?style=social.svg
+ :target: https://github.com/Rishit-dagli/TF-Watcher
+
+.. image:: https://img.shields.io/twitter/url?style=social&url=https%3A%2F%2Fgithub.com%2FRishit-dagli%2FTF-Watcher.svg
+ :target: https://twitter.com/intent/tweet?text=Wow:&url=https%3A%2F%2Fgithub.com%2FRishit-dagli%2FTF-Watcher
+
+TF Watcher is a simple to use Python package and web app which allows you to monitor
+your model training or testing process on mobile devices built specially for Google
+Colab, Azure ML and Kaggle.
+
+Quick Links
+------------
+
+* Source: `GitHub <https://github.com/Rishit-dagli/TF-Watcher>`_
+* `Web App <TODO>`_
+* `Issue Tracker <https://github.com/Rishit-dagli/TF-Watcher/issues>`_
+* `Mailing List <https://groups.google.com/g/tf-watcher>`_
+
+.. toctree::
+ :maxdepth: 4
+ :caption: Contents:
+
+ introduction
+ installation
+ tfwatcher
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
diff --git a/docs/source/installation.rst b/docs/source/installation.rst
new file mode 100644
index 0000000..290334a
--- /dev/null
+++ b/docs/source/installation.rst
@@ -0,0 +1,24 @@
+Installation
+============
+
+.. image:: https://img.shields.io/pypi/v/tf-watcher
+ :target: https://pypi.org/project/tf-watcher
+
+Run the following to install the package from PyPi:
+
+.. code-block:: bash
+
+ pip install tf-watcher
+
+Developing TF watcher
+---------------------
+
+To install ``tf-watcher``, along with tools you need to develop and test, run the following in your virtualenv:
+
+.. code-block:: bash
+
+ git clone https://github.com/Rishit-dagli/TF-Watcher.git
+ # or clone your own fork
+
+ cd TF-Watcher
+ pip install -e .[dev]
diff --git a/docs/source/introduction.rst b/docs/source/introduction.rst
new file mode 100644
index 0000000..1b48e68
--- /dev/null
+++ b/docs/source/introduction.rst
@@ -0,0 +1,38 @@
+Introduction
+============
+
+TF Watcher is a simple to use Python package and web app which allows you to easily
+monitor your model training or testing process on mobile devices. We built this to
+specially support easily monitoring training or testing in Google Colab, AzureML and
+Kaggle though this can pretty much be used on any machine or remote server.
+
+.. seealso::
+
+ Checkout this quickstart example which you can run directly on Google Colab to get
+ started with using this package: `Quickstart Example <TODO>`_
+
+To make this super easy to use and easily merge in with your development workflow we
+make use of
+`TensorFlow's Callbacks <https://www.tensorflow.org/api_docs/python/tf/keras/callbacks>`_
+which allow us to easily call our code at certain points during model training. This
+package then accumalates the training data and sends it to Firebase Realtime database
+allowing you to easily monitor and share live logs from anywhere through the web app.
+
+Almost all the callbacks we made with this package are easily usable by simply
+specifying them as a ``callback`` while training or testing your model (see the
+documentation for more details):
+
+.. code-block:: python
+
+ import tfwatcher
+
+ monitor_callback = tfwatcher.callbacks.EpochEnd()
+ model.fit(..., callbacks=[monitor_callback])
+
+You can also use this projeect in your custom training loops and also works in the same
+way in a non-eager TensorFlow graph (``@tf.function``).
+
+.. seealso::
+
+ Checkout all the end to end examples of using this package (can be run on Google
+ Colab): TODO
\ No newline at end of file
diff --git a/docs/source/modules.rst b/docs/source/modules.rst
new file mode 100644
index 0000000..4d061c9
--- /dev/null
+++ b/docs/source/modules.rst
@@ -0,0 +1,7 @@
+tfwatcher
+=========
+
+.. toctree::
+ :maxdepth: 4
+
+ tfwatcher
diff --git a/docs/source/tfwatcher.callbacks.rst b/docs/source/tfwatcher.callbacks.rst
new file mode 100644
index 0000000..66ba053
--- /dev/null
+++ b/docs/source/tfwatcher.callbacks.rst
@@ -0,0 +1,53 @@
+tfwatcher.callbacks package
+===========================
+
+Submodules
+----------
+
+tfwatcher.callbacks.epoch module
+--------------------------------
+
+.. automodule:: tfwatcher.callbacks.epoch
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+tfwatcher.callbacks.predict module
+----------------------------------
+
+.. automodule:: tfwatcher.callbacks.predict
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+tfwatcher.callbacks.predict\_batch module
+-----------------------------------------
+
+.. automodule:: tfwatcher.callbacks.predict_batch
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+tfwatcher.callbacks.test\_batch module
+--------------------------------------
+
+.. automodule:: tfwatcher.callbacks.test_batch
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+tfwatcher.callbacks.train\_batch module
+---------------------------------------
+
+.. automodule:: tfwatcher.callbacks.train_batch
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Module contents
+---------------
+
+.. automodule:: tfwatcher.callbacks
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/docs/source/tfwatcher.rst b/docs/source/tfwatcher.rst
new file mode 100644
index 0000000..96125fb
--- /dev/null
+++ b/docs/source/tfwatcher.rst
@@ -0,0 +1,45 @@
+Documentation
+=================
+
+Subpackages
+-----------
+
+.. toctree::
+ :maxdepth: 4
+
+ tfwatcher.callbacks
+
+Submodules
+----------
+
+tfwatcher.firebase\_config module
+---------------------------------
+
+.. automodule:: tfwatcher.firebase_config
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+tfwatcher.firebase\_helpers module
+----------------------------------
+
+.. automodule:: tfwatcher.firebase_helpers
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+tfwatcher.version module
+------------------------
+
+.. automodule:: tfwatcher.version
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Module contents
+---------------
+
+.. automodule:: tfwatcher
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/setup.py b/setup.py
index 4c3b041..817f48e 100644
--- a/setup.py
+++ b/setup.py
@@ -1,13 +1,31 @@
+import os.path
+
from setuptools import setup
-exec(open("tfwatcher/version.py").read())
-with open("README.md", "r") as fh:
- long_description = fh.read()
+def read(rel_path: str) -> str:
+ here = os.path.abspath(os.path.dirname(__file__))
+ # intentionally *not* adding an encoding option to open
+ with open(os.path.join(here, rel_path)) as fp:
+ return fp.read()
+
+
+def get_version(rel_path: str) -> str:
+ for line in read(rel_path).splitlines():
+ if line.startswith("__version__"):
+ # __version__ = "0.9"
+ delim = '"' if '"' in line else "'"
+ return line.split(delim)[1]
+ raise RuntimeError("Unable to find version string.")
+
+
+this_directory = os.path.abspath(os.path.dirname(__file__))
+with open(os.path.join(this_directory, "README.md"), encoding="utf-8") as f:
+ long_description = f.read()
setup(
name="tf-watcher",
- version=__version__,
+ version=get_version("tfwatcher/version.py"),
description="Monitor your TensorFlow model training on mobile devices, especially for Google Colab",
packages=["tfwatcher"],
long_description=long_description,
@@ -33,9 +51,18 @@
author="Rishit Dagli",
author_email="[email protected]",
install_requires=[
- "tensorflow >= 2.2.0",
+ "tensorflow ~= 2.5.0",
+ "pyrebase4 ~= 4.5.0",
],
extras_require={
- "dev": ["check-manifest", "twine", "numpy", "black"],
+ "dev": [
+ "check-manifest",
+ "twine",
+ "numpy",
+ "black",
+ "isort",
+ "sphinx",
+ "sphinx-rtd-theme",
+ ],
},
)
diff --git a/tfwatcher/__init__.py b/tfwatcher/__init__.py
index 58f3ace..791e03c 100644
--- a/tfwatcher/__init__.py
+++ b/tfwatcher/__init__.py
@@ -1,1 +1,4 @@
+from .callbacks import epoch, predict, predict_batch, train_batch
from .version import __version__
+
+__all__ = ["firebase_config", "firebase_helpers", "callbacks"]
diff --git a/tfwatcher/callbacks/__init__.py b/tfwatcher/callbacks/__init__.py
new file mode 100644
index 0000000..c7aa8e4
--- /dev/null
+++ b/tfwatcher/callbacks/__init__.py
@@ -0,0 +1,7 @@
+from .epoch import EpochEnd
+from .predict import PredictEnd
+from .predict_batch import PredictBatchEnd
+from .test_batch import TestBatchEnd
+from .train_batch import TrainBatchEnd
+
+__all__ = ["epoch", "predict", "predict_batch", "test_batch", "train_batch"]
diff --git a/tfwatcher/callbacks/epoch.py b/tfwatcher/callbacks/epoch.py
new file mode 100644
index 0000000..dd92a9b
--- /dev/null
+++ b/tfwatcher/callbacks/epoch.py
@@ -0,0 +1,148 @@
+from statistics import mean
+from typing import Union
+
+import tensorflow as tf
+
+from ..firebase_helpers import random_char, write_in_callback
+
+
+class EpochEnd(tf.keras.callbacks.Callback):
+ """This class is a subclass of the `tf.keras.callbacks.Callback <https://www.tensorflow.org/api_docs/python/tf/keras/callbacks/Callback>`_
+ abstract base class and overrides the methods :func:`on_epoch_begin` and :func:`on_epoch_end`
+ allowing logging after epochs in training. This class also uses the
+ :mod:`..firebase_helpers` to send data to Firebase Realtime database and also
+ creates a 7 character unique string where the data is pushed on Firebase. Logging
+ to Firebase is also controllable by ``schedule`` argument, even providing a
+ granular control for each epoch.
+
+ Example:
+
+ .. code-block:: python
+ :caption: Logging data after every epoch
+ :emphasize-lines: 4,13
+ :linenos:
+
+ import tfwatcher
+
+ # here we specify schedule = 1 to log after every epoch
+ monitor_callback = tfwatcher.callbacks.EpochEnd(schedule=1)
+
+ model.compile(
+ optimizer=...,
+ loss=...,
+ # metrics which will be logged
+ metrics=[...],
+ )
+
+ model.fit(..., callbacks=[monitor_callback])
+
+
+ :param schedule: Use an integer value n to specify logging data every n epochs
+ the first one being logged by default. Use a list of integers to control
+ logging with a greater granularity, logs on all epoch numbers specified in
+ the list taking the first epoch as epoch 1. Using a list will override
+ loggging on the first epoch by default, defaults to 1
+ :type schedule: Union[int, list[int]], optional
+ :param round_time: This argument allows specifying if you want to see the times
+ on the web-app to be rounded, in most cases you would not be using this, defaults to 2
+ :type round_time: int, optional
+ :param print_logs: This argument should only be used when trying to debug if
+ your logs do not appear in the web-app, if set to ``True`` this would print
+ out the dictionary which is being pushed to Firebase, defaults to False
+ :type print_logs: bool, optional
+ :raises ValueError: If the ``schedule`` is neither an integer or a list.
+ :raises Exception: If all the values in ``schedule`` list are not convertible
+ to integer.
+ """
+
+ def __init__(
+ self,
+ schedule: Union[int, list] = 1,
+ round_time: int = 2,
+ print_logs: bool = False,
+ ):
+
+ super(EpochEnd, self).__init__()
+ self.schedule = schedule
+ self.start_time = None
+ self.end_time = None
+ self.times = list()
+ self.round_time = round_time
+ self.print_logs = print_logs
+
+ self.ref_id = random_char(7)
+ print(f"Use this ID to monitor training for this session: {self.ref_id}")
+
+ self.is_int = False
+ self.is_list = False
+ if isinstance(self.schedule, int):
+ self.is_int = True
+ elif isinstance(self.schedule, list):
+ self.is_list = True
+ else:
+ raise ValueError("schedule should either be an integer or a list")
+
+ if self.is_list:
+ try:
+ self.schedule = list(map(int, self.schedule))
+ except (ValueError, TypeError) as err:
+ raise Exception(
+ "All elements in the list should be convertible to int: {}".format(
+ err
+ )
+ )
+
+ def on_epoch_begin(self, epoch: int, logs: dict = None):
+ """Overrides the `tf.keras.callbacks.Callback.on_epoch_begin <https://www.tensorflow.org/api_docs/python/tf/keras/callbacks/Callback#on_epoch_begin>`_
+ method which is called at the start of an epoch. This function should only be
+ called during TRAIN mode.
+
+ :param epoch: Index of epoch
+ :type epoch: int
+ :param logs: Currently no data is passed to this argument since there are no
+ logs during the start of an epoch, defaults to None
+ :type logs: dict, optional
+ """
+
+ self.start_time = tf.timestamp()
+
+ def on_epoch_end(self, epoch: int, logs: dict = None):
+ """Overrides the `tf.keras.callbacks.Callback.on_epoch_end <https://www.tensorflow.org/api_docs/python/tf/keras/callbacks/Callback#on_epoch_end>`_
+ method which is called at the end of an epoch. This function should only be
+ called during TRAIN mode. This method adds the epoch number, the average time
+ taken and pushes it to Firebase using the :mod:`..firebase_helpers` module.
+
+ :param epoch: Index of epoch
+ :type epoch: int
+ :param logs: Metric results for this training epoch, and for the validation
+ epoch if validation is performed. Validation result keys are prefixed with
+ ``val_``. For training epoch, the values of the Model's metrics are
+ returned. Example : ``{'loss': 0.2, 'accuracy': 0.7}``, defaults to None
+ :type logs: dict, optional
+ """
+
+ self.end_time = tf.timestamp()
+
+ # Use Python built in functions to allow using in @tf.function see
+ # https://github.com/tensorflow/tensorflow/issues/27491#issuecomment-890887810
+ time = float(self.end_time - self.start_time)
+ self.times.append(time)
+
+ # Since we have similar logging code use the fact that if first argument of and is False Python doesn't
+ # execute the second argument
+ if (
+ (self.is_int and ((epoch + 1) % self.schedule == 0))
+ or (self.is_list and ((epoch + 1) in self.schedule))
+ ) or (epoch == 0):
+ data = logs
+ data["epoch"] = epoch + 1
+ data["batch"] = False
+ data["avg_time"] = round(mean(self.times), self.round_time)
+
+ write_in_callback(data=data, ref_id=self.ref_id)
+
+ data["time"] = self.times
+ if self.print_logs:
+ print(data)
+
+ self.times = list()
diff --git a/tfwatcher/callbacks/predict.py b/tfwatcher/callbacks/predict.py
new file mode 100644
index 0000000..20491cf
--- /dev/null
+++ b/tfwatcher/callbacks/predict.py
@@ -0,0 +1,87 @@
+import tensorflow as tf
+
+from ..firebase_helpers import random_char, write_in_callback
+
+
+class PredictEnd(tf.keras.callbacks.Callback):
+ """This class is a subclass of the `tf.keras.callbacks.Callback <https://www.tensorflow.org/api_docs/python/tf/keras/callbacks/Callback>`_
+ abstract base class and overrides the methods :func:`on_predict_begin` and :func:`on_predict_end`
+ allowing loging after ``predict`` method is run. This class also uses the
+ :mod:`..firebase_helpers` to send data to Firebase Realtime database and also
+ creates a 7 character unique string where the data is pushed on Firebase.
+
+ Example:
+
+ .. code-block:: python
+ :caption: Logging data after predict method
+ :emphasize-lines: 3,12
+ :linenos:
+
+ import tfwatcher
+
+ monitor_callback = tfwatcher.callbacks.PredictEnd()
+
+ model.compile(
+ optimizer=...,
+ loss=...,
+ # metrics which will be logged
+ metrics=[...],
+ )
+
+ model.fit(..., callbacks=[monitor_callback])
+
+ :param round_time: This argument allows specifying if you want to see the times
+ on the web-app to be rounded, in most cases you would not be using this, defaults to 2
+ :type round_time: int, optional
+ :param print_logs: This argument should only be used when trying to debug if
+ your logs do not appear in the web-app, if set to ``True`` this would print
+ out the dictionary which is being pushed to Firebase, defaults to False
+ :type print_logs: bool, optional
+ :raises ValueError: If the ``schedule`` is neither an integer or a list.
+ :raises Exception: If all the values in ``schedule`` list are not convertible
+ to integer.
+ """
+
+ def __init__(self, round_time: int = 2, print_logs: bool = False):
+ super(PredictEnd, self).__init__()
+ self.round_time = round_time
+ self.start_time = None
+ self.end_time = None
+ self.time = None
+ self.print_logs = print_logs
+
+ self.ref_id = random_char(7)
+ print(f"Use this ID to monitor training for this session: {self.ref_id}")
+
+ def on_predict_begin(self, logs: dict = None):
+ """Overrides the `tf.keras.callbacks.Callback.on_predict_begin <https://www.tensorflow.org/api_docs/python/tf/keras/callbacks/Callback#on_predict_begin>`_
+ method which is called at the start of prediction.
+
+ :param logs: Currently no data is passed to this argument since there are no
+ logs during the start of an epoch, defaults to None
+ :type logs: dict, optional
+ """
+
+ self.start_time = tf.timestamp()
+
+ def on_predict_end(self, logs: dict = None):
+ """Overrides the `tf.keras.callbacks.Callback.on_predict_end <https://www.tensorflow.org/api_docs/python/tf/keras/callbacks/Callback#on_predict_end>`_
+ method which is called at the end of prediction.
+
+ :param logs: Currently no data is passed to this argument since there are no
+ logs during the start of an epoch, defaults to None
+ :type logs: dict, optional
+ """
+
+ self.end_time = tf.timestamp()
+
+ # Use Python built in functions to allow using in @tf.function see
+ # https://github.com/tensorflow/tensorflow/issues/27491#issuecomment-890887810
+ self.time = float(self.end_time - self.start_time)
+
+ data = {"epoch": False, "batch": False, "avg_time": self.time}
+
+ write_in_callback(data=data, ref_id=self.ref_id)
+
+ if self.print_logs:
+ print(data)
diff --git a/tfwatcher/callbacks/predict_batch.py b/tfwatcher/callbacks/predict_batch.py
new file mode 100644
index 0000000..f0ec56b
--- /dev/null
+++ b/tfwatcher/callbacks/predict_batch.py
@@ -0,0 +1,148 @@
+from statistics import mean
+from typing import Union
+
+import tensorflow as tf
+
+from ..firebase_helpers import random_char, write_in_callback
+
+
+class PredictBatchEnd(tf.keras.callbacks.Callback):
+ """This class is a subclass of the `tf.keras.callbacks.Callback <https://www.tensorflow.org/api_docs/python/tf/keras/callbacks/Callback>`_
+ abstract base class and overrides the methods :func:`on_predict_batch_begin` and :func:`on_predict_batch_end`
+ allowing loging after batches in ``predict`` method. This class also uses the
+ :mod:`..firebase_helpers` to send data to Firebase Realtime database and also
+ creates a 7 character unique string where the data is pushed on Firebase. Logging
+ to Firebase is also controllable by ``schedule`` argument, even providing a
+ granular control for each batch in ``predict`` methods.
+
+ Example:
+
+ .. code-block:: python
+ :caption: Logging data after every batch in predict methods
+ :emphasize-lines: 4,13
+ :linenos:
+
+ import tfwatcher
+
+ # here we specify schedule = 1 to log after every batch
+ monitor_callback = tfwatcher.callbacks.PredictBatchEnd(schedule=1)
+
+ model.compile(
+ optimizer=...,
+ loss=...,
+ # metrics which will be logged
+ metrics=[...],
+ )
+
+ model.fit(..., callbacks=[monitor_callback])
+
+ .. warning::
+ If the ``steps_per_execution`` argument to compile in ``tf.keras.Model`` is set
+ to N, the logging code will only be called every N batches.
+
+ :param schedule: Use an integer value n to specify logging data every n batches
+ the first one being logged by default. Use a list of integers to control
+ logging with a greater granularity, logs on all batch numbers specified in
+ the list taking the first batch as batch 1. Using a list will override
+ loggging on the first batch by default, defaults to 1
+ :type schedule: Union[int, list[int]], optional
+ :param round_time: This argument allows specifying if you want to see the times
+ on the web-app to be rounded, in most cases you would not be using this, defaults to 2
+ :type round_time: int, optional
+ :param print_logs: This argument should only be used when trying to debug if
+ your logs do not appear in the web-app, if set to ``True`` this would print
+ out the dictionary which is being pushed to Firebase, defaults to False
+ :type print_logs: bool, optional
+ :raises ValueError: If the ``schedule`` is neither an integer or a list.
+ :raises Exception: If all the values in ``schedule`` list are not convertible
+ to integer.
+ """
+
+ def __init__(
+ self,
+ schedule: Union[int, list] = 1,
+ round_time: int = 2,
+ print_logs: bool = False,
+ ):
+ super(PredictBatchEnd, self).__init__()
+ self.schedule = schedule
+ self.start_time = None
+ self.end_time = None
+ self.times = list()
+ self.round_time = round_time
+ self.print_logs = print_logs
+
+ self.ref_id = random_char(7)
+ print(f"Use this ID to monitor training for this session: {self.ref_id}")
+
+ self.is_int = False
+ self.is_list = False
+ if isinstance(self.schedule, int):
+ self.is_int = True
+ elif isinstance(self.schedule, list):
+ self.is_list = True
+ else:
+ raise ValueError("schedule should either be an integer or a list")
+
+ if self.is_list:
+ try:
+ self.schedule = list(map(int, self.schedule))
+ except (ValueError, TypeError) as err:
+ raise Exception(
+ "All elements in the list should be convertible to int: {}".format(
+ err
+ )
+ )
+
+ def on_predict_batch_begin(self, batch: int, logs: dict = None):
+ """Overrides the `tf.keras.callbacks.Callback.on_predict_batch_begin <https://www.tensorflow.org/api_docs/python/tf/keras/callbacks/Callback#on_predict_batch_begin>`_
+ method which is called called at the beginning of a batch in predict methods.
+
+ :param batch: Index of batch within the current epoch
+ :type batch: int
+ :param logs: contains the return value of ``model.predict_step``, it typically
+ returns a dict with a key 'outputs' containing the model's outputs
+ :type logs: dict, optional
+ """
+
+ self.start_time = tf.timestamp()
+
+ def on_predict_batch_end(self, batch: int, logs: dict = None):
+ """Overrides the `tf.keras.callbacks.Callback.on_predict_batch_end <https://www.tensorflow.org/api_docs/python/tf/keras/callbacks/Callback#on_predict_batch_end>`_
+ method which is called called at the end of a batch in predict methods. This
+ method adds the batch number, the average time taken and pushes it to Firebase
+ using the :mod:`..firebase_helpers` module.
+
+ :param epoch: Index of batch within the current epoch
+ :type epoch: int
+ :param logs: Aggregated metric results up until this batch, defaults to None
+ :type logs: dict, optional
+ """
+
+ self.end_time = tf.timestamp()
+
+ # Use Python built in functions to allow using in @tf.function see
+ # https://github.com/tensorflow/tensorflow/issues/27491#issuecomment-890887810
+ time = float(self.end_time - self.start_time)
+ self.times.append(time)
+
+ # Since we have similar logging code use the fact that if first argument of and is False Python doesn't
+ # execute the second argument
+ if (
+ (self.is_int and ((batch + 1) % self.schedule == 0))
+ or (self.is_list and ((batch + 1) in self.schedule))
+ ) or (batch == 0):
+
+ data = {
+ "batch": batch + 1,
+ "epoch": False,
+ "avg_time": round(mean(self.times), self.round_time),
+ }
+
+ write_in_callback(data=data, ref_id=self.ref_id)
+
+ data["time"] = self.times
+ if self.print_logs:
+ print(data)
+
+ self.times = list()
diff --git a/tfwatcher/callbacks/train_batch.py b/tfwatcher/callbacks/train_batch.py
new file mode 100644
index 0000000..50ea9c9
--- /dev/null
+++ b/tfwatcher/callbacks/train_batch.py
@@ -0,0 +1,148 @@
+from statistics import mean
+from typing import Union
+
+import tensorflow as tf
+
+from ..firebase_helpers import random_char, write_in_callback
+
+
+class TrainBatchEnd(tf.keras.callbacks.Callback):
+ """This class is a subclass of the `tf.keras.callbacks.Callback <https://www.tensorflow.org/api_docs/python/tf/keras/callbacks/Callback>`_
+ abstract base class and overrides the methods :func:`on_test_batch_begin` and :func:`on_test_batch_end`
+ allowing loging after a training batch in fit methods. This class
+ also uses the :mod:`..firebase_helpers` to send data to Firebase Realtime database
+ and also creates a 7 character unique string where the data is pushed on Firebase.
+ Logging to Firebase is also controllable by ``schedule`` argument, even providing a
+ granular control for each batch in fit methods.
+
+ Example:
+
+ .. code-block:: python
+ :caption: Logging data after every batch in fit methods
+ :emphasize-lines: 4,13
+ :linenos:
+
+ import tfwatcher
+
+ # here we specify schedule = 1 to log after every batch
+ monitor_callback = tfwatcher.callbacks.TrainBatchEnd(schedule=1)
+
+ model.compile(
+ optimizer=...,
+ loss=...,
+ # metrics which will be logged
+ metrics=[...],
+ )
+
+ model.fit(..., callbacks=[monitor_callback])
+
+ .. warning::
+ If the ``steps_per_execution`` argument to compile in ``tf.keras.Model`` is set
+ to N, the logging code will only be called every N batches.
+
+ :param schedule: Use an integer value n to specify logging data every n batches
+ the first one being logged by default. Use a list of integers to control
+ logging with a greater granularity, logs on all batch numbers specified in
+ the list taking the first batch as batch 1. Using a list will override
+ loggging on the first batch by default, defaults to 1
+ :type schedule: Union[int, list[int]], optional
+ :param round_time: This argument allows specifying if you want to see the times
+ on the web-app to be rounded, in most cases you would not be using this, defaults to 2
+ :type round_time: int, optional
+ :param print_logs: This argument should only be used when trying to debug if
+ your logs do not appear in the web-app, if set to ``True`` this would print
+ out the dictionary which is being pushed to Firebase, defaults to False
+ :type print_logs: bool, optional
+ :raises ValueError: If the ``schedule`` is neither an integer or a list.
+ :raises Exception: If all the values in ``schedule`` list are not convertible
+ to integer.
+ """
+
+ def __init__(
+ self,
+ schedule: Union[int, list] = 1,
+ round_time: int = 2,
+ print_logs: bool = False,
+ ):
+ super(TrainBatchEnd, self).__init__()
+ self.schedule = schedule
+ self.start_time = None
+ self.end_time = None
+ self.times = list()
+ self.round_time = round_time
+ self.print_logs = print_logs
+
+ self.ref_id = random_char(7)
+ print(f"Use this ID to monitor training for this session: {self.ref_id}")
+
+ self.is_int = False
+ self.is_list = False
+ if isinstance(self.schedule, int):
+ self.is_int = True
+ elif isinstance(self.schedule, list):
+ self.is_list = True
+ else:
+ raise ValueError("schedule should either be an integer or a list")
+
+ if self.is_list:
+ try:
+ self.schedule = list(map(int, self.schedule))
+ except (ValueError, TypeError) as err:
+ raise Exception(
+ "All elements in the list should be convertible to int: {}".format(
+ err
+ )
+ )
+
+ def on_train_batch_begin(self, batch: int, logs: dict = None):
+ """Overrides the `tf.keras.callbacks.Callback.on_train_batch_begin <https://www.tensorflow.org/api_docs/python/tf/keras/callbacks/Callback#on_train_batch_begin>`_
+ method which is called called at the beginning of a training batch in fit methods.
+
+ :param batch: Index of batch within the current epoch
+ :type batch: int
+ :param logs: Contains the return value of model.train_step. Typically, the
+ values of the Model's metrics are returned.
+ Example: ``{'loss': 0.2, 'accuracy': 0.7}``, defaults to None
+ :type logs: dict, optional
+ """
+
+ self.start_time = tf.timestamp()
+
+ def on_train_batch_end(self, batch: int, logs: dict = None):
+ """Overrides the `tf.keras.callbacks.Callback.on_train_batch_end <https://www.tensorflow.org/api_docs/python/tf/keras/callbacks/Callback#on_train_batch_end>`_
+ method which is called called at the end of a batch in of a training batch in
+ fit methods. This method adds the batch number, the average time taken and
+ pushes it to Firebase using the :mod:`..firebase_helpers` module.
+
+ :param epoch: Index of batch within the current epoch
+ :type epoch: int
+ :param logs: Aggregated metric results up until this batch, defaults to None
+ :type logs: dict, optional
+ """
+
+ self.end_time = tf.timestamp()
+
+ # Use Python built in functions to allow using in @tf.function see
+ # https://github.com/tensorflow/tensorflow/issues/27491#issuecomment-890887810
+ time = float(self.end_time - self.start_time)
+ self.times.append(time)
+
+ # Since we have similar logging code use the fact that if first argument of and is False Python doesn't
+ # execute the second argument
+ if (
+ (self.is_int and ((batch + 1) % self.schedule == 0))
+ or (self.is_list and ((batch + 1) in self.schedule))
+ ) or (batch == 0):
+
+ data = logs
+ data["batch"] = batch + 1
+ data["epoch"] = False
+ data["avg_time"] = round(mean(self.times), self.round_time)
+
+ write_in_callback(data=data, ref_id=self.ref_id)
+
+ data["time"] = self.times
+ if self.print_logs:
+ print(data)
+
+ self.times = list()
diff --git a/tfwatcher/firebase_config.py b/tfwatcher/firebase_config.py
new file mode 100644
index 0000000..17c6f17
--- /dev/null
+++ b/tfwatcher/firebase_config.py
@@ -0,0 +1,17 @@
+def get_firebase_config() -> dict:
+ """Returns a dictionary to initialize Firebase containing registered app's
+ Firebase project configuration. It is safe to expose Firebase apiKey
+ publicly, read `this Stack Overflow answer <https://stackoverflow.com/a/37484053/11878567>`_ .
+
+ :return: A dictionary of Firebase project configuration
+ :rtype: dict
+ """
+
+ # It is better using a dict literal, read: https://stackoverflow.com/a/6610783/11878567
+ return {
+ "apiKey": "AIzaSyAfCOOzFtKxTa-_pS3lO6URRGR8sVjK7sk",
+ "authDomain": "tf-watcher.firebaseapp.com",
+ "databaseURL": "https://tf-watcher-default-rtdb.firebaseio.com",
+ "projectId": "tf-watcher",
+ "storageBucket": "tf-watcher.appspot.com",
+ }
diff --git a/tfwatcher/firebase_helpers.py b/tfwatcher/firebase_helpers.py
new file mode 100644
index 0000000..5d45ef0
--- /dev/null
+++ b/tfwatcher/firebase_helpers.py
@@ -0,0 +1,77 @@
+import random
+import string
+
+import pyrebase
+
+from .firebase_config import get_firebase_config
+
+
+def write_to_firebase(data: dict, ref_id: str, level: str):
+ """Writes data to Firebase Realtime Database using
+ `Pyrebase <https://github.com/thisbejim/Pyrebase>`_ , a simple Python wrapper
+ around the Firebase API. This automatically fetches the Firebase Config from
+ :func:`firebase_config.get_firebase_config` .
+
+ :param data: A dictionary of the logging metrics, epoch number and average time
+ which are to be logged to Firebase
+ :type data: dict
+ :param ref_id: A unique ID where the data would be pushed to on Firebase
+ :type ref_id: str
+ :param level: This should be either ``epoch``, ``batch`` or ``prediction``
+ corresponding to the level where the logs are collected. For ``prediction``,
+ the data would be pushed without the epoch or batch number it was collected on.
+ :type level: str
+ """
+
+ # level can be epoch, batch, prediction
+ firebase = pyrebase.initialize_app(get_firebase_config())
+ log_db = firebase.database()
+
+ if level == "prediction":
+ log_db.child(ref_id).child(1).push(data)
+ else:
+ log_db.child(ref_id).child(data[level]).push(data)
+
+
+def write_in_callback(data: dict, ref_id: str):
+ """A wrapper around :func:`write_to_firebase` to simply pass in
+ the ``data`` and a unique ID to write to Firebase Realtime database. It
+ automatically figures out the level at which logs were collected and calls the
+ :func:`write_to_firebase` function. This function is also used to
+ write data to Firebase in between callbacks (eg. the :class:`EpochEnd` class).
+
+ .. note::
+ This function is specially made to directly use in :obj:`callbacks` and does not
+ require using ``level`` argument which is automatically calculated for callback
+ classes.
+
+ :param data: A dictionary of the logging metrics, epoch number and average time
+ which are to be logged to Firebase
+ :type data: dict
+ :param ref_id: A unique ID where the data would be pushed to on Firebase
+ :type ref_id: str
+ """
+
+ if data["epoch"] or (data["epoch"] is 0):
+ level = "epoch"
+ elif data["batch"] or (data["batch"] is 0):
+ level = "batch"
+ else:
+ level = "prediction"
+
+ write_to_firebase(data=data, ref_id=ref_id, level=level)
+
+
+def random_char(y: int) -> str:
+ """A very simple function to help generate an arbitary length of pseudo random
+ letters to serve as a unique ID specific to the class through which metrics are
+ being logged. This is also the child under which the mtrics are logged in Firebase
+ Realtime database.
+
+ :param y: The length of the unique ID to be created
+ :type y: int
+ :return: A string of ``y`` pseudo random upper case and lower letters
+ :rtype: str
+ """
+
+ return "".join(random.choice(string.ascii_letters) for _ in range(y))
diff --git a/webapp/.eslintrc.json b/webapp/.eslintrc.json
new file mode 100644
index 0000000..69dec1f
--- /dev/null
+++ b/webapp/.eslintrc.json
@@ -0,0 +1,32 @@
+{
+ "env": {
+ "browser": true,
+ "es2021": true
+ },
+ "extends": [
+ "plugin:react/recommended",
+ "airbnb"
+ ],
+ "parserOptions": {
+ "ecmaFeatures": {
+ "jsx": true
+ },
+ "ecmaVersion": 12,
+ "sourceType": "module"
+ },
+ "plugins": [
+ "react"
+ ],
+ "rules": {
+ "react/jsx-filename-extension": [
+ 1,
+ {
+ "extensions": [
+ ".js",
+ ".jsx"
+ ]
+ }
+ ],
+ "linebreak-style": 0
+ }
+}
diff --git a/webapp/.gitignore b/webapp/.gitignore
new file mode 100644
index 0000000..4d29575
--- /dev/null
+++ b/webapp/.gitignore
@@ -0,0 +1,23 @@
+# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
+
+# dependencies
+/node_modules
+/.pnp
+.pnp.js
+
+# testing
+/coverage
+
+# production
+/build
+
+# misc
+.DS_Store
+.env.local
+.env.development.local
+.env.test.local
+.env.production.local
+
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
diff --git a/webapp/package.json b/webapp/package.json
new file mode 100644
index 0000000..0f94b59
--- /dev/null
+++ b/webapp/package.json
@@ -0,0 +1,53 @@
+{
+ "name": "webapp",
+ "version": "0.1.0",
+ "private": true,
+ "dependencies": {
+ "@chakra-ui/react": "^1.6.5",
+ "@emotion/react": "11",
+ "@emotion/styled": "11",
+ "firebase": "^8.8.1",
+ "framer-motion": "4",
+ "react": "^17.0.2",
+ "react-dom": "^17.0.2",
+ "react-router-dom": "^5.2.0",
+ "react-scripts": "4.0.3",
+ "recharts": "^2.0.10",
+ "workbox-core": "^5.1.3",
+ "workbox-expiration": "^5.1.3",
+ "workbox-precaching": "^5.1.3",
+ "workbox-routing": "^5.1.3",
+ "workbox-strategies": "^5.1.3"
+ },
+ "scripts": {
+ "start": "react-scripts start",
+ "build": "react-scripts build",
+ "test": "react-scripts test",
+ "eject": "react-scripts eject"
+ },
+ "eslintConfig": {
+ "extends": [
+ "react-app"
+ ]
+ },
+ "browserslist": {
+ "production": [
+ ">0.2%",
+ "not dead",
+ "not op_mini all"
+ ],
+ "development": [
+ "last 1 chrome version",
+ "last 1 firefox version",
+ "last 1 safari version"
+ ]
+ },
+ "devDependencies": {
+ "eslint": "^7.32.0",
+ "eslint-config-airbnb": "^18.2.1",
+ "eslint-plugin-import": "^2.23.4",
+ "eslint-plugin-jsx-a11y": "^6.4.1",
+ "eslint-plugin-react": "^7.24.0",
+ "eslint-plugin-react-hooks": "^4.2.0"
+ }
+}
diff --git a/webapp/public/favicon.ico b/webapp/public/favicon.ico
new file mode 100644
index 0000000..a11777c
Binary files /dev/null and b/webapp/public/favicon.ico differ
diff --git a/webapp/public/index.html b/webapp/public/index.html
new file mode 100644
index 0000000..9dd6fe7
--- /dev/null
+++ b/webapp/public/index.html
@@ -0,0 +1,17 @@
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="utf-8" />
+ <link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
+ <meta name="viewport" content="width=device-width, initial-scale=1" />
+ <meta name="theme-color" content="#000000" />
+ <meta name="description" content="Web site created using create-react-app" />
+ <link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" />
+ <link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
+ <title>TF Watcher</title>
+ </head>
+ <body>
+ <noscript>You need to enable JavaScript to run this app.</noscript>
+ <div id="root"></div>
+ </body>
+</html>
diff --git a/webapp/public/logo192.png b/webapp/public/logo192.png
new file mode 100644
index 0000000..fc44b0a
Binary files /dev/null and b/webapp/public/logo192.png differ
diff --git a/webapp/public/logo512.png b/webapp/public/logo512.png
new file mode 100644
index 0000000..a4e47a6
Binary files /dev/null and b/webapp/public/logo512.png differ
diff --git a/webapp/public/manifest.json b/webapp/public/manifest.json
new file mode 100644
index 0000000..962676a
--- /dev/null
+++ b/webapp/public/manifest.json
@@ -0,0 +1,25 @@
+{
+ "short_name": "TF Watcher",
+ "name": "TF Watcher",
+ "icons": [
+ {
+ "src": "favicon.ico",
+ "sizes": "64x64 32x32 24x24 16x16",
+ "type": "image/x-icon"
+ },
+ {
+ "src": "logo192.png",
+ "type": "image/png",
+ "sizes": "192x192"
+ },
+ {
+ "src": "logo512.png",
+ "type": "image/png",
+ "sizes": "512x512"
+ }
+ ],
+ "start_url": ".",
+ "display": "standalone",
+ "theme_color": "#000000",
+ "background_color": "#ffffff"
+}
diff --git a/webapp/public/robots.txt b/webapp/public/robots.txt
new file mode 100644
index 0000000..e9e57dc
--- /dev/null
+++ b/webapp/public/robots.txt
@@ -0,0 +1,3 @@
+# https://www.robotstxt.org/robotstxt.html
+User-agent: *
+Disallow:
diff --git a/webapp/src/App.js b/webapp/src/App.js
new file mode 100644
index 0000000..8a0dcff
--- /dev/null
+++ b/webapp/src/App.js
@@ -0,0 +1,25 @@
+import React from 'react';
+import {
+ BrowserRouter, Switch, Route, Redirect,
+} from 'react-router-dom';
+import Navbar from './components/Navbar';
+import UserProvider from './providers/AuthProvider';
+import ChartScreen from './screens/ChartScreen';
+import HomeScreen from './screens/HomeScreen';
+import PageNotFound from './screens/404Screen';
+
+const App = () => (
+ <UserProvider>
+ <BrowserRouter>
+ <Navbar />
+ <Switch>
+ <Route path="/logs/:id" component={ChartScreen} />
+ <Route exact path="/" component={HomeScreen} />
+ <Route path="/404" component={PageNotFound} />
+ <Redirect from="*" to="/404" />
+ </Switch>
+ </BrowserRouter>
+ </UserProvider>
+);
+
+export default App;
diff --git a/webapp/src/components/AreaChartComponent.js b/webapp/src/components/AreaChartComponent.js
new file mode 100644
index 0000000..3442053
--- /dev/null
+++ b/webapp/src/components/AreaChartComponent.js
@@ -0,0 +1,92 @@
+import React from 'react';
+import {
+ ResponsiveContainer,
+ XAxis,
+ YAxis,
+ CartesianGrid,
+ Tooltip,
+ AreaChart,
+ Legend,
+ Area,
+} from 'recharts';
+import { Container } from '@chakra-ui/react';
+
+const AreaChartComponent = (data) => {
+ const {
+ xaxis, lineA, lineB, logs,
+ } = data;
+ return (
+ <Container maxWidth="container.lg">
+ <div style={{
+ width: '100%', height: 380, marginBottom: 40, marginTop: 10,
+ }}
+ >
+ <ResponsiveContainer>
+ <AreaChart
+ data={logs}
+ margin={{
+ top: 5,
+ right: 30,
+ left: 30,
+ bottom: 15,
+ }}
+ >
+ <defs>
+ <linearGradient id="green" x1="0" y1="0" x2="0" y2="1">
+ <stop offset="5%" stopColor="#5ac49c" stopOpacity={1} />
+ <stop offset="95%" stopColor="#5ac49c" stopOpacity={0.2} />
+ </linearGradient>
+ <linearGradient id="purple" x1="0" y1="0" x2="0" y2="1">
+ <stop offset="5%" stopColor="#8884d8" stopOpacity={1} />
+ <stop offset="95%" stopColor="#8884d8" stopOpacity={0.2} />
+ </linearGradient>
+ </defs>
+ <CartesianGrid strokeDasharray="3 3" />
+ <XAxis
+ dataKey={xaxis}
+ label={{
+ value: xaxis.toUpperCase(), position: 'insideBottom', offset: -12, fill: '#0987A0', fontSize: 15,
+ }}
+ />
+ <YAxis label={{
+ value: `${lineA.toUpperCase()} VALUE`, angle: -90, position: 'insideLeft', fill: '#0987A0', fontSize: 15,
+ }}
+ />
+ <Tooltip />
+ <Legend
+ verticalAlign="middle"
+ layout="vertical"
+ align="right"
+ wrapperStyle={{
+ paddingLeft: '20px',
+ width: '10rem',
+ }}
+ iconType="diamond"
+ iconSize={14}
+ />
+ <Area
+ type="monotone"
+ dataKey={lineA}
+ stackId="1"
+ stroke="#44b88b"
+ fill="url(#green)"
+ />
+ {lineB
+ ? (
+ <Area
+ type="monotone"
+ dataKey={lineB}
+ stackId="1"
+ stroke="#8884d8"
+ fill="url(#purple)"
+ />
+ )
+ : null}
+ </AreaChart>
+ </ResponsiveContainer>
+ </div>
+ </Container>
+ );
+};
+
+export default AreaChartComponent;
diff --git a/webapp/src/components/BarChartComponent.js b/webapp/src/components/BarChartComponent.js
new file mode 100644
index 0000000..4b10dbe
--- /dev/null
+++ b/webapp/src/components/BarChartComponent.js
@@ -0,0 +1,37 @@
+import React from 'react';
+import { Container } from '@chakra-ui/react';
+import {
+ BarChart, CartesianGrid, XAxis, YAxis, Tooltip, Bar, ResponsiveContainer,
+} from 'recharts';
+
+const BarChartComponent = (data) => {
+ const { logs, xaxis } = data;
+ return (
+ <Container maxWidth="container.lg" marginTop="20">
+ <div style={{
+ width: '80%', height: 280, marginBottom: 40, paddingLeft: 40,
+ }}
+ >
+ <ResponsiveContainer>
+ <BarChart data={logs} margin={{ bottom: 20 }}>
+ <CartesianGrid strokeDasharray="3 3" />
+ <XAxis
+ dataKey={xaxis}
+ label={{
+ value: xaxis.toUpperCase(), position: 'insideBottom', offset: -12, fill: '#0987A0', fontSize: 15,
+ }}
+ />
+ <YAxis label={{
+ value: 'AVG TIME', angle: -90, position: 'insideLeft', fill: '#0987A0', fontSize: 15,
+ }}
+ />
+ <Tooltip />
+ <Bar dataKey="avg_time" fill="#b794f4" />
+ </BarChart>
+ </ResponsiveContainer>
+ </div>
+ </Container>
+ );
+};
+
+export default BarChartComponent;
diff --git a/webapp/src/components/ChartsContainer.js b/webapp/src/components/ChartsContainer.js
new file mode 100644
index 0000000..1bd9e42
--- /dev/null
+++ b/webapp/src/components/ChartsContainer.js
@@ -0,0 +1,33 @@
+import React from 'react';
+import { Redirect } from 'react-router-dom';
+import getAllParams from '../helpers/getChartsDataFormat';
+import AreaChartComponent from './AreaChartComponent';
+import BarChartComponent from './BarChartComponent';
+
+const ChartsContainer = (params) => {
+ const { data } = params;
+ let baseParam;
+ let chartsParams;
+
+ if (data[0]) {
+ const temp = getAllParams(data);
+ baseParam = temp.baseParam;
+ chartsParams = temp.chartsParams;
+ } else {
+ return <Redirect to="/404" />;
+ }
+
+ return (
+ <>
+ {baseParam === 'epoch'
+ ? chartsParams.map(
+ (item) => <AreaChartComponent xaxis={baseParam} lineA={item} lineB={`val_${item}`} logs={data} key={item} />,
+ ) : chartsParams.map(
+ (item) => <AreaChartComponent xaxis={baseParam} lineA={item} logs={data} key={item} />,
+ )}
+ <BarChartComponent logs={data} xaxis={baseParam} />
+ </>
+ );
+};
+
+export default ChartsContainer;
diff --git a/webapp/src/components/LoadingSpinner.js b/webapp/src/components/LoadingSpinner.js
new file mode 100644
index 0000000..f7d98c7
--- /dev/null
+++ b/webapp/src/components/LoadingSpinner.js
@@ -0,0 +1,16 @@
+import React from 'react';
+import { VStack, Spinner } from '@chakra-ui/react';
+
+const LoadingSpinner = () => (
+ <VStack marginTop="10">
+ <Spinner
+ thickness="4px"
+ speed="0.65s"
+ emptyColor="gray.200"
+ color="teal.400"
+ size="xl"
+ />
+ </VStack>
+);
+
+export default LoadingSpinner;
diff --git a/webapp/src/components/Navbar.js b/webapp/src/components/Navbar.js
new file mode 100644
index 0000000..adf71bb
--- /dev/null
+++ b/webapp/src/components/Navbar.js
@@ -0,0 +1,61 @@
+import React, { useEffect, useContext, useState } from 'react';
+import { useHistory } from 'react-router-dom';
+import {
+ Text, Button, Flex, useToast,
+} from '@chakra-ui/react';
+
+import { logOut, signInWithGoogle } from '../firebase/Firebase';
+import { UserContext } from '../providers/AuthProvider';
+
+const Navbar = () => {
+ const history = useHistory();
+ const { user } = useContext(UserContext);
+ const [loginStatus, setLoginStatus] = useState(false);
+ const [err, setErr] = useState(false);
+ const toast = useToast();
+
+ const login = async () => {
+ const success = await signInWithGoogle();
+ setErr(!success);
+ };
+
+ const logout = async () => {
+ const success = await logOut();
+ if (success) history.go(0);
+ else setErr(!success);
+ };
+
+ useEffect(() => {
+ if (user) setLoginStatus(true);
+ }, [user]);
+
+ useEffect(() => {
+ if (err) {
+ toast({
+ title: 'Error',
+ description: 'Something went wrong! Please try again',
+ status: 'error',
+ duration: 8000,
+ isClosable: true,
+ });
+ }
+ }, [err]);
+
+ return (
+ <Flex bgColor="teal.50" alignItems="center" paddingY="4" paddingX={{ base: '8', md: '20', lg: '28' }} marginBottom="5" boxShadow="lg">
+ <Text flexGrow="1" fontSize="large" fontWeight="bold">TF Watcher</Text>
+ <Button
+ size="sm"
+ colorScheme="teal"
+ fontWeight="bold"
+ paddingX="5"
+ variant="link"
+ onClick={loginStatus ? logout : login}
+ >
+ {loginStatus ? 'Logout' : 'Login'}
+ </Button>
+ </Flex>
+ );
+};
+
+export default Navbar;
diff --git a/webapp/src/firebase/Firebase.js b/webapp/src/firebase/Firebase.js
new file mode 100644
index 0000000..0ce9452
--- /dev/null
+++ b/webapp/src/firebase/Firebase.js
@@ -0,0 +1,33 @@
+import firebase from 'firebase/app';
+import 'firebase/auth';
+import 'firebase/database';
+
+firebase.initializeApp({
+ apiKey: process.env.REACT_APP_FIREBASE_API_KEY,
+ authDomain: process.env.REACT_APP_FIREBASE_AUTH_DOMAIN,
+ databaseURL: process.env.REACT_APP_FIREBASE_DATABASE_URL,
+ projectId: process.env.REACT_APP_FIREBASE_PROJECT_ID,
+});
+
+export const auth = firebase.auth();
+export const db = firebase.database();
+
+const googleProvider = new firebase.auth.GoogleAuthProvider();
+
+export const signInWithGoogle = async () => {
+ try {
+ await auth.signInWithPopup(googleProvider);
+ } catch {
+ return false;
+ }
+ return true;
+};
+
+export const logOut = async () => {
+ try {
+ await auth.signOut();
+ } catch {
+ return false;
+ }
+ return true;
+};
diff --git a/webapp/src/helpers/getChartsDataFormat.js b/webapp/src/helpers/getChartsDataFormat.js
new file mode 100644
index 0000000..c532ff4
--- /dev/null
+++ b/webapp/src/helpers/getChartsDataFormat.js
@@ -0,0 +1,29 @@
+let baseParam = 'epoch';
+let chartsParams = [];
+const ignoreParams = ['epoch', 'batch', 'avg_time'];
+
+const getAllParams = (data) => {
+ if (data[0].epoch === false) baseParam = 'batch';
+
+ if (baseParam === 'epoch') {
+ const charts = [];
+ Object.keys(data[0]).map((key) => {
+ if (!ignoreParams.includes(key)) {
+ if (key.split('val_').length === 1) charts.push(key);
+ }
+ return null;
+ });
+ chartsParams = charts;
+ } else if (baseParam === 'batch') {
+ const charts = [];
+ Object.keys(data[0]).map((key) => {
+ if (!ignoreParams.includes(key)) charts.push(key);
+ return null;
+ });
+ chartsParams = charts;
+ }
+
+ return { baseParam, chartsParams };
+};
+
+export default getAllParams;
diff --git a/webapp/src/index.js b/webapp/src/index.js
new file mode 100644
index 0000000..220cc0e
--- /dev/null
+++ b/webapp/src/index.js
@@ -0,0 +1,16 @@
+import React from 'react';
+import ReactDOM from 'react-dom';
+import { ChakraProvider } from '@chakra-ui/react';
+import App from './App';
+import * as serviceWorkerRegistration from './service/serviceWorkerRegistration';
+
+ReactDOM.render(
+ <React.StrictMode>
+ <ChakraProvider>
+ <App />
+ </ChakraProvider>
+ </React.StrictMode>,
+ document.getElementById('root'),
+);
+
+serviceWorkerRegistration.unregister();
diff --git a/webapp/src/providers/AuthProvider.js b/webapp/src/providers/AuthProvider.js
new file mode 100644
index 0000000..63648d1
--- /dev/null
+++ b/webapp/src/providers/AuthProvider.js
@@ -0,0 +1,23 @@
+/* eslint-disable react/prop-types */
+/* eslint-disable no-undef */
+import React, { useState, useEffect, createContext } from 'react';
+import { auth } from '../firebase/Firebase';
+
+export const UserContext = createContext();
+
+export default (props) => {
+ const [user, setUser] = useState(null);
+ const [loading, setLoading] = useState(true);
+ const { children } = props;
+
+ useEffect(() => {
+ auth.onAuthStateChanged((currentUser) => {
+ if (currentUser) setUser(currentUser);
+ setLoading(false);
+ });
+ }, []);
+
+ return (
+ <UserContext.Provider value={{ user, loading }}>{children}</UserContext.Provider>
+ );
+};
diff --git a/webapp/src/screens/404Screen.js b/webapp/src/screens/404Screen.js
new file mode 100644
index 0000000..c3ac319
--- /dev/null
+++ b/webapp/src/screens/404Screen.js
@@ -0,0 +1,13 @@
+import React from 'react';
+import { Link } from 'react-router-dom';
+import { VStack, Text, Button } from '@chakra-ui/react';
+
+const PageNotFound = () => (
+ <VStack paddingX="4" textAlign="center">
+ <Text fontSize="3xl" color="teal.600" fontWeight="bold">Oops!</Text>
+ <Text fontSize="lg" fontWeight="medium">The page you are looking for does not exist</Text>
+ <Link to="/"><Button colorScheme="teal" marginTop="6">Go back home</Button></Link>
+ </VStack>
+);
+
+export default PageNotFound;
diff --git a/webapp/src/screens/ChartScreen.js b/webapp/src/screens/ChartScreen.js
new file mode 100644
index 0000000..8852410
--- /dev/null
+++ b/webapp/src/screens/ChartScreen.js
@@ -0,0 +1,70 @@
+import React, {
+ useContext, useState, useEffect,
+} from 'react';
+import { Redirect, useHistory, useParams } from 'react-router-dom';
+import { VStack, Text } from '@chakra-ui/react';
+
+import { UserContext } from '../providers/AuthProvider';
+import { db } from '../firebase/Firebase';
+
+import LoadingSpinner from '../components/LoadingSpinner';
+import ChartsContainer from '../components/ChartsContainer';
+
+const ChartScreen = () => {
+ const { id } = useParams();
+ const history = useHistory();
+ const { user, loading } = useContext(UserContext);
+ const [logs, setLogs] = useState([]);
+ const [pageLoading, setPageLoading] = useState(true);
+
+ useEffect(() => {
+ const rootRef = db.ref();
+ let flag = 0;
+ rootRef.on('value', (snapshot) => {
+ snapshot.forEach((child) => {
+ if (child.key === id) {
+ flag = 1;
+ return;
+ }
+ setPageLoading(false);
+ });
+ if (!flag) history.push('/404');
+ });
+ }, []);
+
+ useEffect(() => {
+ db.ref(id).on('value', (snapshot) => {
+ const getAllData = [];
+ snapshot.forEach((snap) => {
+ getAllData.push(snap.val());
+ });
+ const newLogs = [];
+ getAllData.forEach((element) => {
+ Object.keys(element).map((x) => {
+ newLogs.push(element[x]);
+ return null;
+ });
+ });
+ setLogs(newLogs);
+ });
+ }, []);
+
+ if (pageLoading) {
+ return <LoadingSpinner />;
+ }
+
+ return (
+ <>
+ { !loading && !user ? (
+ <Redirect to="/" />
+ ) : (
+ <VStack>
+ <Text fontSize="2xl" marginTop="4" fontWeight="semibold" color="gray.600">Real-time logs</Text>
+ <ChartsContainer data={logs} />
+ </VStack>
+ )}
+ </>
+ );
+};
+
+export default ChartScreen;
diff --git a/webapp/src/screens/HomeScreen.js b/webapp/src/screens/HomeScreen.js
new file mode 100644
index 0000000..1cb6598
--- /dev/null
+++ b/webapp/src/screens/HomeScreen.js
@@ -0,0 +1,92 @@
+import React, { useState, useContext, useEffect } from 'react';
+import {
+ Button, VStack, Text, Input, HStack, useToast,
+} from '@chakra-ui/react';
+import { useHistory } from 'react-router-dom';
+import { UserContext } from '../providers/AuthProvider';
+import { db } from '../firebase/Firebase';
+import LoadingSpinner from '../components/LoadingSpinner';
+
+const HomeScreen = () => {
+ const history = useHistory();
+ const { user, loading } = useContext(UserContext);
+ const [pageLoading, setPageLoading] = useState(true);
+ const [loginStatus, setLoginStatus] = useState(false);
+ const [key, setKey] = useState('');
+ const [searchLoading, setSearchLoading] = useState(false);
+ const [err, setErr] = useState(false);
+ const toast = useToast();
+
+ useEffect(() => {
+ if (user) setLoginStatus(true);
+ }, [user]);
+
+ useEffect(() => {
+ if (!loading) setPageLoading(false);
+ });
+
+ useEffect(() => {
+ if (err) {
+ toast({
+ title: 'Error',
+ description: 'Could not find a active process with that key!',
+ status: 'error',
+ duration: 2000,
+ isClosable: true,
+ });
+ setErr(false);
+ }
+ }, [err]);
+
+ const searchId = () => {
+ setSearchLoading(true);
+ const rootRef = db.ref();
+ let flag = 0;
+ rootRef.on('value', (snapshot) => {
+ snapshot.forEach((child) => {
+ if (child.key === key) {
+ flag = 1;
+ return;
+ }
+ setSearchLoading(false);
+ });
+ if (!flag) setErr(true);
+ else { history.push(`/logs/${key}`); }
+ });
+ };
+
+ if (pageLoading) {
+ return <LoadingSpinner />;
+ }
+
+ return (
+ <VStack>
+ { !loginStatus
+ ? <Text fontSize="2xl">Login to get started!</Text>
+ : (
+ <HStack>
+ <Input
+ placeholder="Enter key"
+ onChange={(e) => setKey(e.target.value.trim())}
+ maxW="md"
+ variant="outline"
+ boxShadow="md"
+ onKeyPress={(e) => (e.key === 'Enter' ? searchId() : null)}
+ disabled={searchLoading}
+ />
+ <Button
+ onClick={searchId}
+ colorScheme="teal"
+ paddingX="10"
+ boxShadow="lg"
+ isLoading={searchLoading}
+ >
+ View logs
+ </Button>
+ </HStack>
+ )}
+ </VStack>
+ );
+};
+
+export default HomeScreen;
diff --git a/webapp/src/service/service-worker.js b/webapp/src/service/service-worker.js
new file mode 100644
index 0000000..77daa7b
--- /dev/null
+++ b/webapp/src/service/service-worker.js
@@ -0,0 +1,73 @@
+/* eslint-disable no-restricted-globals */
+
+// This service worker can be customized!
+// See https://developers.google.com/web/tools/workbox/modules
+// for the list of available Workbox modules, or add any other
+// code you'd like.
+// You can also remove this file if you'd prefer not to use a
+// service worker, and the Workbox build step will be skipped.
+
+import { clientsClaim } from 'workbox-core';
+import { ExpirationPlugin } from 'workbox-expiration';
+import { precacheAndRoute, createHandlerBoundToURL } from 'workbox-precaching';
+import { registerRoute } from 'workbox-routing';
+import { StaleWhileRevalidate } from 'workbox-strategies';
+
+clientsClaim();
+
+// Precache all of the assets generated by your build process.
+// Their URLs are injected into the manifest variable below.
+// This variable must be present somewhere in your service worker file,
+// even if you decide not to use precaching. See https://cra.link/PWA
+// eslint-disable-next-line no-underscore-dangle
+precacheAndRoute(self.__WB_MANIFEST);
+
+// Set up App Shell-style routing, so that all navigation requests
+// are fulfilled with your index.html shell. Learn more at
+// https://developers.google.com/web/fundamentals/architecture/app-shell
+const fileExtensionRegexp = new RegExp('/[^/?]+\\.[^/]+$');
+registerRoute(
+ // Return false to exempt requests from being fulfilled by index.html.
+ ({ request, url }) => {
+ // If this isn't a navigation, skip.
+ if (request.mode !== 'navigate') {
+ return false;
+ } // If this is a URL that starts with /_, skip.
+
+ if (url.pathname.startsWith('/_')) {
+ return false;
+ } // If this looks like a URL for a resource, because it contains // a file extension, skip.
+
+ if (url.pathname.match(fileExtensionRegexp)) {
+ return false;
+ } // Return true to signal that we want to use the handler.
+
+ return true;
+ },
+ createHandlerBoundToURL(`${process.env.PUBLIC_URL}/index.html`),
+);
+
+// An example runtime caching route for requests that aren't handled by the
+// precache, in this case same-origin .png requests like those from in public/
+registerRoute(
+ // Add in any other file extensions or routing criteria as needed.
+ ({ url }) => url.origin === self.location.origin && url.pathname.endsWith('.png'), // Customize this strategy as needed, e.g., by changing to CacheFirst.
+ new StaleWhileRevalidate({
+ cacheName: 'images',
+ plugins: [
+ // Ensure that once this runtime cache reaches a maximum size the
+ // least-recently used images are removed.
+ new ExpirationPlugin({ maxEntries: 50 }),
+ ],
+ }),
+);
+
+// This allows the web app to trigger skipWaiting via
+// registration.waiting.postMessage({type: 'SKIP_WAITING'})
+self.addEventListener('message', (event) => {
+ if (event.data && event.data.type === 'SKIP_WAITING') {
+ self.skipWaiting();
+ }
+});
+
+// Any other custom service worker logic can go here.
diff --git a/webapp/src/service/serviceWorkerRegistration.js b/webapp/src/service/serviceWorkerRegistration.js
new file mode 100644
index 0000000..deabc45
--- /dev/null
+++ b/webapp/src/service/serviceWorkerRegistration.js
@@ -0,0 +1,140 @@
+/* eslint-disable no-console */
+/* eslint-disable no-use-before-define */
+// This optional code is used to register a service worker.
+// register() is not called by default.
+
+// This lets the app load faster on subsequent visits in production, and gives
+// it offline capabilities. However, it also means that developers (and users)
+// will only see deployed updates on subsequent visits to a page, after all the
+// existing tabs open on the page have been closed, since previously cached
+// resources are updated in the background.
+
+// To learn more about the benefits of this model and instructions on how to
+// opt-in, read https://cra.link/PWA
+
+const isLocalhost = Boolean(
+ window.location.hostname === 'localhost'
+ // [::1] is the IPv6 localhost address.
+ || window.location.hostname === '[::1]'
+ // 127.0.0.0/8 are considered localhost for IPv4.
+ || window.location.hostname.match(/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/),
+);
+
+export function register(config) {
+ if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) {
+ // The URL constructor is available in all browsers that support SW.
+ const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href);
+ if (publicUrl.origin !== window.location.origin) {
+ // Our service worker won't work if PUBLIC_URL is on a different origin
+ // from what our page is served on. This might happen if a CDN is used to
+ // serve assets; see https://github.com/facebook/create-react-app/issues/2374
+ return;
+ }
+
+ window.addEventListener('load', () => {
+ const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;
+
+ if (isLocalhost) {
+ // This is running on localhost. Let's check if a service worker still exists or not.
+ checkValidServiceWorker(swUrl, config);
+
+ // Add some additional logging to localhost, pointing developers to the
+ // service worker/PWA documentation.
+ navigator.serviceWorker.ready.then(() => {
+ console.log(
+ 'This web app is being served cache-first by a service '
+ + 'worker. To learn more, visit https://cra.link/PWA',
+ );
+ });
+ } else {
+ // Is not localhost. Just register service worker
+ registerValidSW(swUrl, config);
+ }
+ });
+ }
+}
+
+function registerValidSW(swUrl, config) {
+ navigator.serviceWorker
+ .register(swUrl)
+ .then((registration) => {
+ // eslint-disable-next-line no-param-reassign
+ registration.onupdatefound = () => {
+ const installingWorker = registration.installing;
+ if (installingWorker == null) {
+ return;
+ }
+ installingWorker.onstatechange = () => {
+ if (installingWorker.state === 'installed') {
+ if (navigator.serviceWorker.controller) {
+ // At this point, the updated precached content has been fetched,
+ // but the previous service worker will still serve the older
+ // content until all client tabs are closed.
+ console.log(
+ 'New content is available and will be used when all '
+ + 'tabs for this page are closed. See https://cra.link/PWA.',
+ );
+
+ // Execute callback
+ if (config && config.onUpdate) {
+ config.onUpdate(registration);
+ }
+ } else {
+ // At this point, everything has been precached.
+ // It's the perfect time to display a
+ // "Content is cached for offline use." message.
+ console.log('Content is cached for offline use.');
+
+ // Execute callback
+ if (config && config.onSuccess) {
+ config.onSuccess(registration);
+ }
+ }
+ }
+ };
+ };
+ })
+ .catch((error) => {
+ console.error('Error during service worker registration:', error);
+ });
+}
+
+function checkValidServiceWorker(swUrl, config) {
+ // Check if the service worker can be found. If it can't reload the page.
+ fetch(swUrl, {
+ headers: { 'Service-Worker': 'script' },
+ })
+ .then((response) => {
+ // Ensure service worker exists, and that we really are getting a JS file.
+ const contentType = response.headers.get('content-type');
+ if (
+ response.status === 404
+ || (contentType != null && contentType.indexOf('javascript') === -1)
+ ) {
+ // No service worker found. Probably a different app. Reload the page.
+ navigator.serviceWorker.ready.then((registration) => {
+ registration.unregister().then(() => {
+ window.location.reload();
+ });
+ });
+ } else {
+ // Service worker found. Proceed as normal.
+ registerValidSW(swUrl, config);
+ }
+ })
+ .catch(() => {
+ console.log('No internet connection found. App is running in offline mode.');
+ });
+}
+
+export function unregister() {
+ if ('serviceWorker' in navigator) {
+ navigator.serviceWorker.ready
+ .then((registration) => {
+ registration.unregister();
+ })
+ .catch((error) => {
+ console.error(error.message);
+ });
+ }
+}
diff --git a/webapp/yarn.lock b/webapp/yarn.lock
new file mode 100644
index 0000000..5b38c15
--- /dev/null
+++ b/webapp/yarn.lock
@@ -0,0 +1,13329 @@
+# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+# yarn lockfile v1
+
+
+"@babel/[email protected]":
+ version "7.10.4"
+ resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.10.4.tgz#168da1a36e90da68ae8d49c0f1b48c7c6249213a"
+ integrity sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg==
+ dependencies:
+ "@babel/highlight" "^7.10.4"
+
+"@babel/[email protected]":
+ version "7.12.11"
+ resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.11.tgz#f4ad435aa263db935b8f10f2c552d23fb716a63f"
+ integrity sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==
+ dependencies:
+ "@babel/highlight" "^7.10.4"
+
+"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.5.5":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.13.tgz#dcfc826beef65e75c50e21d3837d7d95798dd658"
+ integrity sha512-HV1Cm0Q3ZrpCR93tkWOYiuYIgLxZXZFVG2VgK+MBWjUqZTundupbfx2aXarXuw5Ko5aMcjtJgbSs4vUGBS5v6g==
+ dependencies:
+ "@babel/highlight" "^7.12.13"
+
+"@babel/compat-data@^7.12.1", "@babel/compat-data@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.12.13.tgz#27e19e0ed3726ccf54067ced4109501765e7e2e8"
+ integrity sha512-U/hshG5R+SIoW7HVWIdmy1cB7s3ki+r3FpyEZiCgpi4tFgPnX/vynY80ZGSASOIrUM6O7VxOgCZgdt7h97bUGg==
+
+"@babel/[email protected]":
+ version "7.12.3"
+ resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.12.3.tgz#1b436884e1e3bff6fb1328dc02b208759de92ad8"
+ integrity sha512-0qXcZYKZp3/6N2jKYVxZv0aNCsxTSVCiK72DTiTYZAu7sjg73W0/aynWjMbiGd87EQL4WyA8reiJVh92AVla9g==
+ dependencies:
+ "@babel/code-frame" "^7.10.4"
+ "@babel/generator" "^7.12.1"
+ "@babel/helper-module-transforms" "^7.12.1"
+ "@babel/helpers" "^7.12.1"
+ "@babel/parser" "^7.12.3"
+ "@babel/template" "^7.10.4"
+ "@babel/traverse" "^7.12.1"
+ "@babel/types" "^7.12.1"
+ convert-source-map "^1.7.0"
+ debug "^4.1.0"
+ gensync "^1.0.0-beta.1"
+ json5 "^2.1.2"
+ lodash "^4.17.19"
+ resolve "^1.3.2"
+ semver "^5.4.1"
+ source-map "^0.5.0"
+
+"@babel/core@^7.1.0", "@babel/core@^7.12.3", "@babel/core@^7.7.5", "@babel/core@^7.8.4":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.12.17.tgz#993c5e893333107a2815d8e0d73a2c3755e280b2"
+ integrity sha512-V3CuX1aBywbJvV2yzJScRxeiiw0v2KZZYYE3giywxzFJL13RiyPjaaDwhDnxmgFTTS7FgvM2ijr4QmKNIu0AtQ==
+ dependencies:
+ "@babel/code-frame" "^7.12.13"
+ "@babel/generator" "^7.12.17"
+ "@babel/helper-module-transforms" "^7.12.17"
+ "@babel/helpers" "^7.12.17"
+ "@babel/parser" "^7.12.17"
+ "@babel/template" "^7.12.13"
+ "@babel/traverse" "^7.12.17"
+ "@babel/types" "^7.12.17"
+ convert-source-map "^1.7.0"
+ debug "^4.1.0"
+ gensync "^1.0.0-beta.1"
+ json5 "^2.1.2"
+ lodash "^4.17.19"
+ semver "^5.4.1"
+ source-map "^0.5.0"
+
+"@babel/generator@^7.12.1", "@babel/generator@^7.12.17":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.12.17.tgz#9ef1dd792d778b32284411df63f4f668a9957287"
+ integrity sha512-DSA7ruZrY4WI8VxuS1jWSRezFnghEoYEFrZcw9BizQRmOZiUsiHl59+qEARGPqPikwA/GPTyRCi7isuCK/oyqg==
+ dependencies:
+ "@babel/types" "^7.12.17"
+ jsesc "^2.5.1"
+ source-map "^0.5.0"
+
+"@babel/helper-annotate-as-pure@^7.10.4", "@babel/helper-annotate-as-pure@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.12.13.tgz#0f58e86dfc4bb3b1fcd7db806570e177d439b6ab"
+ integrity sha512-7YXfX5wQ5aYM/BOlbSccHDbuXXFPxeoUmfWtz8le2yTkTZc+BxsiEnENFoi2SlmA8ewDkG2LgIMIVzzn2h8kfw==
+ dependencies:
+ "@babel/types" "^7.12.13"
+
+"@babel/helper-builder-binary-assignment-operator-visitor@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.12.13.tgz#6bc20361c88b0a74d05137a65cac8d3cbf6f61fc"
+ integrity sha512-CZOv9tGphhDRlVjVkAgm8Nhklm9RzSmWpX2my+t7Ua/KT616pEzXsQCjinzvkRvHWJ9itO4f296efroX23XCMA==
+ dependencies:
+ "@babel/helper-explode-assignable-expression" "^7.12.13"
+ "@babel/types" "^7.12.13"
+
+"@babel/helper-compilation-targets@^7.12.1", "@babel/helper-compilation-targets@^7.12.17":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.12.17.tgz#91d83fae61ef390d39c3f0507cb83979bab837c7"
+ integrity sha512-5EkibqLVYOuZ89BSg2lv+GG8feywLuvMXNYgf0Im4MssE0mFWPztSpJbildNnUgw0bLI2EsIN4MpSHC2iUJkQA==
+ dependencies:
+ "@babel/compat-data" "^7.12.13"
+ "@babel/helper-validator-option" "^7.12.17"
+ browserslist "^4.14.5"
+ semver "^5.5.0"
+
+"@babel/helper-create-class-features-plugin@^7.12.1", "@babel/helper-create-class-features-plugin@^7.12.13", "@babel/helper-create-class-features-plugin@^7.12.17":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.12.17.tgz#704b69c8a78d03fb1c5fcc2e7b593f8a65628944"
+ integrity sha512-I/nurmTxIxHV0M+rIpfQBF1oN342+yvl2kwZUrQuOClMamHF1w5tknfZubgNOLRoA73SzBFAdFcpb4M9HwOeWQ==
+ dependencies:
+ "@babel/helper-function-name" "^7.12.13"
+ "@babel/helper-member-expression-to-functions" "^7.12.17"
+ "@babel/helper-optimise-call-expression" "^7.12.13"
+ "@babel/helper-replace-supers" "^7.12.13"
+ "@babel/helper-split-export-declaration" "^7.12.13"
+
+"@babel/helper-create-regexp-features-plugin@^7.12.13":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.12.17.tgz#a2ac87e9e319269ac655b8d4415e94d38d663cb7"
+ integrity sha512-p2VGmBu9oefLZ2nQpgnEnG0ZlRPvL8gAGvPUMQwUdaE8k49rOMuZpOwdQoy5qJf6K8jL3bcAMhVUlHAjIgJHUg==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.12.13"
+ regexpu-core "^4.7.1"
+
+"@babel/helper-explode-assignable-expression@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.12.13.tgz#0e46990da9e271502f77507efa4c9918d3d8634a"
+ integrity sha512-5loeRNvMo9mx1dA/d6yNi+YiKziJZFylZnCo1nmFF4qPU4yJ14abhWESuSMQSlQxWdxdOFzxXjk/PpfudTtYyw==
+ dependencies:
+ "@babel/types" "^7.12.13"
+
+"@babel/helper-function-name@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.12.13.tgz#93ad656db3c3c2232559fd7b2c3dbdcbe0eb377a"
+ integrity sha512-TZvmPn0UOqmvi5G4vvw0qZTpVptGkB1GL61R6lKvrSdIxGm5Pky7Q3fpKiIkQCAtRCBUwB0PaThlx9vebCDSwA==
+ dependencies:
+ "@babel/helper-get-function-arity" "^7.12.13"
+ "@babel/template" "^7.12.13"
+ "@babel/types" "^7.12.13"
+
+"@babel/helper-get-function-arity@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.12.13.tgz#bc63451d403a3b3082b97e1d8b3fe5bd4091e583"
+ integrity sha512-DjEVzQNz5LICkzN0REdpD5prGoidvbdYk1BVgRUOINaWJP2t6avB27X1guXK1kXNrX0WMfsrm1A/ZBthYuIMQg==
+ dependencies:
+ "@babel/types" "^7.12.13"
+
+"@babel/helper-hoist-variables@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.12.13.tgz#13aba58b7480b502362316ea02f52cca0e9796cd"
+ integrity sha512-KSC5XSj5HreRhYQtZ3cnSnQwDzgnbdUDEFsxkN0m6Q3WrCRt72xrnZ8+h+pX7YxM7hr87zIO3a/v5p/H3TrnVw==
+ dependencies:
+ "@babel/types" "^7.12.13"
+
+"@babel/helper-member-expression-to-functions@^7.12.13", "@babel/helper-member-expression-to-functions@^7.12.17":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.12.17.tgz#f82838eb06e1235307b6d71457b6670ff71ee5ac"
+ integrity sha512-Bzv4p3ODgS/qpBE0DiJ9qf5WxSmrQ8gVTe8ClMfwwsY2x/rhykxxy3bXzG7AGTnPB2ij37zGJ/Q/6FruxHxsxg==
+ dependencies:
+ "@babel/types" "^7.12.17"
+
+"@babel/helper-module-imports@^7.0.0", "@babel/helper-module-imports@^7.12.1", "@babel/helper-module-imports@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.12.13.tgz#ec67e4404f41750463e455cc3203f6a32e93fcb0"
+ integrity sha512-NGmfvRp9Rqxy0uHSSVP+SRIW1q31a7Ji10cLBcqSDUngGentY4FRiHOFZFE1CLU5eiL0oE8reH7Tg1y99TDM/g==
+ dependencies:
+ "@babel/types" "^7.12.13"
+
+"@babel/helper-module-transforms@^7.12.1", "@babel/helper-module-transforms@^7.12.13", "@babel/helper-module-transforms@^7.12.17":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.12.17.tgz#7c75b987d6dfd5b48e575648f81eaac891539509"
+ integrity sha512-sFL+p6zOCQMm9vilo06M4VHuTxUAwa6IxgL56Tq1DVtA0ziAGTH1ThmJq7xwPqdQlgAbKX3fb0oZNbtRIyA5KQ==
+ dependencies:
+ "@babel/helper-module-imports" "^7.12.13"
+ "@babel/helper-replace-supers" "^7.12.13"
+ "@babel/helper-simple-access" "^7.12.13"
+ "@babel/helper-split-export-declaration" "^7.12.13"
+ "@babel/helper-validator-identifier" "^7.12.11"
+ "@babel/template" "^7.12.13"
+ "@babel/traverse" "^7.12.17"
+ "@babel/types" "^7.12.17"
+ lodash "^4.17.19"
+
+"@babel/helper-optimise-call-expression@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.12.13.tgz#5c02d171b4c8615b1e7163f888c1c81c30a2aaea"
+ integrity sha512-BdWQhoVJkp6nVjB7nkFWcn43dkprYauqtk++Py2eaf/GRDFm5BxRqEIZCiHlZUGAVmtwKcsVL1dC68WmzeFmiA==
+ dependencies:
+ "@babel/types" "^7.12.13"
+
+"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.12.13.tgz#174254d0f2424d8aefb4dd48057511247b0a9eeb"
+ integrity sha512-C+10MXCXJLiR6IeG9+Wiejt9jmtFpxUc3MQqCmPY8hfCjyUGl9kT+B2okzEZrtykiwrc4dbCPdDoz0A/HQbDaA==
+
+"@babel/helper-remap-async-to-generator@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.12.13.tgz#170365f4140e2d20e5c88f8ba23c24468c296878"
+ integrity sha512-Qa6PU9vNcj1NZacZZI1Mvwt+gXDH6CTfgAkSjeRMLE8HxtDK76+YDId6NQR+z7Rgd5arhD2cIbS74r0SxD6PDA==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.12.13"
+ "@babel/helper-wrap-function" "^7.12.13"
+ "@babel/types" "^7.12.13"
+
+"@babel/helper-replace-supers@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.12.13.tgz#00ec4fb6862546bd3d0aff9aac56074277173121"
+ integrity sha512-pctAOIAMVStI2TMLhozPKbf5yTEXc0OJa0eENheb4w09SrgOWEs+P4nTOZYJQCqs8JlErGLDPDJTiGIp3ygbLg==
+ dependencies:
+ "@babel/helper-member-expression-to-functions" "^7.12.13"
+ "@babel/helper-optimise-call-expression" "^7.12.13"
+ "@babel/traverse" "^7.12.13"
+ "@babel/types" "^7.12.13"
+
+"@babel/helper-simple-access@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.12.13.tgz#8478bcc5cacf6aa1672b251c1d2dde5ccd61a6c4"
+ integrity sha512-0ski5dyYIHEfwpWGx5GPWhH35j342JaflmCeQmsPWcrOQDtCN6C1zKAVRFVbK53lPW2c9TsuLLSUDf0tIGJ5hA==
+ dependencies:
+ "@babel/types" "^7.12.13"
+
+"@babel/helper-skip-transparent-expression-wrappers@^7.12.1":
+ version "7.12.1"
+ resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.12.1.tgz#462dc63a7e435ade8468385c63d2b84cce4b3cbf"
+ integrity sha512-Mf5AUuhG1/OCChOJ/HcADmvcHM42WJockombn8ATJG3OnyiSxBK/Mm5x78BQWvmtXZKHgbjdGL2kin/HOLlZGA==
+ dependencies:
+ "@babel/types" "^7.12.1"
+
+"@babel/helper-split-export-declaration@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.12.13.tgz#e9430be00baf3e88b0e13e6f9d4eaf2136372b05"
+ integrity sha512-tCJDltF83htUtXx5NLcaDqRmknv652ZWCHyoTETf1CXYJdPC7nohZohjUgieXhv0hTJdRf2FjDueFehdNucpzg==
+ dependencies:
+ "@babel/types" "^7.12.13"
+
+"@babel/helper-validator-identifier@^7.12.11":
+ version "7.12.11"
+ resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz#c9a1f021917dcb5ccf0d4e453e399022981fc9ed"
+ integrity sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw==
+
+"@babel/helper-validator-option@^7.12.1", "@babel/helper-validator-option@^7.12.17":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.12.17.tgz#d1fbf012e1a79b7eebbfdc6d270baaf8d9eb9831"
+ integrity sha512-TopkMDmLzq8ngChwRlyjR6raKD6gMSae4JdYDB8bByKreQgG0RBTuKe9LRxW3wFtUnjxOPRKBDwEH6Mg5KeDfw==
+
+"@babel/helper-wrap-function@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.12.13.tgz#e3ea8cb3ee0a16911f9c1b50d9e99fe8fe30f9ff"
+ integrity sha512-t0aZFEmBJ1LojdtJnhOaQEVejnzYhyjWHSsNSNo8vOYRbAJNh6r6GQF7pd36SqG7OKGbn+AewVQ/0IfYfIuGdw==
+ dependencies:
+ "@babel/helper-function-name" "^7.12.13"
+ "@babel/template" "^7.12.13"
+ "@babel/traverse" "^7.12.13"
+ "@babel/types" "^7.12.13"
+
+"@babel/helpers@^7.12.1", "@babel/helpers@^7.12.17":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.12.17.tgz#71e03d2981a6b5ee16899964f4101dc8471d60bc"
+ integrity sha512-tEpjqSBGt/SFEsFikKds1sLNChKKGGR17flIgQKXH4fG6m9gTgl3gnOC1giHNyaBCSKuTfxaSzHi7UnvqiVKxg==
+ dependencies:
+ "@babel/template" "^7.12.13"
+ "@babel/traverse" "^7.12.17"
+ "@babel/types" "^7.12.17"
+
+"@babel/highlight@^7.10.4", "@babel/highlight@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.12.13.tgz#8ab538393e00370b26271b01fa08f7f27f2e795c"
+ integrity sha512-kocDQvIbgMKlWxXe9fof3TQ+gkIPOUSEYhJjqUjvKMez3krV7vbzYCDq39Oj11UAVK7JqPVGQPlgE85dPNlQww==
+ dependencies:
+ "@babel/helper-validator-identifier" "^7.12.11"
+ chalk "^2.0.0"
+ js-tokens "^4.0.0"
+
+"@babel/parser@^7.1.0", "@babel/parser@^7.12.13", "@babel/parser@^7.12.17", "@babel/parser@^7.12.3", "@babel/parser@^7.7.0":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.12.17.tgz#bc85d2d47db38094e5bb268fc761716e7d693848"
+ integrity sha512-r1yKkiUTYMQ8LiEI0UcQx5ETw5dpTLn9wijn9hk6KkTtOK95FndDN10M+8/s6k/Ymlbivw0Av9q4SlgF80PtHg==
+
+"@babel/plugin-proposal-async-generator-functions@^7.12.1", "@babel/plugin-proposal-async-generator-functions@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.12.13.tgz#d1c6d841802ffb88c64a2413e311f7345b9e66b5"
+ integrity sha512-1KH46Hx4WqP77f978+5Ye/VUbuwQld2hph70yaw2hXS2v7ER2f3nlpNMu909HO2rbvP0NKLlMVDPh9KXklVMhA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/helper-remap-async-to-generator" "^7.12.13"
+ "@babel/plugin-syntax-async-generators" "^7.8.0"
+
+"@babel/[email protected]":
+ version "7.12.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.12.1.tgz#a082ff541f2a29a4821065b8add9346c0c16e5de"
+ integrity sha512-cKp3dlQsFsEs5CWKnN7BnSHOd0EOW8EKpEjkoz1pO2E5KzIDNV9Ros1b0CnmbVgAGXJubOYVBOGCT1OmJwOI7w==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.12.1"
+ "@babel/helper-plugin-utils" "^7.10.4"
+
+"@babel/plugin-proposal-class-properties@^7.12.1", "@babel/plugin-proposal-class-properties@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.12.13.tgz#3d2ce350367058033c93c098e348161d6dc0d8c8"
+ integrity sha512-8SCJ0Ddrpwv4T7Gwb33EmW1V9PY5lggTO+A8WjyIwxrSHDUyBw4MtF96ifn1n8H806YlxbVCoKXbbmzD6RD+cA==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/[email protected]":
+ version "7.12.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.12.1.tgz#59271439fed4145456c41067450543aee332d15f"
+ integrity sha512-knNIuusychgYN8fGJHONL0RbFxLGawhXOJNLBk75TniTsZZeA+wdkDuv6wp4lGwzQEKjZi6/WYtnb3udNPmQmQ==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.12.1"
+ "@babel/helper-plugin-utils" "^7.10.4"
+ "@babel/plugin-syntax-decorators" "^7.12.1"
+
+"@babel/plugin-proposal-dynamic-import@^7.12.1", "@babel/plugin-proposal-dynamic-import@^7.12.17":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.12.17.tgz#e0ebd8db65acc37eac518fa17bead2174e224512"
+ integrity sha512-ZNGoFZqrnuy9H2izB2jLlnNDAfVPlGl5NhFEiFe4D84ix9GQGygF+CWMGHKuE+bpyS/AOuDQCnkiRNqW2IzS1Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/plugin-syntax-dynamic-import" "^7.8.0"
+
+"@babel/plugin-proposal-export-namespace-from@^7.12.1", "@babel/plugin-proposal-export-namespace-from@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.12.13.tgz#393be47a4acd03fa2af6e3cde9b06e33de1b446d"
+ integrity sha512-INAgtFo4OnLN3Y/j0VwAgw3HDXcDtX+C/erMvWzuV9v71r7urb6iyMXu7eM9IgLr1ElLlOkaHjJ0SbCmdOQ3Iw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/plugin-syntax-export-namespace-from" "^7.8.3"
+
+"@babel/plugin-proposal-json-strings@^7.12.1", "@babel/plugin-proposal-json-strings@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.12.13.tgz#ced7888a2db92a3d520a2e35eb421fdb7fcc9b5d"
+ integrity sha512-v9eEi4GiORDg8x+Dmi5r8ibOe0VXoKDeNPYcTTxdGN4eOWikrJfDJCJrr1l5gKGvsNyGJbrfMftC2dTL6oz7pg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/plugin-syntax-json-strings" "^7.8.0"
+
+"@babel/plugin-proposal-logical-assignment-operators@^7.12.1", "@babel/plugin-proposal-logical-assignment-operators@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.12.13.tgz#575b5d9a08d8299eeb4db6430da6e16e5cf14350"
+ integrity sha512-fqmiD3Lz7jVdK6kabeSr1PZlWSUVqSitmHEe3Z00dtGTKieWnX9beafvavc32kjORa5Bai4QNHgFDwWJP+WtSQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4"
+
+"@babel/[email protected]":
+ version "7.12.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.12.1.tgz#3ed4fff31c015e7f3f1467f190dbe545cd7b046c"
+ integrity sha512-nZY0ESiaQDI1y96+jk6VxMOaL4LPo/QDHBqL+SF3/vl6dHkTwHlOI8L4ZwuRBHgakRBw5zsVylel7QPbbGuYgg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+ "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0"
+
+"@babel/plugin-proposal-nullish-coalescing-operator@^7.12.1", "@babel/plugin-proposal-nullish-coalescing-operator@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.12.13.tgz#24867307285cee4e1031170efd8a7ac807deefde"
+ integrity sha512-Qoxpy+OxhDBI5kRqliJFAl4uWXk3Bn24WeFstPH0iLymFehSAUR8MHpqU7njyXv/qbo7oN6yTy5bfCmXdKpo1Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0"
+
+"@babel/[email protected]":
+ version "7.12.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.12.1.tgz#0e2c6774c4ce48be412119b4d693ac777f7685a6"
+ integrity sha512-MR7Ok+Af3OhNTCxYVjJZHS0t97ydnJZt/DbR4WISO39iDnhiD8XHrY12xuSJ90FFEGjir0Fzyyn7g/zY6hxbxA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+ "@babel/plugin-syntax-numeric-separator" "^7.10.4"
+
+"@babel/plugin-proposal-numeric-separator@^7.12.1", "@babel/plugin-proposal-numeric-separator@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.12.13.tgz#bd9da3188e787b5120b4f9d465a8261ce67ed1db"
+ integrity sha512-O1jFia9R8BUCl3ZGB7eitaAPu62TXJRHn7rh+ojNERCFyqRwJMTmhz+tJ+k0CwI6CLjX/ee4qW74FSqlq9I35w==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/plugin-syntax-numeric-separator" "^7.10.4"
+
+"@babel/plugin-proposal-object-rest-spread@^7.12.1", "@babel/plugin-proposal-object-rest-spread@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.12.13.tgz#f93f3116381ff94bc676fdcb29d71045cd1ec011"
+ integrity sha512-WvA1okB/0OS/N3Ldb3sziSrXg6sRphsBgqiccfcQq7woEn5wQLNX82Oc4PlaFcdwcWHuQXAtb8ftbS8Fbsg/sg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/plugin-syntax-object-rest-spread" "^7.8.0"
+ "@babel/plugin-transform-parameters" "^7.12.13"
+
+"@babel/plugin-proposal-optional-catch-binding@^7.12.1", "@babel/plugin-proposal-optional-catch-binding@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.12.13.tgz#4640520afe57728af14b4d1574ba844f263bcae5"
+ integrity sha512-9+MIm6msl9sHWg58NvqpNpLtuFbmpFYk37x8kgnGzAHvX35E1FyAwSUt5hIkSoWJFSAH+iwU8bJ4fcD1zKXOzg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/plugin-syntax-optional-catch-binding" "^7.8.0"
+
+"@babel/[email protected]":
+ version "7.12.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.12.1.tgz#cce122203fc8a32794296fc377c6dedaf4363797"
+ integrity sha512-c2uRpY6WzaVDzynVY9liyykS+kVU+WRZPMPYpkelXH8KBt1oXoI89kPbZKKG/jDT5UK92FTW2fZkZaJhdiBabw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.12.1"
+ "@babel/plugin-syntax-optional-chaining" "^7.8.0"
+
+"@babel/plugin-proposal-optional-chaining@^7.12.1", "@babel/plugin-proposal-optional-chaining@^7.12.17":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.12.17.tgz#e382becadc2cb16b7913b6c672d92e4b33385b5c"
+ integrity sha512-TvxwI80pWftrGPKHNfkvX/HnoeSTR7gC4ezWnAL39PuktYUe6r8kEpOLTYnkBTsaoeazXm2jHJ22EQ81sdgfcA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.12.1"
+ "@babel/plugin-syntax-optional-chaining" "^7.8.0"
+
+"@babel/plugin-proposal-private-methods@^7.12.1", "@babel/plugin-proposal-private-methods@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.12.13.tgz#ea78a12554d784ecf7fc55950b752d469d9c4a71"
+ integrity sha512-sV0V57uUwpauixvR7s2o75LmwJI6JECwm5oPUY5beZB1nBl2i37hc7CJGqB5G+58fur5Y6ugvl3LRONk5x34rg==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-proposal-unicode-property-regex@^7.12.1", "@babel/plugin-proposal-unicode-property-regex@^7.12.13", "@babel/plugin-proposal-unicode-property-regex@^7.4.4":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.12.13.tgz#bebde51339be829c17aaaaced18641deb62b39ba"
+ integrity sha512-XyJmZidNfofEkqFV5VC/bLabGmO5QzenPO/YOfGuEbgU+2sSwMmio3YLb4WtBgcmmdwZHyVyv8on77IUjQ5Gvg==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-syntax-async-generators@^7.8.0", "@babel/plugin-syntax-async-generators@^7.8.4":
+ version "7.8.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d"
+ integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-bigint@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea"
+ integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-class-properties@^7.12.1", "@babel/plugin-syntax-class-properties@^7.12.13", "@babel/plugin-syntax-class-properties@^7.8.3":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10"
+ integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-syntax-decorators@^7.12.1":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-decorators/-/plugin-syntax-decorators-7.12.13.tgz#fac829bf3c7ef4a1bc916257b403e58c6bdaf648"
+ integrity sha512-Rw6aIXGuqDLr6/LoBBYE57nKOzQpz/aDkKlMqEwH+Vp0MXbG6H/TfRjaY343LKxzAKAMXIHsQ8JzaZKuDZ9MwA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-syntax-dynamic-import@^7.8.0":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3"
+ integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-export-namespace-from@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a"
+ integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.3"
+
+"@babel/plugin-syntax-flow@^7.12.1":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.12.13.tgz#5df9962503c0a9c918381c929d51d4d6949e7e86"
+ integrity sha512-J/RYxnlSLXZLVR7wTRsozxKT8qbsx1mNKJzXEEjQ0Kjx1ZACcyHgbanNWNCFtc36IzuWhYWPpvJFFoexoOWFmA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-syntax-import-meta@^7.8.3":
+ version "7.10.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51"
+ integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+
+"@babel/plugin-syntax-json-strings@^7.8.0", "@babel/plugin-syntax-json-strings@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a"
+ integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-jsx@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.13.tgz#044fb81ebad6698fe62c478875575bcbb9b70f15"
+ integrity sha512-d4HM23Q1K7oq/SLNmG6mRt85l2csmQ0cHRaxRXjKW0YFdEXqlZ5kzFQKH5Uc3rDJECgu+yCRgPkG04Mm98R/1g==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-syntax-logical-assignment-operators@^7.10.4", "@babel/plugin-syntax-logical-assignment-operators@^7.8.3":
+ version "7.10.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699"
+ integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+
+"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.0", "@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9"
+ integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-numeric-separator@^7.10.4", "@babel/plugin-syntax-numeric-separator@^7.8.3":
+ version "7.10.4"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97"
+ integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+
+"@babel/plugin-syntax-object-rest-spread@^7.8.0", "@babel/plugin-syntax-object-rest-spread@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871"
+ integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-optional-catch-binding@^7.8.0", "@babel/plugin-syntax-optional-catch-binding@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1"
+ integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-optional-chaining@^7.8.0", "@babel/plugin-syntax-optional-chaining@^7.8.3":
+ version "7.8.3"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a"
+ integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.8.0"
+
+"@babel/plugin-syntax-top-level-await@^7.12.1", "@babel/plugin-syntax-top-level-await@^7.12.13", "@babel/plugin-syntax-top-level-await@^7.8.3":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.12.13.tgz#c5f0fa6e249f5b739727f923540cf7a806130178"
+ integrity sha512-A81F9pDwyS7yM//KwbCSDqy3Uj4NMIurtplxphWxoYtNPov7cJsDkAFNNyVlIZ3jwGycVsurZ+LtOA8gZ376iQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-syntax-typescript@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.12.13.tgz#9dff111ca64154cef0f4dc52cf843d9f12ce4474"
+ integrity sha512-cHP3u1JiUiG2LFDKbXnwVad81GvfyIOmCD6HIEId6ojrY0Drfy2q1jw7BwN7dE84+kTnBjLkXoL3IEy/3JPu2w==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-arrow-functions@^7.12.1", "@babel/plugin-transform-arrow-functions@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.12.13.tgz#eda5670b282952100c229f8a3bd49e0f6a72e9fe"
+ integrity sha512-tBtuN6qtCTd+iHzVZVOMNp+L04iIJBpqkdY42tWbmjIT5wvR2kx7gxMBsyhQtFzHwBbyGi9h8J8r9HgnOpQHxg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-async-to-generator@^7.12.1", "@babel/plugin-transform-async-to-generator@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.12.13.tgz#fed8c69eebf187a535bfa4ee97a614009b24f7ae"
+ integrity sha512-psM9QHcHaDr+HZpRuJcE1PXESuGWSCcbiGFFhhwfzdbTxaGDVzuVtdNYliAwcRo3GFg0Bc8MmI+AvIGYIJG04A==
+ dependencies:
+ "@babel/helper-module-imports" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/helper-remap-async-to-generator" "^7.12.13"
+
+"@babel/plugin-transform-block-scoped-functions@^7.12.1", "@babel/plugin-transform-block-scoped-functions@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.12.13.tgz#a9bf1836f2a39b4eb6cf09967739de29ea4bf4c4"
+ integrity sha512-zNyFqbc3kI/fVpqwfqkg6RvBgFpC4J18aKKMmv7KdQ/1GgREapSJAykLMVNwfRGO3BtHj3YQZl8kxCXPcVMVeg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-block-scoping@^7.12.1", "@babel/plugin-transform-block-scoping@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.12.13.tgz#f36e55076d06f41dfd78557ea039c1b581642e61"
+ integrity sha512-Pxwe0iqWJX4fOOM2kEZeUuAxHMWb9nK+9oh5d11bsLoB0xMg+mkDpt0eYuDZB7ETrY9bbcVlKUGTOGWy7BHsMQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-classes@^7.12.1", "@babel/plugin-transform-classes@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.12.13.tgz#9728edc1838b5d62fc93ad830bd523b1fcb0e1f6"
+ integrity sha512-cqZlMlhCC1rVnxE5ZGMtIb896ijL90xppMiuWXcwcOAuFczynpd3KYemb91XFFPi3wJSe/OcrX9lXoowatkkxA==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.12.13"
+ "@babel/helper-function-name" "^7.12.13"
+ "@babel/helper-optimise-call-expression" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/helper-replace-supers" "^7.12.13"
+ "@babel/helper-split-export-declaration" "^7.12.13"
+ globals "^11.1.0"
+
+"@babel/plugin-transform-computed-properties@^7.12.1", "@babel/plugin-transform-computed-properties@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.12.13.tgz#6a210647a3d67f21f699cfd2a01333803b27339d"
+ integrity sha512-dDfuROUPGK1mTtLKyDPUavmj2b6kFu82SmgpztBFEO974KMjJT+Ytj3/oWsTUMBmgPcp9J5Pc1SlcAYRpJ2hRA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-destructuring@^7.12.1", "@babel/plugin-transform-destructuring@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.12.13.tgz#fc56c5176940c5b41735c677124d1d20cecc9aeb"
+ integrity sha512-Dn83KykIFzjhA3FDPA1z4N+yfF3btDGhjnJwxIj0T43tP0flCujnU8fKgEkf0C1biIpSv9NZegPBQ1J6jYkwvQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-dotall-regex@^7.12.1", "@babel/plugin-transform-dotall-regex@^7.12.13", "@babel/plugin-transform-dotall-regex@^7.4.4":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.12.13.tgz#3f1601cc29905bfcb67f53910f197aeafebb25ad"
+ integrity sha512-foDrozE65ZFdUC2OfgeOCrEPTxdB3yjqxpXh8CH+ipd9CHd4s/iq81kcUpyH8ACGNEPdFqbtzfgzbT/ZGlbDeQ==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-duplicate-keys@^7.12.1", "@babel/plugin-transform-duplicate-keys@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.12.13.tgz#6f06b87a8b803fd928e54b81c258f0a0033904de"
+ integrity sha512-NfADJiiHdhLBW3pulJlJI2NB0t4cci4WTZ8FtdIuNc2+8pslXdPtRRAEWqUY+m9kNOk2eRYbTAOipAxlrOcwwQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-exponentiation-operator@^7.12.1", "@babel/plugin-transform-exponentiation-operator@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.12.13.tgz#4d52390b9a273e651e4aba6aee49ef40e80cd0a1"
+ integrity sha512-fbUelkM1apvqez/yYx1/oICVnGo2KM5s63mhGylrmXUxK/IAXSIf87QIxVfZldWf4QsOafY6vV3bX8aMHSvNrA==
+ dependencies:
+ "@babel/helper-builder-binary-assignment-operator-visitor" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/[email protected]":
+ version "7.12.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.12.1.tgz#8430decfa7eb2aea5414ed4a3fa6e1652b7d77c4"
+ integrity sha512-8hAtkmsQb36yMmEtk2JZ9JnVyDSnDOdlB+0nEGzIDLuK4yR3JcEjfuFPYkdEPSh8Id+rAMeBEn+X0iVEyho6Hg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+ "@babel/plugin-syntax-flow" "^7.12.1"
+
+"@babel/plugin-transform-for-of@^7.12.1", "@babel/plugin-transform-for-of@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.12.13.tgz#561ff6d74d9e1c8879cb12dbaf4a14cd29d15cf6"
+ integrity sha512-xCbdgSzXYmHGyVX3+BsQjcd4hv4vA/FDy7Kc8eOpzKmBBPEOTurt0w5fCRQaGl+GSBORKgJdstQ1rHl4jbNseQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-function-name@^7.12.1", "@babel/plugin-transform-function-name@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.12.13.tgz#bb024452f9aaed861d374c8e7a24252ce3a50051"
+ integrity sha512-6K7gZycG0cmIwwF7uMK/ZqeCikCGVBdyP2J5SKNCXO5EOHcqi+z7Jwf8AmyDNcBgxET8DrEtCt/mPKPyAzXyqQ==
+ dependencies:
+ "@babel/helper-function-name" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-literals@^7.12.1", "@babel/plugin-transform-literals@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.12.13.tgz#2ca45bafe4a820197cf315794a4d26560fe4bdb9"
+ integrity sha512-FW+WPjSR7hiUxMcKqyNjP05tQ2kmBCdpEpZHY1ARm96tGQCCBvXKnpjILtDplUnJ/eHZ0lALLM+d2lMFSpYJrQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-member-expression-literals@^7.12.1", "@babel/plugin-transform-member-expression-literals@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.12.13.tgz#5ffa66cd59b9e191314c9f1f803b938e8c081e40"
+ integrity sha512-kxLkOsg8yir4YeEPHLuO2tXP9R/gTjpuTOjshqSpELUN3ZAg2jfDnKUvzzJxObun38sw3wm4Uu69sX/zA7iRvg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-modules-amd@^7.12.1", "@babel/plugin-transform-modules-amd@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.12.13.tgz#43db16249b274ee2e551e2422090aa1c47692d56"
+ integrity sha512-JHLOU0o81m5UqG0Ulz/fPC68/v+UTuGTWaZBUwpEk1fYQ1D9LfKV6MPn4ttJKqRo5Lm460fkzjLTL4EHvCprvA==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.12.13"
+ babel-plugin-dynamic-import-node "^2.3.3"
+
+"@babel/plugin-transform-modules-commonjs@^7.12.1", "@babel/plugin-transform-modules-commonjs@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.12.13.tgz#5043b870a784a8421fa1fd9136a24f294da13e50"
+ integrity sha512-OGQoeVXVi1259HjuoDnsQMlMkT9UkZT9TpXAsqWplS/M0N1g3TJAn/ByOCeQu7mfjc5WpSsRU+jV1Hd89ts0kQ==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/helper-simple-access" "^7.12.13"
+ babel-plugin-dynamic-import-node "^2.3.3"
+
+"@babel/plugin-transform-modules-systemjs@^7.12.1", "@babel/plugin-transform-modules-systemjs@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.12.13.tgz#351937f392c7f07493fc79b2118201d50404a3c5"
+ integrity sha512-aHfVjhZ8QekaNF/5aNdStCGzwTbU7SI5hUybBKlMzqIMC7w7Ho8hx5a4R/DkTHfRfLwHGGxSpFt9BfxKCoXKoA==
+ dependencies:
+ "@babel/helper-hoist-variables" "^7.12.13"
+ "@babel/helper-module-transforms" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/helper-validator-identifier" "^7.12.11"
+ babel-plugin-dynamic-import-node "^2.3.3"
+
+"@babel/plugin-transform-modules-umd@^7.12.1", "@babel/plugin-transform-modules-umd@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.12.13.tgz#26c66f161d3456674e344b4b1255de4d530cfb37"
+ integrity sha512-BgZndyABRML4z6ibpi7Z98m4EVLFI9tVsZDADC14AElFaNHHBcJIovflJ6wtCqFxwy2YJ1tJhGRsr0yLPKoN+w==
+ dependencies:
+ "@babel/helper-module-transforms" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-named-capturing-groups-regex@^7.12.1", "@babel/plugin-transform-named-capturing-groups-regex@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.12.13.tgz#2213725a5f5bbbe364b50c3ba5998c9599c5c9d9"
+ integrity sha512-Xsm8P2hr5hAxyYblrfACXpQKdQbx4m2df9/ZZSQ8MAhsadw06+jW7s9zsSw6he+mJZXRlVMyEnVktJo4zjk1WA==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.12.13"
+
+"@babel/plugin-transform-new-target@^7.12.1", "@babel/plugin-transform-new-target@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.12.13.tgz#e22d8c3af24b150dd528cbd6e685e799bf1c351c"
+ integrity sha512-/KY2hbLxrG5GTQ9zzZSc3xWiOy379pIETEhbtzwZcw9rvuaVV4Fqy7BYGYOWZnaoXIQYbbJ0ziXLa/sKcGCYEQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-object-super@^7.12.1", "@babel/plugin-transform-object-super@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.12.13.tgz#b4416a2d63b8f7be314f3d349bd55a9c1b5171f7"
+ integrity sha512-JzYIcj3XtYspZDV8j9ulnoMPZZnF/Cj0LUxPOjR89BdBVx+zYJI9MdMIlUZjbXDX+6YVeS6I3e8op+qQ3BYBoQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/helper-replace-supers" "^7.12.13"
+
+"@babel/plugin-transform-parameters@^7.12.1", "@babel/plugin-transform-parameters@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.12.13.tgz#461e76dfb63c2dfd327b8a008a9e802818ce9853"
+ integrity sha512-e7QqwZalNiBRHCpJg/P8s/VJeSRYgmtWySs1JwvfwPqhBbiWfOcHDKdeAi6oAyIimoKWBlwc8oTgbZHdhCoVZA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-property-literals@^7.12.1", "@babel/plugin-transform-property-literals@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.12.13.tgz#4e6a9e37864d8f1b3bc0e2dce7bf8857db8b1a81"
+ integrity sha512-nqVigwVan+lR+g8Fj8Exl0UQX2kymtjcWfMOYM1vTYEKujeyv2SkMgazf2qNcK7l4SDiKyTA/nHCPqL4e2zo1A==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-react-constant-elements@^7.12.1":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.12.13.tgz#f8ee56888545d53d80f766b3cc1563ab2c241f92"
+ integrity sha512-qmzKVTn46Upvtxv8LQoQ8mTCdUC83AOVQIQm57e9oekLT5cmK9GOMOfcWhe8jMNx4UJXn/UDhVZ/7lGofVNeDQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/[email protected]":
+ version "7.12.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.12.1.tgz#1cbcd0c3b1d6648c55374a22fc9b6b7e5341c00d"
+ integrity sha512-cAzB+UzBIrekfYxyLlFqf/OagTvHLcVBb5vpouzkYkBclRPraiygVnafvAoipErZLI8ANv8Ecn6E/m5qPXD26w==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+
+"@babel/plugin-transform-react-display-name@^7.12.1", "@babel/plugin-transform-react-display-name@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.12.13.tgz#c28effd771b276f4647411c9733dbb2d2da954bd"
+ integrity sha512-MprESJzI9O5VnJZrL7gg1MpdqmiFcUv41Jc7SahxYsNP2kDkFqClxxTZq+1Qv4AFCamm+GXMRDQINNn+qrxmiA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-react-jsx-development@^7.12.1", "@babel/plugin-transform-react-jsx-development@^7.12.12":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.12.17.tgz#f510c0fa7cd7234153539f9a362ced41a5ca1447"
+ integrity sha512-BPjYV86SVuOaudFhsJR1zjgxxOhJDt6JHNoD48DxWEIxUCAMjV1ys6DYw4SDYZh0b1QsS2vfIA9t/ZsQGsDOUQ==
+ dependencies:
+ "@babel/plugin-transform-react-jsx" "^7.12.17"
+
+"@babel/plugin-transform-react-jsx-self@^7.12.1":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.12.13.tgz#422d99d122d592acab9c35ea22a6cfd9bf189f60"
+ integrity sha512-FXYw98TTJ125GVCCkFLZXlZ1qGcsYqNQhVBQcZjyrwf8FEUtVfKIoidnO8S0q+KBQpDYNTmiGo1gn67Vti04lQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-react-jsx-source@^7.12.1":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.12.13.tgz#051d76126bee5c9a6aa3ba37be2f6c1698856bcb"
+ integrity sha512-O5JJi6fyfih0WfDgIJXksSPhGP/G0fQpfxYy87sDc+1sFmsCS6wr3aAn+whbzkhbjtq4VMqLRaSzR6IsshIC0Q==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-react-jsx@^7.12.1", "@babel/plugin-transform-react-jsx@^7.12.13", "@babel/plugin-transform-react-jsx@^7.12.17":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.12.17.tgz#dd2c1299f5e26de584939892de3cfc1807a38f24"
+ integrity sha512-mwaVNcXV+l6qJOuRhpdTEj8sT/Z0owAVWf9QujTZ0d2ye9X/K+MTOTSizcgKOj18PGnTc/7g1I4+cIUjsKhBcw==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.12.13"
+ "@babel/helper-module-imports" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/plugin-syntax-jsx" "^7.12.13"
+ "@babel/types" "^7.12.17"
+
+"@babel/plugin-transform-react-pure-annotations@^7.12.1":
+ version "7.12.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.12.1.tgz#05d46f0ab4d1339ac59adf20a1462c91b37a1a42"
+ integrity sha512-RqeaHiwZtphSIUZ5I85PEH19LOSzxfuEazoY7/pWASCAIBuATQzpSVD+eT6MebeeZT2F4eSL0u4vw6n4Nm0Mjg==
+ dependencies:
+ "@babel/helper-annotate-as-pure" "^7.10.4"
+ "@babel/helper-plugin-utils" "^7.10.4"
+
+"@babel/plugin-transform-regenerator@^7.12.1", "@babel/plugin-transform-regenerator@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.12.13.tgz#b628bcc9c85260ac1aeb05b45bde25210194a2f5"
+ integrity sha512-lxb2ZAvSLyJ2PEe47hoGWPmW22v7CtSl9jW8mingV4H2sEX/JOcrAj2nPuGWi56ERUm2bUpjKzONAuT6HCn2EA==
+ dependencies:
+ regenerator-transform "^0.14.2"
+
+"@babel/plugin-transform-reserved-words@^7.12.1", "@babel/plugin-transform-reserved-words@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.12.13.tgz#7d9988d4f06e0fe697ea1d9803188aa18b472695"
+ integrity sha512-xhUPzDXxZN1QfiOy/I5tyye+TRz6lA7z6xaT4CLOjPRMVg1ldRf0LHw0TDBpYL4vG78556WuHdyO9oi5UmzZBg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/[email protected]":
+ version "7.12.1"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.12.1.tgz#04b792057eb460389ff6a4198e377614ea1e7ba5"
+ integrity sha512-Ac/H6G9FEIkS2tXsZjL4RAdS3L3WHxci0usAnz7laPWUmFiGtj7tIASChqKZMHTSQTQY6xDbOq+V1/vIq3QrWg==
+ dependencies:
+ "@babel/helper-module-imports" "^7.12.1"
+ "@babel/helper-plugin-utils" "^7.10.4"
+ resolve "^1.8.1"
+ semver "^5.5.1"
+
+"@babel/plugin-transform-shorthand-properties@^7.12.1", "@babel/plugin-transform-shorthand-properties@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.12.13.tgz#db755732b70c539d504c6390d9ce90fe64aff7ad"
+ integrity sha512-xpL49pqPnLtf0tVluuqvzWIgLEhuPpZzvs2yabUHSKRNlN7ScYU7aMlmavOeyXJZKgZKQRBlh8rHbKiJDraTSw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-spread@^7.12.1", "@babel/plugin-transform-spread@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.12.13.tgz#ca0d5645abbd560719c354451b849f14df4a7949"
+ integrity sha512-dUCrqPIowjqk5pXsx1zPftSq4sT0aCeZVAxhdgs3AMgyaDmoUT0G+5h3Dzja27t76aUEIJWlFgPJqJ/d4dbTtg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/helper-skip-transparent-expression-wrappers" "^7.12.1"
+
+"@babel/plugin-transform-sticky-regex@^7.12.1", "@babel/plugin-transform-sticky-regex@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.12.13.tgz#760ffd936face73f860ae646fb86ee82f3d06d1f"
+ integrity sha512-Jc3JSaaWT8+fr7GRvQP02fKDsYk4K/lYwWq38r/UGfaxo89ajud321NH28KRQ7xy1Ybc0VUE5Pz8psjNNDUglg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-template-literals@^7.12.1", "@babel/plugin-transform-template-literals@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.12.13.tgz#655037b07ebbddaf3b7752f55d15c2fd6f5aa865"
+ integrity sha512-arIKlWYUgmNsF28EyfmiQHJLJFlAJNYkuQO10jL46ggjBpeb2re1P9K9YGxNJB45BqTbaslVysXDYm/g3sN/Qg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-typeof-symbol@^7.12.1", "@babel/plugin-transform-typeof-symbol@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.12.13.tgz#785dd67a1f2ea579d9c2be722de8c84cb85f5a7f"
+ integrity sha512-eKv/LmUJpMnu4npgfvs3LiHhJua5fo/CysENxa45YCQXZwKnGCQKAg87bvoqSW1fFT+HA32l03Qxsm8ouTY3ZQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-typescript@^7.12.1":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.12.17.tgz#4aa6a5041888dd2e5d316ec39212b0cf855211bb"
+ integrity sha512-1bIYwnhRoetxkFonuZRtDZPFEjl1l5r+3ITkxLC3mlMaFja+GQFo94b/WHEPjqWLU9Bc+W4oFZbvCGe9eYMu1g==
+ dependencies:
+ "@babel/helper-create-class-features-plugin" "^7.12.17"
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/plugin-syntax-typescript" "^7.12.13"
+
+"@babel/plugin-transform-unicode-escapes@^7.12.1", "@babel/plugin-transform-unicode-escapes@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.12.13.tgz#840ced3b816d3b5127dd1d12dcedc5dead1a5e74"
+ integrity sha512-0bHEkdwJ/sN/ikBHfSmOXPypN/beiGqjo+o4/5K+vxEFNPRPdImhviPakMKG4x96l85emoa0Z6cDflsdBusZbw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/plugin-transform-unicode-regex@^7.12.1", "@babel/plugin-transform-unicode-regex@^7.12.13":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.12.13.tgz#b52521685804e155b1202e83fc188d34bb70f5ac"
+ integrity sha512-mDRzSNY7/zopwisPZ5kM9XKCfhchqIYwAKRERtEnhYscZB79VRekuRSoYbN0+KVe3y8+q1h6A4svXtP7N+UoCA==
+ dependencies:
+ "@babel/helper-create-regexp-features-plugin" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.12.13"
+
+"@babel/[email protected]":
+ version "7.12.1"
+ resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.12.1.tgz#9c7e5ca82a19efc865384bb4989148d2ee5d7ac2"
+ integrity sha512-H8kxXmtPaAGT7TyBvSSkoSTUK6RHh61So05SyEbpmr0MCZrsNYn7mGMzzeYoOUCdHzww61k8XBft2TaES+xPLg==
+ dependencies:
+ "@babel/compat-data" "^7.12.1"
+ "@babel/helper-compilation-targets" "^7.12.1"
+ "@babel/helper-module-imports" "^7.12.1"
+ "@babel/helper-plugin-utils" "^7.10.4"
+ "@babel/helper-validator-option" "^7.12.1"
+ "@babel/plugin-proposal-async-generator-functions" "^7.12.1"
+ "@babel/plugin-proposal-class-properties" "^7.12.1"
+ "@babel/plugin-proposal-dynamic-import" "^7.12.1"
+ "@babel/plugin-proposal-export-namespace-from" "^7.12.1"
+ "@babel/plugin-proposal-json-strings" "^7.12.1"
+ "@babel/plugin-proposal-logical-assignment-operators" "^7.12.1"
+ "@babel/plugin-proposal-nullish-coalescing-operator" "^7.12.1"
+ "@babel/plugin-proposal-numeric-separator" "^7.12.1"
+ "@babel/plugin-proposal-object-rest-spread" "^7.12.1"
+ "@babel/plugin-proposal-optional-catch-binding" "^7.12.1"
+ "@babel/plugin-proposal-optional-chaining" "^7.12.1"
+ "@babel/plugin-proposal-private-methods" "^7.12.1"
+ "@babel/plugin-proposal-unicode-property-regex" "^7.12.1"
+ "@babel/plugin-syntax-async-generators" "^7.8.0"
+ "@babel/plugin-syntax-class-properties" "^7.12.1"
+ "@babel/plugin-syntax-dynamic-import" "^7.8.0"
+ "@babel/plugin-syntax-export-namespace-from" "^7.8.3"
+ "@babel/plugin-syntax-json-strings" "^7.8.0"
+ "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4"
+ "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0"
+ "@babel/plugin-syntax-numeric-separator" "^7.10.4"
+ "@babel/plugin-syntax-object-rest-spread" "^7.8.0"
+ "@babel/plugin-syntax-optional-catch-binding" "^7.8.0"
+ "@babel/plugin-syntax-optional-chaining" "^7.8.0"
+ "@babel/plugin-syntax-top-level-await" "^7.12.1"
+ "@babel/plugin-transform-arrow-functions" "^7.12.1"
+ "@babel/plugin-transform-async-to-generator" "^7.12.1"
+ "@babel/plugin-transform-block-scoped-functions" "^7.12.1"
+ "@babel/plugin-transform-block-scoping" "^7.12.1"
+ "@babel/plugin-transform-classes" "^7.12.1"
+ "@babel/plugin-transform-computed-properties" "^7.12.1"
+ "@babel/plugin-transform-destructuring" "^7.12.1"
+ "@babel/plugin-transform-dotall-regex" "^7.12.1"
+ "@babel/plugin-transform-duplicate-keys" "^7.12.1"
+ "@babel/plugin-transform-exponentiation-operator" "^7.12.1"
+ "@babel/plugin-transform-for-of" "^7.12.1"
+ "@babel/plugin-transform-function-name" "^7.12.1"
+ "@babel/plugin-transform-literals" "^7.12.1"
+ "@babel/plugin-transform-member-expression-literals" "^7.12.1"
+ "@babel/plugin-transform-modules-amd" "^7.12.1"
+ "@babel/plugin-transform-modules-commonjs" "^7.12.1"
+ "@babel/plugin-transform-modules-systemjs" "^7.12.1"
+ "@babel/plugin-transform-modules-umd" "^7.12.1"
+ "@babel/plugin-transform-named-capturing-groups-regex" "^7.12.1"
+ "@babel/plugin-transform-new-target" "^7.12.1"
+ "@babel/plugin-transform-object-super" "^7.12.1"
+ "@babel/plugin-transform-parameters" "^7.12.1"
+ "@babel/plugin-transform-property-literals" "^7.12.1"
+ "@babel/plugin-transform-regenerator" "^7.12.1"
+ "@babel/plugin-transform-reserved-words" "^7.12.1"
+ "@babel/plugin-transform-shorthand-properties" "^7.12.1"
+ "@babel/plugin-transform-spread" "^7.12.1"
+ "@babel/plugin-transform-sticky-regex" "^7.12.1"
+ "@babel/plugin-transform-template-literals" "^7.12.1"
+ "@babel/plugin-transform-typeof-symbol" "^7.12.1"
+ "@babel/plugin-transform-unicode-escapes" "^7.12.1"
+ "@babel/plugin-transform-unicode-regex" "^7.12.1"
+ "@babel/preset-modules" "^0.1.3"
+ "@babel/types" "^7.12.1"
+ core-js-compat "^3.6.2"
+ semver "^5.5.0"
+
+"@babel/preset-env@^7.12.1", "@babel/preset-env@^7.8.4":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.12.17.tgz#94a3793ff089c32ee74d76a3c03a7597693ebaaa"
+ integrity sha512-9PMijx8zFbCwTHrd2P4PJR5nWGH3zWebx2OcpTjqQrHhCiL2ssSR2Sc9ko2BsI2VmVBfoaQmPrlMTCui4LmXQg==
+ dependencies:
+ "@babel/compat-data" "^7.12.13"
+ "@babel/helper-compilation-targets" "^7.12.17"
+ "@babel/helper-module-imports" "^7.12.13"
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/helper-validator-option" "^7.12.17"
+ "@babel/plugin-proposal-async-generator-functions" "^7.12.13"
+ "@babel/plugin-proposal-class-properties" "^7.12.13"
+ "@babel/plugin-proposal-dynamic-import" "^7.12.17"
+ "@babel/plugin-proposal-export-namespace-from" "^7.12.13"
+ "@babel/plugin-proposal-json-strings" "^7.12.13"
+ "@babel/plugin-proposal-logical-assignment-operators" "^7.12.13"
+ "@babel/plugin-proposal-nullish-coalescing-operator" "^7.12.13"
+ "@babel/plugin-proposal-numeric-separator" "^7.12.13"
+ "@babel/plugin-proposal-object-rest-spread" "^7.12.13"
+ "@babel/plugin-proposal-optional-catch-binding" "^7.12.13"
+ "@babel/plugin-proposal-optional-chaining" "^7.12.17"
+ "@babel/plugin-proposal-private-methods" "^7.12.13"
+ "@babel/plugin-proposal-unicode-property-regex" "^7.12.13"
+ "@babel/plugin-syntax-async-generators" "^7.8.0"
+ "@babel/plugin-syntax-class-properties" "^7.12.13"
+ "@babel/plugin-syntax-dynamic-import" "^7.8.0"
+ "@babel/plugin-syntax-export-namespace-from" "^7.8.3"
+ "@babel/plugin-syntax-json-strings" "^7.8.0"
+ "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4"
+ "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0"
+ "@babel/plugin-syntax-numeric-separator" "^7.10.4"
+ "@babel/plugin-syntax-object-rest-spread" "^7.8.0"
+ "@babel/plugin-syntax-optional-catch-binding" "^7.8.0"
+ "@babel/plugin-syntax-optional-chaining" "^7.8.0"
+ "@babel/plugin-syntax-top-level-await" "^7.12.13"
+ "@babel/plugin-transform-arrow-functions" "^7.12.13"
+ "@babel/plugin-transform-async-to-generator" "^7.12.13"
+ "@babel/plugin-transform-block-scoped-functions" "^7.12.13"
+ "@babel/plugin-transform-block-scoping" "^7.12.13"
+ "@babel/plugin-transform-classes" "^7.12.13"
+ "@babel/plugin-transform-computed-properties" "^7.12.13"
+ "@babel/plugin-transform-destructuring" "^7.12.13"
+ "@babel/plugin-transform-dotall-regex" "^7.12.13"
+ "@babel/plugin-transform-duplicate-keys" "^7.12.13"
+ "@babel/plugin-transform-exponentiation-operator" "^7.12.13"
+ "@babel/plugin-transform-for-of" "^7.12.13"
+ "@babel/plugin-transform-function-name" "^7.12.13"
+ "@babel/plugin-transform-literals" "^7.12.13"
+ "@babel/plugin-transform-member-expression-literals" "^7.12.13"
+ "@babel/plugin-transform-modules-amd" "^7.12.13"
+ "@babel/plugin-transform-modules-commonjs" "^7.12.13"
+ "@babel/plugin-transform-modules-systemjs" "^7.12.13"
+ "@babel/plugin-transform-modules-umd" "^7.12.13"
+ "@babel/plugin-transform-named-capturing-groups-regex" "^7.12.13"
+ "@babel/plugin-transform-new-target" "^7.12.13"
+ "@babel/plugin-transform-object-super" "^7.12.13"
+ "@babel/plugin-transform-parameters" "^7.12.13"
+ "@babel/plugin-transform-property-literals" "^7.12.13"
+ "@babel/plugin-transform-regenerator" "^7.12.13"
+ "@babel/plugin-transform-reserved-words" "^7.12.13"
+ "@babel/plugin-transform-shorthand-properties" "^7.12.13"
+ "@babel/plugin-transform-spread" "^7.12.13"
+ "@babel/plugin-transform-sticky-regex" "^7.12.13"
+ "@babel/plugin-transform-template-literals" "^7.12.13"
+ "@babel/plugin-transform-typeof-symbol" "^7.12.13"
+ "@babel/plugin-transform-unicode-escapes" "^7.12.13"
+ "@babel/plugin-transform-unicode-regex" "^7.12.13"
+ "@babel/preset-modules" "^0.1.3"
+ "@babel/types" "^7.12.17"
+ core-js-compat "^3.8.0"
+ semver "^5.5.0"
+
+"@babel/preset-modules@^0.1.3":
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.4.tgz#362f2b68c662842970fdb5e254ffc8fc1c2e415e"
+ integrity sha512-J36NhwnfdzpmH41M1DrnkkgAqhZaqr/NBdPfQ677mLzlaXo+oDiv1deyCDtgAhz8p328otdob0Du7+xgHGZbKg==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.0.0"
+ "@babel/plugin-proposal-unicode-property-regex" "^7.4.4"
+ "@babel/plugin-transform-dotall-regex" "^7.4.4"
+ "@babel/types" "^7.4.4"
+ esutils "^2.0.2"
+
+"@babel/[email protected]":
+ version "7.12.1"
+ resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.12.1.tgz#7f022b13f55b6dd82f00f16d1c599ae62985358c"
+ integrity sha512-euCExymHCi0qB9u5fKw7rvlw7AZSjw/NaB9h7EkdTt5+yHRrXdiRTh7fkG3uBPpJg82CqLfp1LHLqWGSCrab+g==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+ "@babel/plugin-transform-react-display-name" "^7.12.1"
+ "@babel/plugin-transform-react-jsx" "^7.12.1"
+ "@babel/plugin-transform-react-jsx-development" "^7.12.1"
+ "@babel/plugin-transform-react-jsx-self" "^7.12.1"
+ "@babel/plugin-transform-react-jsx-source" "^7.12.1"
+ "@babel/plugin-transform-react-pure-annotations" "^7.12.1"
+
+"@babel/preset-react@^7.12.5":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.12.13.tgz#5f911b2eb24277fa686820d5bd81cad9a0602a0a"
+ integrity sha512-TYM0V9z6Abb6dj1K7i5NrEhA13oS5ujUYQYDfqIBXYHOc2c2VkFgc+q9kyssIyUfy4/hEwqrgSlJ/Qgv8zJLsA==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.12.13"
+ "@babel/plugin-transform-react-display-name" "^7.12.13"
+ "@babel/plugin-transform-react-jsx" "^7.12.13"
+ "@babel/plugin-transform-react-jsx-development" "^7.12.12"
+ "@babel/plugin-transform-react-pure-annotations" "^7.12.1"
+
+"@babel/[email protected]":
+ version "7.12.1"
+ resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.12.1.tgz#86480b483bb97f75036e8864fe404cc782cc311b"
+ integrity sha512-hNK/DhmoJPsksdHuI/RVrcEws7GN5eamhi28JkO52MqIxU8Z0QpmiSOQxZHWOHV7I3P4UjHV97ay4TcamMA6Kw==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.10.4"
+ "@babel/plugin-transform-typescript" "^7.12.1"
+
+"@babel/runtime-corejs3@^7.10.2":
+ version "7.12.18"
+ resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.12.18.tgz#e5663237e5658e4c09586995d2dd6d2c8cfd6fc0"
+ integrity sha512-ngR7yhNTjDxxe1VYmhqQqqXZWujGb6g0IoA4qeG6MxNGRnIw2Zo8ImY8HfaQ7l3T6GklWhdNfyhWk0C0iocdVA==
+ dependencies:
+ core-js-pure "^3.0.0"
+ regenerator-runtime "^0.13.4"
+
+"@babel/[email protected]":
+ version "7.12.1"
+ resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.12.1.tgz#b4116a6b6711d010b2dad3b7b6e43bf1b9954740"
+ integrity sha512-J5AIf3vPj3UwXaAzb5j1xM4WAQDX3EMgemF8rjCP3SoW09LfRKAXQKt6CoVYl230P6iWdRcBbnLDDdnqWxZSCA==
+ dependencies:
+ regenerator-runtime "^0.13.4"
+
+"@babel/runtime@^7.0.0", "@babel/runtime@^7.1.2", "@babel/runtime@^7.12.1", "@babel/runtime@^7.12.13", "@babel/runtime@^7.13.10":
+ version "7.14.8"
+ resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.14.8.tgz#7119a56f421018852694290b9f9148097391b446"
+ integrity sha512-twj3L8Og5SaCRCErB4x4ajbvBIVV77CGeFglHpeg5WC5FF8TZzBWXtTJ4MqaD9QszLYTtr+IsaAL2rEUevb+eg==
+ dependencies:
+ regenerator-runtime "^0.13.4"
+
+"@babel/runtime@^7.10.2", "@babel/runtime@^7.11.2", "@babel/runtime@^7.5.5", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.4":
+ version "7.12.18"
+ resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.12.18.tgz#af137bd7e7d9705a412b3caaf991fe6aaa97831b"
+ integrity sha512-BogPQ7ciE6SYAUPtlm9tWbgI9+2AgqSam6QivMgXgAT+fKbgppaj4ZX15MHeLC1PVF5sNk70huBu20XxWOs8Cg==
+ dependencies:
+ regenerator-runtime "^0.13.4"
+
+"@babel/template@^7.10.4", "@babel/template@^7.12.13", "@babel/template@^7.3.3":
+ version "7.12.13"
+ resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.12.13.tgz#530265be8a2589dbb37523844c5bcb55947fb327"
+ integrity sha512-/7xxiGA57xMo/P2GVvdEumr8ONhFOhfgq2ihK3h1e6THqzTAkHbkXgB0xI9yeTfIUoH3+oAeHhqm/I43OTbbjA==
+ dependencies:
+ "@babel/code-frame" "^7.12.13"
+ "@babel/parser" "^7.12.13"
+ "@babel/types" "^7.12.13"
+
+"@babel/traverse@^7.1.0", "@babel/traverse@^7.12.1", "@babel/traverse@^7.12.13", "@babel/traverse@^7.12.17", "@babel/traverse@^7.7.0":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.12.17.tgz#40ec8c7ffb502c4e54c7f95492dc11b88d718619"
+ integrity sha512-LGkTqDqdiwC6Q7fWSwQoas/oyiEYw6Hqjve5KOSykXkmFJFqzvGMb9niaUEag3Rlve492Mkye3gLw9FTv94fdQ==
+ dependencies:
+ "@babel/code-frame" "^7.12.13"
+ "@babel/generator" "^7.12.17"
+ "@babel/helper-function-name" "^7.12.13"
+ "@babel/helper-split-export-declaration" "^7.12.13"
+ "@babel/parser" "^7.12.17"
+ "@babel/types" "^7.12.17"
+ debug "^4.1.0"
+ globals "^11.1.0"
+ lodash "^4.17.19"
+
+"@babel/types@^7.0.0", "@babel/types@^7.12.1", "@babel/types@^7.12.13", "@babel/types@^7.12.17", "@babel/types@^7.12.6", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4", "@babel/types@^7.7.0":
+ version "7.12.17"
+ resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.12.17.tgz#9d711eb807e0934c90b8b1ca0eb1f7230d150963"
+ integrity sha512-tNMDjcv/4DIcHxErTgwB9q2ZcYyN0sUfgGKUK/mm1FJK7Wz+KstoEekxrl/tBiNDgLK1HGi+sppj1An/1DR4fQ==
+ dependencies:
+ "@babel/helper-validator-identifier" "^7.12.11"
+ lodash "^4.17.19"
+ to-fast-properties "^2.0.0"
+
+"@bcoe/v8-coverage@^0.2.3":
+ version "0.2.3"
+ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
+ integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
+
+"@chakra-ui/[email protected]":
+ version "1.3.4"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/accordion/-/accordion-1.3.4.tgz#cb5d279dd5fa8086d8b90ab4a94c915c4ac7107a"
+ integrity sha512-X+o68wcMkm07yWGjZz69rRke6W0zsD1eEG8uBs7iFy+q0sc1n5LiHNO/1L6s6CyBo6omI31RS/fbLD9OXJVD1g==
+ dependencies:
+ "@chakra-ui/descendant" "2.0.1"
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/icon" "1.1.10"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/transition" "1.3.3"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.2.6"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/alert/-/alert-1.2.6.tgz#24201f691d77d77b9ebda638f33b5a71bece3a0a"
+ integrity sha512-aq2hVHQFe3sFHYWDj+3HRVTKOqWlWwpm/FFihPVNoYteLKje8f71n3VN3rhDaFY15tFDXq9Uv3qTdMK55KXGlg==
+ dependencies:
+ "@chakra-ui/icon" "1.1.10"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.2.7"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/avatar/-/avatar-1.2.7.tgz#dcb85effd1617f39afc31dc233e0e3dddc835129"
+ integrity sha512-WwtHDEmnSglBKOkxQHRu8tUtRTKu+vn35JlO6QVP+Mb5SPX0vFns3F38dohVr2s1wGUiMVMq/bt0JNCG5fFzhQ==
+ dependencies:
+ "@chakra-ui/image" "1.0.17"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.2.7"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/breadcrumb/-/breadcrumb-1.2.7.tgz#19d047662dcb4da44281a6059979bbd7f35e0577"
+ integrity sha512-gJVigaLRIkRCNBgH8B36fOFCgGIKErZOutchhIOCiycWnIStaGiZ7XpQIbuXCWHcLtWG3+YRL4pupx7mOPoc3w==
+ dependencies:
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.4.1"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/button/-/button-1.4.1.tgz#57012faab52303eb14dda37a79455a3deb7e17a7"
+ integrity sha512-KnxG0buRMdM5KM1p00UozZ9KmZ22RKWUHvJrqtfi2Qxcj6FaEgS3nTXInLRpMIQ5xc83O07mio+pZ1j4zoRrbw==
+ dependencies:
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/spinner" "1.1.11"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.5.4"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/checkbox/-/checkbox-1.5.4.tgz#c8cabe59d767c57325f42e37aefd07539d1e1ac2"
+ integrity sha512-exEfDZZK2IQjT4DpTYynC7wdUGWxBTo+iYfTmA/DOvcTW9RqETgYSJteRUTZdFgA3AptH1XN/PuAj/ucIsQ9VA==
+ dependencies:
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+ "@chakra-ui/visually-hidden" "1.0.13"
+
+"@chakra-ui/[email protected]":
+ version "1.1.6"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/clickable/-/clickable-1.1.6.tgz#49db49093080da9a4fa8741ce33bd4bc1c27aa39"
+ integrity sha512-wCA/QKXwJaB6t6DRfIk8tKRBkHMmgG3aqXD9/KusXb+3OGDExuxrcO/nBkpTwZJ0+y0FPADpOduLupnrHQ4KNw==
+ dependencies:
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.1.10"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/close-button/-/close-button-1.1.10.tgz#ad4d026d70ff0ed523a3e098110983072584e7c7"
+ integrity sha512-DgjPZlqt2lixmLfnWaeqUQwGzRW3Ld1UNncjMzVUhTFxyfgSOCRLTQP4Hj4NWXilK3SuiPtxrtxAzm1sdYRfLg==
+ dependencies:
+ "@chakra-ui/icon" "1.1.10"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.1.10"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/color-mode/-/color-mode-1.1.10.tgz#46f4ee277aa7fb509a37cd6a9fde452a88e9e34b"
+ integrity sha512-fMI4yeaWjlDwM9gsGpD4G23j/7aVL7UQcZmPnyTsyPXWM7Y51CO7VF8Nr7WCeq2l0axjhVqMs+HveL4biM+kGw==
+ dependencies:
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.0.14"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/control-box/-/control-box-1.0.14.tgz#47000dc8049e1fbc495b001b36636812e3086094"
+ integrity sha512-BJJQnOy0C6gDH1sbQTRYflaWdc0h3IafcGAD0d2WGYVscMicAiNd/+6qGfqivrCESpghz4pfDcNE96UIFUYvHg==
+ dependencies:
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.1.7"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/counter/-/counter-1.1.7.tgz#8db41962fc3f99f44b2231633326520df529930f"
+ integrity sha512-RrlbFg8u3UNcqPm7SLyJGLeqPnFuRqccXXL98Udy5wLhEe1maI6mUPu0bZHTm0VJ1AEdiVzbql0qH8HLneMiGg==
+ dependencies:
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/css-reset/-/css-reset-1.0.0.tgz#8395921b35ef27bee0579a4d730c5ab7f7b39734"
+ integrity sha512-UaPsImGHvCgFO3ayp6Ugafu2/3/EG8wlW/8Y9Ihfk1UFv8cpV+3BfWKmuZ7IcmxcBL9dkP6E8p3/M1T0FB92hg==
+
+"@chakra-ui/[email protected]":
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/descendant/-/descendant-2.0.1.tgz#fc3bc9081aa01503035b2c9296bc4b9f87ceaae0"
+ integrity sha512-TeYp94iOhu5Gs2oVzewJaep0qft/JKMKfmcf4PGgzJF+h6TWZm6NGohk6Jq7JOh+y0rExa1ulknIgnMzFx5xaA==
+ dependencies:
+ "@chakra-ui/react-utils" "^1.1.2"
+
+"@chakra-ui/[email protected]":
+ version "1.2.7"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/editable/-/editable-1.2.7.tgz#397857c6901f8a469c02eaf981e462ab0f441c10"
+ integrity sha512-wmS5eGNw4ACX+kMEPxV97B6DEMJhGmvsUpdJAA8HDbDdcZNZk93Zkuog10X1cvXaddNCpDkFaa+TBOkqjeluNA==
+ dependencies:
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.1.9"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/focus-lock/-/focus-lock-1.1.9.tgz#c642804344c5eac9aec2890d3194e496a0e81a8f"
+ integrity sha512-C6nQqn5PNOiwp6Ovd9xzJ2V6P3d3ZdfykTl+Fc4YdTC47LTrJzJmv61++nhDAzYeEseojmmgXIE1DlZfGjZpZQ==
+ dependencies:
+ "@chakra-ui/utils" "1.8.1"
+ react-focus-lock "2.5.0"
+
+"@chakra-ui/[email protected]":
+ version "1.3.8"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/form-control/-/form-control-1.3.8.tgz#4369c58ecdac4f7c5c23eb0a568f6b6618e8ca64"
+ integrity sha512-S4zHu9ktuUeiqFC/ZM95UQ8CrnJvuXKfFRG+HsQrO5JjvaiYl0YjDE79Bi6+oj5WHjz0Zo7t+px+LAjxn7my3Q==
+ dependencies:
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/icon" "1.1.10"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.5.4"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/hooks/-/hooks-1.5.4.tgz#b32536f13ac88e61675ea83c0cf7d3b494903db5"
+ integrity sha512-xAFj2Feu+ZWD1oxbQQ2UHDI7zbx/zZXjlS6ogdpXZoMrGYJhbdbV0JNGx4eK1Q1AEChNLdnZQIq8An1gYKgE8g==
+ dependencies:
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+ compute-scroll-into-view "1.0.14"
+ copy-to-clipboard "3.3.1"
+
+"@chakra-ui/[email protected]":
+ version "1.1.10"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/icon/-/icon-1.1.10.tgz#fe7bb96f3b8162d90e85e347b3e2f8203733f44a"
+ integrity sha512-AZ2dKCHKT6dI4K9NXizHsNZSwPuBP0i1BZ4ZPoXGMOfNt7bD3yKBLoZfyO+NmAubMHanVASztikSNAmy2Rvczg==
+ dependencies:
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.0.17"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/image/-/image-1.0.17.tgz#b5c7fd564af1d23303eafe1d443c7dbecfd4cf31"
+ integrity sha512-M6OGT2Qs9Gy8Ba21XTWFDKe97fALSOSAcpQ38seSQt2hBjYdf8Pa3nKN6OO4O5zpTe612A/Sawuwxhf+6fSCeQ==
+ dependencies:
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.2.8"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/input/-/input-1.2.8.tgz#cab3d4f022199b7a7d3e889379f771ccb7613afe"
+ integrity sha512-WGvkcjJH9XpOlpKI9POn7UDA8qnHf22mBKY771U3IfW2QxcZH/rPFwDE7YIMLr9M4g+rL4NLSWmXYvO92rzc6A==
+ dependencies:
+ "@chakra-ui/form-control" "1.3.8"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.4.7"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/layout/-/layout-1.4.7.tgz#64a80ade52cbe70de5451a6081810d49df84c66e"
+ integrity sha512-wu1IBz/zg8rj4N88w4MtjS2kC5w+FXEvbxt0r2DqxLtPUFtE/fFmCa8OKsz+jMrDcZ1dRh48YNYrrWdAGEOQ8w==
+ dependencies:
+ "@chakra-ui/icon" "1.1.10"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.0.13"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/live-region/-/live-region-1.0.13.tgz#fe93eb7e75ce7c9ae6b76664ed0632ddbc5ba9b7"
+ integrity sha512-bzgi8jIYxVaqSVmUynnGFDjBOKf1LuKY1qMljuwIa7rK6iJZiMxTAdPbxX5Km4xTdgUz5AtZrmqDvKKLPDA1fg==
+ dependencies:
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/media-query/-/media-query-1.1.1.tgz#3950e223e28a7a6e3c29b3529698e15389facd5e"
+ integrity sha512-KHsY4NzMl77yMyqpw3nleh1xM3zqAhCmSRBzQIh5fU/kT7r2tCwGl53djY5O2pl9VPMb4LhqPwkNd6vsscfCxQ==
+ dependencies:
+ "@chakra-ui/react-env" "1.0.5"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.7.1"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/menu/-/menu-1.7.1.tgz#227f337fd2cd9483175088c65500df12422d75ca"
+ integrity sha512-a9+iyw+cUBtxC/+mKAhPS92a0Nlq94wXpz8haswWTNSOLE5U/zXNDbiG8BsXQ+pS8ngPUjZRE35EFSge+efV8Q==
+ dependencies:
+ "@chakra-ui/clickable" "1.1.6"
+ "@chakra-ui/descendant" "2.0.1"
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/popper" "2.2.1"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/transition" "1.3.3"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.8.9"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/modal/-/modal-1.8.9.tgz#12c1c52d5c2ee7ed565272fe34111d3f0e4da09e"
+ integrity sha512-fguU4zpE/4JWKY0yHyi/PoM0QzcBokgcT3KZnZj3KGOc1C15ZkR6GvD5UBubGMWQzlKT9hCwYaLc+VeoHnN6XA==
+ dependencies:
+ "@chakra-ui/close-button" "1.1.10"
+ "@chakra-ui/focus-lock" "1.1.9"
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/portal" "1.2.7"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/transition" "1.3.3"
+ "@chakra-ui/utils" "1.8.1"
+ aria-hidden "^1.1.1"
+ react-remove-scroll "2.4.1"
+
+"@chakra-ui/[email protected]":
+ version "1.2.8"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/number-input/-/number-input-1.2.8.tgz#22c6db54b60c143ff5711563859a646db99b5f86"
+ integrity sha512-f8mQrPJu7O5qX4auNu24N6TtzaAE/q+eld1K+vwVdFUeFCOxuSsEoMT3xOEPrkEKYtikFDt0Dy3+pYrTcgBrvA==
+ dependencies:
+ "@chakra-ui/counter" "1.1.7"
+ "@chakra-ui/form-control" "1.3.8"
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/icon" "1.1.10"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.6.3"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/pin-input/-/pin-input-1.6.3.tgz#d996babf9007708c1f4fb226ab2bc11d1f34bc26"
+ integrity sha512-BZYNUpcwagjfAr8olmkZe5aQ3e45q4rwoIwWvHVb39KVvPP3L7jzLFlxzoncoxVfBh9hOEztg/GeIeN0arLtLw==
+ dependencies:
+ "@chakra-ui/descendant" "2.0.1"
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.8.1"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/popover/-/popover-1.8.1.tgz#d7f6efb899026d2021b14b2f442a4177ec618d38"
+ integrity sha512-fEYcEV6rO4H9ewj+8nom5flHZfh8+BwxNfuzVZFnJbzuSzP9NKk5VMp+nbBow2CKlI/ct3Y8dpaLbsYrm/X6AA==
+ dependencies:
+ "@chakra-ui/close-button" "1.1.10"
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/popper" "2.2.1"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "2.2.1"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/popper/-/popper-2.2.1.tgz#51d49933ee837b396d78d9daaab1d9809afea982"
+ integrity sha512-W0hMTBp2X62UooF3qPNmsEW0IJfz72gr2DN8nsCvHQrMiARB9s2jECEss6qEsB97tnmIG8k2TNee8IzTGLmMyA==
+ dependencies:
+ "@chakra-ui/react-utils" "1.1.2"
+ "@popperjs/core" "2.4.4"
+
+"@chakra-ui/[email protected]":
+ version "1.2.7"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/portal/-/portal-1.2.7.tgz#1db2d14aa6baa7267b1e51162749bb046cdb9263"
+ integrity sha512-s5iFEhjZ1r5cyIH3i5R6UOW5FwmM3JDFkLw3Y7wumlYV4CscV2/UwoKIbscR93COMGP+HPvfVDUZOB1woftQRA==
+ dependencies:
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.1.11"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/progress/-/progress-1.1.11.tgz#3d02c58c95b037719ac94513a2d0531ae00336da"
+ integrity sha512-8cPvHI/TxQSP1DPs7nC1qnLPFFd2lzMs7GDk0AcORW+Be8BS0cJC5NV9wZJM4N8RUP4sK4nhkMfyq4GbrNzoLg==
+ dependencies:
+ "@chakra-ui/theme-tools" "1.1.8"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.3.8"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/radio/-/radio-1.3.8.tgz#b04e71e5975e9969f1cef81024c764ba4cc07249"
+ integrity sha512-3HWS7OVrdtqZYR/FBtIQhVvVLU0hiWZWWdiG+W1g6V3YhTq1PtwDA8uYDDe5KxaA/DjXfUhg1mQjjozgB1jZ/g==
+ dependencies:
+ "@chakra-ui/form-control" "1.3.8"
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+ "@chakra-ui/visually-hidden" "1.0.13"
+
+"@chakra-ui/[email protected]":
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/react-env/-/react-env-1.0.5.tgz#2333c26a749bf6ad85a0a7865844471ff9d05d7a"
+ integrity sha512-qAWslmm27q7DyHv5XvIoW6ihmilQK6K/LNc0bUlPrKaxzLtk9m16N767spl+xue9JyPb7ZE3gAPwdUEUD7XKhQ==
+ dependencies:
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]", "@chakra-ui/react-utils@^1.1.2":
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/react-utils/-/react-utils-1.1.2.tgz#7ea80b6ae25bd7b182095cc9ffaad23c464408b5"
+ integrity sha512-S8jPVKGZH2qF7ZGxl/0DF/dXXI2AxDNGf4Ahi2LGHqajMvqBB7vtYIRRmIA7+jAnErhzO8WUi3i4Z7oScp6xSA==
+ dependencies:
+ "@chakra-ui/utils" "^1.7.0"
+
+"@chakra-ui/react@^1.6.5":
+ version "1.6.5"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/react/-/react-1.6.5.tgz#f8d05b2bb7f98d73c9606b6abe02cec9598541e6"
+ integrity sha512-kvBNX3gkg2CCbdaj585I8m7Wd+PGMLTpEM15WbII3t6E26lhKWwD5OXMomhWhsnBMCM9uSQ790dunhffcruUUg==
+ dependencies:
+ "@chakra-ui/accordion" "1.3.4"
+ "@chakra-ui/alert" "1.2.6"
+ "@chakra-ui/avatar" "1.2.7"
+ "@chakra-ui/breadcrumb" "1.2.7"
+ "@chakra-ui/button" "1.4.1"
+ "@chakra-ui/checkbox" "1.5.4"
+ "@chakra-ui/close-button" "1.1.10"
+ "@chakra-ui/control-box" "1.0.14"
+ "@chakra-ui/counter" "1.1.7"
+ "@chakra-ui/css-reset" "1.0.0"
+ "@chakra-ui/editable" "1.2.7"
+ "@chakra-ui/form-control" "1.3.8"
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/icon" "1.1.10"
+ "@chakra-ui/image" "1.0.17"
+ "@chakra-ui/input" "1.2.8"
+ "@chakra-ui/layout" "1.4.7"
+ "@chakra-ui/live-region" "1.0.13"
+ "@chakra-ui/media-query" "1.1.1"
+ "@chakra-ui/menu" "1.7.1"
+ "@chakra-ui/modal" "1.8.9"
+ "@chakra-ui/number-input" "1.2.8"
+ "@chakra-ui/pin-input" "1.6.3"
+ "@chakra-ui/popover" "1.8.1"
+ "@chakra-ui/popper" "2.2.1"
+ "@chakra-ui/portal" "1.2.7"
+ "@chakra-ui/progress" "1.1.11"
+ "@chakra-ui/radio" "1.3.8"
+ "@chakra-ui/react-env" "1.0.5"
+ "@chakra-ui/select" "1.1.12"
+ "@chakra-ui/skeleton" "1.1.16"
+ "@chakra-ui/slider" "1.2.7"
+ "@chakra-ui/spinner" "1.1.11"
+ "@chakra-ui/stat" "1.1.11"
+ "@chakra-ui/switch" "1.2.7"
+ "@chakra-ui/system" "1.7.1"
+ "@chakra-ui/table" "1.2.5"
+ "@chakra-ui/tabs" "1.5.3"
+ "@chakra-ui/tag" "1.1.11"
+ "@chakra-ui/textarea" "1.1.12"
+ "@chakra-ui/theme" "1.9.2"
+ "@chakra-ui/toast" "1.2.9"
+ "@chakra-ui/tooltip" "1.3.8"
+ "@chakra-ui/transition" "1.3.3"
+ "@chakra-ui/utils" "1.8.1"
+ "@chakra-ui/visually-hidden" "1.0.13"
+
+"@chakra-ui/[email protected]":
+ version "1.1.12"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/select/-/select-1.1.12.tgz#a1531e142f96b46c7bfe1fc05ac8d6229a58ebe8"
+ integrity sha512-oOCLLCONoGgnJ/RvWEvdl+ggecDGIlxYHOsTjPu2vZs6PPIer69Xf9/S36Zp4kkuYWxz2ssK3YMoiU0PpPz7GQ==
+ dependencies:
+ "@chakra-ui/form-control" "1.3.8"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.1.16"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/skeleton/-/skeleton-1.1.16.tgz#3f6126da7dde50f959227ce0048b149d313202aa"
+ integrity sha512-pzqa2PYg21ktFrdIcMvx+BEG4u+tTNuHDHqQeFD7bV7tYbNkMlQhY7I7kTBWMo0mROmnrerVBTJd92CbG/c5lA==
+ dependencies:
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/media-query" "1.1.1"
+ "@chakra-ui/system" "1.7.1"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.2.7"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/slider/-/slider-1.2.7.tgz#39a56b266f3e363aefeac1c2eb15f422657e44a5"
+ integrity sha512-fp5ef8MEbXq89U4TpSeEa6NUwvtSyHbM6VSdZCgsHG546BWpRkcCEvagtKXmviX4NthtOyig0YCqmET8HKduVA==
+ dependencies:
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.1.11"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/spinner/-/spinner-1.1.11.tgz#ed5645cd0007fe584dcb191c276bf81ead6539a0"
+ integrity sha512-gkh44jZ8msfHQgswVvflbWz/6Egv5FeSu6a7BJWX/XQJw9IxPy0B75xy0d06LgQCOFk17x2xhB+mwZI6i55T8Q==
+ dependencies:
+ "@chakra-ui/utils" "1.8.1"
+ "@chakra-ui/visually-hidden" "1.0.13"
+
+"@chakra-ui/[email protected]":
+ version "1.1.11"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/stat/-/stat-1.1.11.tgz#1146738abc89c51de0c9de409fd4c80e47b68fe1"
+ integrity sha512-47aHxoAReUmQ0bU6q7qY2N9RryKtZWTheK/xepFppGI5Q0hWSoOESkJ8BNZ/LuQW6NLCmv2jOxyhW4XIDEJ+fA==
+ dependencies:
+ "@chakra-ui/icon" "1.1.10"
+ "@chakra-ui/utils" "1.8.1"
+ "@chakra-ui/visually-hidden" "1.0.13"
+
+"@chakra-ui/[email protected]":
+ version "1.12.1"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/styled-system/-/styled-system-1.12.1.tgz#eb20cb4605f12f27a187141d70aa8fa15e243767"
+ integrity sha512-/92egMOe6/6xerCmoos1/HhZBJdeRwIRa2BR+wwkHJ4ehqxi4IBtU9oXc2g4P70GGh6UqKIgR/oURrvVY8vjow==
+ dependencies:
+ "@chakra-ui/utils" "1.8.1"
+ csstype "^3.0.6"
+
+"@chakra-ui/[email protected]":
+ version "1.2.7"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/switch/-/switch-1.2.7.tgz#cc7483cd4d42f182f4f6ac511535af3d553e6c15"
+ integrity sha512-zHI6lg+NuDUw9vxEDSOkH4j2lRntIpwysuIEYUKFPkH2zmZpo6c1zLA9L+rfMbqFRoewm+YIqh8tOgQmNbIGPg==
+ dependencies:
+ "@chakra-ui/checkbox" "1.5.4"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.7.1"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/system/-/system-1.7.1.tgz#b1f311a75963d5ac1f86873d17ba2ee5db59027d"
+ integrity sha512-1G7+mAPbkGqtowZ4Bt9JwCB2wTJt701vj/vPLRW2KDYqlES5Xp2RomG8LdrGQcVWfiwO2wzpCYUZj2YLY4kbVA==
+ dependencies:
+ "@chakra-ui/color-mode" "1.1.10"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/styled-system" "1.12.1"
+ "@chakra-ui/utils" "1.8.1"
+ react-fast-compare "3.2.0"
+
+"@chakra-ui/[email protected]":
+ version "1.2.5"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/table/-/table-1.2.5.tgz#af31be7d20bff46f58f9e0ff572b0284b2784fa4"
+ integrity sha512-iYSDv4oTKZ8bLJo9OHjAPCi7cxDXXVXIYupwP2oXcBsM8Hx6FrmlPlO8vdBCTD2ySaazFOZgW2/EPOKsXlAnlQ==
+ dependencies:
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.5.3"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/tabs/-/tabs-1.5.3.tgz#cb608667b6272e4f172e3bcfe4c6cd7293111981"
+ integrity sha512-Nn/+gSZRigODwPK597U6DYwaPiOZAFNsozE5RYSZootr/tMIwqTh3opxwzW9zbPx4lQ2+3uvS4QHN5Tn+YxW8Q==
+ dependencies:
+ "@chakra-ui/clickable" "1.1.6"
+ "@chakra-ui/descendant" "2.0.1"
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.1.11"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/tag/-/tag-1.1.11.tgz#6dc231f6c18b5faa81ec1e95d59055b9100c13f7"
+ integrity sha512-XLKafTuK5lsRLk+zAXCQZ1368GOTf59ghtpYofLg0ieGAbOOuNmw1/lLKdnrnHj8ueatKPr86bDa4DQ31J3Lxg==
+ dependencies:
+ "@chakra-ui/icon" "1.1.10"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.1.12"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/textarea/-/textarea-1.1.12.tgz#3ffa33cb49c1653c869478a4ea7516fa303f496c"
+ integrity sha512-Qmc98ePiSdjCJ/AVCQ6mgX7Ez/cEoBTPkP/t4eqbjpfBSWYAExfYn/w/Tkcx1C5dd9cfk+EPzxM2r3KVpWuQGA==
+ dependencies:
+ "@chakra-ui/form-control" "1.3.8"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.1.8"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/theme-tools/-/theme-tools-1.1.8.tgz#cc04947aaf5f553db84a32ba6c742f1fbc726f9b"
+ integrity sha512-FQqHNfuvl2O1m7o6YY3ozqxnz74TWAhVzzfKrh7/eXcyA2IkF+MuKMUnyWXjOq1bcLt9rAGq0FQALisTd4YPWQ==
+ dependencies:
+ "@chakra-ui/utils" "1.8.1"
+ "@types/tinycolor2" "1.4.2"
+ tinycolor2 "1.4.2"
+
+"@chakra-ui/[email protected]":
+ version "1.9.2"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/theme/-/theme-1.9.2.tgz#b7f044f96fdedfbeff181dd2e9788e139ecaf513"
+ integrity sha512-bSKcVGTi83sjdQNJULLAul0mL3Hljs+KEZ+oWEl0FogPumCeBOBW4rPCnddW3YWkQUrHwoNz4hag29klTs/IsQ==
+ dependencies:
+ "@chakra-ui/theme-tools" "1.1.8"
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]":
+ version "1.2.9"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/toast/-/toast-1.2.9.tgz#ebc78ab404f524c396ed84f433f546b18110cdf2"
+ integrity sha512-fVE5UD27WykiPS817Wlee4LAT01SysWFxCFikflBj1nK8UJXhRKV/UavNf5aJbxvzx5QCwkD0pjFmDO9uxOSPA==
+ dependencies:
+ "@chakra-ui/alert" "1.2.6"
+ "@chakra-ui/close-button" "1.1.10"
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/theme" "1.9.2"
+ "@chakra-ui/transition" "1.3.3"
+ "@chakra-ui/utils" "1.8.1"
+ "@reach/alert" "0.13.2"
+
+"@chakra-ui/[email protected]":
+ version "1.3.8"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/tooltip/-/tooltip-1.3.8.tgz#27f1106b7660dc612fa3f3b60d127ade1ea8e9ac"
+ integrity sha512-7rqAhcd04ZnnJZ2DmGvVPNyi/+Fy4bzQocYn83rWR3LC/8/LM+czG6pmz4FKjYR5iU6Ttf6Ckp8NfFKhyHAp/g==
+ dependencies:
+ "@chakra-ui/hooks" "1.5.4"
+ "@chakra-ui/popper" "2.2.1"
+ "@chakra-ui/portal" "1.2.7"
+ "@chakra-ui/react-utils" "1.1.2"
+ "@chakra-ui/utils" "1.8.1"
+ "@chakra-ui/visually-hidden" "1.0.13"
+
+"@chakra-ui/[email protected]":
+ version "1.3.3"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/transition/-/transition-1.3.3.tgz#0315060600d160be2687231ca19062709e363d8c"
+ integrity sha512-p9ZRaHNdSGQKS3trL7jSxh47fQDDEZfgYHMx7L/mDy6vxMNsO6YhnURULePk90hvtCAp6Z4urNTM6VYaywioQQ==
+ dependencies:
+ "@chakra-ui/utils" "1.8.1"
+
+"@chakra-ui/[email protected]", "@chakra-ui/utils@^1.7.0":
+ version "1.8.1"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/utils/-/utils-1.8.1.tgz#9778e4e8776da0ed4dc0051755d79002d2f21ec2"
+ integrity sha512-v0xL9U2ozDbHCl2kQTdJNOjUGT7ZjyFwEYuMW02ZaLkmLPj2w3G592iOsJ9Z9sBemQgoOrZGyTWqdxm6rhxJug==
+ dependencies:
+ "@types/lodash.mergewith" "4.6.6"
+ css-box-model "1.2.1"
+ framesync "5.3.0"
+ lodash.mergewith "4.6.2"
+
+"@chakra-ui/[email protected]":
+ version "1.0.13"
+ resolved "https://registry.yarnpkg.com/@chakra-ui/visually-hidden/-/visually-hidden-1.0.13.tgz#1cfca8e46137583afdd14a974f7d77815a2ff516"
+ integrity sha512-wFFXdejxwOT7r7AbD/IFl6Ve+n6VIOl2Drjcrn3JXmfwzL9NKB3xrtcdMXe8G/zW9jRXh+E6DUkTyEUjdUZErg==
+ dependencies:
+ "@chakra-ui/utils" "1.8.1"
+
+"@cnakazawa/watch@^1.0.3":
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/@cnakazawa/watch/-/watch-1.0.4.tgz#f864ae85004d0fcab6f50be9141c4da368d1656a"
+ integrity sha512-v9kIhKwjeZThiWrLmj0y17CWoyddASLj9O2yvbZkbvw/N3rWOYy9zkV66ursAoVr0mV15bL8g0c4QZUE6cdDoQ==
+ dependencies:
+ exec-sh "^0.3.2"
+ minimist "^1.2.0"
+
+"@csstools/convert-colors@^1.4.0":
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/@csstools/convert-colors/-/convert-colors-1.4.0.tgz#ad495dc41b12e75d588c6db8b9834f08fa131eb7"
+ integrity sha512-5a6wqoJV/xEdbRNKVo6I4hO3VjyDq//8q2f9I6PBAvMesJHFauXDorcNCsr9RzvsZnaWi5NYCcfyqP1QeFHFbw==
+
+"@csstools/normalize.css@^10.1.0":
+ version "10.1.0"
+ resolved "https://registry.yarnpkg.com/@csstools/normalize.css/-/normalize.css-10.1.0.tgz#f0950bba18819512d42f7197e56c518aa491cf18"
+ integrity sha512-ij4wRiunFfaJxjB0BdrYHIH8FxBJpOwNPhhAcunlmPdXudL1WQV1qoP9un6JsEBAgQH+7UXyyjh0g7jTxXK6tg==
+
+"@emotion/babel-plugin@^11.3.0":
+ version "11.3.0"
+ resolved "https://registry.yarnpkg.com/@emotion/babel-plugin/-/babel-plugin-11.3.0.tgz#3a16850ba04d8d9651f07f3fb674b3436a4fb9d7"
+ integrity sha512-UZKwBV2rADuhRp+ZOGgNWg2eYgbzKzQXfQPtJbu/PLy8onurxlNCLvxMQEvlr1/GudguPI5IU9qIY1+2z1M5bA==
+ dependencies:
+ "@babel/helper-module-imports" "^7.12.13"
+ "@babel/plugin-syntax-jsx" "^7.12.13"
+ "@babel/runtime" "^7.13.10"
+ "@emotion/hash" "^0.8.0"
+ "@emotion/memoize" "^0.7.5"
+ "@emotion/serialize" "^1.0.2"
+ babel-plugin-macros "^2.6.1"
+ convert-source-map "^1.5.0"
+ escape-string-regexp "^4.0.0"
+ find-root "^1.1.0"
+ source-map "^0.5.7"
+ stylis "^4.0.3"
+
+"@emotion/cache@^11.4.0":
+ version "11.4.0"
+ resolved "https://registry.yarnpkg.com/@emotion/cache/-/cache-11.4.0.tgz#293fc9d9a7a38b9aad8e9337e5014366c3b09ac0"
+ integrity sha512-Zx70bjE7LErRO9OaZrhf22Qye1y4F7iDl+ITjet0J+i+B88PrAOBkKvaAWhxsZf72tDLajwCgfCjJ2dvH77C3g==
+ dependencies:
+ "@emotion/memoize" "^0.7.4"
+ "@emotion/sheet" "^1.0.0"
+ "@emotion/utils" "^1.0.0"
+ "@emotion/weak-memoize" "^0.2.5"
+ stylis "^4.0.3"
+
+"@emotion/hash@^0.8.0":
+ version "0.8.0"
+ resolved "https://registry.yarnpkg.com/@emotion/hash/-/hash-0.8.0.tgz#bbbff68978fefdbe68ccb533bc8cbe1d1afb5413"
+ integrity sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow==
+
+"@emotion/is-prop-valid@^0.8.2":
+ version "0.8.8"
+ resolved "https://registry.yarnpkg.com/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz#db28b1c4368a259b60a97311d6a952d4fd01ac1a"
+ integrity sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==
+ dependencies:
+ "@emotion/memoize" "0.7.4"
+
+"@emotion/is-prop-valid@^1.1.0":
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/@emotion/is-prop-valid/-/is-prop-valid-1.1.0.tgz#29ef6be1e946fb4739f9707def860f316f668cde"
+ integrity sha512-9RkilvXAufQHsSsjQ3PIzSns+pxuX4EW8EbGeSPjZMHuMx6z/MOzb9LpqNieQX4F3mre3NWS2+X3JNRHTQztUQ==
+ dependencies:
+ "@emotion/memoize" "^0.7.4"
+
+"@emotion/[email protected]":
+ version "0.7.4"
+ resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.7.4.tgz#19bf0f5af19149111c40d98bb0cf82119f5d9eeb"
+ integrity sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw==
+
+"@emotion/memoize@^0.7.4", "@emotion/memoize@^0.7.5":
+ version "0.7.5"
+ resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.7.5.tgz#2c40f81449a4e554e9fc6396910ed4843ec2be50"
+ integrity sha512-igX9a37DR2ZPGYtV6suZ6whr8pTFtyHL3K/oLUotxpSVO2ASaprmAe2Dkq7tBo7CRY7MMDrAa9nuQP9/YG8FxQ==
+
+"@emotion/react@11":
+ version "11.4.0"
+ resolved "https://registry.yarnpkg.com/@emotion/react/-/react-11.4.0.tgz#2465ad7b073a691409b88dfd96dc17097ddad9b7"
+ integrity sha512-4XklWsl9BdtatLoJpSjusXhpKv9YVteYKh9hPKP1Sxl+mswEFoUe0WtmtWjxEjkA51DQ2QRMCNOvKcSlCQ7ivg==
+ dependencies:
+ "@babel/runtime" "^7.13.10"
+ "@emotion/cache" "^11.4.0"
+ "@emotion/serialize" "^1.0.2"
+ "@emotion/sheet" "^1.0.1"
+ "@emotion/utils" "^1.0.0"
+ "@emotion/weak-memoize" "^0.2.5"
+ hoist-non-react-statics "^3.3.1"
+
+"@emotion/serialize@^1.0.2":
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/@emotion/serialize/-/serialize-1.0.2.tgz#77cb21a0571c9f68eb66087754a65fa97bfcd965"
+ integrity sha512-95MgNJ9+/ajxU7QIAruiOAdYNjxZX7G2mhgrtDWswA21VviYIRP1R5QilZ/bDY42xiKsaktP4egJb3QdYQZi1A==
+ dependencies:
+ "@emotion/hash" "^0.8.0"
+ "@emotion/memoize" "^0.7.4"
+ "@emotion/unitless" "^0.7.5"
+ "@emotion/utils" "^1.0.0"
+ csstype "^3.0.2"
+
+"@emotion/sheet@^1.0.0", "@emotion/sheet@^1.0.1":
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/@emotion/sheet/-/sheet-1.0.1.tgz#245f54abb02dfd82326e28689f34c27aa9b2a698"
+ integrity sha512-GbIvVMe4U+Zc+929N1V7nW6YYJtidj31lidSmdYcWozwoBIObXBnaJkKNDjZrLm9Nc0BR+ZyHNaRZxqNZbof5g==
+
+"@emotion/styled@11":
+ version "11.3.0"
+ resolved "https://registry.yarnpkg.com/@emotion/styled/-/styled-11.3.0.tgz#d63ee00537dfb6ff612e31b0e915c5cf9925a207"
+ integrity sha512-fUoLcN3BfMiLlRhJ8CuPUMEyKkLEoM+n+UyAbnqGEsCd5IzKQ7VQFLtzpJOaCD2/VR2+1hXQTnSZXVJeiTNltA==
+ dependencies:
+ "@babel/runtime" "^7.13.10"
+ "@emotion/babel-plugin" "^11.3.0"
+ "@emotion/is-prop-valid" "^1.1.0"
+ "@emotion/serialize" "^1.0.2"
+ "@emotion/utils" "^1.0.0"
+
+"@emotion/unitless@^0.7.5":
+ version "0.7.5"
+ resolved "https://registry.yarnpkg.com/@emotion/unitless/-/unitless-0.7.5.tgz#77211291c1900a700b8a78cfafda3160d76949ed"
+ integrity sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg==
+
+"@emotion/utils@^1.0.0":
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/@emotion/utils/-/utils-1.0.0.tgz#abe06a83160b10570816c913990245813a2fd6af"
+ integrity sha512-mQC2b3XLDs6QCW+pDQDiyO/EdGZYOygE8s5N5rrzjSI4M3IejPE/JPndCBwRT9z982aqQNi6beWs1UeayrQxxA==
+
+"@emotion/weak-memoize@^0.2.5":
+ version "0.2.5"
+ resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.2.5.tgz#8eed982e2ee6f7f4e44c253e12962980791efd46"
+ integrity sha512-6U71C2Wp7r5XtFtQzYrW5iKFT67OixrSxjI4MptCHzdSVlgabczzqLe0ZSgnub/5Kp4hSbpDB1tMytZY9pwxxA==
+
+"@eslint/eslintrc@^0.3.0":
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.3.0.tgz#d736d6963d7003b6514e6324bec9c602ac340318"
+ integrity sha512-1JTKgrOKAHVivSvOYw+sJOunkBjUOvjqWk1DPja7ZFhIS2mX/4EgTT8M7eTK9jrKhL/FvXXEbQwIs3pg1xp3dg==
+ dependencies:
+ ajv "^6.12.4"
+ debug "^4.1.1"
+ espree "^7.3.0"
+ globals "^12.1.0"
+ ignore "^4.0.6"
+ import-fresh "^3.2.1"
+ js-yaml "^3.13.1"
+ lodash "^4.17.20"
+ minimatch "^3.0.4"
+ strip-json-comments "^3.1.1"
+
+"@eslint/eslintrc@^0.4.3":
+ version "0.4.3"
+ resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.3.tgz#9e42981ef035beb3dd49add17acb96e8ff6f394c"
+ integrity sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw==
+ dependencies:
+ ajv "^6.12.4"
+ debug "^4.1.1"
+ espree "^7.3.0"
+ globals "^13.9.0"
+ ignore "^4.0.6"
+ import-fresh "^3.2.1"
+ js-yaml "^3.13.1"
+ minimatch "^3.0.4"
+ strip-json-comments "^3.1.1"
+
+"@firebase/[email protected]":
+ version "0.5.0"
+ resolved "https://registry.yarnpkg.com/@firebase/analytics-types/-/analytics-types-0.5.0.tgz#cfa1dc34034fc478eca360f5faa4b4d0466892ce"
+ integrity sha512-VTV5Xtq5gVabbL/4n6pBtMJWcQBgOUDE2XbEHl8EOuwRaU9weyGUS7ofbisDkpl1RlFU1aewnc33pbLcYbi0iQ==
+
+"@firebase/[email protected]":
+ version "0.6.16"
+ resolved "https://registry.yarnpkg.com/@firebase/analytics/-/analytics-0.6.16.tgz#27bb3220ae70f83bb5c2ebde1e78087abfbf17f5"
+ integrity sha512-eBYWKf7S7xmDFi3cWLs7Z6x4Hn1AG1oy2Xp/RvfyamhqI2X8GbgyCif/+q7orh+MWnNwipblVT93YajhhXpQcQ==
+ dependencies:
+ "@firebase/analytics-types" "0.5.0"
+ "@firebase/component" "0.5.5"
+ "@firebase/installations" "0.4.31"
+ "@firebase/logger" "0.2.6"
+ "@firebase/util" "1.2.0"
+ tslib "^2.1.0"
+
+"@firebase/[email protected]":
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/@firebase/app-check-interop-types/-/app-check-interop-types-0.1.0.tgz#83afd9d41f99166c2bdb2d824e5032e9edd8fe53"
+ integrity sha512-uZfn9s4uuRsaX5Lwx+gFP3B6YsyOKUE+Rqa6z9ojT4VSRAsZFko9FRn6OxQUA1z5t5d08fY4pf+/+Dkd5wbdbA==
+
+"@firebase/[email protected]":
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/@firebase/app-check-types/-/app-check-types-0.2.0.tgz#b938e03914b8139796a8923bb20a9004114d5409"
+ integrity sha512-CfZhWtChLK9uNmrxbJyTg1BPtROiwc/VJGu3f39KjS0F5ZvZjHmyRFMrDiSoXDoybM4B6X0pQhJYi9rifT2wpQ==
+
+"@firebase/[email protected]":
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/@firebase/app-check/-/app-check-0.2.1.tgz#3dd87dfa1832ee9a662489cce28261eaff23ef5d"
+ integrity sha512-Qswn+qHiAyi3P0O/W9BffDFX4MmptSod49zhWQt8vV42JyKSZexaXQpeNlfKgdE5jX8wUw8Vkk8My4PfIrPkww==
+ dependencies:
+ "@firebase/app-check-interop-types" "0.1.0"
+ "@firebase/app-check-types" "0.2.0"
+ "@firebase/component" "0.5.5"
+ "@firebase/logger" "0.2.6"
+ "@firebase/util" "1.2.0"
+ tslib "^2.1.0"
+
+"@firebase/[email protected]":
+ version "0.6.3"
+ resolved "https://registry.yarnpkg.com/@firebase/app-types/-/app-types-0.6.3.tgz#3f10514786aad846d74cd63cb693556309918f4b"
+ integrity sha512-/M13DPPati7FQHEQ9Minjk1HGLm/4K4gs9bR4rzLCWJg64yGtVC0zNg9gDpkw9yc2cvol/mNFxqTtd4geGrwdw==
+
+"@firebase/[email protected]":
+ version "0.6.29"
+ resolved "https://registry.yarnpkg.com/@firebase/app/-/app-0.6.29.tgz#e2f88274b39917ab766f9fe73da48c353eaed557"
+ integrity sha512-duCzk9/BSVVsb5Y9b0rnvGSuD5zQA/JghiQsccRl+lA4xiUYjFudTU4cVFftkw+0zzeYBHn4KiVxchsva1O9dA==
+ dependencies:
+ "@firebase/app-types" "0.6.3"
+ "@firebase/component" "0.5.5"
+ "@firebase/logger" "0.2.6"
+ "@firebase/util" "1.2.0"
+ dom-storage "2.1.0"
+ tslib "^2.1.0"
+ xmlhttprequest "1.8.0"
+
+"@firebase/[email protected]":
+ version "0.1.6"
+ resolved "https://registry.yarnpkg.com/@firebase/auth-interop-types/-/auth-interop-types-0.1.6.tgz#5ce13fc1c527ad36f1bb1322c4492680a6cf4964"
+ integrity sha512-etIi92fW3CctsmR9e3sYM3Uqnoq861M0Id9mdOPF6PWIg38BXL5k4upCNBggGUpLIS0H1grMOvy/wn1xymwe2g==
+
+"@firebase/[email protected]":
+ version "0.10.3"
+ resolved "https://registry.yarnpkg.com/@firebase/auth-types/-/auth-types-0.10.3.tgz#2be7dd93959c8f5304c63e09e98718e103464d8c"
+ integrity sha512-zExrThRqyqGUbXOFrH/sowuh2rRtfKHp9SBVY2vOqKWdCX1Ztn682n9WLtlUDsiYVIbBcwautYWk2HyCGFv0OA==
+
+"@firebase/[email protected]":
+ version "0.16.8"
+ resolved "https://registry.yarnpkg.com/@firebase/auth/-/auth-0.16.8.tgz#4edd44673d3711e94cfa1e6b03883214ae1f2255"
+ integrity sha512-mR0UXG4LirWIfOiCWxVmvz1o23BuKGxeItQ2cCUgXLTjNtWJXdcky/356iTUsd7ZV5A78s2NHeN5tIDDG6H4rg==
+ dependencies:
+ "@firebase/auth-types" "0.10.3"
+
+"@firebase/[email protected]":
+ version "0.5.5"
+ resolved "https://registry.yarnpkg.com/@firebase/component/-/component-0.5.5.tgz#849ccf7cbf0398a43058f274ffcd43620ae9521f"
+ integrity sha512-L41SdS/4a164jx2iGfakJgaBUPPBI3DI+RrUlmh3oHSUljTeCwfj/Nhcv3S7e2lyXsGFJtAyepfPUx4IQ05crw==
+ dependencies:
+ "@firebase/util" "1.2.0"
+ tslib "^2.1.0"
+
+"@firebase/[email protected]":
+ version "0.7.3"
+ resolved "https://registry.yarnpkg.com/@firebase/database-types/-/database-types-0.7.3.tgz#819f16dd4c767c864b460004458620f265a3f735"
+ integrity sha512-dSOJmhKQ0nL8O4EQMRNGpSExWCXeHtH57gGg0BfNAdWcKhC8/4Y+qfKLfWXzyHvrSecpLmO0SmAi/iK2D5fp5A==
+ dependencies:
+ "@firebase/app-types" "0.6.3"
+
+"@firebase/[email protected]":
+ version "0.10.9"
+ resolved "https://registry.yarnpkg.com/@firebase/database/-/database-0.10.9.tgz#79f7b03cbe8a127dddfb7ea7748a3e923990f046"
+ integrity sha512-Jxi9SiE4cNOftO9YKlG71ccyWFw4kSM9AG/xYu6vWXUGBr39Uw1TvYougANOcU21Q0TP4J08VPGnOnpXk/FGbQ==
+ dependencies:
+ "@firebase/auth-interop-types" "0.1.6"
+ "@firebase/component" "0.5.5"
+ "@firebase/database-types" "0.7.3"
+ "@firebase/logger" "0.2.6"
+ "@firebase/util" "1.2.0"
+ faye-websocket "0.11.3"
+ tslib "^2.1.0"
+
+"@firebase/[email protected]":
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/@firebase/firestore-types/-/firestore-types-2.3.0.tgz#baf5c9470ba8be96bf0d76b83b413f03104cf565"
+ integrity sha512-QTW7NP7nDL0pgT/X53lyj+mIMh4nRQBBTBlRNQBt7eSyeqBf3ag3bxdQhCg358+5KbjYTC2/O6QtX9DlJZmh1A==
+
+"@firebase/[email protected]":
+ version "2.3.10"
+ resolved "https://registry.yarnpkg.com/@firebase/firestore/-/firestore-2.3.10.tgz#76d5137e5c37d33ccf3c5d77a9261c73493494b2"
+ integrity sha512-O+XpaZVhDIBK2fMwBUBR2BuhaXF6zTmz+afAuXAx18DK+2rFfLefbALZLaUYw0Aabe9pryy0c7OenzRbHA8n4Q==
+ dependencies:
+ "@firebase/component" "0.5.5"
+ "@firebase/firestore-types" "2.3.0"
+ "@firebase/logger" "0.2.6"
+ "@firebase/util" "1.2.0"
+ "@firebase/webchannel-wrapper" "0.5.1"
+ "@grpc/grpc-js" "^1.3.2"
+ "@grpc/proto-loader" "^0.6.0"
+ node-fetch "2.6.1"
+ tslib "^2.1.0"
+
+"@firebase/[email protected]":
+ version "0.4.0"
+ resolved "https://registry.yarnpkg.com/@firebase/functions-types/-/functions-types-0.4.0.tgz#0b789f4fe9a9c0b987606c4da10139345b40f6b9"
+ integrity sha512-3KElyO3887HNxtxNF1ytGFrNmqD+hheqjwmT3sI09FaDCuaxGbOnsXAXH2eQ049XRXw9YQpHMgYws/aUNgXVyQ==
+
+"@firebase/[email protected]":
+ version "0.6.14"
+ resolved "https://registry.yarnpkg.com/@firebase/functions/-/functions-0.6.14.tgz#f6b452a53dc15299595bd079dd6ed4afb59e1a8c"
+ integrity sha512-Gthru/wHPQqkn651MenVM+qKVFFqIyFcNT3qfJUacibqrKlvDtYtaCMjFGAkChuGnYzNVnXJIaNrIHkEIII4Hg==
+ dependencies:
+ "@firebase/component" "0.5.5"
+ "@firebase/functions-types" "0.4.0"
+ "@firebase/messaging-types" "0.5.0"
+ node-fetch "2.6.1"
+ tslib "^2.1.0"
+
+"@firebase/[email protected]":
+ version "0.3.4"
+ resolved "https://registry.yarnpkg.com/@firebase/installations-types/-/installations-types-0.3.4.tgz#589a941d713f4f64bf9f4feb7f463505bab1afa2"
+ integrity sha512-RfePJFovmdIXb6rYwtngyxuEcWnOrzdZd9m7xAW0gRxDIjBT20n3BOhjpmgRWXo/DAxRmS7bRjWAyTHY9cqN7Q==
+
+"@firebase/[email protected]":
+ version "0.4.31"
+ resolved "https://registry.yarnpkg.com/@firebase/installations/-/installations-0.4.31.tgz#dbde30c0542fb4343b075f0574d4e0d0f4b49aa7"
+ integrity sha512-qWolhAgMHvD3avsNCl+K8+untzoDDFQIRR8At8kyWMKKosy0vttdWTWzjvDoZbyKU6r0RNlxDUWAgV88Q8EudQ==
+ dependencies:
+ "@firebase/component" "0.5.5"
+ "@firebase/installations-types" "0.3.4"
+ "@firebase/util" "1.2.0"
+ idb "3.0.2"
+ tslib "^2.1.0"
+
+"@firebase/[email protected]":
+ version "0.2.6"
+ resolved "https://registry.yarnpkg.com/@firebase/logger/-/logger-0.2.6.tgz#3aa2ca4fe10327cabf7808bd3994e88db26d7989"
+ integrity sha512-KIxcUvW/cRGWlzK9Vd2KB864HlUnCfdTH0taHE0sXW5Xl7+W68suaeau1oKNEqmc3l45azkd4NzXTCWZRZdXrw==
+
+"@firebase/[email protected]":
+ version "0.5.0"
+ resolved "https://registry.yarnpkg.com/@firebase/messaging-types/-/messaging-types-0.5.0.tgz#c5d0ef309ced1758fda93ef3ac70a786de2e73c4"
+ integrity sha512-QaaBswrU6umJYb/ZYvjR5JDSslCGOH6D9P136PhabFAHLTR4TWjsaACvbBXuvwrfCXu10DtcjMxqfhdNIB1Xfg==
+
+"@firebase/[email protected]":
+ version "0.7.15"
+ resolved "https://registry.yarnpkg.com/@firebase/messaging/-/messaging-0.7.15.tgz#d3b9a053331238480860c71385819babda2076f3"
+ integrity sha512-81t6iJtqMBJF5LHTjDhlHUpbPZOV6dKhW0TueAoON4omc0SaDXgf4nnk6JkvZRfdcuOaP8848Cv53tvZPFFAYQ==
+ dependencies:
+ "@firebase/component" "0.5.5"
+ "@firebase/installations" "0.4.31"
+ "@firebase/messaging-types" "0.5.0"
+ "@firebase/util" "1.2.0"
+ idb "3.0.2"
+ tslib "^2.1.0"
+
+"@firebase/[email protected]":
+ version "0.0.13"
+ resolved "https://registry.yarnpkg.com/@firebase/performance-types/-/performance-types-0.0.13.tgz#58ce5453f57e34b18186f74ef11550dfc558ede6"
+ integrity sha512-6fZfIGjQpwo9S5OzMpPyqgYAUZcFzZxHFqOyNtorDIgNXq33nlldTL/vtaUZA8iT9TT5cJlCrF/jthKU7X21EA==
+
+"@firebase/[email protected]":
+ version "0.4.17"
+ resolved "https://registry.yarnpkg.com/@firebase/performance/-/performance-0.4.17.tgz#b160a4352f682c1039b49ec9d24d6c473a31b3c3"
+ integrity sha512-uhDs9rhdMrGraYHcd3CTRkGtcNap4hp6rAHTwJNIX56Z3RzQ1VW2ea9vvesl7EjFtEIPU0jfdrS32wV+qer5DQ==
+ dependencies:
+ "@firebase/component" "0.5.5"
+ "@firebase/installations" "0.4.31"
+ "@firebase/logger" "0.2.6"
+ "@firebase/performance-types" "0.0.13"
+ "@firebase/util" "1.2.0"
+ tslib "^2.1.0"
+
+"@firebase/[email protected]":
+ version "0.3.36"
+ resolved "https://registry.yarnpkg.com/@firebase/polyfill/-/polyfill-0.3.36.tgz#c057cce6748170f36966b555749472b25efdb145"
+ integrity sha512-zMM9oSJgY6cT2jx3Ce9LYqb0eIpDE52meIzd/oe/y70F+v9u1LDqk5kUF5mf16zovGBWMNFmgzlsh6Wj0OsFtg==
+ dependencies:
+ core-js "3.6.5"
+ promise-polyfill "8.1.3"
+ whatwg-fetch "2.0.4"
+
+"@firebase/[email protected]":
+ version "0.1.9"
+ resolved "https://registry.yarnpkg.com/@firebase/remote-config-types/-/remote-config-types-0.1.9.tgz#fe6bbe4d08f3b6e92fce30e4b7a9f4d6a96d6965"
+ integrity sha512-G96qnF3RYGbZsTRut7NBX0sxyczxt1uyCgXQuH/eAfUCngxjEGcZQnBdy6mvSdqdJh5mC31rWPO4v9/s7HwtzA==
+
+"@firebase/[email protected]":
+ version "0.1.42"
+ resolved "https://registry.yarnpkg.com/@firebase/remote-config/-/remote-config-0.1.42.tgz#84573ac2f1ee49cb9d4327a25c5625f2e274695d"
+ integrity sha512-hWwtAZmYLB274bxjV2cdMYhyBCUUqbYErihGx3rMyab76D+VbIxOuKJb2z0DS67jQG+SA3pr9/MtWsTPHV/l9g==
+ dependencies:
+ "@firebase/component" "0.5.5"
+ "@firebase/installations" "0.4.31"
+ "@firebase/logger" "0.2.6"
+ "@firebase/remote-config-types" "0.1.9"
+ "@firebase/util" "1.2.0"
+ tslib "^2.1.0"
+
+"@firebase/[email protected]":
+ version "0.4.1"
+ resolved "https://registry.yarnpkg.com/@firebase/storage-types/-/storage-types-0.4.1.tgz#da6582ae217e3db485c90075dc71100ca5064cc6"
+ integrity sha512-IM4cRzAnQ6QZoaxVZ5MatBzqXVcp47hOlE28jd9xXw1M9V7gfjhmW0PALGFQx58tPVmuUwIKyoEbHZjV4qRJwQ==
+
+"@firebase/[email protected]":
+ version "0.6.1"
+ resolved "https://registry.yarnpkg.com/@firebase/storage/-/storage-0.6.1.tgz#29d1568e78c9234af72a609409a36403346a60b8"
+ integrity sha512-00WEdmmKoKUHBsufUIUDgBS5ghAe8tCp1QbHQnnlf3aekAgFf8UKjfR6QMaHoEIzuZPhWPStQ5KrrIcWA/MMQg==
+ dependencies:
+ "@firebase/component" "0.5.5"
+ "@firebase/storage-types" "0.4.1"
+ "@firebase/util" "1.2.0"
+ node-fetch "2.6.1"
+ tslib "^2.1.0"
+
+"@firebase/[email protected]":
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/@firebase/util/-/util-1.2.0.tgz#4d4e419bf8c9bc1bc51308d1953dc2e4353c0770"
+ integrity sha512-8W9TTGImXr9cu+oyjBJ7yjoEd/IVAv0pBZA4c1uIuKrpGZi2ee38m+8xlZOBRmsAaOU/tR9DXz1WF/oeM6Fb7Q==
+ dependencies:
+ tslib "^2.1.0"
+
+"@firebase/[email protected]":
+ version "0.5.1"
+ resolved "https://registry.yarnpkg.com/@firebase/webchannel-wrapper/-/webchannel-wrapper-0.5.1.tgz#a64d1af3c62e3bb89576ec58af880980a562bf4e"
+ integrity sha512-dZMzN0uAjwJXWYYAcnxIwXqRTZw3o14hGe7O6uhwjD1ZQWPVYA5lASgnNskEBra0knVBsOXB4KXg+HnlKewN/A==
+
+"@grpc/grpc-js@^1.3.2":
+ version "1.3.6"
+ resolved "https://registry.yarnpkg.com/@grpc/grpc-js/-/grpc-js-1.3.6.tgz#6e2d17610c2c8df0f6ceab0e1968f563df74b173"
+ integrity sha512-v7+LQFbqZKmd/Tvf5/j1Xlbq6jXL/4d+gUtm2TNX4QiEC3ELWADmGr2dGlUyLl6aKTuYfsN72vAsO5zmavYkEg==
+ dependencies:
+ "@types/node" ">=12.12.47"
+
+"@grpc/proto-loader@^0.6.0":
+ version "0.6.4"
+ resolved "https://registry.yarnpkg.com/@grpc/proto-loader/-/proto-loader-0.6.4.tgz#5438c0d771e92274e77e631babdc14456441cbdc"
+ integrity sha512-7xvDvW/vJEcmLUltCUGOgWRPM8Oofv0eCFSVMuKqaqWJaXSzmB+m9hiyqe34QofAl4WAzIKUZZlinIF9FOHyTQ==
+ dependencies:
+ "@types/long" "^4.0.1"
+ lodash.camelcase "^4.3.0"
+ long "^4.0.0"
+ protobufjs "^6.10.0"
+ yargs "^16.1.1"
+
+"@hapi/[email protected]":
+ version "2.1.4"
+ resolved "https://registry.yarnpkg.com/@hapi/address/-/address-2.1.4.tgz#5d67ed43f3fd41a69d4b9ff7b56e7c0d1d0a81e5"
+ integrity sha512-QD1PhQk+s31P1ixsX0H0Suoupp3VMXzIVMSwobR3F3MSUO2YCV0B7xqLcUw/Bh8yuvd3LhpyqLQWTNcRmp6IdQ==
+
+"@hapi/[email protected]":
+ version "1.3.2"
+ resolved "https://registry.yarnpkg.com/@hapi/bourne/-/bourne-1.3.2.tgz#0a7095adea067243ce3283e1b56b8a8f453b242a"
+ integrity sha512-1dVNHT76Uu5N3eJNTYcvxee+jzX4Z9lfciqRRHCU27ihbUcYi+iSc2iml5Ke1LXe1SyJCLA0+14Jh4tXJgOppA==
+
+"@hapi/[email protected]", "@hapi/hoek@^8.3.0":
+ version "8.5.1"
+ resolved "https://registry.yarnpkg.com/@hapi/hoek/-/hoek-8.5.1.tgz#fde96064ca446dec8c55a8c2f130957b070c6e06"
+ integrity sha512-yN7kbciD87WzLGc5539Tn0sApjyiGHAJgKvG9W8C7O+6c7qmoQMfVs0W4bX17eqz6C78QJqqFrtgdK5EWf6Qow==
+
+"@hapi/joi@^15.1.0":
+ version "15.1.1"
+ resolved "https://registry.yarnpkg.com/@hapi/joi/-/joi-15.1.1.tgz#c675b8a71296f02833f8d6d243b34c57b8ce19d7"
+ integrity sha512-entf8ZMOK8sc+8YfeOlM8pCfg3b5+WZIKBfUaaJT8UsjAAPjartzxIYm3TIbjvA4u+u++KbcXD38k682nVHDAQ==
+ dependencies:
+ "@hapi/address" "2.x.x"
+ "@hapi/bourne" "1.x.x"
+ "@hapi/hoek" "8.x.x"
+ "@hapi/topo" "3.x.x"
+
+"@hapi/[email protected]":
+ version "3.1.6"
+ resolved "https://registry.yarnpkg.com/@hapi/topo/-/topo-3.1.6.tgz#68d935fa3eae7fdd5ab0d7f953f3205d8b2bfc29"
+ integrity sha512-tAag0jEcjwH+P2quUfipd7liWCNX2F8NvYjQp2wtInsZxnMlypdw0FtAOLxtvvkO+GSRRbmNi8m/5y42PQJYCQ==
+ dependencies:
+ "@hapi/hoek" "^8.3.0"
+
+"@humanwhocodes/config-array@^0.5.0":
+ version "0.5.0"
+ resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.5.0.tgz#1407967d4c6eecd7388f83acf1eaf4d0c6e58ef9"
+ integrity sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==
+ dependencies:
+ "@humanwhocodes/object-schema" "^1.2.0"
+ debug "^4.1.1"
+ minimatch "^3.0.4"
+
+"@humanwhocodes/object-schema@^1.2.0":
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.0.tgz#87de7af9c231826fdd68ac7258f77c429e0e5fcf"
+ integrity sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==
+
+"@istanbuljs/load-nyc-config@^1.0.0":
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced"
+ integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==
+ dependencies:
+ camelcase "^5.3.1"
+ find-up "^4.1.0"
+ get-package-type "^0.1.0"
+ js-yaml "^3.13.1"
+ resolve-from "^5.0.0"
+
+"@istanbuljs/schema@^0.1.2":
+ version "0.1.3"
+ resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98"
+ integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==
+
+"@jest/console@^26.6.2":
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/@jest/console/-/console-26.6.2.tgz#4e04bc464014358b03ab4937805ee36a0aeb98f2"
+ integrity sha512-IY1R2i2aLsLr7Id3S6p2BA82GNWryt4oSvEXLAKc+L2zdi89dSkE8xC1C+0kpATG4JhBJREnQOH7/zmccM2B0g==
+ dependencies:
+ "@jest/types" "^26.6.2"
+ "@types/node" "*"
+ chalk "^4.0.0"
+ jest-message-util "^26.6.2"
+ jest-util "^26.6.2"
+ slash "^3.0.0"
+
+"@jest/core@^26.6.0", "@jest/core@^26.6.3":
+ version "26.6.3"
+ resolved "https://registry.yarnpkg.com/@jest/core/-/core-26.6.3.tgz#7639fcb3833d748a4656ada54bde193051e45fad"
+ integrity sha512-xvV1kKbhfUqFVuZ8Cyo+JPpipAHHAV3kcDBftiduK8EICXmTFddryy3P7NfZt8Pv37rA9nEJBKCCkglCPt/Xjw==
+ dependencies:
+ "@jest/console" "^26.6.2"
+ "@jest/reporters" "^26.6.2"
+ "@jest/test-result" "^26.6.2"
+ "@jest/transform" "^26.6.2"
+ "@jest/types" "^26.6.2"
+ "@types/node" "*"
+ ansi-escapes "^4.2.1"
+ chalk "^4.0.0"
+ exit "^0.1.2"
+ graceful-fs "^4.2.4"
+ jest-changed-files "^26.6.2"
+ jest-config "^26.6.3"
+ jest-haste-map "^26.6.2"
+ jest-message-util "^26.6.2"
+ jest-regex-util "^26.0.0"
+ jest-resolve "^26.6.2"
+ jest-resolve-dependencies "^26.6.3"
+ jest-runner "^26.6.3"
+ jest-runtime "^26.6.3"
+ jest-snapshot "^26.6.2"
+ jest-util "^26.6.2"
+ jest-validate "^26.6.2"
+ jest-watcher "^26.6.2"
+ micromatch "^4.0.2"
+ p-each-series "^2.1.0"
+ rimraf "^3.0.0"
+ slash "^3.0.0"
+ strip-ansi "^6.0.0"
+
+"@jest/environment@^26.6.0", "@jest/environment@^26.6.2":
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-26.6.2.tgz#ba364cc72e221e79cc8f0a99555bf5d7577cf92c"
+ integrity sha512-nFy+fHl28zUrRsCeMB61VDThV1pVTtlEokBRgqPrcT1JNq4yRNIyTHfyht6PqtUvY9IsuLGTrbG8kPXjSZIZwA==
+ dependencies:
+ "@jest/fake-timers" "^26.6.2"
+ "@jest/types" "^26.6.2"
+ "@types/node" "*"
+ jest-mock "^26.6.2"
+
+"@jest/fake-timers@^26.6.2":
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-26.6.2.tgz#459c329bcf70cee4af4d7e3f3e67848123535aad"
+ integrity sha512-14Uleatt7jdzefLPYM3KLcnUl1ZNikaKq34enpb5XG9i81JpppDb5muZvonvKyrl7ftEHkKS5L5/eB/kxJ+bvA==
+ dependencies:
+ "@jest/types" "^26.6.2"
+ "@sinonjs/fake-timers" "^6.0.1"
+ "@types/node" "*"
+ jest-message-util "^26.6.2"
+ jest-mock "^26.6.2"
+ jest-util "^26.6.2"
+
+"@jest/globals@^26.6.2":
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-26.6.2.tgz#5b613b78a1aa2655ae908eba638cc96a20df720a"
+ integrity sha512-85Ltnm7HlB/KesBUuALwQ68YTU72w9H2xW9FjZ1eL1U3lhtefjjl5c2MiUbpXt/i6LaPRvoOFJ22yCBSfQ0JIA==
+ dependencies:
+ "@jest/environment" "^26.6.2"
+ "@jest/types" "^26.6.2"
+ expect "^26.6.2"
+
+"@jest/reporters@^26.6.2":
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-26.6.2.tgz#1f518b99637a5f18307bd3ecf9275f6882a667f6"
+ integrity sha512-h2bW53APG4HvkOnVMo8q3QXa6pcaNt1HkwVsOPMBV6LD/q9oSpxNSYZQYkAnjdMjrJ86UuYeLo+aEZClV6opnw==
+ dependencies:
+ "@bcoe/v8-coverage" "^0.2.3"
+ "@jest/console" "^26.6.2"
+ "@jest/test-result" "^26.6.2"
+ "@jest/transform" "^26.6.2"
+ "@jest/types" "^26.6.2"
+ chalk "^4.0.0"
+ collect-v8-coverage "^1.0.0"
+ exit "^0.1.2"
+ glob "^7.1.2"
+ graceful-fs "^4.2.4"
+ istanbul-lib-coverage "^3.0.0"
+ istanbul-lib-instrument "^4.0.3"
+ istanbul-lib-report "^3.0.0"
+ istanbul-lib-source-maps "^4.0.0"
+ istanbul-reports "^3.0.2"
+ jest-haste-map "^26.6.2"
+ jest-resolve "^26.6.2"
+ jest-util "^26.6.2"
+ jest-worker "^26.6.2"
+ slash "^3.0.0"
+ source-map "^0.6.0"
+ string-length "^4.0.1"
+ terminal-link "^2.0.0"
+ v8-to-istanbul "^7.0.0"
+ optionalDependencies:
+ node-notifier "^8.0.0"
+
+"@jest/source-map@^26.6.2":
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-26.6.2.tgz#29af5e1e2e324cafccc936f218309f54ab69d535"
+ integrity sha512-YwYcCwAnNmOVsZ8mr3GfnzdXDAl4LaenZP5z+G0c8bzC9/dugL8zRmxZzdoTl4IaS3CryS1uWnROLPFmb6lVvA==
+ dependencies:
+ callsites "^3.0.0"
+ graceful-fs "^4.2.4"
+ source-map "^0.6.0"
+
+"@jest/test-result@^26.6.0", "@jest/test-result@^26.6.2":
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-26.6.2.tgz#55da58b62df134576cc95476efa5f7949e3f5f18"
+ integrity sha512-5O7H5c/7YlojphYNrK02LlDIV2GNPYisKwHm2QTKjNZeEzezCbwYs9swJySv2UfPMyZ0VdsmMv7jIlD/IKYQpQ==
+ dependencies:
+ "@jest/console" "^26.6.2"
+ "@jest/types" "^26.6.2"
+ "@types/istanbul-lib-coverage" "^2.0.0"
+ collect-v8-coverage "^1.0.0"
+
+"@jest/test-sequencer@^26.6.3":
+ version "26.6.3"
+ resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-26.6.3.tgz#98e8a45100863886d074205e8ffdc5a7eb582b17"
+ integrity sha512-YHlVIjP5nfEyjlrSr8t/YdNfU/1XEt7c5b4OxcXCjyRhjzLYu/rO69/WHPuYcbCWkz8kAeZVZp2N2+IOLLEPGw==
+ dependencies:
+ "@jest/test-result" "^26.6.2"
+ graceful-fs "^4.2.4"
+ jest-haste-map "^26.6.2"
+ jest-runner "^26.6.3"
+ jest-runtime "^26.6.3"
+
+"@jest/transform@^26.6.2":
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-26.6.2.tgz#5ac57c5fa1ad17b2aae83e73e45813894dcf2e4b"
+ integrity sha512-E9JjhUgNzvuQ+vVAL21vlyfy12gP0GhazGgJC4h6qUt1jSdUXGWJ1wfu/X7Sd8etSgxV4ovT1pb9v5D6QW4XgA==
+ dependencies:
+ "@babel/core" "^7.1.0"
+ "@jest/types" "^26.6.2"
+ babel-plugin-istanbul "^6.0.0"
+ chalk "^4.0.0"
+ convert-source-map "^1.4.0"
+ fast-json-stable-stringify "^2.0.0"
+ graceful-fs "^4.2.4"
+ jest-haste-map "^26.6.2"
+ jest-regex-util "^26.0.0"
+ jest-util "^26.6.2"
+ micromatch "^4.0.2"
+ pirates "^4.0.1"
+ slash "^3.0.0"
+ source-map "^0.6.1"
+ write-file-atomic "^3.0.0"
+
+"@jest/types@^26.6.0", "@jest/types@^26.6.2":
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/@jest/types/-/types-26.6.2.tgz#bef5a532030e1d88a2f5a6d933f84e97226ed48e"
+ integrity sha512-fC6QCp7Sc5sX6g8Tvbmj4XUTbyrik0akgRy03yjXbQaBWWNWGE7SGtJk98m0N8nzegD/7SggrUlivxo5ax4KWQ==
+ dependencies:
+ "@types/istanbul-lib-coverage" "^2.0.0"
+ "@types/istanbul-reports" "^3.0.0"
+ "@types/node" "*"
+ "@types/yargs" "^15.0.0"
+ chalk "^4.0.0"
+
+"@nodelib/[email protected]":
+ version "2.1.4"
+ resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.4.tgz#d4b3549a5db5de2683e0c1071ab4f140904bbf69"
+ integrity sha512-33g3pMJk3bg5nXbL/+CY6I2eJDzZAni49PfJnL5fghPTggPvBd/pFNSgJsdAgWptuFu7qq/ERvOYFlhvsLTCKA==
+ dependencies:
+ "@nodelib/fs.stat" "2.0.4"
+ run-parallel "^1.1.9"
+
+"@nodelib/[email protected]", "@nodelib/fs.stat@^2.0.2":
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.4.tgz#a3f2dd61bab43b8db8fa108a121cfffe4c676655"
+ integrity sha512-IYlHJA0clt2+Vg7bccq+TzRdJvv19c2INqBSsoOLp1je7xjtr7J26+WXR72MCdvU9q1qTzIWDfhMf+DRvQJK4Q==
+
+"@nodelib/fs.walk@^1.2.3":
+ version "1.2.6"
+ resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.6.tgz#cce9396b30aa5afe9e3756608f5831adcb53d063"
+ integrity sha512-8Broas6vTtW4GIXTAHDoE32hnN2M5ykgCpWGbuXHQ15vEMqr23pB76e/GZcYsZCHALv50ktd24qhEyKr6wBtow==
+ dependencies:
+ "@nodelib/fs.scandir" "2.1.4"
+ fastq "^1.6.0"
+
+"@npmcli/move-file@^1.0.1":
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/@npmcli/move-file/-/move-file-1.1.2.tgz#1a82c3e372f7cae9253eb66d72543d6b8685c674"
+ integrity sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg==
+ dependencies:
+ mkdirp "^1.0.4"
+ rimraf "^3.0.2"
+
+"@pmmmwh/[email protected]":
+ version "0.4.3"
+ resolved "https://registry.yarnpkg.com/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.4.3.tgz#1eec460596d200c0236bf195b078a5d1df89b766"
+ integrity sha512-br5Qwvh8D2OQqSXpd1g/xqXKnK0r+Jz6qVKBbWmpUcrbGOxUrf39V5oZ1876084CGn18uMdR5uvPqBv9UqtBjQ==
+ dependencies:
+ ansi-html "^0.0.7"
+ error-stack-parser "^2.0.6"
+ html-entities "^1.2.1"
+ native-url "^0.2.6"
+ schema-utils "^2.6.5"
+ source-map "^0.7.3"
+
+"@popperjs/[email protected]":
+ version "2.4.4"
+ resolved "https://registry.yarnpkg.com/@popperjs/core/-/core-2.4.4.tgz#11d5db19bd178936ec89cd84519c4de439574398"
+ integrity sha512-1oO6+dN5kdIA3sKPZhRGJTfGVP4SWV6KqlMOwry4J3HfyD68sl/3KmG7DeYUzvN+RbhXDnv/D8vNNB8168tAMg==
+
+"@protobufjs/aspromise@^1.1.1", "@protobufjs/aspromise@^1.1.2":
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/@protobufjs/aspromise/-/aspromise-1.1.2.tgz#9b8b0cc663d669a7d8f6f5d0893a14d348f30fbf"
+ integrity sha1-m4sMxmPWaafY9vXQiToU00jzD78=
+
+"@protobufjs/base64@^1.1.2":
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/@protobufjs/base64/-/base64-1.1.2.tgz#4c85730e59b9a1f1f349047dbf24296034bb2735"
+ integrity sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==
+
+"@protobufjs/codegen@^2.0.4":
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/@protobufjs/codegen/-/codegen-2.0.4.tgz#7ef37f0d010fb028ad1ad59722e506d9262815cb"
+ integrity sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==
+
+"@protobufjs/eventemitter@^1.1.0":
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz#355cbc98bafad5978f9ed095f397621f1d066b70"
+ integrity sha1-NVy8mLr61ZePntCV85diHx0Ga3A=
+
+"@protobufjs/fetch@^1.1.0":
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/@protobufjs/fetch/-/fetch-1.1.0.tgz#ba99fb598614af65700c1619ff06d454b0d84c45"
+ integrity sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU=
+ dependencies:
+ "@protobufjs/aspromise" "^1.1.1"
+ "@protobufjs/inquire" "^1.1.0"
+
+"@protobufjs/float@^1.0.2":
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/@protobufjs/float/-/float-1.0.2.tgz#5e9e1abdcb73fc0a7cb8b291df78c8cbd97b87d1"
+ integrity sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E=
+
+"@protobufjs/inquire@^1.1.0":
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/@protobufjs/inquire/-/inquire-1.1.0.tgz#ff200e3e7cf2429e2dcafc1140828e8cc638f089"
+ integrity sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik=
+
+"@protobufjs/path@^1.1.2":
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/@protobufjs/path/-/path-1.1.2.tgz#6cc2b20c5c9ad6ad0dccfd21ca7673d8d7fbf68d"
+ integrity sha1-bMKyDFya1q0NzP0hynZz2Nf79o0=
+
+"@protobufjs/pool@^1.1.0":
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/@protobufjs/pool/-/pool-1.1.0.tgz#09fd15f2d6d3abfa9b65bc366506d6ad7846ff54"
+ integrity sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q=
+
+"@protobufjs/utf8@^1.1.0":
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570"
+ integrity sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA=
+
+"@reach/[email protected]":
+ version "0.13.2"
+ resolved "https://registry.yarnpkg.com/@reach/alert/-/alert-0.13.2.tgz#71c4a848d51341f1d6d9eaae060975391c224870"
+ integrity sha512-LDz83AXCrClyq/MWe+0vaZfHp1Ytqn+kgL5VxG7rirUvmluWaj/snxzfNPWn0Ma4K2YENmXXRC/iHt5X95SqIg==
+ dependencies:
+ "@reach/utils" "0.13.2"
+ "@reach/visually-hidden" "0.13.2"
+ prop-types "^15.7.2"
+ tslib "^2.1.0"
+
+"@reach/[email protected]":
+ version "0.13.2"
+ resolved "https://registry.yarnpkg.com/@reach/utils/-/utils-0.13.2.tgz#87e8fef8ebfe583fa48250238a1a3ed03189fcc8"
+ integrity sha512-3ir6cN60zvUrwjOJu7C6jec/samqAeyAB12ZADK+qjnmQPdzSYldrFWwDVV5H0WkhbYXR3uh+eImu13hCetNPQ==
+ dependencies:
+ "@types/warning" "^3.0.0"
+ tslib "^2.1.0"
+ warning "^4.0.3"
+
+"@reach/[email protected]":
+ version "0.13.2"
+ resolved "https://registry.yarnpkg.com/@reach/visually-hidden/-/visually-hidden-0.13.2.tgz#ee21de376a7e57e60dc92d95a671073796caa17e"
+ integrity sha512-sPZwNS0/duOuG0mYwE5DmgEAzW9VhgU3aIt1+mrfT/xiT9Cdncqke+kRBQgU708q/Ttm9tWsoHni03nn/SuPTQ==
+ dependencies:
+ prop-types "^15.7.2"
+ tslib "^2.1.0"
+
+"@rollup/plugin-node-resolve@^7.1.1":
+ version "7.1.3"
+ resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-7.1.3.tgz#80de384edfbd7bfc9101164910f86078151a3eca"
+ integrity sha512-RxtSL3XmdTAE2byxekYLnx+98kEUOrPHF/KRVjLH+DEIHy6kjIw7YINQzn+NXiH/NTrQLAwYs0GWB+csWygA9Q==
+ dependencies:
+ "@rollup/pluginutils" "^3.0.8"
+ "@types/resolve" "0.0.8"
+ builtin-modules "^3.1.0"
+ is-module "^1.0.0"
+ resolve "^1.14.2"
+
+"@rollup/plugin-replace@^2.3.1":
+ version "2.4.1"
+ resolved "https://registry.yarnpkg.com/@rollup/plugin-replace/-/plugin-replace-2.4.1.tgz#c411b5ab72809fb1bfc8b487d8d02eef661460d3"
+ integrity sha512-XwC1oK5rrtRJ0tn1ioLHS6OV5JTluJF7QE1J/q1hN3bquwjnVxjtMyY9iCnoyH9DQbf92CxajB3o98wZbP3oAQ==
+ dependencies:
+ "@rollup/pluginutils" "^3.1.0"
+ magic-string "^0.25.7"
+
+"@rollup/pluginutils@^3.0.8", "@rollup/pluginutils@^3.1.0":
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-3.1.0.tgz#706b4524ee6dc8b103b3c995533e5ad680c02b9b"
+ integrity sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg==
+ dependencies:
+ "@types/estree" "0.0.39"
+ estree-walker "^1.0.1"
+ picomatch "^2.2.2"
+
+"@sinonjs/commons@^1.7.0":
+ version "1.8.2"
+ resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.2.tgz#858f5c4b48d80778fde4b9d541f27edc0d56488b"
+ integrity sha512-sruwd86RJHdsVf/AtBoijDmUqJp3B6hF/DGC23C+JaegnDHaZyewCjoVGTdg3J0uz3Zs7NnIT05OBOmML72lQw==
+ dependencies:
+ type-detect "4.0.8"
+
+"@sinonjs/fake-timers@^6.0.1":
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-6.0.1.tgz#293674fccb3262ac782c7aadfdeca86b10c75c40"
+ integrity sha512-MZPUxrmFubI36XS1DI3qmI0YdN1gks62JtFZvxR67ljjSNCeK6U08Zx4msEWOXuofgqUt6zPHSi1H9fbjR/NRA==
+ dependencies:
+ "@sinonjs/commons" "^1.7.0"
+
+"@surma/rollup-plugin-off-main-thread@^1.1.1":
+ version "1.4.2"
+ resolved "https://registry.yarnpkg.com/@surma/rollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-1.4.2.tgz#e6786b6af5799f82f7ab3a82e53f6182d2b91a58"
+ integrity sha512-yBMPqmd1yEJo/280PAMkychuaALyQ9Lkb5q1ck3mjJrFuEobIfhnQ4J3mbvBoISmR3SWMWV+cGB/I0lCQee79A==
+ dependencies:
+ ejs "^2.6.1"
+ magic-string "^0.25.0"
+
+"@svgr/babel-plugin-add-jsx-attribute@^5.4.0":
+ version "5.4.0"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-5.4.0.tgz#81ef61947bb268eb9d50523446f9c638fb355906"
+ integrity sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg==
+
+"@svgr/babel-plugin-remove-jsx-attribute@^5.4.0":
+ version "5.4.0"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-5.4.0.tgz#6b2c770c95c874654fd5e1d5ef475b78a0a962ef"
+ integrity sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg==
+
+"@svgr/babel-plugin-remove-jsx-empty-expression@^5.0.1":
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-5.0.1.tgz#25621a8915ed7ad70da6cea3d0a6dbc2ea933efd"
+ integrity sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA==
+
+"@svgr/babel-plugin-replace-jsx-attribute-value@^5.0.1":
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-5.0.1.tgz#0b221fc57f9fcd10e91fe219e2cd0dd03145a897"
+ integrity sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ==
+
+"@svgr/babel-plugin-svg-dynamic-title@^5.4.0":
+ version "5.4.0"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-5.4.0.tgz#139b546dd0c3186b6e5db4fefc26cb0baea729d7"
+ integrity sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg==
+
+"@svgr/babel-plugin-svg-em-dimensions@^5.4.0":
+ version "5.4.0"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-5.4.0.tgz#6543f69526632a133ce5cabab965deeaea2234a0"
+ integrity sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw==
+
+"@svgr/babel-plugin-transform-react-native-svg@^5.4.0":
+ version "5.4.0"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-5.4.0.tgz#00bf9a7a73f1cad3948cdab1f8dfb774750f8c80"
+ integrity sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q==
+
+"@svgr/babel-plugin-transform-svg-component@^5.5.0":
+ version "5.5.0"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-5.5.0.tgz#583a5e2a193e214da2f3afeb0b9e8d3250126b4a"
+ integrity sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ==
+
+"@svgr/babel-preset@^5.5.0":
+ version "5.5.0"
+ resolved "https://registry.yarnpkg.com/@svgr/babel-preset/-/babel-preset-5.5.0.tgz#8af54f3e0a8add7b1e2b0fcd5a882c55393df327"
+ integrity sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig==
+ dependencies:
+ "@svgr/babel-plugin-add-jsx-attribute" "^5.4.0"
+ "@svgr/babel-plugin-remove-jsx-attribute" "^5.4.0"
+ "@svgr/babel-plugin-remove-jsx-empty-expression" "^5.0.1"
+ "@svgr/babel-plugin-replace-jsx-attribute-value" "^5.0.1"
+ "@svgr/babel-plugin-svg-dynamic-title" "^5.4.0"
+ "@svgr/babel-plugin-svg-em-dimensions" "^5.4.0"
+ "@svgr/babel-plugin-transform-react-native-svg" "^5.4.0"
+ "@svgr/babel-plugin-transform-svg-component" "^5.5.0"
+
+"@svgr/core@^5.5.0":
+ version "5.5.0"
+ resolved "https://registry.yarnpkg.com/@svgr/core/-/core-5.5.0.tgz#82e826b8715d71083120fe8f2492ec7d7874a579"
+ integrity sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ==
+ dependencies:
+ "@svgr/plugin-jsx" "^5.5.0"
+ camelcase "^6.2.0"
+ cosmiconfig "^7.0.0"
+
+"@svgr/hast-util-to-babel-ast@^5.5.0":
+ version "5.5.0"
+ resolved "https://registry.yarnpkg.com/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-5.5.0.tgz#5ee52a9c2533f73e63f8f22b779f93cd432a5461"
+ integrity sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ==
+ dependencies:
+ "@babel/types" "^7.12.6"
+
+"@svgr/plugin-jsx@^5.5.0":
+ version "5.5.0"
+ resolved "https://registry.yarnpkg.com/@svgr/plugin-jsx/-/plugin-jsx-5.5.0.tgz#1aa8cd798a1db7173ac043466d7b52236b369000"
+ integrity sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA==
+ dependencies:
+ "@babel/core" "^7.12.3"
+ "@svgr/babel-preset" "^5.5.0"
+ "@svgr/hast-util-to-babel-ast" "^5.5.0"
+ svg-parser "^2.0.2"
+
+"@svgr/plugin-svgo@^5.5.0":
+ version "5.5.0"
+ resolved "https://registry.yarnpkg.com/@svgr/plugin-svgo/-/plugin-svgo-5.5.0.tgz#02da55d85320549324e201c7b2e53bf431fcc246"
+ integrity sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ==
+ dependencies:
+ cosmiconfig "^7.0.0"
+ deepmerge "^4.2.2"
+ svgo "^1.2.2"
+
+"@svgr/[email protected]":
+ version "5.5.0"
+ resolved "https://registry.yarnpkg.com/@svgr/webpack/-/webpack-5.5.0.tgz#aae858ee579f5fa8ce6c3166ef56c6a1b381b640"
+ integrity sha512-DOBOK255wfQxguUta2INKkzPj6AIS6iafZYiYmHn6W3pHlycSRRlvWKCfLDG10fXfLWqE3DJHgRUOyJYmARa7g==
+ dependencies:
+ "@babel/core" "^7.12.3"
+ "@babel/plugin-transform-react-constant-elements" "^7.12.1"
+ "@babel/preset-env" "^7.12.1"
+ "@babel/preset-react" "^7.12.5"
+ "@svgr/core" "^5.5.0"
+ "@svgr/plugin-jsx" "^5.5.0"
+ "@svgr/plugin-svgo" "^5.5.0"
+ loader-utils "^2.0.0"
+
+"@types/anymatch@*":
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/@types/anymatch/-/anymatch-1.3.1.tgz#336badc1beecb9dacc38bea2cf32adf627a8421a"
+ integrity sha512-/+CRPXpBDpo2RK9C68N3b2cOvO0Cf5B9aPijHsoDQTHivnGSObdOF2BRQOYjojWTDy6nQvMjmqRXIxH55VjxxA==
+
+"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.7":
+ version "7.1.12"
+ resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.12.tgz#4d8e9e51eb265552a7e4f1ff2219ab6133bdfb2d"
+ integrity sha512-wMTHiiTiBAAPebqaPiPDLFA4LYPKr6Ph0Xq/6rq1Ur3v66HXyG+clfR9CNETkD7MQS8ZHvpQOtA53DLws5WAEQ==
+ dependencies:
+ "@babel/parser" "^7.1.0"
+ "@babel/types" "^7.0.0"
+ "@types/babel__generator" "*"
+ "@types/babel__template" "*"
+ "@types/babel__traverse" "*"
+
+"@types/babel__generator@*":
+ version "7.6.2"
+ resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.2.tgz#f3d71178e187858f7c45e30380f8f1b7415a12d8"
+ integrity sha512-MdSJnBjl+bdwkLskZ3NGFp9YcXGx5ggLpQQPqtgakVhsWK0hTtNYhjpZLlWQTviGTvF8at+Bvli3jV7faPdgeQ==
+ dependencies:
+ "@babel/types" "^7.0.0"
+
+"@types/babel__template@*":
+ version "7.4.0"
+ resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.0.tgz#0c888dd70b3ee9eebb6e4f200e809da0076262be"
+ integrity sha512-NTPErx4/FiPCGScH7foPyr+/1Dkzkni+rHiYHHoTjvwou7AQzJkNeD60A9CXRy+ZEN2B1bggmkTMCDb+Mv5k+A==
+ dependencies:
+ "@babel/parser" "^7.1.0"
+ "@babel/types" "^7.0.0"
+
+"@types/babel__traverse@*", "@types/babel__traverse@^7.0.4", "@types/babel__traverse@^7.0.6":
+ version "7.11.0"
+ resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.11.0.tgz#b9a1efa635201ba9bc850323a8793ee2d36c04a0"
+ integrity sha512-kSjgDMZONiIfSH1Nxcr5JIRMwUetDki63FSQfpTCz8ogF3Ulqm8+mr5f78dUYs6vMiB6gBusQqfQmBvHZj/lwg==
+ dependencies:
+ "@babel/types" "^7.3.0"
+
+"@types/d3-path@^2":
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/@types/d3-path/-/d3-path-2.0.1.tgz#ca03dfa8b94d8add97ad0cd97e96e2006b4763cb"
+ integrity sha512-6K8LaFlztlhZO7mwsZg7ClRsdLg3FJRzIIi6SZXDWmmSJc2x8dd2VkESbLXdk3p8cuvz71f36S0y8Zv2AxqvQw==
+
+"@types/d3-scale@^3.0.0":
+ version "3.3.2"
+ resolved "https://registry.yarnpkg.com/@types/d3-scale/-/d3-scale-3.3.2.tgz#18c94e90f4f1c6b1ee14a70f14bfca2bd1c61d06"
+ integrity sha512-gGqr7x1ost9px3FvIfUMi5XA/F/yAf4UkUDtdQhpH92XCT0Oa7zkkRzY61gPVJq+DxpHn/btouw5ohWkbBsCzQ==
+ dependencies:
+ "@types/d3-time" "^2"
+
+"@types/d3-shape@^2.0.0":
+ version "2.1.3"
+ resolved "https://registry.yarnpkg.com/@types/d3-shape/-/d3-shape-2.1.3.tgz#35d397b9e687abaa0de82343b250b9897b8cacf3"
+ integrity sha512-HAhCel3wP93kh4/rq+7atLdybcESZ5bRHDEZUojClyZWsRuEMo3A52NGYJSh48SxfxEU6RZIVbZL2YFZ2OAlzQ==
+ dependencies:
+ "@types/d3-path" "^2"
+
+"@types/d3-time@^2":
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/@types/d3-time/-/d3-time-2.1.1.tgz#743fdc821c81f86537cbfece07093ac39b4bc342"
+ integrity sha512-9MVYlmIgmRR31C5b4FVSWtuMmBHh2mOWQYfl7XAYOa8dsnb7iEmUmRSWSFgXFtkjxO65d7hTUHQC+RhR/9IWFg==
+
+"@types/eslint@^7.2.6":
+ version "7.2.6"
+ resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-7.2.6.tgz#5e9aff555a975596c03a98b59ecd103decc70c3c"
+ integrity sha512-I+1sYH+NPQ3/tVqCeUSBwTE/0heyvtXqpIopUUArlBm0Kpocb8FbMa3AZ/ASKIFpN3rnEx932TTXDbt9OXsNDw==
+ dependencies:
+ "@types/estree" "*"
+ "@types/json-schema" "*"
+
+"@types/estree@*":
+ version "0.0.46"
+ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.46.tgz#0fb6bfbbeabd7a30880504993369c4bf1deab1fe"
+ integrity sha512-laIjwTQaD+5DukBZaygQ79K1Z0jb1bPEMRrkXSLjtCcZm+abyp5YbrqpSLzD42FwWW6gK/aS4NYpJ804nG2brg==
+
+"@types/[email protected]":
+ version "0.0.39"
+ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f"
+ integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw==
+
+"@types/glob@^7.1.1":
+ version "7.1.3"
+ resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.3.tgz#e6ba80f36b7daad2c685acd9266382e68985c183"
+ integrity sha512-SEYeGAIQIQX8NN6LDKprLjbrd5dARM5EXsd8GI/A5l0apYI1fGMWgPHSe4ZKL4eozlAyI+doUE9XbYS4xCkQ1w==
+ dependencies:
+ "@types/minimatch" "*"
+ "@types/node" "*"
+
+"@types/graceful-fs@^4.1.2":
+ version "4.1.5"
+ resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15"
+ integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw==
+ dependencies:
+ "@types/node" "*"
+
+"@types/html-minifier-terser@^5.0.0":
+ version "5.1.1"
+ resolved "https://registry.yarnpkg.com/@types/html-minifier-terser/-/html-minifier-terser-5.1.1.tgz#3c9ee980f1a10d6021ae6632ca3e79ca2ec4fb50"
+ integrity sha512-giAlZwstKbmvMk1OO7WXSj4OZ0keXAcl2TQq4LWHiiPH2ByaH7WeUzng+Qej8UPxxv+8lRTuouo0iaNDBuzIBA==
+
+"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1":
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.3.tgz#4ba8ddb720221f432e443bd5f9117fd22cfd4762"
+ integrity sha512-sz7iLqvVUg1gIedBOvlkxPlc8/uVzyS5OwGz1cKjXzkl3FpL3al0crU8YGU1WoHkxn0Wxbw5tyi6hvzJKNzFsw==
+
+"@types/istanbul-lib-report@*":
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686"
+ integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==
+ dependencies:
+ "@types/istanbul-lib-coverage" "*"
+
+"@types/istanbul-reports@^3.0.0":
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.0.tgz#508b13aa344fa4976234e75dddcc34925737d821"
+ integrity sha512-nwKNbvnwJ2/mndE9ItP/zc2TCzw6uuodnF4EHYWD+gCQDVBuRQL5UzbZD0/ezy1iKsFU2ZQiDqg4M9dN4+wZgA==
+ dependencies:
+ "@types/istanbul-lib-report" "*"
+
+"@types/json-schema@*", "@types/json-schema@^7.0.3", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.6":
+ version "7.0.7"
+ resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.7.tgz#98a993516c859eb0d5c4c8f098317a9ea68db9ad"
+ integrity sha512-cxWFQVseBm6O9Gbw1IWb8r6OS4OhSt3hPZLkFApLjM8TEXROBuQGLAH2i2gZpcXdLBIrpXuTDhH7Vbm1iXmNGA==
+
+"@types/json5@^0.0.29":
+ version "0.0.29"
+ resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee"
+ integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4=
+
+"@types/[email protected]":
+ version "4.6.6"
+ resolved "https://registry.yarnpkg.com/@types/lodash.mergewith/-/lodash.mergewith-4.6.6.tgz#c4698f5b214a433ff35cb2c75ee6ec7f99d79f10"
+ integrity sha512-RY/8IaVENjG19rxTZu9Nukqh0W2UrYgmBj5sdns4hWRZaV8PqR7wIKHFKzvOTjo4zVRV7sVI+yFhAJql12Kfqg==
+ dependencies:
+ "@types/lodash" "*"
+
+"@types/lodash@*":
+ version "4.14.172"
+ resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.172.tgz#aad774c28e7bfd7a67de25408e03ee5a8c3d028a"
+ integrity sha512-/BHF5HAx3em7/KkzVKm3LrsD6HZAXuXO1AJZQ3cRRBZj4oHZDviWPYu0aEplAqDFNHZPW6d3G7KN+ONcCCC7pw==
+
+"@types/long@^4.0.1":
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/@types/long/-/long-4.0.1.tgz#459c65fa1867dafe6a8f322c4c51695663cc55e9"
+ integrity sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w==
+
+"@types/minimatch@*":
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d"
+ integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA==
+
+"@types/node@*":
+ version "14.14.31"
+ resolved "https://registry.yarnpkg.com/@types/node/-/node-14.14.31.tgz#72286bd33d137aa0d152d47ec7c1762563d34055"
+ integrity sha512-vFHy/ezP5qI0rFgJ7aQnjDXwAMrG0KqqIH7tQG5PPv3BWBayOPIQNBjVc/P6hhdZfMx51REc6tfDNXHUio893g==
+
+"@types/node@>=12.12.47", "@types/node@>=13.7.0":
+ version "16.4.10"
+ resolved "https://registry.yarnpkg.com/@types/node/-/node-16.4.10.tgz#e57e2a54fc6da58da94b3571b1cb456d39f88597"
+ integrity sha512-TmVHsm43br64js9BqHWqiDZA+xMtbUpI1MBIA0EyiBmoV9pcEYFOSdj5fr6enZNfh4fChh+AGOLIzGwJnkshyQ==
+
+"@types/normalize-package-data@^2.4.0":
+ version "2.4.0"
+ resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e"
+ integrity sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA==
+
+"@types/parse-json@^4.0.0":
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0"
+ integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==
+
+"@types/prettier@^2.0.0":
+ version "2.2.1"
+ resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.2.1.tgz#374e31645d58cb18a07b3ecd8e9dede4deb2cccd"
+ integrity sha512-DxZZbyMAM9GWEzXL+BMZROWz9oo6A9EilwwOMET2UVu2uZTqMWS5S69KVtuVKaRjCUpcrOXRalet86/OpG4kqw==
+
+"@types/q@^1.5.1":
+ version "1.5.4"
+ resolved "https://registry.yarnpkg.com/@types/q/-/q-1.5.4.tgz#15925414e0ad2cd765bfef58842f7e26a7accb24"
+ integrity sha512-1HcDas8SEj4z1Wc696tH56G8OlRaH/sqZOynNNB+HF0WOeXPaxTtbYzJY2oEfiUxjSKjhCKr+MvR7dCHcEelug==
+
+"@types/resize-observer-browser@^0.1.5":
+ version "0.1.6"
+ resolved "https://registry.yarnpkg.com/@types/resize-observer-browser/-/resize-observer-browser-0.1.6.tgz#d8e6c2f830e2650dc06fe74464472ff64b54a302"
+ integrity sha512-61IfTac0s9jvNtBCpyo86QeaN8qqpMGHdK0uGKCCIy2dt5/Yk84VduHIdWAcmkC5QvdkPL0p5eWYgUZtHKKUVg==
+
+"@types/[email protected]":
+ version "0.0.8"
+ resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-0.0.8.tgz#f26074d238e02659e323ce1a13d041eee280e194"
+ integrity sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ==
+ dependencies:
+ "@types/node" "*"
+
+"@types/source-list-map@*":
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/@types/source-list-map/-/source-list-map-0.1.2.tgz#0078836063ffaf17412349bba364087e0ac02ec9"
+ integrity sha512-K5K+yml8LTo9bWJI/rECfIPrGgxdpeNbj+d53lwN4QjW1MCwlkhUms+gtdzigTeUyBr09+u8BwOIY3MXvHdcsA==
+
+"@types/stack-utils@^2.0.0":
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.0.tgz#7036640b4e21cc2f259ae826ce843d277dad8cff"
+ integrity sha512-RJJrrySY7A8havqpGObOB4W92QXKJo63/jFLLgpvOtsGUqbQZ9Sbgl35KMm1DjC6j7AvmmU2bIno+3IyEaemaw==
+
+"@types/tapable@*", "@types/tapable@^1.0.5":
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/@types/tapable/-/tapable-1.0.6.tgz#a9ca4b70a18b270ccb2bc0aaafefd1d486b7ea74"
+ integrity sha512-W+bw9ds02rAQaMvaLYxAbJ6cvguW/iJXNT6lTssS1ps6QdrMKttqEAMEG/b5CR8TZl3/L7/lH0ZV5nNR1LXikA==
+
+"@types/[email protected]":
+ version "1.4.2"
+ resolved "https://registry.yarnpkg.com/@types/tinycolor2/-/tinycolor2-1.4.2.tgz#721ca5c5d1a2988b4a886e35c2ffc5735b6afbdf"
+ integrity sha512-PeHg/AtdW6aaIO2a+98Xj7rWY4KC1E6yOy7AFknJQ7VXUGNrMlyxDFxJo7HqLtjQms/ZhhQX52mLVW/EX3JGOw==
+
+"@types/uglify-js@*":
+ version "3.12.0"
+ resolved "https://registry.yarnpkg.com/@types/uglify-js/-/uglify-js-3.12.0.tgz#2bb061c269441620d46b946350c8f16d52ef37c5"
+ integrity sha512-sYAF+CF9XZ5cvEBkI7RtrG9g2GtMBkviTnBxYYyq+8BWvO4QtXfwwR6a2LFwCi4evMKZfpv6U43ViYvv17Wz3Q==
+ dependencies:
+ source-map "^0.6.1"
+
+"@types/warning@^3.0.0":
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/@types/warning/-/warning-3.0.0.tgz#0d2501268ad8f9962b740d387c4654f5f8e23e52"
+ integrity sha1-DSUBJorY+ZYrdA04fEZU9fjiPlI=
+
+"@types/webpack-sources@*":
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/@types/webpack-sources/-/webpack-sources-2.1.0.tgz#8882b0bd62d1e0ce62f183d0d01b72e6e82e8c10"
+ integrity sha512-LXn/oYIpBeucgP1EIJbKQ2/4ZmpvRl+dlrFdX7+94SKRUV3Evy3FsfMZY318vGhkWUS5MPhtOM3w1/hCOAOXcg==
+ dependencies:
+ "@types/node" "*"
+ "@types/source-list-map" "*"
+ source-map "^0.7.3"
+
+"@types/webpack@^4.41.8":
+ version "4.41.26"
+ resolved "https://registry.yarnpkg.com/@types/webpack/-/webpack-4.41.26.tgz#27a30d7d531e16489f9c7607c747be6bc1a459ef"
+ integrity sha512-7ZyTfxjCRwexh+EJFwRUM+CDB2XvgHl4vfuqf1ZKrgGvcS5BrNvPQqJh3tsZ0P6h6Aa1qClVHaJZszLPzpqHeA==
+ dependencies:
+ "@types/anymatch" "*"
+ "@types/node" "*"
+ "@types/tapable" "*"
+ "@types/uglify-js" "*"
+ "@types/webpack-sources" "*"
+ source-map "^0.6.0"
+
+"@types/yargs-parser@*":
+ version "20.2.0"
+ resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-20.2.0.tgz#dd3e6699ba3237f0348cd085e4698780204842f9"
+ integrity sha512-37RSHht+gzzgYeobbG+KWryeAW8J33Nhr69cjTqSYymXVZEN9NbRYWoYlRtDhHKPVT1FyNKwaTPC1NynKZpzRA==
+
+"@types/yargs@^15.0.0":
+ version "15.0.13"
+ resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-15.0.13.tgz#34f7fec8b389d7f3c1fd08026a5763e072d3c6dc"
+ integrity sha512-kQ5JNTrbDv3Rp5X2n/iUu37IJBDU2gsZ5R/g1/KHOOEc5IKfUFjXT6DENPGduh08I/pamwtEq4oul7gUqKTQDQ==
+ dependencies:
+ "@types/yargs-parser" "*"
+
+"@typescript-eslint/eslint-plugin@^4.5.0":
+ version "4.15.2"
+ resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.15.2.tgz#981b26b4076c62a5a55873fbef3fe98f83360c61"
+ integrity sha512-uiQQeu9tWl3f1+oK0yoAv9lt/KXO24iafxgQTkIYO/kitruILGx3uH+QtIAHqxFV+yIsdnJH+alel9KuE3J15Q==
+ dependencies:
+ "@typescript-eslint/experimental-utils" "4.15.2"
+ "@typescript-eslint/scope-manager" "4.15.2"
+ debug "^4.1.1"
+ functional-red-black-tree "^1.0.1"
+ lodash "^4.17.15"
+ regexpp "^3.0.0"
+ semver "^7.3.2"
+ tsutils "^3.17.1"
+
+"@typescript-eslint/[email protected]", "@typescript-eslint/experimental-utils@^4.0.1":
+ version "4.15.2"
+ resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-4.15.2.tgz#5efd12355bd5b535e1831282e6cf465b9a71cf36"
+ integrity sha512-Fxoshw8+R5X3/Vmqwsjc8nRO/7iTysRtDqx6rlfLZ7HbT8TZhPeQqbPjTyk2RheH3L8afumecTQnUc9EeXxohQ==
+ dependencies:
+ "@types/json-schema" "^7.0.3"
+ "@typescript-eslint/scope-manager" "4.15.2"
+ "@typescript-eslint/types" "4.15.2"
+ "@typescript-eslint/typescript-estree" "4.15.2"
+ eslint-scope "^5.0.0"
+ eslint-utils "^2.0.0"
+
+"@typescript-eslint/experimental-utils@^3.10.1":
+ version "3.10.1"
+ resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-3.10.1.tgz#e179ffc81a80ebcae2ea04e0332f8b251345a686"
+ integrity sha512-DewqIgscDzmAfd5nOGe4zm6Bl7PKtMG2Ad0KG8CUZAHlXfAKTF9Ol5PXhiMh39yRL2ChRH1cuuUGOcVyyrhQIw==
+ dependencies:
+ "@types/json-schema" "^7.0.3"
+ "@typescript-eslint/types" "3.10.1"
+ "@typescript-eslint/typescript-estree" "3.10.1"
+ eslint-scope "^5.0.0"
+ eslint-utils "^2.0.0"
+
+"@typescript-eslint/parser@^4.5.0":
+ version "4.15.2"
+ resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.15.2.tgz#c804474321ef76a3955aec03664808f0d6e7872e"
+ integrity sha512-SHeF8xbsC6z2FKXsaTb1tBCf0QZsjJ94H6Bo51Y1aVEZ4XAefaw5ZAilMoDPlGghe+qtq7XdTiDlGfVTOmvA+Q==
+ dependencies:
+ "@typescript-eslint/scope-manager" "4.15.2"
+ "@typescript-eslint/types" "4.15.2"
+ "@typescript-eslint/typescript-estree" "4.15.2"
+ debug "^4.1.1"
+
+"@typescript-eslint/[email protected]":
+ version "4.15.2"
+ resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-4.15.2.tgz#5725bda656995960ae1d004bfd1cd70320f37f4f"
+ integrity sha512-Zm0tf/MSKuX6aeJmuXexgdVyxT9/oJJhaCkijv0DvJVT3ui4zY6XYd6iwIo/8GEZGy43cd7w1rFMiCLHbRzAPQ==
+ dependencies:
+ "@typescript-eslint/types" "4.15.2"
+ "@typescript-eslint/visitor-keys" "4.15.2"
+
+"@typescript-eslint/[email protected]":
+ version "3.10.1"
+ resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-3.10.1.tgz#1d7463fa7c32d8a23ab508a803ca2fe26e758727"
+ integrity sha512-+3+FCUJIahE9q0lDi1WleYzjCwJs5hIsbugIgnbB+dSCYUxl8L6PwmsyOPFZde2hc1DlTo/xnkOgiTLSyAbHiQ==
+
+"@typescript-eslint/[email protected]":
+ version "4.15.2"
+ resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-4.15.2.tgz#04acf3a2dc8001a88985291744241e732ef22c60"
+ integrity sha512-r7lW7HFkAarfUylJ2tKndyO9njwSyoy6cpfDKWPX6/ctZA+QyaYscAHXVAfJqtnY6aaTwDYrOhp+ginlbc7HfQ==
+
+"@typescript-eslint/[email protected]":
+ version "3.10.1"
+ resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-3.10.1.tgz#fd0061cc38add4fad45136d654408569f365b853"
+ integrity sha512-QbcXOuq6WYvnB3XPsZpIwztBoquEYLXh2MtwVU+kO8jgYCiv4G5xrSP/1wg4tkvrEE+esZVquIPX/dxPlePk1w==
+ dependencies:
+ "@typescript-eslint/types" "3.10.1"
+ "@typescript-eslint/visitor-keys" "3.10.1"
+ debug "^4.1.1"
+ glob "^7.1.6"
+ is-glob "^4.0.1"
+ lodash "^4.17.15"
+ semver "^7.3.2"
+ tsutils "^3.17.1"
+
+"@typescript-eslint/[email protected]":
+ version "4.15.2"
+ resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-4.15.2.tgz#c2f7a1e94f3428d229d5ecff3ead6581ee9b62fa"
+ integrity sha512-cGR8C2g5SPtHTQvAymEODeqx90pJHadWsgTtx6GbnTWKqsg7yp6Eaya9nFzUd4KrKhxdYTTFBiYeTPQaz/l8bw==
+ dependencies:
+ "@typescript-eslint/types" "4.15.2"
+ "@typescript-eslint/visitor-keys" "4.15.2"
+ debug "^4.1.1"
+ globby "^11.0.1"
+ is-glob "^4.0.1"
+ semver "^7.3.2"
+ tsutils "^3.17.1"
+
+"@typescript-eslint/[email protected]":
+ version "3.10.1"
+ resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-3.10.1.tgz#cd4274773e3eb63b2e870ac602274487ecd1e931"
+ integrity sha512-9JgC82AaQeglebjZMgYR5wgmfUdUc+EitGUUMW8u2nDckaeimzW+VsoLV6FoimPv2id3VQzfjwBxEMVz08ameQ==
+ dependencies:
+ eslint-visitor-keys "^1.1.0"
+
+"@typescript-eslint/[email protected]":
+ version "4.15.2"
+ resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-4.15.2.tgz#3d1c7979ce75bf6acf9691109bd0d6b5706192b9"
+ integrity sha512-TME1VgSb7wTwgENN5KVj4Nqg25hP8DisXxNBojM4Nn31rYaNDIocNm5cmjOFfh42n7NVERxWrDFoETO/76ePyg==
+ dependencies:
+ "@typescript-eslint/types" "4.15.2"
+ eslint-visitor-keys "^2.0.0"
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.9.0.tgz#bd850604b4042459a5a41cd7d338cbed695ed964"
+ integrity sha512-C6wW5L+b7ogSDVqymbkkvuW9kruN//YisMED04xzeBBqjHa2FYnmvOlS6Xj68xWQRgWvI9cIglsjFowH/RJyEA==
+ dependencies:
+ "@webassemblyjs/helper-module-context" "1.9.0"
+ "@webassemblyjs/helper-wasm-bytecode" "1.9.0"
+ "@webassemblyjs/wast-parser" "1.9.0"
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.9.0.tgz#3c3d3b271bddfc84deb00f71344438311d52ffb4"
+ integrity sha512-TG5qcFsS8QB4g4MhrxK5TqfdNe7Ey/7YL/xN+36rRjl/BlGE/NcBvJcqsRgCP6Z92mRE+7N50pRIi8SmKUbcQA==
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.9.0.tgz#203f676e333b96c9da2eeab3ccef33c45928b6a2"
+ integrity sha512-NcMLjoFMXpsASZFxJ5h2HZRcEhDkvnNFOAKneP5RbKRzaWJN36NC4jqQHKwStIhGXu5mUWlUUk7ygdtrO8lbmw==
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.9.0.tgz#a1442d269c5feb23fcbc9ef759dac3547f29de00"
+ integrity sha512-qZol43oqhq6yBPx7YM3m9Bv7WMV9Eevj6kMi6InKOuZxhw+q9hOkvq5e/PpKSiLfyetpaBnogSbNCfBwyB00CA==
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.9.0.tgz#647f8892cd2043a82ac0c8c5e75c36f1d9159f27"
+ integrity sha512-ERCYdJBkD9Vu4vtjUYe8LZruWuNIToYq/ME22igL+2vj2dQ2OOujIZr3MEFvfEaqKoVqpsFKAGsRdBSBjrIvZA==
+ dependencies:
+ "@webassemblyjs/wast-printer" "1.9.0"
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-fsm/-/helper-fsm-1.9.0.tgz#c05256b71244214671f4b08ec108ad63b70eddb8"
+ integrity sha512-OPRowhGbshCb5PxJ8LocpdX9Kl0uB4XsAjl6jH/dWKlk/mzsANvhwbiULsaiqT5GZGT9qinTICdj6PLuM5gslw==
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-module-context/-/helper-module-context-1.9.0.tgz#25d8884b76839871a08a6c6f806c3979ef712f07"
+ integrity sha512-MJCW8iGC08tMk2enck1aPW+BE5Cw8/7ph/VGZxwyvGbJwjktKkDK7vy7gAmMDx88D7mhDTCNKAW5tED+gZ0W8g==
+ dependencies:
+ "@webassemblyjs/ast" "1.9.0"
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.9.0.tgz#4fed8beac9b8c14f8c58b70d124d549dd1fe5790"
+ integrity sha512-R7FStIzyNcd7xKxCZH5lE0Bqy+hGTwS3LJjuv1ZVxd9O7eHCedSdrId/hMOd20I+v8wDXEn+bjfKDLzTepoaUw==
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.9.0.tgz#5a4138d5a6292ba18b04c5ae49717e4167965346"
+ integrity sha512-XnMB8l3ek4tvrKUUku+IVaXNHz2YsJyOOmz+MMkZvh8h1uSJpSen6vYnw3IoQ7WwEuAhL8Efjms1ZWjqh2agvw==
+ dependencies:
+ "@webassemblyjs/ast" "1.9.0"
+ "@webassemblyjs/helper-buffer" "1.9.0"
+ "@webassemblyjs/helper-wasm-bytecode" "1.9.0"
+ "@webassemblyjs/wasm-gen" "1.9.0"
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.9.0.tgz#15c7a0fbaae83fb26143bbacf6d6df1702ad39e4"
+ integrity sha512-dcX8JuYU/gvymzIHc9DgxTzUUTLexWwt8uCTWP3otys596io0L5aW02Gb1RjYpx2+0Jus1h4ZFqjla7umFniTg==
+ dependencies:
+ "@xtuc/ieee754" "^1.2.0"
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.9.0.tgz#f19ca0b76a6dc55623a09cffa769e838fa1e1c95"
+ integrity sha512-ENVzM5VwV1ojs9jam6vPys97B/S65YQtv/aanqnU7D8aSoHFX8GyhGg0CMfyKNIHBuAVjy3tlzd5QMMINa7wpw==
+ dependencies:
+ "@xtuc/long" "4.2.2"
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.9.0.tgz#04d33b636f78e6a6813227e82402f7637b6229ab"
+ integrity sha512-GZbQlWtopBTP0u7cHrEx+73yZKrQoBMpwkGEIqlacljhXCkVM1kMQge/Mf+csMJAjEdSwhOyLAS0AoR3AG5P8w==
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.9.0.tgz#3fe6d79d3f0f922183aa86002c42dd256cfee9cf"
+ integrity sha512-FgHzBm80uwz5M8WKnMTn6j/sVbqilPdQXTWraSjBwFXSYGirpkSWE2R9Qvz9tNiTKQvoKILpCuTjBKzOIm0nxw==
+ dependencies:
+ "@webassemblyjs/ast" "1.9.0"
+ "@webassemblyjs/helper-buffer" "1.9.0"
+ "@webassemblyjs/helper-wasm-bytecode" "1.9.0"
+ "@webassemblyjs/helper-wasm-section" "1.9.0"
+ "@webassemblyjs/wasm-gen" "1.9.0"
+ "@webassemblyjs/wasm-opt" "1.9.0"
+ "@webassemblyjs/wasm-parser" "1.9.0"
+ "@webassemblyjs/wast-printer" "1.9.0"
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.9.0.tgz#50bc70ec68ded8e2763b01a1418bf43491a7a49c"
+ integrity sha512-cPE3o44YzOOHvlsb4+E9qSqjc9Qf9Na1OO/BHFy4OI91XDE14MjFN4lTMezzaIWdPqHnsTodGGNP+iRSYfGkjA==
+ dependencies:
+ "@webassemblyjs/ast" "1.9.0"
+ "@webassemblyjs/helper-wasm-bytecode" "1.9.0"
+ "@webassemblyjs/ieee754" "1.9.0"
+ "@webassemblyjs/leb128" "1.9.0"
+ "@webassemblyjs/utf8" "1.9.0"
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.9.0.tgz#2211181e5b31326443cc8112eb9f0b9028721a61"
+ integrity sha512-Qkjgm6Anhm+OMbIL0iokO7meajkzQD71ioelnfPEj6r4eOFuqm4YC3VBPqXjFyyNwowzbMD+hizmprP/Fwkl2A==
+ dependencies:
+ "@webassemblyjs/ast" "1.9.0"
+ "@webassemblyjs/helper-buffer" "1.9.0"
+ "@webassemblyjs/wasm-gen" "1.9.0"
+ "@webassemblyjs/wasm-parser" "1.9.0"
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.9.0.tgz#9d48e44826df4a6598294aa6c87469d642fff65e"
+ integrity sha512-9+wkMowR2AmdSWQzsPEjFU7njh8HTO5MqO8vjwEHuM+AMHioNqSBONRdr0NQQ3dVQrzp0s8lTcYqzUdb7YgELA==
+ dependencies:
+ "@webassemblyjs/ast" "1.9.0"
+ "@webassemblyjs/helper-api-error" "1.9.0"
+ "@webassemblyjs/helper-wasm-bytecode" "1.9.0"
+ "@webassemblyjs/ieee754" "1.9.0"
+ "@webassemblyjs/leb128" "1.9.0"
+ "@webassemblyjs/utf8" "1.9.0"
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-parser/-/wast-parser-1.9.0.tgz#3031115d79ac5bd261556cecc3fa90a3ef451914"
+ integrity sha512-qsqSAP3QQ3LyZjNC/0jBJ/ToSxfYJ8kYyuiGvtn/8MK89VrNEfwj7BPQzJVHi0jGTRK2dGdJ5PRqhtjzoww+bw==
+ dependencies:
+ "@webassemblyjs/ast" "1.9.0"
+ "@webassemblyjs/floating-point-hex-parser" "1.9.0"
+ "@webassemblyjs/helper-api-error" "1.9.0"
+ "@webassemblyjs/helper-code-frame" "1.9.0"
+ "@webassemblyjs/helper-fsm" "1.9.0"
+ "@xtuc/long" "4.2.2"
+
+"@webassemblyjs/[email protected]":
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.9.0.tgz#4935d54c85fef637b00ce9f52377451d00d47899"
+ integrity sha512-2J0nE95rHXHyQ24cWjMKJ1tqB/ds8z/cyeOZxJhcb+rW+SQASVjuznUSmdz5GpVJTzU8JkhYut0D3siFDD6wsA==
+ dependencies:
+ "@webassemblyjs/ast" "1.9.0"
+ "@webassemblyjs/wast-parser" "1.9.0"
+ "@xtuc/long" "4.2.2"
+
+"@xtuc/ieee754@^1.2.0":
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790"
+ integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==
+
+"@xtuc/[email protected]":
+ version "4.2.2"
+ resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d"
+ integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==
+
+abab@^2.0.3:
+ version "2.0.5"
+ resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.5.tgz#c0b678fb32d60fc1219c784d6a826fe385aeb79a"
+ integrity sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q==
+
+accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7:
+ version "1.3.7"
+ resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd"
+ integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==
+ dependencies:
+ mime-types "~2.1.24"
+ negotiator "0.6.2"
+
+acorn-globals@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45"
+ integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg==
+ dependencies:
+ acorn "^7.1.1"
+ acorn-walk "^7.1.1"
+
+acorn-jsx@^5.3.1:
+ version "5.3.1"
+ resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.1.tgz#fc8661e11b7ac1539c47dbfea2e72b3af34d267b"
+ integrity sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng==
+
+acorn-walk@^7.1.1:
+ version "7.2.0"
+ resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc"
+ integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==
+
+acorn@^6.4.1:
+ version "6.4.2"
+ resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.2.tgz#35866fd710528e92de10cf06016498e47e39e1e6"
+ integrity sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ==
+
+acorn@^7.1.0, acorn@^7.1.1, acorn@^7.4.0:
+ version "7.4.1"
+ resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa"
+ integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==
+
[email protected], address@^1.0.1:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/address/-/address-1.1.2.tgz#bf1116c9c758c51b7a933d296b72c221ed9428b6"
+ integrity sha512-aT6camzM4xEA54YVJYSqxz1kv4IHnQZRtThJJHhUMRExaU5spC7jX5ugSwTaTgJliIgs4VhZOk7htClvQ/LmRA==
+
[email protected]:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/adjust-sourcemap-loader/-/adjust-sourcemap-loader-3.0.0.tgz#5ae12fb5b7b1c585e80bbb5a63ec163a1a45e61e"
+ integrity sha512-YBrGyT2/uVQ/c6Rr+t6ZJXniY03YtHGMJQYal368burRGYKqhx9qGTWqcBU5s1CwYY9E/ri63RYyG1IacMZtqw==
+ dependencies:
+ loader-utils "^2.0.0"
+ regex-parser "^2.2.11"
+
+aggregate-error@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a"
+ integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==
+ dependencies:
+ clean-stack "^2.0.0"
+ indent-string "^4.0.0"
+
+ajv-errors@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d"
+ integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ==
+
+ajv-keywords@^3.1.0, ajv-keywords@^3.4.1, ajv-keywords@^3.5.2:
+ version "3.5.2"
+ resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d"
+ integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==
+
+ajv@^6.1.0, ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.3, ajv@^6.12.4, ajv@^6.12.5:
+ version "6.12.6"
+ resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4"
+ integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==
+ dependencies:
+ fast-deep-equal "^3.1.1"
+ fast-json-stable-stringify "^2.0.0"
+ json-schema-traverse "^0.4.1"
+ uri-js "^4.2.2"
+
+ajv@^7.0.2:
+ version "7.1.1"
+ resolved "https://registry.yarnpkg.com/ajv/-/ajv-7.1.1.tgz#1e6b37a454021fa9941713f38b952fc1c8d32a84"
+ integrity sha512-ga/aqDYnUy/o7vbsRTFhhTsNeXiYb5JWDIcRIeZfwRNCefwjNTVYCGdGSUrEmiu3yDK3vFvNbgJxvrQW4JXrYQ==
+ dependencies:
+ fast-deep-equal "^3.1.1"
+ json-schema-traverse "^1.0.0"
+ require-from-string "^2.0.2"
+ uri-js "^4.2.2"
+
+ajv@^8.0.1:
+ version "8.6.2"
+ resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.6.2.tgz#2fb45e0e5fcbc0813326c1c3da535d1881bb0571"
+ integrity sha512-9807RlWAgT564wT+DjeyU5OFMPjmzxVobvDFmNAhY+5zD6A2ly3jDp6sgnfyDtlIQ+7H97oc/DGCzzfu9rjw9w==
+ dependencies:
+ fast-deep-equal "^3.1.1"
+ json-schema-traverse "^1.0.0"
+ require-from-string "^2.0.2"
+ uri-js "^4.2.2"
+
+alphanum-sort@^1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/alphanum-sort/-/alphanum-sort-1.0.2.tgz#97a1119649b211ad33691d9f9f486a8ec9fbe0a3"
+ integrity sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM=
+
+ansi-colors@^3.0.0:
+ version "3.2.4"
+ resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf"
+ integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA==
+
+ansi-colors@^4.1.1:
+ version "4.1.1"
+ resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348"
+ integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==
+
+ansi-escapes@^4.2.1, ansi-escapes@^4.3.1:
+ version "4.3.1"
+ resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.1.tgz#a5c47cc43181f1f38ffd7076837700d395522a61"
+ integrity sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA==
+ dependencies:
+ type-fest "^0.11.0"
+
[email protected], ansi-html@^0.0.7:
+ version "0.0.7"
+ resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e"
+ integrity sha1-gTWEAhliqenm/QOflA0S9WynhZ4=
+
+ansi-regex@^2.0.0:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df"
+ integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8=
+
+ansi-regex@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997"
+ integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==
+
+ansi-regex@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.0.tgz#388539f55179bf39339c81af30a654d69f87cb75"
+ integrity sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==
+
+ansi-styles@^3.2.0, ansi-styles@^3.2.1:
+ version "3.2.1"
+ resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
+ integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==
+ dependencies:
+ color-convert "^1.9.0"
+
+ansi-styles@^4.0.0, ansi-styles@^4.1.0:
+ version "4.3.0"
+ resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937"
+ integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==
+ dependencies:
+ color-convert "^2.0.1"
+
+anymatch@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb"
+ integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==
+ dependencies:
+ micromatch "^3.1.4"
+ normalize-path "^2.1.1"
+
+anymatch@^3.0.3, anymatch@~3.1.1:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142"
+ integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==
+ dependencies:
+ normalize-path "^3.0.0"
+ picomatch "^2.0.4"
+
+aproba@^1.1.1:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a"
+ integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==
+
+argparse@^1.0.7:
+ version "1.0.10"
+ resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
+ integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==
+ dependencies:
+ sprintf-js "~1.0.2"
+
+aria-hidden@^1.1.1:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/aria-hidden/-/aria-hidden-1.1.3.tgz#bb48de18dc84787a3c6eee113709c473c64ec254"
+ integrity sha512-RhVWFtKH5BiGMycI72q2RAFMLQi8JP9bLuQXgR5a8Znp7P5KOIADSJeyfI8PCVxLEp067B2HbP5JIiI/PXIZeA==
+ dependencies:
+ tslib "^1.0.0"
+
+aria-query@^4.2.2:
+ version "4.2.2"
+ resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-4.2.2.tgz#0d2ca6c9aceb56b8977e9fed6aed7e15bbd2f83b"
+ integrity sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA==
+ dependencies:
+ "@babel/runtime" "^7.10.2"
+ "@babel/runtime-corejs3" "^7.10.2"
+
+arity-n@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/arity-n/-/arity-n-1.0.4.tgz#d9e76b11733e08569c0847ae7b39b2860b30b745"
+ integrity sha1-2edrEXM+CFacCEeuezmyhgswt0U=
+
+arr-diff@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520"
+ integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=
+
+arr-flatten@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1"
+ integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==
+
+arr-union@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4"
+ integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=
+
[email protected]:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2"
+ integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=
+
+array-flatten@^2.1.0:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099"
+ integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ==
+
+array-includes@^3.1.1, array-includes@^3.1.2, array-includes@^3.1.3:
+ version "3.1.3"
+ resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.3.tgz#c7f619b382ad2afaf5326cddfdc0afc61af7690a"
+ integrity sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+ es-abstract "^1.18.0-next.2"
+ get-intrinsic "^1.1.1"
+ is-string "^1.0.5"
+
+array-union@^1.0.1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39"
+ integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=
+ dependencies:
+ array-uniq "^1.0.1"
+
+array-union@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d"
+ integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==
+
+array-uniq@^1.0.1:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6"
+ integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY=
+
+array-unique@^0.3.2:
+ version "0.3.2"
+ resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428"
+ integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=
+
+array.prototype.flat@^1.2.3, array.prototype.flat@^1.2.4:
+ version "1.2.4"
+ resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.2.4.tgz#6ef638b43312bd401b4c6199fdec7e2dc9e9a123"
+ integrity sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg==
+ dependencies:
+ call-bind "^1.0.0"
+ define-properties "^1.1.3"
+ es-abstract "^1.18.0-next.1"
+
+array.prototype.flatmap@^1.2.3, array.prototype.flatmap@^1.2.4:
+ version "1.2.4"
+ resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.2.4.tgz#94cfd47cc1556ec0747d97f7c7738c58122004c9"
+ integrity sha512-r9Z0zYoxqHz60vvQbWEdXIEtCwHF0yxaWfno9qzXeNHvfyl3BZqygmGzb84dsubyaXLH4husF+NFgMSdpZhk2Q==
+ dependencies:
+ call-bind "^1.0.0"
+ define-properties "^1.1.3"
+ es-abstract "^1.18.0-next.1"
+ function-bind "^1.1.1"
+
+arrify@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/arrify/-/arrify-2.0.1.tgz#c9655e9331e0abcd588d2a7cad7e9956f66701fa"
+ integrity sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==
+
+asap@~2.0.6:
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46"
+ integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=
+
+asn1.js@^5.2.0:
+ version "5.4.1"
+ resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-5.4.1.tgz#11a980b84ebb91781ce35b0fdc2ee294e3783f07"
+ integrity sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==
+ dependencies:
+ bn.js "^4.0.0"
+ inherits "^2.0.1"
+ minimalistic-assert "^1.0.0"
+ safer-buffer "^2.1.0"
+
+asn1@~0.2.3:
+ version "0.2.4"
+ resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136"
+ integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==
+ dependencies:
+ safer-buffer "~2.1.0"
+
[email protected], assert-plus@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525"
+ integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=
+
+assert@^1.1.1:
+ version "1.5.0"
+ resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb"
+ integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA==
+ dependencies:
+ object-assign "^4.1.1"
+ util "0.10.3"
+
+assign-symbols@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367"
+ integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=
+
+ast-types-flow@^0.0.7:
+ version "0.0.7"
+ resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad"
+ integrity sha1-9wtzXGvKGlycItmCw+Oef+ujva0=
+
+astral-regex@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31"
+ integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==
+
+async-each@^1.0.1:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf"
+ integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ==
+
+async-limiter@~1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd"
+ integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==
+
+async@^2.6.2:
+ version "2.6.3"
+ resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff"
+ integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==
+ dependencies:
+ lodash "^4.17.14"
+
+asynckit@^0.4.0:
+ version "0.4.0"
+ resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
+ integrity sha1-x57Zf380y48robyXkLzDZkdLS3k=
+
+at-least-node@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2"
+ integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==
+
+atob@^2.1.2:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9"
+ integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==
+
+autoprefixer@^9.6.1:
+ version "9.8.6"
+ resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.8.6.tgz#3b73594ca1bf9266320c5acf1588d74dea74210f"
+ integrity sha512-XrvP4VVHdRBCdX1S3WXVD8+RyG9qeb1D5Sn1DeLiG2xfSpzellk5k54xbUERJ3M5DggQxes39UGOTP8CFrEGbg==
+ dependencies:
+ browserslist "^4.12.0"
+ caniuse-lite "^1.0.30001109"
+ colorette "^1.2.1"
+ normalize-range "^0.1.2"
+ num2fraction "^1.2.2"
+ postcss "^7.0.32"
+ postcss-value-parser "^4.1.0"
+
+aws-sign2@~0.7.0:
+ version "0.7.0"
+ resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8"
+ integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=
+
+aws4@^1.8.0:
+ version "1.11.0"
+ resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59"
+ integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==
+
+axe-core@^4.0.2:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.1.2.tgz#7cf783331320098bfbef620df3b3c770147bc224"
+ integrity sha512-V+Nq70NxKhYt89ArVcaNL9FDryB3vQOd+BFXZIfO3RP6rwtj+2yqqqdHEkacutglPaZLkJeuXKCjCJDMGPtPqg==
+
+axobject-query@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-2.2.0.tgz#943d47e10c0b704aa42275e20edf3722648989be"
+ integrity sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA==
+
+babel-eslint@^10.1.0:
+ version "10.1.0"
+ resolved "https://registry.yarnpkg.com/babel-eslint/-/babel-eslint-10.1.0.tgz#6968e568a910b78fb3779cdd8b6ac2f479943232"
+ integrity sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg==
+ dependencies:
+ "@babel/code-frame" "^7.0.0"
+ "@babel/parser" "^7.7.0"
+ "@babel/traverse" "^7.7.0"
+ "@babel/types" "^7.7.0"
+ eslint-visitor-keys "^1.0.0"
+ resolve "^1.12.0"
+
+babel-extract-comments@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/babel-extract-comments/-/babel-extract-comments-1.0.0.tgz#0a2aedf81417ed391b85e18b4614e693a0351a21"
+ integrity sha512-qWWzi4TlddohA91bFwgt6zO/J0X+io7Qp184Fw0m2JYRSTZnJbFR8+07KmzudHCZgOiKRCrjhylwv9Xd8gfhVQ==
+ dependencies:
+ babylon "^6.18.0"
+
+babel-jest@^26.6.0, babel-jest@^26.6.3:
+ version "26.6.3"
+ resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-26.6.3.tgz#d87d25cb0037577a0c89f82e5755c5d293c01056"
+ integrity sha512-pl4Q+GAVOHwvjrck6jKjvmGhnO3jHX/xuB9d27f+EJZ/6k+6nMuPjorrYp7s++bKKdANwzElBWnLWaObvTnaZA==
+ dependencies:
+ "@jest/transform" "^26.6.2"
+ "@jest/types" "^26.6.2"
+ "@types/babel__core" "^7.1.7"
+ babel-plugin-istanbul "^6.0.0"
+ babel-preset-jest "^26.6.2"
+ chalk "^4.0.0"
+ graceful-fs "^4.2.4"
+ slash "^3.0.0"
+
[email protected]:
+ version "8.1.0"
+ resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.1.0.tgz#c611d5112bd5209abe8b9fa84c3e4da25275f1c3"
+ integrity sha512-7q7nC1tYOrqvUrN3LQK4GwSk/TQorZSOlO9C+RZDZpODgyN4ZlCqE5q9cDsyWOliN+aU9B4JX01xK9eJXowJLw==
+ dependencies:
+ find-cache-dir "^2.1.0"
+ loader-utils "^1.4.0"
+ mkdirp "^0.5.3"
+ pify "^4.0.1"
+ schema-utils "^2.6.5"
+
+babel-plugin-dynamic-import-node@^2.3.3:
+ version "2.3.3"
+ resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3"
+ integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==
+ dependencies:
+ object.assign "^4.1.0"
+
+babel-plugin-istanbul@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.0.0.tgz#e159ccdc9af95e0b570c75b4573b7c34d671d765"
+ integrity sha512-AF55rZXpe7trmEylbaE1Gv54wn6rwU03aptvRoVIGP8YykoSxqdVLV1TfwflBCE/QtHmqtP8SWlTENqbK8GCSQ==
+ dependencies:
+ "@babel/helper-plugin-utils" "^7.0.0"
+ "@istanbuljs/load-nyc-config" "^1.0.0"
+ "@istanbuljs/schema" "^0.1.2"
+ istanbul-lib-instrument "^4.0.0"
+ test-exclude "^6.0.0"
+
+babel-plugin-jest-hoist@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-26.6.2.tgz#8185bd030348d254c6d7dd974355e6a28b21e62d"
+ integrity sha512-PO9t0697lNTmcEHH69mdtYiOIkkOlj9fySqfO3K1eCcdISevLAE0xY59VLLUj0SoiPiTX/JU2CYFpILydUa5Lw==
+ dependencies:
+ "@babel/template" "^7.3.3"
+ "@babel/types" "^7.3.3"
+ "@types/babel__core" "^7.0.0"
+ "@types/babel__traverse" "^7.0.6"
+
[email protected], babel-plugin-macros@^2.6.1:
+ version "2.8.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-macros/-/babel-plugin-macros-2.8.0.tgz#0f958a7cc6556b1e65344465d99111a1e5e10138"
+ integrity sha512-SEP5kJpfGYqYKpBrj5XU3ahw5p5GOHJ0U5ssOSQ/WBVdwkD2Dzlce95exQTs3jOVWPPKLBN2rlEWkCK7dSmLvg==
+ dependencies:
+ "@babel/runtime" "^7.7.2"
+ cosmiconfig "^6.0.0"
+ resolve "^1.12.0"
+
+babel-plugin-named-asset-import@^0.3.7:
+ version "0.3.7"
+ resolved "https://registry.yarnpkg.com/babel-plugin-named-asset-import/-/babel-plugin-named-asset-import-0.3.7.tgz#156cd55d3f1228a5765774340937afc8398067dd"
+ integrity sha512-squySRkf+6JGnvjoUtDEjSREJEBirnXi9NqP6rjSYsylxQxqBTz+pkmf395i9E2zsvmYUaI40BHo6SqZUdydlw==
+
+babel-plugin-syntax-object-rest-spread@^6.8.0:
+ version "6.13.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-syntax-object-rest-spread/-/babel-plugin-syntax-object-rest-spread-6.13.0.tgz#fd6536f2bce13836ffa3a5458c4903a597bb3bf5"
+ integrity sha1-/WU28rzhODb/o6VFjEkDpZe7O/U=
+
+babel-plugin-transform-object-rest-spread@^6.26.0:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-object-rest-spread/-/babel-plugin-transform-object-rest-spread-6.26.0.tgz#0f36692d50fef6b7e2d4b3ac1478137a963b7b06"
+ integrity sha1-DzZpLVD+9rfi1LOsFHgTepY7ewY=
+ dependencies:
+ babel-plugin-syntax-object-rest-spread "^6.8.0"
+ babel-runtime "^6.26.0"
+
[email protected]:
+ version "0.4.24"
+ resolved "https://registry.yarnpkg.com/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz#f2edaf9b4c6a5fbe5c1d678bfb531078c1555f3a"
+ integrity sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA==
+
+babel-preset-current-node-syntax@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b"
+ integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==
+ dependencies:
+ "@babel/plugin-syntax-async-generators" "^7.8.4"
+ "@babel/plugin-syntax-bigint" "^7.8.3"
+ "@babel/plugin-syntax-class-properties" "^7.8.3"
+ "@babel/plugin-syntax-import-meta" "^7.8.3"
+ "@babel/plugin-syntax-json-strings" "^7.8.3"
+ "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3"
+ "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3"
+ "@babel/plugin-syntax-numeric-separator" "^7.8.3"
+ "@babel/plugin-syntax-object-rest-spread" "^7.8.3"
+ "@babel/plugin-syntax-optional-catch-binding" "^7.8.3"
+ "@babel/plugin-syntax-optional-chaining" "^7.8.3"
+ "@babel/plugin-syntax-top-level-await" "^7.8.3"
+
+babel-preset-jest@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-26.6.2.tgz#747872b1171df032252426586881d62d31798fee"
+ integrity sha512-YvdtlVm9t3k777c5NPQIv6cxFFFapys25HiUmuSgHwIZhfifweR5c5Sf5nwE3MAbfu327CYSvps8Yx6ANLyleQ==
+ dependencies:
+ babel-plugin-jest-hoist "^26.6.2"
+ babel-preset-current-node-syntax "^1.0.0"
+
+babel-preset-react-app@^10.0.0:
+ version "10.0.0"
+ resolved "https://registry.yarnpkg.com/babel-preset-react-app/-/babel-preset-react-app-10.0.0.tgz#689b60edc705f8a70ce87f47ab0e560a317d7045"
+ integrity sha512-itL2z8v16khpuKutx5IH8UdCdSTuzrOhRFTEdIhveZ2i1iBKDrVE0ATa4sFVy+02GLucZNVBWtoarXBy0Msdpg==
+ dependencies:
+ "@babel/core" "7.12.3"
+ "@babel/plugin-proposal-class-properties" "7.12.1"
+ "@babel/plugin-proposal-decorators" "7.12.1"
+ "@babel/plugin-proposal-nullish-coalescing-operator" "7.12.1"
+ "@babel/plugin-proposal-numeric-separator" "7.12.1"
+ "@babel/plugin-proposal-optional-chaining" "7.12.1"
+ "@babel/plugin-transform-flow-strip-types" "7.12.1"
+ "@babel/plugin-transform-react-display-name" "7.12.1"
+ "@babel/plugin-transform-runtime" "7.12.1"
+ "@babel/preset-env" "7.12.1"
+ "@babel/preset-react" "7.12.1"
+ "@babel/preset-typescript" "7.12.1"
+ "@babel/runtime" "7.12.1"
+ babel-plugin-macros "2.8.0"
+ babel-plugin-transform-react-remove-prop-types "0.4.24"
+
+babel-runtime@^6.26.0:
+ version "6.26.0"
+ resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe"
+ integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4=
+ dependencies:
+ core-js "^2.4.0"
+ regenerator-runtime "^0.11.0"
+
+babylon@^6.18.0:
+ version "6.18.0"
+ resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3"
+ integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ==
+
+balanced-match@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
+ integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c=
+
+base64-js@^1.0.2:
+ version "1.5.1"
+ resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a"
+ integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==
+
+base@^0.11.1:
+ version "0.11.2"
+ resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f"
+ integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==
+ dependencies:
+ cache-base "^1.0.1"
+ class-utils "^0.3.5"
+ component-emitter "^1.2.1"
+ define-property "^1.0.0"
+ isobject "^3.0.1"
+ mixin-deep "^1.2.0"
+ pascalcase "^0.1.1"
+
[email protected]:
+ version "0.6.1"
+ resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16"
+ integrity sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY=
+
+bcrypt-pbkdf@^1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e"
+ integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=
+ dependencies:
+ tweetnacl "^0.14.3"
+
+bfj@^7.0.2:
+ version "7.0.2"
+ resolved "https://registry.yarnpkg.com/bfj/-/bfj-7.0.2.tgz#1988ce76f3add9ac2913fd8ba47aad9e651bfbb2"
+ integrity sha512-+e/UqUzwmzJamNF50tBV6tZPTORow7gQ96iFow+8b562OdMpEK0BcJEq2OSPEDmAbSMBQ7PKZ87ubFkgxpYWgw==
+ dependencies:
+ bluebird "^3.5.5"
+ check-types "^11.1.1"
+ hoopy "^0.1.4"
+ tryer "^1.0.1"
+
+big.js@^5.2.2:
+ version "5.2.2"
+ resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328"
+ integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==
+
+binary-extensions@^1.0.0:
+ version "1.13.1"
+ resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65"
+ integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==
+
+binary-extensions@^2.0.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d"
+ integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==
+
+bindings@^1.5.0:
+ version "1.5.0"
+ resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df"
+ integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==
+ dependencies:
+ file-uri-to-path "1.0.0"
+
+bluebird@^3.5.5:
+ version "3.7.2"
+ resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f"
+ integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==
+
+bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.11.9:
+ version "4.11.9"
+ resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.9.tgz#26d556829458f9d1e81fc48952493d0ba3507828"
+ integrity sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==
+
+bn.js@^5.0.0, bn.js@^5.1.1:
+ version "5.1.3"
+ resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.1.3.tgz#beca005408f642ebebea80b042b4d18d2ac0ee6b"
+ integrity sha512-GkTiFpjFtUzU9CbMeJ5iazkCzGL3jrhzerzZIuqLABjbwRaFt33I9tUdSNryIptM+RxDet6OKm2WnLXzW51KsQ==
+
[email protected]:
+ version "1.19.0"
+ resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a"
+ integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==
+ dependencies:
+ bytes "3.1.0"
+ content-type "~1.0.4"
+ debug "2.6.9"
+ depd "~1.1.2"
+ http-errors "1.7.2"
+ iconv-lite "0.4.24"
+ on-finished "~2.3.0"
+ qs "6.7.0"
+ raw-body "2.4.0"
+ type-is "~1.6.17"
+
+bonjour@^3.5.0:
+ version "3.5.0"
+ resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5"
+ integrity sha1-jokKGD2O6aI5OzhExpGkK897yfU=
+ dependencies:
+ array-flatten "^2.1.0"
+ deep-equal "^1.0.1"
+ dns-equal "^1.0.0"
+ dns-txt "^2.0.2"
+ multicast-dns "^6.0.1"
+ multicast-dns-service-types "^1.1.0"
+
+boolbase@^1.0.0, boolbase@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e"
+ integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24=
+
+brace-expansion@^1.1.7:
+ version "1.1.11"
+ resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
+ integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==
+ dependencies:
+ balanced-match "^1.0.0"
+ concat-map "0.0.1"
+
+braces@^2.3.1, braces@^2.3.2:
+ version "2.3.2"
+ resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729"
+ integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==
+ dependencies:
+ arr-flatten "^1.1.0"
+ array-unique "^0.3.2"
+ extend-shallow "^2.0.1"
+ fill-range "^4.0.0"
+ isobject "^3.0.1"
+ repeat-element "^1.1.2"
+ snapdragon "^0.8.1"
+ snapdragon-node "^2.0.1"
+ split-string "^3.0.2"
+ to-regex "^3.0.1"
+
+braces@^3.0.1, braces@~3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107"
+ integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==
+ dependencies:
+ fill-range "^7.0.1"
+
+brorand@^1.0.1, brorand@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f"
+ integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8=
+
+browser-process-hrtime@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626"
+ integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==
+
+browserify-aes@^1.0.0, browserify-aes@^1.0.4:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48"
+ integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==
+ dependencies:
+ buffer-xor "^1.0.3"
+ cipher-base "^1.0.0"
+ create-hash "^1.1.0"
+ evp_bytestokey "^1.0.3"
+ inherits "^2.0.1"
+ safe-buffer "^5.0.1"
+
+browserify-cipher@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0"
+ integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w==
+ dependencies:
+ browserify-aes "^1.0.4"
+ browserify-des "^1.0.0"
+ evp_bytestokey "^1.0.0"
+
+browserify-des@^1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c"
+ integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A==
+ dependencies:
+ cipher-base "^1.0.1"
+ des.js "^1.0.0"
+ inherits "^2.0.1"
+ safe-buffer "^5.1.2"
+
+browserify-rsa@^4.0.0, browserify-rsa@^4.0.1:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.1.0.tgz#b2fd06b5b75ae297f7ce2dc651f918f5be158c8d"
+ integrity sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog==
+ dependencies:
+ bn.js "^5.0.0"
+ randombytes "^2.0.1"
+
+browserify-sign@^4.0.0:
+ version "4.2.1"
+ resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.2.1.tgz#eaf4add46dd54be3bb3b36c0cf15abbeba7956c3"
+ integrity sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg==
+ dependencies:
+ bn.js "^5.1.1"
+ browserify-rsa "^4.0.1"
+ create-hash "^1.2.0"
+ create-hmac "^1.1.7"
+ elliptic "^6.5.3"
+ inherits "^2.0.4"
+ parse-asn1 "^5.1.5"
+ readable-stream "^3.6.0"
+ safe-buffer "^5.2.0"
+
+browserify-zlib@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f"
+ integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==
+ dependencies:
+ pako "~1.0.5"
+
[email protected]:
+ version "4.14.2"
+ resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.14.2.tgz#1b3cec458a1ba87588cc5e9be62f19b6d48813ce"
+ integrity sha512-HI4lPveGKUR0x2StIz+2FXfDk9SfVMrxn6PLh1JeGUwcuoDkdKZebWiyLRJ68iIPDpMI4JLVDf7S7XzslgWOhw==
+ dependencies:
+ caniuse-lite "^1.0.30001125"
+ electron-to-chromium "^1.3.564"
+ escalade "^3.0.2"
+ node-releases "^1.1.61"
+
+browserslist@^4.0.0, browserslist@^4.12.0, browserslist@^4.14.5, browserslist@^4.16.3, browserslist@^4.6.2, browserslist@^4.6.4:
+ version "4.16.3"
+ resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.3.tgz#340aa46940d7db878748567c5dea24a48ddf3717"
+ integrity sha512-vIyhWmIkULaq04Gt93txdh+j02yX/JzlyhLYbV3YQCn/zvES3JnY7TifHHvvr1w5hTDluNKMkV05cs4vy8Q7sw==
+ dependencies:
+ caniuse-lite "^1.0.30001181"
+ colorette "^1.2.1"
+ electron-to-chromium "^1.3.649"
+ escalade "^3.1.1"
+ node-releases "^1.1.70"
+
[email protected]:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05"
+ integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==
+ dependencies:
+ node-int64 "^0.4.0"
+
+buffer-from@^1.0.0:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef"
+ integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==
+
+buffer-indexof@^1.0.0:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c"
+ integrity sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g==
+
+buffer-xor@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9"
+ integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=
+
+buffer@^4.3.0:
+ version "4.9.2"
+ resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8"
+ integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==
+ dependencies:
+ base64-js "^1.0.2"
+ ieee754 "^1.1.4"
+ isarray "^1.0.0"
+
+builtin-modules@^3.1.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.2.0.tgz#45d5db99e7ee5e6bc4f362e008bf917ab5049887"
+ integrity sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA==
+
+builtin-status-codes@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8"
+ integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug=
+
[email protected]:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048"
+ integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=
+
[email protected]:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6"
+ integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==
+
+cacache@^12.0.2:
+ version "12.0.4"
+ resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.4.tgz#668bcbd105aeb5f1d92fe25570ec9525c8faa40c"
+ integrity sha512-a0tMB40oefvuInr4Cwb3GerbL9xTj1D5yg0T5xrjGCGyfvbxseIXX7BAO/u/hIXdafzOI5JC3wDwHyf24buOAQ==
+ dependencies:
+ bluebird "^3.5.5"
+ chownr "^1.1.1"
+ figgy-pudding "^3.5.1"
+ glob "^7.1.4"
+ graceful-fs "^4.1.15"
+ infer-owner "^1.0.3"
+ lru-cache "^5.1.1"
+ mississippi "^3.0.0"
+ mkdirp "^0.5.1"
+ move-concurrently "^1.0.1"
+ promise-inflight "^1.0.1"
+ rimraf "^2.6.3"
+ ssri "^6.0.1"
+ unique-filename "^1.1.1"
+ y18n "^4.0.0"
+
+cacache@^15.0.5:
+ version "15.0.5"
+ resolved "https://registry.yarnpkg.com/cacache/-/cacache-15.0.5.tgz#69162833da29170d6732334643c60e005f5f17d0"
+ integrity sha512-lloiL22n7sOjEEXdL8NAjTgv9a1u43xICE9/203qonkZUCj5X1UEWIdf2/Y0d6QcCtMzbKQyhrcDbdvlZTs/+A==
+ dependencies:
+ "@npmcli/move-file" "^1.0.1"
+ chownr "^2.0.0"
+ fs-minipass "^2.0.0"
+ glob "^7.1.4"
+ infer-owner "^1.0.4"
+ lru-cache "^6.0.0"
+ minipass "^3.1.1"
+ minipass-collect "^1.0.2"
+ minipass-flush "^1.0.5"
+ minipass-pipeline "^1.2.2"
+ mkdirp "^1.0.3"
+ p-map "^4.0.0"
+ promise-inflight "^1.0.1"
+ rimraf "^3.0.2"
+ ssri "^8.0.0"
+ tar "^6.0.2"
+ unique-filename "^1.1.1"
+
+cache-base@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2"
+ integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==
+ dependencies:
+ collection-visit "^1.0.0"
+ component-emitter "^1.2.1"
+ get-value "^2.0.6"
+ has-value "^1.0.0"
+ isobject "^3.0.1"
+ set-value "^2.0.0"
+ to-object-path "^0.3.0"
+ union-value "^1.0.0"
+ unset-value "^1.0.0"
+
+call-bind@^1.0.0, call-bind@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c"
+ integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==
+ dependencies:
+ function-bind "^1.1.1"
+ get-intrinsic "^1.0.2"
+
+caller-callsite@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-2.0.0.tgz#847e0fce0a223750a9a027c54b33731ad3154134"
+ integrity sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ=
+ dependencies:
+ callsites "^2.0.0"
+
+caller-path@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-2.0.0.tgz#468f83044e369ab2010fac5f06ceee15bb2cb1f4"
+ integrity sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ=
+ dependencies:
+ caller-callsite "^2.0.0"
+
+callsites@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50"
+ integrity sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA=
+
+callsites@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73"
+ integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==
+
+camel-case@^4.1.1:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a"
+ integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==
+ dependencies:
+ pascal-case "^3.1.2"
+ tslib "^2.0.3"
+
[email protected], camelcase@^5.0.0, camelcase@^5.3.1:
+ version "5.3.1"
+ resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
+ integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
+
+camelcase@^6.0.0, camelcase@^6.1.0, camelcase@^6.2.0:
+ version "6.2.0"
+ resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.2.0.tgz#924af881c9d525ac9d87f40d964e5cea982a1809"
+ integrity sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==
+
+caniuse-api@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0"
+ integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==
+ dependencies:
+ browserslist "^4.0.0"
+ caniuse-lite "^1.0.0"
+ lodash.memoize "^4.1.2"
+ lodash.uniq "^4.5.0"
+
+caniuse-lite@^1.0.0, caniuse-lite@^1.0.30000981, caniuse-lite@^1.0.30001109, caniuse-lite@^1.0.30001125, caniuse-lite@^1.0.30001181:
+ version "1.0.30001191"
+ resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001191.tgz#bacb432b6701f690c8c5f7c680166b9a9f0843d9"
+ integrity sha512-xJJqzyd+7GCJXkcoBiQ1GuxEiOBCLQ0aVW9HMekifZsAVGdj5eJ4mFB9fEhSHipq9IOk/QXFJUiIr9lZT+EsGw==
+
+capture-exit@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/capture-exit/-/capture-exit-2.0.0.tgz#fb953bfaebeb781f62898239dabb426d08a509a4"
+ integrity sha512-PiT/hQmTonHhl/HFGN+Lx3JJUznrVYJ3+AQsnthneZbvW7x+f08Tk7yLJTLEOUvBTbduLeeBkxEaYXUOUrRq6g==
+ dependencies:
+ rsvp "^4.8.4"
+
[email protected]:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.3.0.tgz#23ac613cc9a856e4f88ff8bb73bbb5e989825cf7"
+ integrity sha512-/4YgnZS8y1UXXmC02xD5rRrBEu6T5ub+mQHLNRj0fzTRbgdBYhsNo2V5EqwgqrExjxsjtF/OpAKAMkKsxbD5XQ==
+
+caseless@~0.12.0:
+ version "0.12.0"
+ resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc"
+ integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=
+
[email protected], chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2:
+ version "2.4.2"
+ resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
+ integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
+ dependencies:
+ ansi-styles "^3.2.1"
+ escape-string-regexp "^1.0.5"
+ supports-color "^5.3.0"
+
+chalk@^4.0.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.0.tgz#4e14870a618d9e2edd97dd8345fd9d9dc315646a"
+ integrity sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==
+ dependencies:
+ ansi-styles "^4.1.0"
+ supports-color "^7.1.0"
+
+char-regex@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf"
+ integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==
+
+check-types@^11.1.1:
+ version "11.1.2"
+ resolved "https://registry.yarnpkg.com/check-types/-/check-types-11.1.2.tgz#86a7c12bf5539f6324eb0e70ca8896c0e38f3e2f"
+ integrity sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ==
+
+chokidar@^2.1.8:
+ version "2.1.8"
+ resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917"
+ integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==
+ dependencies:
+ anymatch "^2.0.0"
+ async-each "^1.0.1"
+ braces "^2.3.2"
+ glob-parent "^3.1.0"
+ inherits "^2.0.3"
+ is-binary-path "^1.0.0"
+ is-glob "^4.0.0"
+ normalize-path "^3.0.0"
+ path-is-absolute "^1.0.0"
+ readdirp "^2.2.1"
+ upath "^1.1.1"
+ optionalDependencies:
+ fsevents "^1.2.7"
+
+chokidar@^3.4.1:
+ version "3.5.1"
+ resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.1.tgz#ee9ce7bbebd2b79f49f304799d5468e31e14e68a"
+ integrity sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==
+ dependencies:
+ anymatch "~3.1.1"
+ braces "~3.0.2"
+ glob-parent "~5.1.0"
+ is-binary-path "~2.1.0"
+ is-glob "~4.0.1"
+ normalize-path "~3.0.0"
+ readdirp "~3.5.0"
+ optionalDependencies:
+ fsevents "~2.3.1"
+
+chownr@^1.1.1:
+ version "1.1.4"
+ resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b"
+ integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==
+
+chownr@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece"
+ integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==
+
+chrome-trace-event@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz#234090ee97c7d4ad1a2c4beae27505deffc608a4"
+ integrity sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ==
+ dependencies:
+ tslib "^1.9.0"
+
+ci-info@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46"
+ integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==
+
+cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de"
+ integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==
+ dependencies:
+ inherits "^2.0.1"
+ safe-buffer "^5.0.1"
+
+cjs-module-lexer@^0.6.0:
+ version "0.6.0"
+ resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-0.6.0.tgz#4186fcca0eae175970aee870b9fe2d6cf8d5655f"
+ integrity sha512-uc2Vix1frTfnuzxxu1Hp4ktSvM3QaI4oXl4ZUqL1wjTu/BGki9TrCWoqLTg/drR1KwAEarXuRFCG2Svr1GxPFw==
+
+class-utils@^0.3.5:
+ version "0.3.6"
+ resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463"
+ integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==
+ dependencies:
+ arr-union "^3.1.0"
+ define-property "^0.2.5"
+ isobject "^3.0.0"
+ static-extend "^0.1.1"
+
+classnames@^2.2.5:
+ version "2.3.1"
+ resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.1.tgz#dfcfa3891e306ec1dad105d0e88f4417b8535e8e"
+ integrity sha512-OlQdbZ7gLfGarSqxesMesDa5uz7KFbID8Kpq/SxIoNGDqY8lSYs0D+hhtBXhcdB3rcbXArFr7vlHheLk1voeNA==
+
+clean-css@^4.2.3:
+ version "4.2.3"
+ resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.3.tgz#507b5de7d97b48ee53d84adb0160ff6216380f78"
+ integrity sha512-VcMWDN54ZN/DS+g58HYL5/n4Zrqe8vHJpGA8KdgUXFU4fuP/aHNw8eld9SyEIyabIMJX/0RaY/fplOo5hYLSFA==
+ dependencies:
+ source-map "~0.6.0"
+
+clean-stack@^2.0.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b"
+ integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==
+
+cliui@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5"
+ integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==
+ dependencies:
+ string-width "^3.1.0"
+ strip-ansi "^5.2.0"
+ wrap-ansi "^5.1.0"
+
+cliui@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1"
+ integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==
+ dependencies:
+ string-width "^4.2.0"
+ strip-ansi "^6.0.0"
+ wrap-ansi "^6.2.0"
+
+cliui@^7.0.2:
+ version "7.0.4"
+ resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f"
+ integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==
+ dependencies:
+ string-width "^4.2.0"
+ strip-ansi "^6.0.0"
+ wrap-ansi "^7.0.0"
+
+co@^4.6.0:
+ version "4.6.0"
+ resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184"
+ integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=
+
+coa@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3"
+ integrity sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA==
+ dependencies:
+ "@types/q" "^1.5.1"
+ chalk "^2.4.1"
+ q "^1.1.2"
+
+collect-v8-coverage@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59"
+ integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==
+
+collection-visit@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0"
+ integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA=
+ dependencies:
+ map-visit "^1.0.0"
+ object-visit "^1.0.0"
+
+color-convert@^1.9.0, color-convert@^1.9.1:
+ version "1.9.3"
+ resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
+ integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==
+ dependencies:
+ color-name "1.1.3"
+
+color-convert@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3"
+ integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==
+ dependencies:
+ color-name "~1.1.4"
+
[email protected]:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
+ integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=
+
+color-name@^1.0.0, color-name@~1.1.4:
+ version "1.1.4"
+ resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
+ integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
+
+color-string@^1.5.4:
+ version "1.5.4"
+ resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.5.4.tgz#dd51cd25cfee953d138fe4002372cc3d0e504cb6"
+ integrity sha512-57yF5yt8Xa3czSEW1jfQDE79Idk0+AkN/4KWad6tbdxUmAs3MvjxlWSWD4deYytcRfoZ9nhKyFl1kj5tBvidbw==
+ dependencies:
+ color-name "^1.0.0"
+ simple-swizzle "^0.2.2"
+
+color@^3.0.0:
+ version "3.1.3"
+ resolved "https://registry.yarnpkg.com/color/-/color-3.1.3.tgz#ca67fb4e7b97d611dcde39eceed422067d91596e"
+ integrity sha512-xgXAcTHa2HeFCGLE9Xs/R82hujGtu9Jd9x4NW3T34+OMs7VoPsjwzRczKHvTAHeJwWFwX5j15+MgAppE8ztObQ==
+ dependencies:
+ color-convert "^1.9.1"
+ color-string "^1.5.4"
+
+colorette@^1.2.1:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.1.tgz#4d0b921325c14faf92633086a536db6e89564b1b"
+ integrity sha512-puCDz0CzydiSYOrnXpz/PKd69zRrribezjtE9yd4zvytoRc8+RY/KJPvtPFKZS3E3wP6neGyMe0vOTlHO5L3Pw==
+
+combined-stream@^1.0.6, combined-stream@~1.0.6:
+ version "1.0.8"
+ resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f"
+ integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==
+ dependencies:
+ delayed-stream "~1.0.0"
+
+commander@^2.20.0:
+ version "2.20.3"
+ resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33"
+ integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==
+
+commander@^4.1.1:
+ version "4.1.1"
+ resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068"
+ integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==
+
+common-tags@^1.8.0:
+ version "1.8.0"
+ resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.0.tgz#8e3153e542d4a39e9b10554434afaaf98956a937"
+ integrity sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==
+
+commondir@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b"
+ integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=
+
+component-emitter@^1.2.1:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0"
+ integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==
+
[email protected]:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/compose-function/-/compose-function-3.0.3.tgz#9ed675f13cc54501d30950a486ff6a7ba3ab185f"
+ integrity sha1-ntZ18TzFRQHTCVCkhv9qe6OrGF8=
+ dependencies:
+ arity-n "^1.0.4"
+
+compressible@~2.0.16:
+ version "2.0.18"
+ resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba"
+ integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==
+ dependencies:
+ mime-db ">= 1.43.0 < 2"
+
+compression@^1.7.4:
+ version "1.7.4"
+ resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f"
+ integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==
+ dependencies:
+ accepts "~1.3.5"
+ bytes "3.0.0"
+ compressible "~2.0.16"
+ debug "2.6.9"
+ on-headers "~1.0.2"
+ safe-buffer "5.1.2"
+ vary "~1.1.2"
+
[email protected]:
+ version "1.0.14"
+ resolved "https://registry.yarnpkg.com/compute-scroll-into-view/-/compute-scroll-into-view-1.0.14.tgz#80e3ebb25d6aa89f42e533956cb4b16a04cfe759"
+ integrity sha512-mKDjINe3tc6hGelUMNDzuhorIUZ7kS7BwyY0r2wQd2HOH2tRuJykiC06iSEX8y1TuhNzvz4GcJnK16mM2J1NMQ==
+
[email protected]:
+ version "0.0.1"
+ resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
+ integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=
+
+concat-stream@^1.5.0:
+ version "1.6.2"
+ resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34"
+ integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==
+ dependencies:
+ buffer-from "^1.0.0"
+ inherits "^2.0.3"
+ readable-stream "^2.2.2"
+ typedarray "^0.0.6"
+
+confusing-browser-globals@^1.0.10:
+ version "1.0.10"
+ resolved "https://registry.yarnpkg.com/confusing-browser-globals/-/confusing-browser-globals-1.0.10.tgz#30d1e7f3d1b882b25ec4933d1d1adac353d20a59"
+ integrity sha512-gNld/3lySHwuhaVluJUKLePYirM3QNCKzVxqAdhJII9/WXKVX5PURzMVJspS1jTslSqjeuG4KMVTSouit5YPHA==
+
+connect-history-api-fallback@^1.6.0:
+ version "1.6.0"
+ resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz#8b32089359308d111115d81cad3fceab888f97bc"
+ integrity sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg==
+
+console-browserify@^1.1.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336"
+ integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==
+
+constants-browserify@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75"
+ integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U=
+
+contains-path@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/contains-path/-/contains-path-0.1.0.tgz#fe8cf184ff6670b6baef01a9d4861a5cbec4120a"
+ integrity sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=
+
[email protected]:
+ version "0.5.3"
+ resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd"
+ integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==
+ dependencies:
+ safe-buffer "5.1.2"
+
+content-type@~1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b"
+ integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==
+
[email protected], convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0:
+ version "1.7.0"
+ resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442"
+ integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==
+ dependencies:
+ safe-buffer "~5.1.1"
+
+convert-source-map@^0.3.3:
+ version "0.3.5"
+ resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-0.3.5.tgz#f1d802950af7dd2631a1febe0596550c86ab3190"
+ integrity sha1-8dgClQr33SYxof6+BZZVDIarMZA=
+
+convert-source-map@^1.5.0:
+ version "1.8.0"
+ resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369"
+ integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==
+ dependencies:
+ safe-buffer "~5.1.1"
+
[email protected]:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c"
+ integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw=
+
[email protected]:
+ version "0.4.0"
+ resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba"
+ integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==
+
+copy-concurrently@^1.0.0:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0"
+ integrity sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A==
+ dependencies:
+ aproba "^1.1.1"
+ fs-write-stream-atomic "^1.0.8"
+ iferr "^0.1.5"
+ mkdirp "^0.5.1"
+ rimraf "^2.5.4"
+ run-queue "^1.0.0"
+
+copy-descriptor@^0.1.0:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d"
+ integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=
+
[email protected]:
+ version "3.3.1"
+ resolved "https://registry.yarnpkg.com/copy-to-clipboard/-/copy-to-clipboard-3.3.1.tgz#115aa1a9998ffab6196f93076ad6da3b913662ae"
+ integrity sha512-i13qo6kIHTTpCm8/Wup+0b1mVWETvu2kIMzKoK8FpkLkFxlt0znUAHcMzox+T8sPlqtZXq3CulEjQHsYiGFJUw==
+ dependencies:
+ toggle-selection "^1.0.6"
+
+core-js-compat@^3.6.2, core-js-compat@^3.8.0:
+ version "3.9.0"
+ resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.9.0.tgz#29da39385f16b71e1915565aa0385c4e0963ad56"
+ integrity sha512-YK6fwFjCOKWwGnjFUR3c544YsnA/7DoLL0ysncuOJ4pwbriAtOpvM2bygdlcXbvQCQZ7bBU9CL4t7tGl7ETRpQ==
+ dependencies:
+ browserslist "^4.16.3"
+ semver "7.0.0"
+
+core-js-pure@^3.0.0:
+ version "3.9.0"
+ resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.9.0.tgz#326cc74e1fef8b7443a6a793ddb0adfcd81f9efb"
+ integrity sha512-3pEcmMZC9Cq0D4ZBh3pe2HLtqxpGNJBLXF/kZ2YzK17RbKp94w0HFbdbSx8H8kAlZG5k76hvLrkPm57Uyef+kg==
+
[email protected]:
+ version "3.6.5"
+ resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.6.5.tgz#7395dc273af37fb2e50e9bd3d9fe841285231d1a"
+ integrity sha512-vZVEEwZoIsI+vPEuoF9Iqf5H7/M3eeQqWlQnYa8FSKKePuYTf5MWnxb5SDAzCa60b3JBRS5g9b+Dq7b1y/RCrA==
+
+core-js@^2.4.0:
+ version "2.6.12"
+ resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.12.tgz#d9333dfa7b065e347cc5682219d6f690859cc2ec"
+ integrity sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==
+
+core-js@^3.6.5:
+ version "3.9.0"
+ resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.9.0.tgz#790b1bb11553a2272b36e2625c7179db345492f8"
+ integrity sha512-PyFBJaLq93FlyYdsndE5VaueA9K5cNB7CGzeCj191YYLhkQM0gdZR2SKihM70oF0wdqKSKClv/tEBOpoRmdOVQ==
+
[email protected], core-util-is@~1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
+ integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=
+
+cosmiconfig@^5.0.0:
+ version "5.2.1"
+ resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.2.1.tgz#040f726809c591e77a17c0a3626ca45b4f168b1a"
+ integrity sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA==
+ dependencies:
+ import-fresh "^2.0.0"
+ is-directory "^0.3.1"
+ js-yaml "^3.13.1"
+ parse-json "^4.0.0"
+
+cosmiconfig@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982"
+ integrity sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg==
+ dependencies:
+ "@types/parse-json" "^4.0.0"
+ import-fresh "^3.1.0"
+ parse-json "^5.0.0"
+ path-type "^4.0.0"
+ yaml "^1.7.2"
+
+cosmiconfig@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.0.0.tgz#ef9b44d773959cae63ddecd122de23853b60f8d3"
+ integrity sha512-pondGvTuVYDk++upghXJabWzL6Kxu6f26ljFw64Swq9v6sQPUL3EUlVDV56diOjpCayKihL6hVe8exIACU4XcA==
+ dependencies:
+ "@types/parse-json" "^4.0.0"
+ import-fresh "^3.2.1"
+ parse-json "^5.0.0"
+ path-type "^4.0.0"
+ yaml "^1.10.0"
+
+create-ecdh@^4.0.0:
+ version "4.0.4"
+ resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.4.tgz#d6e7f4bffa66736085a0762fd3a632684dabcc4e"
+ integrity sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A==
+ dependencies:
+ bn.js "^4.1.0"
+ elliptic "^6.5.3"
+
+create-hash@^1.1.0, create-hash@^1.1.2, create-hash@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196"
+ integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==
+ dependencies:
+ cipher-base "^1.0.1"
+ inherits "^2.0.1"
+ md5.js "^1.3.4"
+ ripemd160 "^2.0.1"
+ sha.js "^2.4.0"
+
+create-hmac@^1.1.0, create-hmac@^1.1.4, create-hmac@^1.1.7:
+ version "1.1.7"
+ resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff"
+ integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==
+ dependencies:
+ cipher-base "^1.0.3"
+ create-hash "^1.1.0"
+ inherits "^2.0.1"
+ ripemd160 "^2.0.0"
+ safe-buffer "^5.0.1"
+ sha.js "^2.4.8"
+
[email protected], cross-spawn@^7.0.0, cross-spawn@^7.0.2:
+ version "7.0.3"
+ resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
+ integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
+ dependencies:
+ path-key "^3.1.0"
+ shebang-command "^2.0.0"
+ which "^2.0.1"
+
+cross-spawn@^6.0.0:
+ version "6.0.5"
+ resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4"
+ integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==
+ dependencies:
+ nice-try "^1.0.4"
+ path-key "^2.0.1"
+ semver "^5.5.0"
+ shebang-command "^1.2.0"
+ which "^1.2.9"
+
+crypto-browserify@^3.11.0:
+ version "3.12.0"
+ resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec"
+ integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==
+ dependencies:
+ browserify-cipher "^1.0.0"
+ browserify-sign "^4.0.0"
+ create-ecdh "^4.0.0"
+ create-hash "^1.1.0"
+ create-hmac "^1.1.0"
+ diffie-hellman "^5.0.0"
+ inherits "^2.0.1"
+ pbkdf2 "^3.0.3"
+ public-encrypt "^4.0.0"
+ randombytes "^2.0.0"
+ randomfill "^1.0.3"
+
+crypto-random-string@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-1.0.0.tgz#a230f64f568310e1498009940790ec99545bca7e"
+ integrity sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4=
+
+css-blank-pseudo@^0.1.4:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/css-blank-pseudo/-/css-blank-pseudo-0.1.4.tgz#dfdefd3254bf8a82027993674ccf35483bfcb3c5"
+ integrity sha512-LHz35Hr83dnFeipc7oqFDmsjHdljj3TQtxGGiNWSOsTLIAubSm4TEz8qCaKFpk7idaQ1GfWscF4E6mgpBysA1w==
+ dependencies:
+ postcss "^7.0.5"
+
[email protected]:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/css-box-model/-/css-box-model-1.2.1.tgz#59951d3b81fd6b2074a62d49444415b0d2b4d7c1"
+ integrity sha512-a7Vr4Q/kd/aw96bnJG332W9V9LkJO69JRcaCYDUqjp6/z0w6VcZjgAcTbgFxEPfBgdnAwlh3iwu+hLopa+flJw==
+ dependencies:
+ tiny-invariant "^1.0.6"
+
[email protected], css-color-names@^0.0.4:
+ version "0.0.4"
+ resolved "https://registry.yarnpkg.com/css-color-names/-/css-color-names-0.0.4.tgz#808adc2e79cf84738069b646cb20ec27beb629e0"
+ integrity sha1-gIrcLnnPhHOAabZGyyDsJ762KeA=
+
+css-declaration-sorter@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-4.0.1.tgz#c198940f63a76d7e36c1e71018b001721054cb22"
+ integrity sha512-BcxQSKTSEEQUftYpBVnsH4SF05NTuBokb19/sBt6asXGKZ/6VP7PLG1CBCkFDYOnhXhPh0jMhO6xZ71oYHXHBA==
+ dependencies:
+ postcss "^7.0.1"
+ timsort "^0.3.0"
+
+css-has-pseudo@^0.10.0:
+ version "0.10.0"
+ resolved "https://registry.yarnpkg.com/css-has-pseudo/-/css-has-pseudo-0.10.0.tgz#3c642ab34ca242c59c41a125df9105841f6966ee"
+ integrity sha512-Z8hnfsZu4o/kt+AuFzeGpLVhFOGO9mluyHBaA2bA8aCGTwah5sT3WV/fTHH8UNZUytOIImuGPrl/prlb4oX4qQ==
+ dependencies:
+ postcss "^7.0.6"
+ postcss-selector-parser "^5.0.0-rc.4"
+
[email protected]:
+ version "4.3.0"
+ resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-4.3.0.tgz#c888af64b2a5b2e85462c72c0f4a85c7e2e0821e"
+ integrity sha512-rdezjCjScIrsL8BSYszgT4s476IcNKt6yX69t0pHjJVnPUTDpn4WfIpDQTN3wCJvUvfsz/mFjuGOekf3PY3NUg==
+ dependencies:
+ camelcase "^6.0.0"
+ cssesc "^3.0.0"
+ icss-utils "^4.1.1"
+ loader-utils "^2.0.0"
+ postcss "^7.0.32"
+ postcss-modules-extract-imports "^2.0.0"
+ postcss-modules-local-by-default "^3.0.3"
+ postcss-modules-scope "^2.2.0"
+ postcss-modules-values "^3.0.0"
+ postcss-value-parser "^4.1.0"
+ schema-utils "^2.7.1"
+ semver "^7.3.2"
+
+css-prefers-color-scheme@^3.1.1:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/css-prefers-color-scheme/-/css-prefers-color-scheme-3.1.1.tgz#6f830a2714199d4f0d0d0bb8a27916ed65cff1f4"
+ integrity sha512-MTu6+tMs9S3EUqzmqLXEcgNRbNkkD/TGFvowpeoWJn5Vfq7FMgsmRQs9X5NXAURiOBmOxm/lLjsDNXDE6k9bhg==
+ dependencies:
+ postcss "^7.0.5"
+
+css-select-base-adapter@^0.1.1:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz#3b2ff4972cc362ab88561507a95408a1432135d7"
+ integrity sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w==
+
+css-select@^2.0.0, css-select@^2.0.2:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/css-select/-/css-select-2.1.0.tgz#6a34653356635934a81baca68d0255432105dbef"
+ integrity sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ==
+ dependencies:
+ boolbase "^1.0.0"
+ css-what "^3.2.1"
+ domutils "^1.7.0"
+ nth-check "^1.0.2"
+
[email protected]:
+ version "1.0.0-alpha.37"
+ resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.37.tgz#98bebd62c4c1d9f960ec340cf9f7522e30709a22"
+ integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg==
+ dependencies:
+ mdn-data "2.0.4"
+ source-map "^0.6.1"
+
+css-tree@^1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.1.2.tgz#9ae393b5dafd7dae8a622475caec78d3d8fbd7b5"
+ integrity sha512-wCoWush5Aeo48GLhfHPbmvZs59Z+M7k5+B1xDnXbdWNcEF423DoFdqSWE0PM5aNk5nI5cp1q7ms36zGApY/sKQ==
+ dependencies:
+ mdn-data "2.0.14"
+ source-map "^0.6.1"
+
+css-unit-converter@^1.1.1:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/css-unit-converter/-/css-unit-converter-1.1.2.tgz#4c77f5a1954e6dbff60695ecb214e3270436ab21"
+ integrity sha512-IiJwMC8rdZE0+xiEZHeru6YoONC4rfPMqGm2W85jMIbkFvv5nFTwJVFHam2eFrN6txmoUYFAFXiv8ICVeTO0MA==
+
+css-what@^3.2.1:
+ version "3.4.2"
+ resolved "https://registry.yarnpkg.com/css-what/-/css-what-3.4.2.tgz#ea7026fcb01777edbde52124e21f327e7ae950e4"
+ integrity sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ==
+
+css@^2.0.0:
+ version "2.2.4"
+ resolved "https://registry.yarnpkg.com/css/-/css-2.2.4.tgz#c646755c73971f2bba6a601e2cf2fd71b1298929"
+ integrity sha512-oUnjmWpy0niI3x/mPL8dVEI1l7MnG3+HHyRPHf+YFSbK+svOhXpmSOcDURUh2aOCgl2grzrOPt1nHLuCVFULLw==
+ dependencies:
+ inherits "^2.0.3"
+ source-map "^0.6.1"
+ source-map-resolve "^0.5.2"
+ urix "^0.1.0"
+
+cssdb@^4.4.0:
+ version "4.4.0"
+ resolved "https://registry.yarnpkg.com/cssdb/-/cssdb-4.4.0.tgz#3bf2f2a68c10f5c6a08abd92378331ee803cddb0"
+ integrity sha512-LsTAR1JPEM9TpGhl/0p3nQecC2LJ0kD8X5YARu1hk/9I1gril5vDtMZyNxcEpxxDj34YNck/ucjuoUd66K03oQ==
+
+cssesc@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-2.0.0.tgz#3b13bd1bb1cb36e1bcb5a4dcd27f54c5dcb35703"
+ integrity sha512-MsCAG1z9lPdoO/IUMLSBWBSVxVtJ1395VGIQ+Fc2gNdkQ1hNDnQdw3YhA71WJCBW1vdwA0cAnk/DnW6bqoEUYg==
+
+cssesc@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee"
+ integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==
+
+cssnano-preset-default@^4.0.7:
+ version "4.0.7"
+ resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-4.0.7.tgz#51ec662ccfca0f88b396dcd9679cdb931be17f76"
+ integrity sha512-x0YHHx2h6p0fCl1zY9L9roD7rnlltugGu7zXSKQx6k2rYw0Hi3IqxcoAGF7u9Q5w1nt7vK0ulxV8Lo+EvllGsA==
+ dependencies:
+ css-declaration-sorter "^4.0.1"
+ cssnano-util-raw-cache "^4.0.1"
+ postcss "^7.0.0"
+ postcss-calc "^7.0.1"
+ postcss-colormin "^4.0.3"
+ postcss-convert-values "^4.0.1"
+ postcss-discard-comments "^4.0.2"
+ postcss-discard-duplicates "^4.0.2"
+ postcss-discard-empty "^4.0.1"
+ postcss-discard-overridden "^4.0.1"
+ postcss-merge-longhand "^4.0.11"
+ postcss-merge-rules "^4.0.3"
+ postcss-minify-font-values "^4.0.2"
+ postcss-minify-gradients "^4.0.2"
+ postcss-minify-params "^4.0.2"
+ postcss-minify-selectors "^4.0.2"
+ postcss-normalize-charset "^4.0.1"
+ postcss-normalize-display-values "^4.0.2"
+ postcss-normalize-positions "^4.0.2"
+ postcss-normalize-repeat-style "^4.0.2"
+ postcss-normalize-string "^4.0.2"
+ postcss-normalize-timing-functions "^4.0.2"
+ postcss-normalize-unicode "^4.0.1"
+ postcss-normalize-url "^4.0.1"
+ postcss-normalize-whitespace "^4.0.2"
+ postcss-ordered-values "^4.1.2"
+ postcss-reduce-initial "^4.0.3"
+ postcss-reduce-transforms "^4.0.2"
+ postcss-svgo "^4.0.2"
+ postcss-unique-selectors "^4.0.1"
+
+cssnano-util-get-arguments@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/cssnano-util-get-arguments/-/cssnano-util-get-arguments-4.0.0.tgz#ed3a08299f21d75741b20f3b81f194ed49cc150f"
+ integrity sha1-7ToIKZ8h11dBsg87gfGU7UnMFQ8=
+
+cssnano-util-get-match@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/cssnano-util-get-match/-/cssnano-util-get-match-4.0.0.tgz#c0e4ca07f5386bb17ec5e52250b4f5961365156d"
+ integrity sha1-wOTKB/U4a7F+xeUiULT1lhNlFW0=
+
+cssnano-util-raw-cache@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/cssnano-util-raw-cache/-/cssnano-util-raw-cache-4.0.1.tgz#b26d5fd5f72a11dfe7a7846fb4c67260f96bf282"
+ integrity sha512-qLuYtWK2b2Dy55I8ZX3ky1Z16WYsx544Q0UWViebptpwn/xDBmog2TLg4f+DBMg1rJ6JDWtn96WHbOKDWt1WQA==
+ dependencies:
+ postcss "^7.0.0"
+
+cssnano-util-same-parent@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/cssnano-util-same-parent/-/cssnano-util-same-parent-4.0.1.tgz#574082fb2859d2db433855835d9a8456ea18bbf3"
+ integrity sha512-WcKx5OY+KoSIAxBW6UBBRay1U6vkYheCdjyVNDm85zt5K9mHoGOfsOsqIszfAqrQQFIIKgjh2+FDgIj/zsl21Q==
+
+cssnano@^4.1.10:
+ version "4.1.10"
+ resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-4.1.10.tgz#0ac41f0b13d13d465487e111b778d42da631b8b2"
+ integrity sha512-5wny+F6H4/8RgNlaqab4ktc3e0/blKutmq8yNlBFXA//nSFFAqAngjNVRzUvCgYROULmZZUoosL/KSoZo5aUaQ==
+ dependencies:
+ cosmiconfig "^5.0.0"
+ cssnano-preset-default "^4.0.7"
+ is-resolvable "^1.0.0"
+ postcss "^7.0.0"
+
+csso@^4.0.2:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/csso/-/csso-4.2.0.tgz#ea3a561346e8dc9f546d6febedd50187cf389529"
+ integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==
+ dependencies:
+ css-tree "^1.1.2"
+
+cssom@^0.4.4:
+ version "0.4.4"
+ resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10"
+ integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw==
+
+cssom@~0.3.6:
+ version "0.3.8"
+ resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a"
+ integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg==
+
+cssstyle@^2.2.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852"
+ integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A==
+ dependencies:
+ cssom "~0.3.6"
+
+csstype@^3.0.2, csstype@^3.0.6:
+ version "3.0.8"
+ resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.0.8.tgz#d2266a792729fb227cd216fb572f43728e1ad340"
+ integrity sha512-jXKhWqXPmlUeoQnF/EhTtTl4C9SnrxSH/jZUih3jmO6lBKr99rP3/+FmrMj4EFpOXzMtXHAZkd3x0E6h6Fgflw==
+
+cyclist@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9"
+ integrity sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk=
+
+d3-array@2, d3-array@^2.3.0:
+ version "2.12.1"
+ resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-2.12.1.tgz#e20b41aafcdffdf5d50928004ececf815a465e81"
+ integrity sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==
+ dependencies:
+ internmap "^1.0.0"
+
+"d3-color@1 - 2":
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-2.0.0.tgz#8d625cab42ed9b8f601a1760a389f7ea9189d62e"
+ integrity sha512-SPXi0TSKPD4g9tw0NMZFnR95XVgUZiBH+uUTqQuDu1OsE2zomHU7ho0FISciaPvosimixwHFl3WHLGabv6dDgQ==
+
+"d3-format@1 - 2":
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-2.0.0.tgz#a10bcc0f986c372b729ba447382413aabf5b0767"
+ integrity sha512-Ab3S6XuE/Q+flY96HXT0jOXcM4EAClYFnRGY5zsjRGNy6qCYrQsMffs7cV5Q9xejb35zxW5hf/guKw34kvIKsA==
+
+"[email protected] - 2", d3-interpolate@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-2.0.1.tgz#98be499cfb8a3b94d4ff616900501a64abc91163"
+ integrity sha512-c5UhwwTs/yybcmTpAVqwSFl6vrQ8JZJoT5F7xNFK9pymv5C0Ymcc9/LIJHtYIggg/yS9YHw8i8O8tgb9pupjeQ==
+ dependencies:
+ d3-color "1 - 2"
+
+"d3-path@1 - 2":
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-2.0.0.tgz#55d86ac131a0548adae241eebfb56b4582dd09d8"
+ integrity sha512-ZwZQxKhBnv9yHaiWd6ZU4x5BtCQ7pXszEV9CU6kRgwIQVQGLMv1oiL4M+MK/n79sYzsj+gcgpPQSctJUsLN7fA==
+
+d3-scale@^3.2.3:
+ version "3.3.0"
+ resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-3.3.0.tgz#28c600b29f47e5b9cd2df9749c206727966203f3"
+ integrity sha512-1JGp44NQCt5d1g+Yy+GeOnZP7xHo0ii8zsQp6PGzd+C1/dl0KGsp9A7Mxwp+1D1o4unbTTxVdU/ZOIEBoeZPbQ==
+ dependencies:
+ d3-array "^2.3.0"
+ d3-format "1 - 2"
+ d3-interpolate "1.2.0 - 2"
+ d3-time "^2.1.1"
+ d3-time-format "2 - 3"
+
+d3-shape@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-2.1.0.tgz#3b6a82ccafbc45de55b57fcf956c584ded3b666f"
+ integrity sha512-PnjUqfM2PpskbSLTJvAzp2Wv4CZsnAgTfcVRTwW03QR3MkXF8Uo7B1y/lWkAsmbKwuecto++4NlsYcvYpXpTHA==
+ dependencies:
+ d3-path "1 - 2"
+
+"d3-time-format@2 - 3":
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-3.0.0.tgz#df8056c83659e01f20ac5da5fdeae7c08d5f1bb6"
+ integrity sha512-UXJh6EKsHBTjopVqZBhFysQcoXSv/5yLONZvkQ5Kk3qbwiUYkdX17Xa1PT6U1ZWXGGfB1ey5L8dKMlFq2DO0Ag==
+ dependencies:
+ d3-time "1 - 2"
+
+"d3-time@1 - 2", d3-time@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-2.1.1.tgz#e9d8a8a88691f4548e68ca085e5ff956724a6682"
+ integrity sha512-/eIQe/eR4kCQwq7yxi7z4c6qEXf2IYGcjoWB5OOQy4Tq9Uv39/947qlDcN2TLkiTzQWzvnsuYPB9TrWaNfipKQ==
+ dependencies:
+ d3-array "2"
+
+d@1, d@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a"
+ integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==
+ dependencies:
+ es5-ext "^0.10.50"
+ type "^1.0.1"
+
+damerau-levenshtein@^1.0.6:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.6.tgz#143c1641cb3d85c60c32329e26899adea8701791"
+ integrity sha512-JVrozIeElnj3QzfUIt8tB8YMluBJom4Vw9qTPpjGYQ9fYlB3D/rb6OordUxf3xeFB35LKWs0xqcO5U6ySvBtug==
+
+dashdash@^1.12.0:
+ version "1.14.1"
+ resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0"
+ integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=
+ dependencies:
+ assert-plus "^1.0.0"
+
+data-urls@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b"
+ integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ==
+ dependencies:
+ abab "^2.0.3"
+ whatwg-mimetype "^2.3.0"
+ whatwg-url "^8.0.0"
+
[email protected], debug@^2.2.0, debug@^2.3.3, debug@^2.6.0, debug@^2.6.9:
+ version "2.6.9"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
+ integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==
+ dependencies:
+ ms "2.0.0"
+
+debug@^3.1.1, debug@^3.2.6, debug@^3.2.7:
+ version "3.2.7"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a"
+ integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==
+ dependencies:
+ ms "^2.1.1"
+
+debug@^4.0.1, debug@^4.1.0, debug@^4.1.1:
+ version "4.3.1"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee"
+ integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==
+ dependencies:
+ ms "2.1.2"
+
+decamelize@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
+ integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=
+
+decimal.js-light@^2.4.1:
+ version "2.5.1"
+ resolved "https://registry.yarnpkg.com/decimal.js-light/-/decimal.js-light-2.5.1.tgz#134fd32508f19e208f4fb2f8dac0d2626a867934"
+ integrity sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==
+
+decimal.js@^10.2.0:
+ version "10.2.1"
+ resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.2.1.tgz#238ae7b0f0c793d3e3cea410108b35a2c01426a3"
+ integrity sha512-KaL7+6Fw6i5A2XSnsbhm/6B+NuEA7TZ4vqxnd5tXz9sbKtrN9Srj8ab4vKVdK8YAqZO9P1kg45Y6YLoduPf+kw==
+
+decode-uri-component@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545"
+ integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=
+
+dedent@^0.7.0:
+ version "0.7.0"
+ resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c"
+ integrity sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw=
+
+deep-equal@^1.0.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.1.1.tgz#b5c98c942ceffaf7cb051e24e1434a25a2e6076a"
+ integrity sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g==
+ dependencies:
+ is-arguments "^1.0.4"
+ is-date-object "^1.0.1"
+ is-regex "^1.0.4"
+ object-is "^1.0.1"
+ object-keys "^1.1.1"
+ regexp.prototype.flags "^1.2.0"
+
+deep-is@^0.1.3, deep-is@~0.1.3:
+ version "0.1.3"
+ resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34"
+ integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=
+
+deepmerge@^4.2.2:
+ version "4.2.2"
+ resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955"
+ integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==
+
+default-gateway@^4.2.0:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-4.2.0.tgz#167104c7500c2115f6dd69b0a536bb8ed720552b"
+ integrity sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA==
+ dependencies:
+ execa "^1.0.0"
+ ip-regex "^2.1.0"
+
+define-properties@^1.1.2, define-properties@^1.1.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1"
+ integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==
+ dependencies:
+ object-keys "^1.0.12"
+
+define-property@^0.2.5:
+ version "0.2.5"
+ resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116"
+ integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=
+ dependencies:
+ is-descriptor "^0.1.0"
+
+define-property@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6"
+ integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY=
+ dependencies:
+ is-descriptor "^1.0.0"
+
+define-property@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d"
+ integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==
+ dependencies:
+ is-descriptor "^1.0.2"
+ isobject "^3.0.1"
+
+del@^4.1.1:
+ version "4.1.1"
+ resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4"
+ integrity sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ==
+ dependencies:
+ "@types/glob" "^7.1.1"
+ globby "^6.1.0"
+ is-path-cwd "^2.0.0"
+ is-path-in-cwd "^2.0.0"
+ p-map "^2.0.0"
+ pify "^4.0.1"
+ rimraf "^2.6.3"
+
+delayed-stream@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
+ integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk=
+
+depd@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
+ integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=
+
+des.js@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843"
+ integrity sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA==
+ dependencies:
+ inherits "^2.0.1"
+ minimalistic-assert "^1.0.0"
+
+destroy@~1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80"
+ integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=
+
+detect-newline@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651"
+ integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==
+
+detect-node-es@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/detect-node-es/-/detect-node-es-1.1.0.tgz#163acdf643330caa0b4cd7c21e7ee7755d6fa493"
+ integrity sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==
+
+detect-node@^2.0.4:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.4.tgz#014ee8f8f669c5c58023da64b8179c083a28c46c"
+ integrity sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw==
+
[email protected]:
+ version "1.1.6"
+ resolved "https://registry.yarnpkg.com/detect-port-alt/-/detect-port-alt-1.1.6.tgz#24707deabe932d4a3cf621302027c2b266568275"
+ integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q==
+ dependencies:
+ address "^1.0.1"
+ debug "^2.6.0"
+
+diff-sequences@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-26.6.2.tgz#48ba99157de1923412eed41db6b6d4aa9ca7c0b1"
+ integrity sha512-Mv/TDa3nZ9sbc5soK+OoA74BsS3mL37yixCvUAQkiuA4Wz6YtwP/K47n2rv2ovzHZvoiQeA5FTQOschKkEwB0Q==
+
+diffie-hellman@^5.0.0:
+ version "5.0.3"
+ resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875"
+ integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==
+ dependencies:
+ bn.js "^4.1.0"
+ miller-rabin "^4.0.0"
+ randombytes "^2.0.0"
+
+dir-glob@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f"
+ integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==
+ dependencies:
+ path-type "^4.0.0"
+
+dns-equal@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d"
+ integrity sha1-s55/HabrCnW6nBcySzR1PEfgZU0=
+
+dns-packet@^1.3.1:
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-1.3.1.tgz#12aa426981075be500b910eedcd0b47dd7deda5a"
+ integrity sha512-0UxfQkMhYAUaZI+xrNZOz/as5KgDU0M/fQ9b6SpkyLbk3GEswDi6PADJVaYJradtRVsRIlF1zLyOodbcTCDzUg==
+ dependencies:
+ ip "^1.1.0"
+ safe-buffer "^5.0.1"
+
+dns-txt@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/dns-txt/-/dns-txt-2.0.2.tgz#b91d806f5d27188e4ab3e7d107d881a1cc4642b6"
+ integrity sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY=
+ dependencies:
+ buffer-indexof "^1.0.0"
+
[email protected]:
+ version "1.5.0"
+ resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-1.5.0.tgz#379dce730f6166f76cefa4e6707a159b02c5a6fa"
+ integrity sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=
+ dependencies:
+ esutils "^2.0.2"
+ isarray "^1.0.0"
+
+doctrine@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d"
+ integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==
+ dependencies:
+ esutils "^2.0.2"
+
+doctrine@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961"
+ integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==
+ dependencies:
+ esutils "^2.0.2"
+
+dom-converter@^0.2:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768"
+ integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==
+ dependencies:
+ utila "~0.4"
+
+dom-helpers@^3.4.0:
+ version "3.4.0"
+ resolved "https://registry.yarnpkg.com/dom-helpers/-/dom-helpers-3.4.0.tgz#e9b369700f959f62ecde5a6babde4bccd9169af8"
+ integrity sha512-LnuPJ+dwqKDIyotW1VzmOZ5TONUN7CwkCR5hrgawTUbkBGYdeoNLZo6nNfGkCrjtE1nXXaj7iMMpDa8/d9WoIA==
+ dependencies:
+ "@babel/runtime" "^7.1.2"
+
+dom-serializer@0:
+ version "0.2.2"
+ resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51"
+ integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g==
+ dependencies:
+ domelementtype "^2.0.1"
+ entities "^2.0.0"
+
[email protected]:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/dom-storage/-/dom-storage-2.1.0.tgz#00fb868bc9201357ea243c7bcfd3304c1e34ea39"
+ integrity sha512-g6RpyWXzl0RR6OTElHKBl7nwnK87GUyZMYC7JWsB/IA73vpqK2K6LT39x4VepLxlSsWBFrPVLnsSR5Jyty0+2Q==
+
+domain-browser@^1.1.1:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda"
+ integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA==
+
+domelementtype@1, domelementtype@^1.3.1:
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f"
+ integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w==
+
+domelementtype@^2.0.1:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.1.0.tgz#a851c080a6d1c3d94344aed151d99f669edf585e"
+ integrity sha512-LsTgx/L5VpD+Q8lmsXSHW2WpA+eBlZ9HPf3erD1IoPF00/3JKHZ3BknUVA2QGDNu69ZNmyFmCWBSO45XjYKC5w==
+
+domexception@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304"
+ integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg==
+ dependencies:
+ webidl-conversions "^5.0.0"
+
+domhandler@^2.3.0:
+ version "2.4.2"
+ resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.4.2.tgz#8805097e933d65e85546f726d60f5eb88b44f803"
+ integrity sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA==
+ dependencies:
+ domelementtype "1"
+
+domutils@^1.5.1, domutils@^1.7.0:
+ version "1.7.0"
+ resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a"
+ integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg==
+ dependencies:
+ dom-serializer "0"
+ domelementtype "1"
+
+dot-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751"
+ integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==
+ dependencies:
+ no-case "^3.0.4"
+ tslib "^2.0.3"
+
+dot-prop@^5.2.0:
+ version "5.3.0"
+ resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.3.0.tgz#90ccce708cd9cd82cc4dc8c3ddd9abdd55b20e88"
+ integrity sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==
+ dependencies:
+ is-obj "^2.0.0"
+
[email protected]:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0"
+ integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA==
+
[email protected]:
+ version "8.2.0"
+ resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.2.0.tgz#97e619259ada750eea3e4ea3e26bceea5424b16a"
+ integrity sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw==
+
+duplexer@^0.1.1:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6"
+ integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==
+
+duplexify@^3.4.2, duplexify@^3.6.0:
+ version "3.7.1"
+ resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309"
+ integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==
+ dependencies:
+ end-of-stream "^1.0.0"
+ inherits "^2.0.1"
+ readable-stream "^2.0.0"
+ stream-shift "^1.0.0"
+
+ecc-jsbn@~0.1.1:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9"
+ integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=
+ dependencies:
+ jsbn "~0.1.0"
+ safer-buffer "^2.1.0"
+
[email protected]:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
+ integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=
+
+ejs@^2.6.1:
+ version "2.7.4"
+ resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.7.4.tgz#48661287573dcc53e366c7a1ae52c3a120eec9ba"
+ integrity sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA==
+
+electron-to-chromium@^1.3.564, electron-to-chromium@^1.3.649:
+ version "1.3.671"
+ resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.671.tgz#8feaed6eae42d279fa4611f58c42a5a1eb81b2a0"
+ integrity sha512-RTD97QkdrJKaKwRv9h/wGAaoR2lGxNXEcBXS31vjitgTPwTWAbLdS7cEsBK68eEQy7p6YyT8D5BxBEYHu2SuwQ==
+
+elliptic@^6.5.3:
+ version "6.5.4"
+ resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb"
+ integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==
+ dependencies:
+ bn.js "^4.11.9"
+ brorand "^1.1.0"
+ hash.js "^1.0.0"
+ hmac-drbg "^1.0.1"
+ inherits "^2.0.4"
+ minimalistic-assert "^1.0.1"
+ minimalistic-crypto-utils "^1.0.1"
+
+emittery@^0.7.1:
+ version "0.7.2"
+ resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.7.2.tgz#25595908e13af0f5674ab419396e2fb394cdfa82"
+ integrity sha512-A8OG5SR/ij3SsJdWDJdkkSYUjQdCUx6APQXem0SaEePBSRg4eymGYwBkKo1Y6DU+af/Jn2dBQqDBvjnr9Vi8nQ==
+
+emoji-regex@^7.0.1:
+ version "7.0.3"
+ resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156"
+ integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==
+
+emoji-regex@^8.0.0:
+ version "8.0.0"
+ resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37"
+ integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==
+
+emoji-regex@^9.0.0:
+ version "9.2.1"
+ resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.1.tgz#c9b25604256bb3428964bead3ab63069d736f7ee"
+ integrity sha512-117l1H6U4X3Krn+MrzYrL57d5H7siRHWraBs7s+LjRuFK7Fe7hJqnJ0skWlinqsycVLU5YAo6L8CsEYQ0V5prg==
+
+emojis-list@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-2.1.0.tgz#4daa4d9db00f9819880c79fa457ae5b09a1fd389"
+ integrity sha1-TapNnbAPmBmIDHn6RXrlsJof04k=
+
+emojis-list@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78"
+ integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==
+
+encodeurl@~1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59"
+ integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=
+
+end-of-stream@^1.0.0, end-of-stream@^1.1.0:
+ version "1.4.4"
+ resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0"
+ integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==
+ dependencies:
+ once "^1.4.0"
+
+enhanced-resolve@^4.3.0:
+ version "4.5.0"
+ resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.5.0.tgz#2f3cfd84dbe3b487f18f2db2ef1e064a571ca5ec"
+ integrity sha512-Nv9m36S/vxpsI+Hc4/ZGRs0n9mXqSWGGq49zxb/cJfPAQMbUtttJAlNPS4AQzaBdw/pKskw5bMbekT/Y7W/Wlg==
+ dependencies:
+ graceful-fs "^4.1.2"
+ memory-fs "^0.5.0"
+ tapable "^1.0.0"
+
+enquirer@^2.3.5:
+ version "2.3.6"
+ resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d"
+ integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==
+ dependencies:
+ ansi-colors "^4.1.1"
+
+entities@^1.1.1:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56"
+ integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==
+
+entities@^2.0.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55"
+ integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==
+
+errno@^0.1.3, errno@~0.1.7:
+ version "0.1.8"
+ resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.8.tgz#8bb3e9c7d463be4976ff888f76b4809ebc2e811f"
+ integrity sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==
+ dependencies:
+ prr "~1.0.1"
+
+error-ex@^1.2.0, error-ex@^1.3.1:
+ version "1.3.2"
+ resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf"
+ integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==
+ dependencies:
+ is-arrayish "^0.2.1"
+
+error-stack-parser@^2.0.6:
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/error-stack-parser/-/error-stack-parser-2.0.6.tgz#5a99a707bd7a4c58a797902d48d82803ede6aad8"
+ integrity sha512-d51brTeqC+BHlwF0BhPtcYgF5nlzf9ZZ0ZIUQNZpc9ZB9qw5IJ2diTrBY9jlCJkTLITYPjmiX6OWCwH+fuyNgQ==
+ dependencies:
+ stackframe "^1.1.1"
+
+es-abstract@^1.17.2:
+ version "1.17.7"
+ resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.17.7.tgz#a4de61b2f66989fc7421676c1cb9787573ace54c"
+ integrity sha512-VBl/gnfcJ7OercKA9MVaegWsBHFjV492syMudcnQZvt/Dw8ezpcOHYZXa/J96O8vx+g4x65YKhxOwDUh63aS5g==
+ dependencies:
+ es-to-primitive "^1.2.1"
+ function-bind "^1.1.1"
+ has "^1.0.3"
+ has-symbols "^1.0.1"
+ is-callable "^1.2.2"
+ is-regex "^1.1.1"
+ object-inspect "^1.8.0"
+ object-keys "^1.1.1"
+ object.assign "^4.1.1"
+ string.prototype.trimend "^1.0.1"
+ string.prototype.trimstart "^1.0.1"
+
+es-abstract@^1.18.0-next.1, es-abstract@^1.18.0-next.2:
+ version "1.18.0-next.2"
+ resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.18.0-next.2.tgz#088101a55f0541f595e7e057199e27ddc8f3a5c2"
+ integrity sha512-Ih4ZMFHEtZupnUh6497zEL4y2+w8+1ljnCyaTa+adcoafI1GOvMwFlDjBLfWR7y9VLfrjRJe9ocuHY1PSR9jjw==
+ dependencies:
+ call-bind "^1.0.2"
+ es-to-primitive "^1.2.1"
+ function-bind "^1.1.1"
+ get-intrinsic "^1.0.2"
+ has "^1.0.3"
+ has-symbols "^1.0.1"
+ is-callable "^1.2.2"
+ is-negative-zero "^2.0.1"
+ is-regex "^1.1.1"
+ object-inspect "^1.9.0"
+ object-keys "^1.1.1"
+ object.assign "^4.1.2"
+ string.prototype.trimend "^1.0.3"
+ string.prototype.trimstart "^1.0.3"
+
+es-abstract@^1.18.2:
+ version "1.18.5"
+ resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.18.5.tgz#9b10de7d4c206a3581fd5b2124233e04db49ae19"
+ integrity sha512-DDggyJLoS91CkJjgauM5c0yZMjiD1uK3KcaCeAmffGwZ+ODWzOkPN4QwRbsK5DOFf06fywmyLci3ZD8jLGhVYA==
+ dependencies:
+ call-bind "^1.0.2"
+ es-to-primitive "^1.2.1"
+ function-bind "^1.1.1"
+ get-intrinsic "^1.1.1"
+ has "^1.0.3"
+ has-symbols "^1.0.2"
+ internal-slot "^1.0.3"
+ is-callable "^1.2.3"
+ is-negative-zero "^2.0.1"
+ is-regex "^1.1.3"
+ is-string "^1.0.6"
+ object-inspect "^1.11.0"
+ object-keys "^1.1.1"
+ object.assign "^4.1.2"
+ string.prototype.trimend "^1.0.4"
+ string.prototype.trimstart "^1.0.4"
+ unbox-primitive "^1.0.1"
+
+es-to-primitive@^1.2.1:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a"
+ integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==
+ dependencies:
+ is-callable "^1.1.4"
+ is-date-object "^1.0.1"
+ is-symbol "^1.0.2"
+
+es5-ext@^0.10.35, es5-ext@^0.10.50:
+ version "0.10.53"
+ resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1"
+ integrity sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q==
+ dependencies:
+ es6-iterator "~2.0.3"
+ es6-symbol "~3.1.3"
+ next-tick "~1.0.0"
+
[email protected], es6-iterator@~2.0.3:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7"
+ integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c=
+ dependencies:
+ d "1"
+ es5-ext "^0.10.35"
+ es6-symbol "^3.1.1"
+
+es6-symbol@^3.1.1, es6-symbol@~3.1.3:
+ version "3.1.3"
+ resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18"
+ integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==
+ dependencies:
+ d "^1.0.1"
+ ext "^1.1.2"
+
+escalade@^3.0.2, escalade@^3.1.1:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40"
+ integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==
+
+escape-html@~1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988"
+ integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=
+
[email protected], escape-string-regexp@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344"
+ integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==
+
+escape-string-regexp@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
+ integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=
+
+escape-string-regexp@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34"
+ integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==
+
+escodegen@^1.14.1:
+ version "1.14.3"
+ resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.14.3.tgz#4e7b81fba61581dc97582ed78cab7f0e8d63f503"
+ integrity sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==
+ dependencies:
+ esprima "^4.0.1"
+ estraverse "^4.2.0"
+ esutils "^2.0.2"
+ optionator "^0.8.1"
+ optionalDependencies:
+ source-map "~0.6.1"
+
+eslint-config-airbnb-base@^14.2.1:
+ version "14.2.1"
+ resolved "https://registry.yarnpkg.com/eslint-config-airbnb-base/-/eslint-config-airbnb-base-14.2.1.tgz#8a2eb38455dc5a312550193b319cdaeef042cd1e"
+ integrity sha512-GOrQyDtVEc1Xy20U7vsB2yAoB4nBlfH5HZJeatRXHleO+OS5Ot+MWij4Dpltw4/DyIkqUfqz1epfhVR5XWWQPA==
+ dependencies:
+ confusing-browser-globals "^1.0.10"
+ object.assign "^4.1.2"
+ object.entries "^1.1.2"
+
+eslint-config-airbnb@^18.2.1:
+ version "18.2.1"
+ resolved "https://registry.yarnpkg.com/eslint-config-airbnb/-/eslint-config-airbnb-18.2.1.tgz#b7fe2b42f9f8173e825b73c8014b592e449c98d9"
+ integrity sha512-glZNDEZ36VdlZWoxn/bUR1r/sdFKPd1mHPbqUtkctgNG4yT2DLLtJ3D+yCV+jzZCc2V1nBVkmdknOJBZ5Hc0fg==
+ dependencies:
+ eslint-config-airbnb-base "^14.2.1"
+ object.assign "^4.1.2"
+ object.entries "^1.1.2"
+
+eslint-config-react-app@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/eslint-config-react-app/-/eslint-config-react-app-6.0.0.tgz#ccff9fc8e36b322902844cbd79197982be355a0e"
+ integrity sha512-bpoAAC+YRfzq0dsTk+6v9aHm/uqnDwayNAXleMypGl6CpxI9oXXscVHo4fk3eJPIn+rsbtNetB4r/ZIidFIE8A==
+ dependencies:
+ confusing-browser-globals "^1.0.10"
+
+eslint-import-resolver-node@^0.3.4:
+ version "0.3.4"
+ resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.4.tgz#85ffa81942c25012d8231096ddf679c03042c717"
+ integrity sha512-ogtf+5AB/O+nM6DIeBUNr2fuT7ot9Qg/1harBfBtaP13ekEWFQEEMP94BCB7zaNW3gyY+8SHYF00rnqYwXKWOA==
+ dependencies:
+ debug "^2.6.9"
+ resolve "^1.13.1"
+
+eslint-module-utils@^2.6.0:
+ version "2.6.0"
+ resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.6.0.tgz#579ebd094f56af7797d19c9866c9c9486629bfa6"
+ integrity sha512-6j9xxegbqe8/kZY8cYpcp0xhbK0EgJlg3g9mib3/miLaExuuwc3n5UEfSnU6hWMbT0FAYVvDbL9RrRgpUeQIvA==
+ dependencies:
+ debug "^2.6.9"
+ pkg-dir "^2.0.0"
+
+eslint-module-utils@^2.6.1:
+ version "2.6.1"
+ resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.6.1.tgz#b51be1e473dd0de1c5ea638e22429c2490ea8233"
+ integrity sha512-ZXI9B8cxAJIH4nfkhTwcRTEAnrVfobYqwjWy/QMCZ8rHkZHFjf9yO4BzpiF9kCSfNlMG54eKigISHpX0+AaT4A==
+ dependencies:
+ debug "^3.2.7"
+ pkg-dir "^2.0.0"
+
+eslint-plugin-flowtype@^5.2.0:
+ version "5.2.2"
+ resolved "https://registry.yarnpkg.com/eslint-plugin-flowtype/-/eslint-plugin-flowtype-5.2.2.tgz#c6e5dd2fad4e757a1c63e652da6cff597659554f"
+ integrity sha512-C4PlPYpszr9h1cBfUbTNRI1IdxUCF0qrXAHkXS2+bESp7WUUCnvb3UBBnYlaQLvJYJ2lRz+2SPQQ/WyV7p/Tow==
+ dependencies:
+ lodash "^4.17.15"
+ string-natural-compare "^3.0.1"
+
+eslint-plugin-import@^2.22.1:
+ version "2.22.1"
+ resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.22.1.tgz#0896c7e6a0cf44109a2d97b95903c2bb689d7702"
+ integrity sha512-8K7JjINHOpH64ozkAhpT3sd+FswIZTfMZTjdx052pnWrgRCVfp8op9tbjpAk3DdUeI/Ba4C8OjdC0r90erHEOw==
+ dependencies:
+ array-includes "^3.1.1"
+ array.prototype.flat "^1.2.3"
+ contains-path "^0.1.0"
+ debug "^2.6.9"
+ doctrine "1.5.0"
+ eslint-import-resolver-node "^0.3.4"
+ eslint-module-utils "^2.6.0"
+ has "^1.0.3"
+ minimatch "^3.0.4"
+ object.values "^1.1.1"
+ read-pkg-up "^2.0.0"
+ resolve "^1.17.0"
+ tsconfig-paths "^3.9.0"
+
+eslint-plugin-import@^2.23.4:
+ version "2.23.4"
+ resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.23.4.tgz#8dceb1ed6b73e46e50ec9a5bb2411b645e7d3d97"
+ integrity sha512-6/wP8zZRsnQFiR3iaPFgh5ImVRM1WN5NUWfTIRqwOdeiGJlBcSk82o1FEVq8yXmy4lkIzTo7YhHCIxlU/2HyEQ==
+ dependencies:
+ array-includes "^3.1.3"
+ array.prototype.flat "^1.2.4"
+ debug "^2.6.9"
+ doctrine "^2.1.0"
+ eslint-import-resolver-node "^0.3.4"
+ eslint-module-utils "^2.6.1"
+ find-up "^2.0.0"
+ has "^1.0.3"
+ is-core-module "^2.4.0"
+ minimatch "^3.0.4"
+ object.values "^1.1.3"
+ pkg-up "^2.0.0"
+ read-pkg-up "^3.0.0"
+ resolve "^1.20.0"
+ tsconfig-paths "^3.9.0"
+
+eslint-plugin-jest@^24.1.0:
+ version "24.1.5"
+ resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-24.1.5.tgz#1e866a9f0deac587d0a3d5d7cefe99815a580de2"
+ integrity sha512-FIP3lwC8EzEG+rOs1y96cOJmMVpdFNreoDJv29B5vIupVssRi8zrSY3QadogT0K3h1Y8TMxJ6ZSAzYUmFCp2hg==
+ dependencies:
+ "@typescript-eslint/experimental-utils" "^4.0.1"
+
+eslint-plugin-jsx-a11y@^6.3.1, eslint-plugin-jsx-a11y@^6.4.1:
+ version "6.4.1"
+ resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.4.1.tgz#a2d84caa49756942f42f1ffab9002436391718fd"
+ integrity sha512-0rGPJBbwHoGNPU73/QCLP/vveMlM1b1Z9PponxO87jfr6tuH5ligXbDT6nHSSzBC8ovX2Z+BQu7Bk5D/Xgq9zg==
+ dependencies:
+ "@babel/runtime" "^7.11.2"
+ aria-query "^4.2.2"
+ array-includes "^3.1.1"
+ ast-types-flow "^0.0.7"
+ axe-core "^4.0.2"
+ axobject-query "^2.2.0"
+ damerau-levenshtein "^1.0.6"
+ emoji-regex "^9.0.0"
+ has "^1.0.3"
+ jsx-ast-utils "^3.1.0"
+ language-tags "^1.0.5"
+
+eslint-plugin-react-hooks@^4.2.0:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.2.0.tgz#8c229c268d468956334c943bb45fc860280f5556"
+ integrity sha512-623WEiZJqxR7VdxFCKLI6d6LLpwJkGPYKODnkH3D7WpOG5KM8yWueBd8TLsNAetEJNF5iJmolaAKO3F8yzyVBQ==
+
+eslint-plugin-react@^7.21.5:
+ version "7.22.0"
+ resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.22.0.tgz#3d1c542d1d3169c45421c1215d9470e341707269"
+ integrity sha512-p30tuX3VS+NWv9nQot9xIGAHBXR0+xJVaZriEsHoJrASGCJZDJ8JLNM0YqKqI0AKm6Uxaa1VUHoNEibxRCMQHA==
+ dependencies:
+ array-includes "^3.1.1"
+ array.prototype.flatmap "^1.2.3"
+ doctrine "^2.1.0"
+ has "^1.0.3"
+ jsx-ast-utils "^2.4.1 || ^3.0.0"
+ object.entries "^1.1.2"
+ object.fromentries "^2.0.2"
+ object.values "^1.1.1"
+ prop-types "^15.7.2"
+ resolve "^1.18.1"
+ string.prototype.matchall "^4.0.2"
+
+eslint-plugin-react@^7.24.0:
+ version "7.24.0"
+ resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.24.0.tgz#eadedfa351a6f36b490aa17f4fa9b14e842b9eb4"
+ integrity sha512-KJJIx2SYx7PBx3ONe/mEeMz4YE0Lcr7feJTCMyyKb/341NcjuAgim3Acgan89GfPv7nxXK2+0slu0CWXYM4x+Q==
+ dependencies:
+ array-includes "^3.1.3"
+ array.prototype.flatmap "^1.2.4"
+ doctrine "^2.1.0"
+ has "^1.0.3"
+ jsx-ast-utils "^2.4.1 || ^3.0.0"
+ minimatch "^3.0.4"
+ object.entries "^1.1.4"
+ object.fromentries "^2.0.4"
+ object.values "^1.1.4"
+ prop-types "^15.7.2"
+ resolve "^2.0.0-next.3"
+ string.prototype.matchall "^4.0.5"
+
+eslint-plugin-testing-library@^3.9.2:
+ version "3.10.1"
+ resolved "https://registry.yarnpkg.com/eslint-plugin-testing-library/-/eslint-plugin-testing-library-3.10.1.tgz#4dd02306d601c3238fdabf1d1dbc5f2a8e85d531"
+ integrity sha512-nQIFe2muIFv2oR2zIuXE4vTbcFNx8hZKRzgHZqJg8rfopIWwoTwtlbCCNELT/jXzVe1uZF68ALGYoDXjLczKiQ==
+ dependencies:
+ "@typescript-eslint/experimental-utils" "^3.10.1"
+
+eslint-scope@^4.0.3:
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848"
+ integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==
+ dependencies:
+ esrecurse "^4.1.0"
+ estraverse "^4.1.1"
+
+eslint-scope@^5.0.0, eslint-scope@^5.1.1:
+ version "5.1.1"
+ resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c"
+ integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==
+ dependencies:
+ esrecurse "^4.3.0"
+ estraverse "^4.1.1"
+
+eslint-utils@^2.0.0, eslint-utils@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27"
+ integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==
+ dependencies:
+ eslint-visitor-keys "^1.1.0"
+
+eslint-visitor-keys@^1.0.0, eslint-visitor-keys@^1.1.0, eslint-visitor-keys@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e"
+ integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==
+
+eslint-visitor-keys@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.0.0.tgz#21fdc8fbcd9c795cc0321f0563702095751511a8"
+ integrity sha512-QudtT6av5WXels9WjIM7qz1XD1cWGvX4gGXvp/zBn9nXG02D0utdU3Em2m/QjTnrsk6bBjmCygl3rmj118msQQ==
+
+eslint-webpack-plugin@^2.5.2:
+ version "2.5.2"
+ resolved "https://registry.yarnpkg.com/eslint-webpack-plugin/-/eslint-webpack-plugin-2.5.2.tgz#4ee17577d6392bf72048080a1678d6237183db81"
+ integrity sha512-ndD9chZ/kaGnjjx7taRg7c6FK/YKb29SSYzaLtPBIYLYJQmZtuKqtQbAvTS2ymiMQT6X0VW9vZIHK0KLstv93Q==
+ dependencies:
+ "@types/eslint" "^7.2.6"
+ arrify "^2.0.1"
+ jest-worker "^26.6.2"
+ micromatch "^4.0.2"
+ schema-utils "^3.0.0"
+
+eslint@^7.11.0:
+ version "7.20.0"
+ resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.20.0.tgz#db07c4ca4eda2e2316e7aa57ac7fc91ec550bdc7"
+ integrity sha512-qGi0CTcOGP2OtCQBgWZlQjcTuP0XkIpYFj25XtRTQSHC+umNnp7UMshr2G8SLsRFYDdAPFeHOsiteadmMH02Yw==
+ dependencies:
+ "@babel/code-frame" "7.12.11"
+ "@eslint/eslintrc" "^0.3.0"
+ ajv "^6.10.0"
+ chalk "^4.0.0"
+ cross-spawn "^7.0.2"
+ debug "^4.0.1"
+ doctrine "^3.0.0"
+ enquirer "^2.3.5"
+ eslint-scope "^5.1.1"
+ eslint-utils "^2.1.0"
+ eslint-visitor-keys "^2.0.0"
+ espree "^7.3.1"
+ esquery "^1.4.0"
+ esutils "^2.0.2"
+ file-entry-cache "^6.0.0"
+ functional-red-black-tree "^1.0.1"
+ glob-parent "^5.0.0"
+ globals "^12.1.0"
+ ignore "^4.0.6"
+ import-fresh "^3.0.0"
+ imurmurhash "^0.1.4"
+ is-glob "^4.0.0"
+ js-yaml "^3.13.1"
+ json-stable-stringify-without-jsonify "^1.0.1"
+ levn "^0.4.1"
+ lodash "^4.17.20"
+ minimatch "^3.0.4"
+ natural-compare "^1.4.0"
+ optionator "^0.9.1"
+ progress "^2.0.0"
+ regexpp "^3.1.0"
+ semver "^7.2.1"
+ strip-ansi "^6.0.0"
+ strip-json-comments "^3.1.0"
+ table "^6.0.4"
+ text-table "^0.2.0"
+ v8-compile-cache "^2.0.3"
+
+eslint@^7.32.0:
+ version "7.32.0"
+ resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.32.0.tgz#c6d328a14be3fb08c8d1d21e12c02fdb7a2a812d"
+ integrity sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA==
+ dependencies:
+ "@babel/code-frame" "7.12.11"
+ "@eslint/eslintrc" "^0.4.3"
+ "@humanwhocodes/config-array" "^0.5.0"
+ ajv "^6.10.0"
+ chalk "^4.0.0"
+ cross-spawn "^7.0.2"
+ debug "^4.0.1"
+ doctrine "^3.0.0"
+ enquirer "^2.3.5"
+ escape-string-regexp "^4.0.0"
+ eslint-scope "^5.1.1"
+ eslint-utils "^2.1.0"
+ eslint-visitor-keys "^2.0.0"
+ espree "^7.3.1"
+ esquery "^1.4.0"
+ esutils "^2.0.2"
+ fast-deep-equal "^3.1.3"
+ file-entry-cache "^6.0.1"
+ functional-red-black-tree "^1.0.1"
+ glob-parent "^5.1.2"
+ globals "^13.6.0"
+ ignore "^4.0.6"
+ import-fresh "^3.0.0"
+ imurmurhash "^0.1.4"
+ is-glob "^4.0.0"
+ js-yaml "^3.13.1"
+ json-stable-stringify-without-jsonify "^1.0.1"
+ levn "^0.4.1"
+ lodash.merge "^4.6.2"
+ minimatch "^3.0.4"
+ natural-compare "^1.4.0"
+ optionator "^0.9.1"
+ progress "^2.0.0"
+ regexpp "^3.1.0"
+ semver "^7.2.1"
+ strip-ansi "^6.0.0"
+ strip-json-comments "^3.1.0"
+ table "^6.0.9"
+ text-table "^0.2.0"
+ v8-compile-cache "^2.0.3"
+
+espree@^7.3.0, espree@^7.3.1:
+ version "7.3.1"
+ resolved "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6"
+ integrity sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==
+ dependencies:
+ acorn "^7.4.0"
+ acorn-jsx "^5.3.1"
+ eslint-visitor-keys "^1.3.0"
+
+esprima@^4.0.0, esprima@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71"
+ integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
+
+esquery@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5"
+ integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==
+ dependencies:
+ estraverse "^5.1.0"
+
+esrecurse@^4.1.0, esrecurse@^4.3.0:
+ version "4.3.0"
+ resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921"
+ integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==
+ dependencies:
+ estraverse "^5.2.0"
+
+estraverse@^4.1.1, estraverse@^4.2.0:
+ version "4.3.0"
+ resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d"
+ integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==
+
+estraverse@^5.1.0, estraverse@^5.2.0:
+ version "5.2.0"
+ resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880"
+ integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==
+
+estree-walker@^0.6.1:
+ version "0.6.1"
+ resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-0.6.1.tgz#53049143f40c6eb918b23671d1fe3219f3a1b362"
+ integrity sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==
+
+estree-walker@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-1.0.1.tgz#31bc5d612c96b704106b477e6dd5d8aa138cb700"
+ integrity sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg==
+
+esutils@^2.0.2:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
+ integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
+
+etag@~1.8.1:
+ version "1.8.1"
+ resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887"
+ integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=
+
+eventemitter3@^4.0.0, eventemitter3@^4.0.1:
+ version "4.0.7"
+ resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f"
+ integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==
+
+events@^3.0.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/events/-/events-3.2.0.tgz#93b87c18f8efcd4202a461aec4dfc0556b639379"
+ integrity sha512-/46HWwbfCX2xTawVfkKLGxMifJYQBWMwY1mjywRtb4c9x8l5NP3KoJtnIOiL1hfdRkIuYhETxQlo62IF8tcnlg==
+
+eventsource@^1.0.7:
+ version "1.0.7"
+ resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.0.7.tgz#8fbc72c93fcd34088090bc0a4e64f4b5cee6d8d0"
+ integrity sha512-4Ln17+vVT0k8aWq+t/bF5arcS3EpT9gYtW66EPacdj/mAFevznsnyoHLPy2BA8gbIQeIHoPsvwmfBftfcG//BQ==
+ dependencies:
+ original "^1.0.0"
+
+evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02"
+ integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==
+ dependencies:
+ md5.js "^1.3.4"
+ safe-buffer "^5.1.1"
+
+exec-sh@^0.3.2:
+ version "0.3.4"
+ resolved "https://registry.yarnpkg.com/exec-sh/-/exec-sh-0.3.4.tgz#3a018ceb526cc6f6df2bb504b2bfe8e3a4934ec5"
+ integrity sha512-sEFIkc61v75sWeOe72qyrqg2Qg0OuLESziUDk/O/z2qgS15y2gWVFrI6f2Qn/qw/0/NCfCEsmNA4zOjkwEZT1A==
+
+execa@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8"
+ integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==
+ dependencies:
+ cross-spawn "^6.0.0"
+ get-stream "^4.0.0"
+ is-stream "^1.1.0"
+ npm-run-path "^2.0.0"
+ p-finally "^1.0.0"
+ signal-exit "^3.0.0"
+ strip-eof "^1.0.0"
+
+execa@^4.0.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/execa/-/execa-4.1.0.tgz#4e5491ad1572f2f17a77d388c6c857135b22847a"
+ integrity sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA==
+ dependencies:
+ cross-spawn "^7.0.0"
+ get-stream "^5.0.0"
+ human-signals "^1.1.1"
+ is-stream "^2.0.0"
+ merge-stream "^2.0.0"
+ npm-run-path "^4.0.0"
+ onetime "^5.1.0"
+ signal-exit "^3.0.2"
+ strip-final-newline "^2.0.0"
+
+exit@^0.1.2:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c"
+ integrity sha1-BjJjj42HfMghB9MKD/8aF8uhzQw=
+
+expand-brackets@^2.1.4:
+ version "2.1.4"
+ resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622"
+ integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI=
+ dependencies:
+ debug "^2.3.3"
+ define-property "^0.2.5"
+ extend-shallow "^2.0.1"
+ posix-character-classes "^0.1.0"
+ regex-not "^1.0.0"
+ snapdragon "^0.8.1"
+ to-regex "^3.0.1"
+
+expect@^26.6.0, expect@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/expect/-/expect-26.6.2.tgz#c6b996bf26bf3fe18b67b2d0f51fc981ba934417"
+ integrity sha512-9/hlOBkQl2l/PLHJx6JjoDF6xPKcJEsUlWKb23rKE7KzeDqUZKXKNMW27KIue5JMdBV9HgmoJPcc8HtO85t9IA==
+ dependencies:
+ "@jest/types" "^26.6.2"
+ ansi-styles "^4.0.0"
+ jest-get-type "^26.3.0"
+ jest-matcher-utils "^26.6.2"
+ jest-message-util "^26.6.2"
+ jest-regex-util "^26.0.0"
+
+express@^4.17.1:
+ version "4.17.1"
+ resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134"
+ integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==
+ dependencies:
+ accepts "~1.3.7"
+ array-flatten "1.1.1"
+ body-parser "1.19.0"
+ content-disposition "0.5.3"
+ content-type "~1.0.4"
+ cookie "0.4.0"
+ cookie-signature "1.0.6"
+ debug "2.6.9"
+ depd "~1.1.2"
+ encodeurl "~1.0.2"
+ escape-html "~1.0.3"
+ etag "~1.8.1"
+ finalhandler "~1.1.2"
+ fresh "0.5.2"
+ merge-descriptors "1.0.1"
+ methods "~1.1.2"
+ on-finished "~2.3.0"
+ parseurl "~1.3.3"
+ path-to-regexp "0.1.7"
+ proxy-addr "~2.0.5"
+ qs "6.7.0"
+ range-parser "~1.2.1"
+ safe-buffer "5.1.2"
+ send "0.17.1"
+ serve-static "1.14.1"
+ setprototypeof "1.1.1"
+ statuses "~1.5.0"
+ type-is "~1.6.18"
+ utils-merge "1.0.1"
+ vary "~1.1.2"
+
+ext@^1.1.2:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/ext/-/ext-1.4.0.tgz#89ae7a07158f79d35517882904324077e4379244"
+ integrity sha512-Key5NIsUxdqKg3vIsdw9dSuXpPCQ297y6wBjL30edxwPgt2E44WcWBZey/ZvUc6sERLTxKdyCu4gZFmUbk1Q7A==
+ dependencies:
+ type "^2.0.0"
+
+extend-shallow@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f"
+ integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=
+ dependencies:
+ is-extendable "^0.1.0"
+
+extend-shallow@^3.0.0, extend-shallow@^3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8"
+ integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg=
+ dependencies:
+ assign-symbols "^1.0.0"
+ is-extendable "^1.0.1"
+
+extend@~3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa"
+ integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==
+
+extglob@^2.0.4:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543"
+ integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==
+ dependencies:
+ array-unique "^0.3.2"
+ define-property "^1.0.0"
+ expand-brackets "^2.1.4"
+ extend-shallow "^2.0.1"
+ fragment-cache "^0.2.1"
+ regex-not "^1.0.0"
+ snapdragon "^0.8.1"
+ to-regex "^3.0.1"
+
[email protected]:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05"
+ integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=
+
+extsprintf@^1.2.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f"
+ integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8=
+
+fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3:
+ version "3.1.3"
+ resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525"
+ integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==
+
+fast-equals@^2.0.0:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/fast-equals/-/fast-equals-2.0.3.tgz#7039b0a039909f345a2ce53f6202a14e5f392efc"
+ integrity sha512-0EMw4TTUxsMDpDkCg0rXor2gsg+npVrMIHbEhvD0HZyIhUX6AktC/yasm+qKwfyswd06Qy95ZKk8p2crTo0iPA==
+
+fast-glob@^3.1.1:
+ version "3.2.5"
+ resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.5.tgz#7939af2a656de79a4f1901903ee8adcaa7cb9661"
+ integrity sha512-2DtFcgT68wiTTiwZ2hNdJfcHNke9XOfnwmBRWXhmeKM8rF0TGwmC/Qto3S7RoZKp5cilZbxzO5iTNTQsJ+EeDg==
+ dependencies:
+ "@nodelib/fs.stat" "^2.0.2"
+ "@nodelib/fs.walk" "^1.2.3"
+ glob-parent "^5.1.0"
+ merge2 "^1.3.0"
+ micromatch "^4.0.2"
+ picomatch "^2.2.1"
+
+fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633"
+ integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==
+
+fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6:
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
+ integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=
+
+fastq@^1.6.0:
+ version "1.10.1"
+ resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.10.1.tgz#8b8f2ac8bf3632d67afcd65dac248d5fdc45385e"
+ integrity sha512-AWuv6Ery3pM+dY7LYS8YIaCiQvUaos9OB1RyNgaOWnaX+Tik7Onvcsf8x8c+YtDeT0maYLniBip2hox5KtEXXA==
+ dependencies:
+ reusify "^1.0.4"
+
[email protected], faye-websocket@^0.11.3:
+ version "0.11.3"
+ resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.3.tgz#5c0e9a8968e8912c286639fde977a8b209f2508e"
+ integrity sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA==
+ dependencies:
+ websocket-driver ">=0.5.1"
+
+fb-watchman@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.1.tgz#fc84fb39d2709cf3ff6d743706157bb5708a8a85"
+ integrity sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg==
+ dependencies:
+ bser "2.1.1"
+
+figgy-pudding@^3.5.1:
+ version "3.5.2"
+ resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.2.tgz#b4eee8148abb01dcf1d1ac34367d59e12fa61d6e"
+ integrity sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw==
+
+file-entry-cache@^6.0.0, file-entry-cache@^6.0.1:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027"
+ integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==
+ dependencies:
+ flat-cache "^3.0.4"
+
[email protected]:
+ version "6.1.1"
+ resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-6.1.1.tgz#a6f29dfb3f5933a1c350b2dbaa20ac5be0539baa"
+ integrity sha512-Klt8C4BjWSXYQAfhpYYkG4qHNTna4toMHEbWrI5IuVoxbU6uiDKeKAP99R8mmbJi3lvewn/jQBOgU4+NS3tDQw==
+ dependencies:
+ loader-utils "^2.0.0"
+ schema-utils "^3.0.0"
+
[email protected]:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd"
+ integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==
+
[email protected]:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/filesize/-/filesize-6.1.0.tgz#e81bdaa780e2451d714d71c0d7a4f3238d37ad00"
+ integrity sha512-LpCHtPQ3sFx67z+uh2HnSyWSLLu5Jxo21795uRDuar/EOuYWXib5EmPaGIBuSnRqH2IODiKA2k5re/K9OnN/Yg==
+
+fill-range@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7"
+ integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=
+ dependencies:
+ extend-shallow "^2.0.1"
+ is-number "^3.0.0"
+ repeat-string "^1.6.1"
+ to-regex-range "^2.1.0"
+
+fill-range@^7.0.1:
+ version "7.0.1"
+ resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40"
+ integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==
+ dependencies:
+ to-regex-range "^5.0.1"
+
+finalhandler@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d"
+ integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==
+ dependencies:
+ debug "2.6.9"
+ encodeurl "~1.0.2"
+ escape-html "~1.0.3"
+ on-finished "~2.3.0"
+ parseurl "~1.3.3"
+ statuses "~1.5.0"
+ unpipe "~1.0.0"
+
+find-cache-dir@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7"
+ integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==
+ dependencies:
+ commondir "^1.0.1"
+ make-dir "^2.0.0"
+ pkg-dir "^3.0.0"
+
+find-cache-dir@^3.3.1:
+ version "3.3.1"
+ resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.1.tgz#89b33fad4a4670daa94f855f7fbe31d6d84fe880"
+ integrity sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ==
+ dependencies:
+ commondir "^1.0.1"
+ make-dir "^3.0.2"
+ pkg-dir "^4.1.0"
+
+find-root@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/find-root/-/find-root-1.1.0.tgz#abcfc8ba76f708c42a97b3d685b7e9450bfb9ce4"
+ integrity sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng==
+
[email protected], find-up@^4.0.0, find-up@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
+ integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
+ dependencies:
+ locate-path "^5.0.0"
+ path-exists "^4.0.0"
+
+find-up@^2.0.0, find-up@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7"
+ integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c=
+ dependencies:
+ locate-path "^2.0.0"
+
+find-up@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73"
+ integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==
+ dependencies:
+ locate-path "^3.0.0"
+
+firebase@^8.8.1:
+ version "8.8.1"
+ resolved "https://registry.yarnpkg.com/firebase/-/firebase-8.8.1.tgz#b18dbb7283a9d50d3f3ef5b9deb9c1e15c9466b1"
+ integrity sha512-dzqQn3wwHhsStsD2gDs3XfSJ/SIqv5IA9Ht+MySnvrIsljk0V8bI/+EMPsh0h2VlYPSk51bmyNQZ4LvuSKNvlA==
+ dependencies:
+ "@firebase/analytics" "0.6.16"
+ "@firebase/app" "0.6.29"
+ "@firebase/app-check" "0.2.1"
+ "@firebase/app-types" "0.6.3"
+ "@firebase/auth" "0.16.8"
+ "@firebase/database" "0.10.9"
+ "@firebase/firestore" "2.3.10"
+ "@firebase/functions" "0.6.14"
+ "@firebase/installations" "0.4.31"
+ "@firebase/messaging" "0.7.15"
+ "@firebase/performance" "0.4.17"
+ "@firebase/polyfill" "0.3.36"
+ "@firebase/remote-config" "0.1.42"
+ "@firebase/storage" "0.6.1"
+ "@firebase/util" "1.2.0"
+
+flat-cache@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11"
+ integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==
+ dependencies:
+ flatted "^3.1.0"
+ rimraf "^3.0.2"
+
+flatted@^3.1.0:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.1.1.tgz#c4b489e80096d9df1dfc97c79871aea7c617c469"
+ integrity sha512-zAoAQiudy+r5SvnSw3KJy5os/oRJYHzrzja/tBDqrZtNhUw8bt6y8OBzMWcjWr+8liV8Eb6yOhw8WZ7VFZ5ZzA==
+
+flatten@^1.0.2:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/flatten/-/flatten-1.0.3.tgz#c1283ac9f27b368abc1e36d1ff7b04501a30356b"
+ integrity sha512-dVsPA/UwQ8+2uoFe5GHtiBMu48dWLTdsuEd7CKGlZlD78r1TTWBvDuFaFGKCo/ZfEr95Uk56vZoX86OsHkUeIg==
+
+flush-write-stream@^1.0.0:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8"
+ integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w==
+ dependencies:
+ inherits "^2.0.3"
+ readable-stream "^2.3.6"
+
+focus-lock@^0.8.1:
+ version "0.8.1"
+ resolved "https://registry.yarnpkg.com/focus-lock/-/focus-lock-0.8.1.tgz#bb36968abf77a2063fa173cb6c47b12ac8599d33"
+ integrity sha512-/LFZOIo82WDsyyv7h7oc0MJF9ACOvDRdx9rWPZ2pgMfNWu/z8hQDBtOchuB/0BVLmuFOZjV02YwUVzNsWx/EzA==
+ dependencies:
+ tslib "^1.9.3"
+
+follow-redirects@^1.0.0:
+ version "1.13.2"
+ resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.13.2.tgz#dd73c8effc12728ba5cf4259d760ea5fb83e3147"
+ integrity sha512-6mPTgLxYm3r6Bkkg0vNM0HTjfGrOEtsfbhagQvbxDEsEkpNhw582upBaoRZylzen6krEmxXJgt9Ju6HiI4O7BA==
+
+for-in@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80"
+ integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=
+
+forever-agent@~0.6.1:
+ version "0.6.1"
+ resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91"
+ integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=
+
[email protected]:
+ version "4.1.6"
+ resolved "https://registry.yarnpkg.com/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-4.1.6.tgz#5055c703febcf37fa06405d400c122b905167fc5"
+ integrity sha512-DUxuQaKoqfNne8iikd14SAkh5uw4+8vNifp6gmA73yYNS6ywLIWSLD/n/mBzHQRpW3J7rbATEakmiA8JvkTyZw==
+ dependencies:
+ "@babel/code-frame" "^7.5.5"
+ chalk "^2.4.1"
+ micromatch "^3.1.10"
+ minimatch "^3.0.4"
+ semver "^5.6.0"
+ tapable "^1.0.0"
+ worker-rpc "^0.1.0"
+
+form-data@~2.3.2:
+ version "2.3.3"
+ resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6"
+ integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==
+ dependencies:
+ asynckit "^0.4.0"
+ combined-stream "^1.0.6"
+ mime-types "^2.1.12"
+
+forwarded@~0.1.2:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84"
+ integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=
+
+fragment-cache@^0.2.1:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19"
+ integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk=
+ dependencies:
+ map-cache "^0.2.2"
+
+framer-motion@4:
+ version "4.1.17"
+ resolved "https://registry.yarnpkg.com/framer-motion/-/framer-motion-4.1.17.tgz#4029469252a62ea599902e5a92b537120cc89721"
+ integrity sha512-thx1wvKzblzbs0XaK2X0G1JuwIdARcoNOW7VVwjO8BUltzXPyONGAElLu6CiCScsOQRI7FIk/45YTFtJw5Yozw==
+ dependencies:
+ framesync "5.3.0"
+ hey-listen "^1.0.8"
+ popmotion "9.3.6"
+ style-value-types "4.1.4"
+ tslib "^2.1.0"
+ optionalDependencies:
+ "@emotion/is-prop-valid" "^0.8.2"
+
[email protected]:
+ version "5.3.0"
+ resolved "https://registry.yarnpkg.com/framesync/-/framesync-5.3.0.tgz#0ecfc955e8f5a6ddc8fdb0cc024070947e1a0d9b"
+ integrity sha512-oc5m68HDO/tuK2blj7ZcdEBRx3p1PjrgHazL8GYEpvULhrtGIFbQArN6cQS2QhW8mitffaB+VYzMjDqBxxQeoA==
+ dependencies:
+ tslib "^2.1.0"
+
[email protected]:
+ version "0.5.2"
+ resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7"
+ integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=
+
+from2@^2.1.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af"
+ integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8=
+ dependencies:
+ inherits "^2.0.1"
+ readable-stream "^2.0.0"
+
+fs-extra@^7.0.0:
+ version "7.0.1"
+ resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9"
+ integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==
+ dependencies:
+ graceful-fs "^4.1.2"
+ jsonfile "^4.0.0"
+ universalify "^0.1.0"
+
+fs-extra@^8.1.0:
+ version "8.1.0"
+ resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-8.1.0.tgz#49d43c45a88cd9677668cb7be1b46efdb8d2e1c0"
+ integrity sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==
+ dependencies:
+ graceful-fs "^4.2.0"
+ jsonfile "^4.0.0"
+ universalify "^0.1.0"
+
+fs-extra@^9.0.1:
+ version "9.1.0"
+ resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d"
+ integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==
+ dependencies:
+ at-least-node "^1.0.0"
+ graceful-fs "^4.2.0"
+ jsonfile "^6.0.1"
+ universalify "^2.0.0"
+
+fs-minipass@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb"
+ integrity sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==
+ dependencies:
+ minipass "^3.0.0"
+
+fs-write-stream-atomic@^1.0.8:
+ version "1.0.10"
+ resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9"
+ integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk=
+ dependencies:
+ graceful-fs "^4.1.2"
+ iferr "^0.1.5"
+ imurmurhash "^0.1.4"
+ readable-stream "1 || 2"
+
+fs.realpath@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
+ integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8=
+
+fsevents@^1.2.7:
+ version "1.2.13"
+ resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.13.tgz#f325cb0455592428bcf11b383370ef70e3bfcc38"
+ integrity sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==
+ dependencies:
+ bindings "^1.5.0"
+ nan "^2.12.1"
+
+fsevents@^2.1.2, fsevents@^2.1.3, fsevents@~2.3.1:
+ version "2.3.2"
+ resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a"
+ integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==
+
+function-bind@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
+ integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==
+
+functional-red-black-tree@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327"
+ integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=
+
+gensync@^1.0.0-beta.1:
+ version "1.0.0-beta.2"
+ resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0"
+ integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==
+
+get-caller-file@^2.0.1, get-caller-file@^2.0.5:
+ version "2.0.5"
+ resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e"
+ integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==
+
+get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6"
+ integrity sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==
+ dependencies:
+ function-bind "^1.1.1"
+ has "^1.0.3"
+ has-symbols "^1.0.1"
+
+get-nonce@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/get-nonce/-/get-nonce-1.0.1.tgz#fdf3f0278073820d2ce9426c18f07481b1e0cdf3"
+ integrity sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==
+
+get-own-enumerable-property-symbols@^3.0.0:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664"
+ integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==
+
+get-package-type@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a"
+ integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==
+
+get-stream@^4.0.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
+ integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==
+ dependencies:
+ pump "^3.0.0"
+
+get-stream@^5.0.0:
+ version "5.2.0"
+ resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3"
+ integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==
+ dependencies:
+ pump "^3.0.0"
+
+get-value@^2.0.3, get-value@^2.0.6:
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28"
+ integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg=
+
+getpass@^0.1.1:
+ version "0.1.7"
+ resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa"
+ integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=
+ dependencies:
+ assert-plus "^1.0.0"
+
+glob-parent@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae"
+ integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=
+ dependencies:
+ is-glob "^3.1.0"
+ path-dirname "^1.0.0"
+
+glob-parent@^5.0.0, glob-parent@^5.1.0, glob-parent@~5.1.0:
+ version "5.1.1"
+ resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.1.tgz#b6c1ef417c4e5663ea498f1c45afac6916bbc229"
+ integrity sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==
+ dependencies:
+ is-glob "^4.0.1"
+
+glob-parent@^5.1.2:
+ version "5.1.2"
+ resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4"
+ integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==
+ dependencies:
+ is-glob "^4.0.1"
+
+glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6:
+ version "7.1.6"
+ resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6"
+ integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==
+ dependencies:
+ fs.realpath "^1.0.0"
+ inflight "^1.0.4"
+ inherits "2"
+ minimatch "^3.0.4"
+ once "^1.3.0"
+ path-is-absolute "^1.0.0"
+
[email protected]:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780"
+ integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A==
+ dependencies:
+ global-prefix "^3.0.0"
+
+global-prefix@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97"
+ integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg==
+ dependencies:
+ ini "^1.3.5"
+ kind-of "^6.0.2"
+ which "^1.3.1"
+
+globals@^11.1.0:
+ version "11.12.0"
+ resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e"
+ integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==
+
+globals@^12.1.0:
+ version "12.4.0"
+ resolved "https://registry.yarnpkg.com/globals/-/globals-12.4.0.tgz#a18813576a41b00a24a97e7f815918c2e19925f8"
+ integrity sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==
+ dependencies:
+ type-fest "^0.8.1"
+
+globals@^13.6.0, globals@^13.9.0:
+ version "13.10.0"
+ resolved "https://registry.yarnpkg.com/globals/-/globals-13.10.0.tgz#60ba56c3ac2ca845cfbf4faeca727ad9dd204676"
+ integrity sha512-piHC3blgLGFjvOuMmWZX60f+na1lXFDhQXBf1UYp2fXPXqvEUbOhNwi6BsQ0bQishwedgnjkwv1d9zKf+MWw3g==
+ dependencies:
+ type-fest "^0.20.2"
+
[email protected]:
+ version "11.0.1"
+ resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.1.tgz#9a2bf107a068f3ffeabc49ad702c79ede8cfd357"
+ integrity sha512-iH9RmgwCmUJHi2z5o2l3eTtGBtXek1OYlHrbcxOYugyHLmAsZrPj43OtHThd62Buh/Vv6VyCBD2bdyWcGNQqoQ==
+ dependencies:
+ array-union "^2.1.0"
+ dir-glob "^3.0.1"
+ fast-glob "^3.1.1"
+ ignore "^5.1.4"
+ merge2 "^1.3.0"
+ slash "^3.0.0"
+
+globby@^11.0.1:
+ version "11.0.2"
+ resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.2.tgz#1af538b766a3b540ebfb58a32b2e2d5897321d83"
+ integrity sha512-2ZThXDvvV8fYFRVIxnrMQBipZQDr7MxKAmQK1vujaj9/7eF0efG7BPUKJ7jP7G5SLF37xKDXvO4S/KKLj/Z0og==
+ dependencies:
+ array-union "^2.1.0"
+ dir-glob "^3.0.1"
+ fast-glob "^3.1.1"
+ ignore "^5.1.4"
+ merge2 "^1.3.0"
+ slash "^3.0.0"
+
+globby@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c"
+ integrity sha1-9abXDoOV4hyFj7BInWTfAkJNUGw=
+ dependencies:
+ array-union "^1.0.1"
+ glob "^7.0.3"
+ object-assign "^4.0.1"
+ pify "^2.0.0"
+ pinkie-promise "^2.0.0"
+
+graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4:
+ version "4.2.6"
+ resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee"
+ integrity sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ==
+
+growly@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/growly/-/growly-1.3.0.tgz#f10748cbe76af964b7c96c93c6bcc28af120c081"
+ integrity sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE=
+
[email protected]:
+ version "5.1.1"
+ resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-5.1.1.tgz#cb9bee692f87c0612b232840a873904e4c135274"
+ integrity sha512-FNHi6mmoHvs1mxZAds4PpdCS6QG8B4C1krxJsMutgxl5t3+GlRTzzI3NEkifXx2pVsOvJdOGSmIgDhQ55FwdPA==
+ dependencies:
+ duplexer "^0.1.1"
+ pify "^4.0.1"
+
+handle-thing@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e"
+ integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==
+
+har-schema@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92"
+ integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=
+
+har-validator@~5.1.3:
+ version "5.1.5"
+ resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd"
+ integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==
+ dependencies:
+ ajv "^6.12.3"
+ har-schema "^2.0.0"
+
+harmony-reflect@^1.4.6:
+ version "1.6.1"
+ resolved "https://registry.yarnpkg.com/harmony-reflect/-/harmony-reflect-1.6.1.tgz#c108d4f2bb451efef7a37861fdbdae72c9bdefa9"
+ integrity sha512-WJTeyp0JzGtHcuMsi7rw2VwtkvLa+JyfEKJCFyfcS0+CDkjQ5lHPu7zEhFZP+PDSRrEgXa5Ah0l1MbgbE41XjA==
+
+has-bigints@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.1.tgz#64fe6acb020673e3b78db035a5af69aa9d07b113"
+ integrity sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA==
+
+has-flag@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
+ integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0=
+
+has-flag@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
+ integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==
+
+has-symbols@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8"
+ integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==
+
+has-symbols@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.2.tgz#165d3070c00309752a1236a479331e3ac56f1423"
+ integrity sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==
+
+has-value@^0.3.1:
+ version "0.3.1"
+ resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f"
+ integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8=
+ dependencies:
+ get-value "^2.0.3"
+ has-values "^0.1.4"
+ isobject "^2.0.0"
+
+has-value@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177"
+ integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc=
+ dependencies:
+ get-value "^2.0.6"
+ has-values "^1.0.0"
+ isobject "^3.0.0"
+
+has-values@^0.1.4:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771"
+ integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E=
+
+has-values@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f"
+ integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8=
+ dependencies:
+ is-number "^3.0.0"
+ kind-of "^4.0.0"
+
+has@^1.0.0, has@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796"
+ integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==
+ dependencies:
+ function-bind "^1.1.1"
+
+hash-base@^3.0.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.1.0.tgz#55c381d9e06e1d2997a883b4a3fddfe7f0d3af33"
+ integrity sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==
+ dependencies:
+ inherits "^2.0.4"
+ readable-stream "^3.6.0"
+ safe-buffer "^5.2.0"
+
+hash.js@^1.0.0, hash.js@^1.0.3:
+ version "1.1.7"
+ resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42"
+ integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==
+ dependencies:
+ inherits "^2.0.3"
+ minimalistic-assert "^1.0.1"
+
+he@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f"
+ integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==
+
+hex-color-regex@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/hex-color-regex/-/hex-color-regex-1.1.0.tgz#4c06fccb4602fe2602b3c93df82d7e7dbf1a8a8e"
+ integrity sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ==
+
+hey-listen@^1.0.8:
+ version "1.0.8"
+ resolved "https://registry.yarnpkg.com/hey-listen/-/hey-listen-1.0.8.tgz#8e59561ff724908de1aa924ed6ecc84a56a9aa68"
+ integrity sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q==
+
+history@^4.9.0:
+ version "4.10.1"
+ resolved "https://registry.yarnpkg.com/history/-/history-4.10.1.tgz#33371a65e3a83b267434e2b3f3b1b4c58aad4cf3"
+ integrity sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==
+ dependencies:
+ "@babel/runtime" "^7.1.2"
+ loose-envify "^1.2.0"
+ resolve-pathname "^3.0.0"
+ tiny-invariant "^1.0.2"
+ tiny-warning "^1.0.0"
+ value-equal "^1.0.1"
+
+hmac-drbg@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1"
+ integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE=
+ dependencies:
+ hash.js "^1.0.3"
+ minimalistic-assert "^1.0.0"
+ minimalistic-crypto-utils "^1.0.1"
+
+hoist-non-react-statics@^3.1.0, hoist-non-react-statics@^3.3.1:
+ version "3.3.2"
+ resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz#ece0acaf71d62c2969c2ec59feff42a4b1a85b45"
+ integrity sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==
+ dependencies:
+ react-is "^16.7.0"
+
+hoopy@^0.1.4:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/hoopy/-/hoopy-0.1.4.tgz#609207d661100033a9a9402ad3dea677381c1b1d"
+ integrity sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ==
+
+hosted-git-info@^2.1.4:
+ version "2.8.8"
+ resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.8.tgz#7539bd4bc1e0e0a895815a2e0262420b12858488"
+ integrity sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==
+
+hpack.js@^2.1.6:
+ version "2.1.6"
+ resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2"
+ integrity sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI=
+ dependencies:
+ inherits "^2.0.1"
+ obuf "^1.0.0"
+ readable-stream "^2.0.1"
+ wbuf "^1.1.0"
+
+hsl-regex@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/hsl-regex/-/hsl-regex-1.0.0.tgz#d49330c789ed819e276a4c0d272dffa30b18fe6e"
+ integrity sha1-1JMwx4ntgZ4nakwNJy3/owsY/m4=
+
+hsla-regex@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/hsla-regex/-/hsla-regex-1.0.0.tgz#c1ce7a3168c8c6614033a4b5f7877f3b225f9c38"
+ integrity sha1-wc56MWjIxmFAM6S194d/OyJfnDg=
+
+html-comment-regex@^1.1.0:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/html-comment-regex/-/html-comment-regex-1.1.2.tgz#97d4688aeb5c81886a364faa0cad1dda14d433a7"
+ integrity sha512-P+M65QY2JQ5Y0G9KKdlDpo0zK+/OHptU5AaBwUfAIDJZk1MYf32Frm84EcOytfJE0t5JvkAnKlmjsXDnWzCJmQ==
+
+html-encoding-sniffer@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3"
+ integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ==
+ dependencies:
+ whatwg-encoding "^1.0.5"
+
+html-entities@^1.2.1, html-entities@^1.3.1:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.4.0.tgz#cfbd1b01d2afaf9adca1b10ae7dffab98c71d2dc"
+ integrity sha512-8nxjcBcd8wovbeKx7h3wTji4e6+rhaVuPNpMqwWgnHh+N9ToqsCs6XztWRBPQ+UtzsoMAdKZtUENoVzU/EMtZA==
+
+html-escaper@^2.0.0:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453"
+ integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==
+
+html-minifier-terser@^5.0.1:
+ version "5.1.1"
+ resolved "https://registry.yarnpkg.com/html-minifier-terser/-/html-minifier-terser-5.1.1.tgz#922e96f1f3bb60832c2634b79884096389b1f054"
+ integrity sha512-ZPr5MNObqnV/T9akshPKbVgyOqLmy+Bxo7juKCfTfnjNniTAMdy4hz21YQqoofMBJD2kdREaqPPdThoR78Tgxg==
+ dependencies:
+ camel-case "^4.1.1"
+ clean-css "^4.2.3"
+ commander "^4.1.1"
+ he "^1.2.0"
+ param-case "^3.0.3"
+ relateurl "^0.2.7"
+ terser "^4.6.3"
+
[email protected]:
+ version "4.5.0"
+ resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-4.5.0.tgz#625097650886b97ea5dae331c320e3238f6c121c"
+ integrity sha512-MouoXEYSjTzCrjIxWwg8gxL5fE2X2WZJLmBYXlaJhQUH5K/b5OrqmV7T4dB7iu0xkmJ6JlUuV6fFVtnqbPopZw==
+ dependencies:
+ "@types/html-minifier-terser" "^5.0.0"
+ "@types/tapable" "^1.0.5"
+ "@types/webpack" "^4.41.8"
+ html-minifier-terser "^5.0.1"
+ loader-utils "^1.2.3"
+ lodash "^4.17.15"
+ pretty-error "^2.1.1"
+ tapable "^1.1.3"
+ util.promisify "1.0.0"
+
+htmlparser2@^3.10.1:
+ version "3.10.1"
+ resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.1.tgz#bd679dc3f59897b6a34bb10749c855bb53a9392f"
+ integrity sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ==
+ dependencies:
+ domelementtype "^1.3.1"
+ domhandler "^2.3.0"
+ domutils "^1.5.1"
+ entities "^1.1.1"
+ inherits "^2.0.1"
+ readable-stream "^3.1.1"
+
+http-deceiver@^1.2.7:
+ version "1.2.7"
+ resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87"
+ integrity sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc=
+
[email protected]:
+ version "1.7.2"
+ resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f"
+ integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==
+ dependencies:
+ depd "~1.1.2"
+ inherits "2.0.3"
+ setprototypeof "1.1.1"
+ statuses ">= 1.5.0 < 2"
+ toidentifier "1.0.0"
+
+http-errors@~1.6.2:
+ version "1.6.3"
+ resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d"
+ integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=
+ dependencies:
+ depd "~1.1.2"
+ inherits "2.0.3"
+ setprototypeof "1.1.0"
+ statuses ">= 1.4.0 < 2"
+
+http-errors@~1.7.2:
+ version "1.7.3"
+ resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06"
+ integrity sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw==
+ dependencies:
+ depd "~1.1.2"
+ inherits "2.0.4"
+ setprototypeof "1.1.1"
+ statuses ">= 1.5.0 < 2"
+ toidentifier "1.0.0"
+
+http-parser-js@>=0.5.1:
+ version "0.5.3"
+ resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.3.tgz#01d2709c79d41698bb01d4decc5e9da4e4a033d9"
+ integrity sha512-t7hjvef/5HEK7RWTdUzVUhl8zkEu+LlaE0IYzdMuvbSDipxBRpOn4Uhw8ZyECEa808iVT8XCjzo6xmYt4CiLZg==
+
[email protected]:
+ version "0.19.1"
+ resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a"
+ integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q==
+ dependencies:
+ http-proxy "^1.17.0"
+ is-glob "^4.0.0"
+ lodash "^4.17.11"
+ micromatch "^3.1.10"
+
+http-proxy@^1.17.0:
+ version "1.18.1"
+ resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549"
+ integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==
+ dependencies:
+ eventemitter3 "^4.0.0"
+ follow-redirects "^1.0.0"
+ requires-port "^1.0.0"
+
+http-signature@~1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1"
+ integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=
+ dependencies:
+ assert-plus "^1.0.0"
+ jsprim "^1.2.2"
+ sshpk "^1.7.0"
+
+https-browserify@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73"
+ integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM=
+
+human-signals@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-1.1.1.tgz#c5b1cd14f50aeae09ab6c59fe63ba3395fe4dfa3"
+ integrity sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==
+
[email protected]:
+ version "0.4.24"
+ resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
+ integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==
+ dependencies:
+ safer-buffer ">= 2.1.2 < 3"
+
+icss-utils@^4.0.0, icss-utils@^4.1.1:
+ version "4.1.1"
+ resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-4.1.1.tgz#21170b53789ee27447c2f47dd683081403f9a467"
+ integrity sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA==
+ dependencies:
+ postcss "^7.0.14"
+
[email protected]:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/idb/-/idb-3.0.2.tgz#c8e9122d5ddd40f13b60ae665e4862f8b13fa384"
+ integrity sha512-+FLa/0sTXqyux0o6C+i2lOR0VoS60LU/jzUo5xjfY6+7sEEgy4Gz1O7yFBXvjd7N0NyIGWIRg8DcQSLEG+VSPw==
+
[email protected]:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/identity-obj-proxy/-/identity-obj-proxy-3.0.0.tgz#94d2bda96084453ef36fbc5aaec37e0f79f1fc14"
+ integrity sha1-lNK9qWCERT7zb7xarsN+D3nx/BQ=
+ dependencies:
+ harmony-reflect "^1.4.6"
+
+ieee754@^1.1.4:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352"
+ integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==
+
+iferr@^0.1.5:
+ version "0.1.5"
+ resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501"
+ integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE=
+
+ignore@^4.0.6:
+ version "4.0.6"
+ resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc"
+ integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==
+
+ignore@^5.1.4:
+ version "5.1.8"
+ resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.8.tgz#f150a8b50a34289b33e22f5889abd4d8016f0e57"
+ integrity sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw==
+
[email protected]:
+ version "8.0.1"
+ resolved "https://registry.yarnpkg.com/immer/-/immer-8.0.1.tgz#9c73db683e2b3975c424fb0572af5889877ae656"
+ integrity sha512-aqXhGP7//Gui2+UrEtvxZxSquQVXTpZ7KDxfCcKAF3Vysvw0CViVaW9RZ1j1xlIYqaaaipBoqdqeibkc18PNvA==
+
+import-cwd@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/import-cwd/-/import-cwd-2.1.0.tgz#aa6cf36e722761285cb371ec6519f53e2435b0a9"
+ integrity sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk=
+ dependencies:
+ import-from "^2.1.0"
+
+import-fresh@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546"
+ integrity sha1-2BNVwVYS04bGH53dOSLUMEgipUY=
+ dependencies:
+ caller-path "^2.0.0"
+ resolve-from "^3.0.0"
+
+import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1:
+ version "3.3.0"
+ resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b"
+ integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==
+ dependencies:
+ parent-module "^1.0.0"
+ resolve-from "^4.0.0"
+
+import-from@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/import-from/-/import-from-2.1.0.tgz#335db7f2a7affd53aaa471d4b8021dee36b7f3b1"
+ integrity sha1-M1238qev/VOqpHHUuAId7ja387E=
+ dependencies:
+ resolve-from "^3.0.0"
+
+import-local@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d"
+ integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ==
+ dependencies:
+ pkg-dir "^3.0.0"
+ resolve-cwd "^2.0.0"
+
+import-local@^3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.0.2.tgz#a8cfd0431d1de4a2199703d003e3e62364fa6db6"
+ integrity sha512-vjL3+w0oulAVZ0hBHnxa/Nm5TAurf9YLQJDhqRZyqb+VKGOB6LU8t9H1Nr5CIo16vh9XfJTOoHwU0B71S557gA==
+ dependencies:
+ pkg-dir "^4.2.0"
+ resolve-cwd "^3.0.0"
+
+imurmurhash@^0.1.4:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
+ integrity sha1-khi5srkoojixPcT7a21XbyMUU+o=
+
+indent-string@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251"
+ integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==
+
+indexes-of@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607"
+ integrity sha1-8w9xbI4r00bHtn0985FVZqfAVgc=
+
+infer-owner@^1.0.3, infer-owner@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467"
+ integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==
+
+inflight@^1.0.4:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
+ integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=
+ dependencies:
+ once "^1.3.0"
+ wrappy "1"
+
+inherits@2, [email protected], inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
+ integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
+
[email protected]:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1"
+ integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE=
+
[email protected]:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
+ integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
+
+ini@^1.3.5:
+ version "1.3.8"
+ resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c"
+ integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==
+
+internal-ip@^4.3.0:
+ version "4.3.0"
+ resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-4.3.0.tgz#845452baad9d2ca3b69c635a137acb9a0dad0907"
+ integrity sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg==
+ dependencies:
+ default-gateway "^4.2.0"
+ ipaddr.js "^1.9.0"
+
+internal-slot@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c"
+ integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==
+ dependencies:
+ get-intrinsic "^1.1.0"
+ has "^1.0.3"
+ side-channel "^1.0.4"
+
+internmap@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/internmap/-/internmap-1.0.1.tgz#0017cc8a3b99605f0302f2b198d272e015e5df95"
+ integrity sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==
+
+invariant@^2.2.4:
+ version "2.2.4"
+ resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6"
+ integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==
+ dependencies:
+ loose-envify "^1.0.0"
+
+ip-regex@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9"
+ integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=
+
+ip@^1.1.0, ip@^1.1.5:
+ version "1.1.5"
+ resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a"
+ integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=
+
[email protected], ipaddr.js@^1.9.0:
+ version "1.9.1"
+ resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3"
+ integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==
+
+is-absolute-url@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-2.1.0.tgz#50530dfb84fcc9aa7dbe7852e83a37b93b9f2aa6"
+ integrity sha1-UFMN+4T8yap9vnhS6Do3uTufKqY=
+
+is-absolute-url@^3.0.3:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698"
+ integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==
+
+is-accessor-descriptor@^0.1.6:
+ version "0.1.6"
+ resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6"
+ integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=
+ dependencies:
+ kind-of "^3.0.2"
+
+is-accessor-descriptor@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656"
+ integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==
+ dependencies:
+ kind-of "^6.0.0"
+
+is-arguments@^1.0.4:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.0.tgz#62353031dfbee07ceb34656a6bde59efecae8dd9"
+ integrity sha512-1Ij4lOMPl/xB5kBDn7I+b2ttPMKa8szhEIrXDuXQD/oe3HJLTLhqhgGspwgyGd6MOywBUqVvYicF72lkgDnIHg==
+ dependencies:
+ call-bind "^1.0.0"
+
+is-arrayish@^0.2.1:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
+ integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=
+
+is-arrayish@^0.3.1:
+ version "0.3.2"
+ resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.3.2.tgz#4574a2ae56f7ab206896fb431eaeed066fdf8f03"
+ integrity sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==
+
+is-bigint@^1.0.1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.2.tgz#ffb381442503235ad245ea89e45b3dbff040ee5a"
+ integrity sha512-0JV5+SOCQkIdzjBK9buARcV804Ddu7A0Qet6sHi3FimE9ne6m4BGQZfRn+NZiXbBk4F4XmHfDZIipLj9pX8dSA==
+
+is-binary-path@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898"
+ integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg=
+ dependencies:
+ binary-extensions "^1.0.0"
+
+is-binary-path@~2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09"
+ integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==
+ dependencies:
+ binary-extensions "^2.0.0"
+
+is-boolean-object@^1.1.0:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.1.tgz#3c0878f035cb821228d350d2e1e36719716a3de8"
+ integrity sha512-bXdQWkECBUIAcCkeH1unwJLIpZYaa5VvuygSyS/c2lf719mTKZDU5UdDRlpd01UjADgmW8RfqaP+mRaVPdr/Ng==
+ dependencies:
+ call-bind "^1.0.2"
+
+is-buffer@^1.1.5:
+ version "1.1.6"
+ resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be"
+ integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==
+
+is-callable@^1.1.4, is-callable@^1.2.2, is-callable@^1.2.3:
+ version "1.2.3"
+ resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.3.tgz#8b1e0500b73a1d76c70487636f368e519de8db8e"
+ integrity sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ==
+
+is-ci@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c"
+ integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==
+ dependencies:
+ ci-info "^2.0.0"
+
+is-color-stop@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/is-color-stop/-/is-color-stop-1.1.0.tgz#cfff471aee4dd5c9e158598fbe12967b5cdad345"
+ integrity sha1-z/9HGu5N1cnhWFmPvhKWe1za00U=
+ dependencies:
+ css-color-names "^0.0.4"
+ hex-color-regex "^1.1.0"
+ hsl-regex "^1.0.0"
+ hsla-regex "^1.0.0"
+ rgb-regex "^1.0.1"
+ rgba-regex "^1.0.0"
+
+is-core-module@^2.0.0, is-core-module@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.2.0.tgz#97037ef3d52224d85163f5597b2b63d9afed981a"
+ integrity sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ==
+ dependencies:
+ has "^1.0.3"
+
+is-core-module@^2.4.0:
+ version "2.5.0"
+ resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.5.0.tgz#f754843617c70bfd29b7bd87327400cda5c18491"
+ integrity sha512-TXCMSDsEHMEEZ6eCA8rwRDbLu55MRGmrctljsBX/2v1d9/GzqHOxW5c5oPSgrUt2vBFXebu9rGqckXGPWOlYpg==
+ dependencies:
+ has "^1.0.3"
+
+is-data-descriptor@^0.1.4:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56"
+ integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=
+ dependencies:
+ kind-of "^3.0.2"
+
+is-data-descriptor@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7"
+ integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==
+ dependencies:
+ kind-of "^6.0.0"
+
+is-date-object@^1.0.1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e"
+ integrity sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==
+
+is-descriptor@^0.1.0:
+ version "0.1.6"
+ resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca"
+ integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==
+ dependencies:
+ is-accessor-descriptor "^0.1.6"
+ is-data-descriptor "^0.1.4"
+ kind-of "^5.0.0"
+
+is-descriptor@^1.0.0, is-descriptor@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec"
+ integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==
+ dependencies:
+ is-accessor-descriptor "^1.0.0"
+ is-data-descriptor "^1.0.0"
+ kind-of "^6.0.2"
+
+is-directory@^0.3.1:
+ version "0.3.1"
+ resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1"
+ integrity sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE=
+
+is-docker@^2.0.0:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.1.1.tgz#4125a88e44e450d384e09047ede71adc2d144156"
+ integrity sha512-ZOoqiXfEwtGknTiuDEy8pN2CfE3TxMHprvNer1mXiqwkOT77Rw3YVrUQ52EqAOU3QAWDQ+bQdx7HJzrv7LS2Hw==
+
+is-extendable@^0.1.0, is-extendable@^0.1.1:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89"
+ integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=
+
+is-extendable@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4"
+ integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==
+ dependencies:
+ is-plain-object "^2.0.4"
+
+is-extglob@^2.1.0, is-extglob@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
+ integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=
+
+is-fullwidth-code-point@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f"
+ integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=
+
+is-fullwidth-code-point@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d"
+ integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==
+
+is-generator-fn@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118"
+ integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==
+
+is-glob@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a"
+ integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=
+ dependencies:
+ is-extglob "^2.1.0"
+
+is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc"
+ integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==
+ dependencies:
+ is-extglob "^2.1.1"
+
+is-module@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591"
+ integrity sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE=
+
+is-negative-zero@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.1.tgz#3de746c18dda2319241a53675908d8f766f11c24"
+ integrity sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==
+
+is-number-object@^1.0.4:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.5.tgz#6edfaeed7950cff19afedce9fbfca9ee6dd289eb"
+ integrity sha512-RU0lI/n95pMoUKu9v1BZP5MBcZuNSVJkMkAG2dJqC4z2GlkGUNeH68SuHuBKBD/XFe+LHZ+f9BKkLET60Niedw==
+
+is-number@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195"
+ integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=
+ dependencies:
+ kind-of "^3.0.2"
+
+is-number@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
+ integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==
+
+is-obj@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f"
+ integrity sha1-PkcprB9f3gJc19g6iW2rn09n2w8=
+
+is-obj@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982"
+ integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==
+
+is-path-cwd@^2.0.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb"
+ integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==
+
+is-path-in-cwd@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz#bfe2dca26c69f397265a4009963602935a053acb"
+ integrity sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ==
+ dependencies:
+ is-path-inside "^2.1.0"
+
+is-path-inside@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-2.1.0.tgz#7c9810587d659a40d27bcdb4d5616eab059494b2"
+ integrity sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg==
+ dependencies:
+ path-is-inside "^1.0.2"
+
+is-plain-obj@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e"
+ integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4=
+
+is-plain-object@^2.0.3, is-plain-object@^2.0.4:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677"
+ integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==
+ dependencies:
+ isobject "^3.0.1"
+
+is-potential-custom-element-name@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.0.tgz#0c52e54bcca391bb2c494b21e8626d7336c6e397"
+ integrity sha1-DFLlS8yjkbssSUsh6GJtczbG45c=
+
+is-regex@^1.0.4, is-regex@^1.1.1:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.2.tgz#81c8ebde4db142f2cf1c53fc86d6a45788266251"
+ integrity sha512-axvdhb5pdhEVThqJzYXwMlVuZwC+FF2DpcOhTS+y/8jVq4trxyPgfcwIxIKiyeuLlSQYKkmUaPQJ8ZE4yNKXDg==
+ dependencies:
+ call-bind "^1.0.2"
+ has-symbols "^1.0.1"
+
+is-regex@^1.1.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.3.tgz#d029f9aff6448b93ebbe3f33dac71511fdcbef9f"
+ integrity sha512-qSVXFz28HM7y+IWX6vLCsexdlvzT1PJNFSBuaQLQ5o0IEw8UDYW6/2+eCMVyIsbM8CNLX2a/QWmSpyxYEHY7CQ==
+ dependencies:
+ call-bind "^1.0.2"
+ has-symbols "^1.0.2"
+
+is-regexp@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069"
+ integrity sha1-/S2INUXEa6xaYz57mgnof6LLUGk=
+
+is-resolvable@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/is-resolvable/-/is-resolvable-1.1.0.tgz#fb18f87ce1feb925169c9a407c19318a3206ed88"
+ integrity sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg==
+
[email protected]:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/is-root/-/is-root-2.1.0.tgz#809e18129cf1129644302a4f8544035d51984a9c"
+ integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg==
+
+is-stream@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
+ integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ=
+
+is-stream@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3"
+ integrity sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==
+
+is-string@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.5.tgz#40493ed198ef3ff477b8c7f92f644ec82a5cd3a6"
+ integrity sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==
+
+is-string@^1.0.6:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.6.tgz#3fe5d5992fb0d93404f32584d4b0179a71b54a5f"
+ integrity sha512-2gdzbKUuqtQ3lYNrUTQYoClPhm7oQu4UdpSZMp1/DGgkHBT8E2Z1l0yMdb6D4zNAxwDiMv8MdulKROJGNl0Q0w==
+
+is-svg@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/is-svg/-/is-svg-3.0.0.tgz#9321dbd29c212e5ca99c4fa9794c714bcafa2f75"
+ integrity sha512-gi4iHK53LR2ujhLVVj+37Ykh9GLqYHX6JOVXbLAucaG/Cqw9xwdFOjDM2qeifLs1sF1npXXFvDu0r5HNgCMrzQ==
+ dependencies:
+ html-comment-regex "^1.1.0"
+
+is-symbol@^1.0.2:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937"
+ integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==
+ dependencies:
+ has-symbols "^1.0.1"
+
+is-symbol@^1.0.3:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c"
+ integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==
+ dependencies:
+ has-symbols "^1.0.2"
+
+is-typedarray@^1.0.0, is-typedarray@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
+ integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=
+
+is-windows@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d"
+ integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==
+
+is-wsl@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d"
+ integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0=
+
+is-wsl@^2.1.1, is-wsl@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271"
+ integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==
+ dependencies:
+ is-docker "^2.0.0"
+
[email protected]:
+ version "0.0.1"
+ resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf"
+ integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=
+
[email protected], isarray@^1.0.0, isarray@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
+ integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
+
+isexe@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
+ integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
+
+isobject@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89"
+ integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=
+ dependencies:
+ isarray "1.0.0"
+
+isobject@^3.0.0, isobject@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df"
+ integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8=
+
+isstream@~0.1.2:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a"
+ integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=
+
+istanbul-lib-coverage@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz#f5944a37c70b550b02a78a5c3b2055b280cec8ec"
+ integrity sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg==
+
+istanbul-lib-instrument@^4.0.0, istanbul-lib-instrument@^4.0.3:
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz#873c6fff897450118222774696a3f28902d77c1d"
+ integrity sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==
+ dependencies:
+ "@babel/core" "^7.7.5"
+ "@istanbuljs/schema" "^0.1.2"
+ istanbul-lib-coverage "^3.0.0"
+ semver "^6.3.0"
+
+istanbul-lib-report@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6"
+ integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==
+ dependencies:
+ istanbul-lib-coverage "^3.0.0"
+ make-dir "^3.0.0"
+ supports-color "^7.1.0"
+
+istanbul-lib-source-maps@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.0.tgz#75743ce6d96bb86dc7ee4352cf6366a23f0b1ad9"
+ integrity sha512-c16LpFRkR8vQXyHZ5nLpY35JZtzj1PQY1iZmesUbf1FZHbIupcWfjgOXBY9YHkLEQ6puz1u4Dgj6qmU/DisrZg==
+ dependencies:
+ debug "^4.1.1"
+ istanbul-lib-coverage "^3.0.0"
+ source-map "^0.6.1"
+
+istanbul-reports@^3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.0.2.tgz#d593210e5000683750cb09fc0644e4b6e27fd53b"
+ integrity sha512-9tZvz7AiR3PEDNGiV9vIouQ/EAcqMXFmkcA1CDFTwOB98OZVDL0PH9glHotf5Ugp6GCOTypfzGWI/OqjWNCRUw==
+ dependencies:
+ html-escaper "^2.0.0"
+ istanbul-lib-report "^3.0.0"
+
+jest-changed-files@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-26.6.2.tgz#f6198479e1cc66f22f9ae1e22acaa0b429c042d0"
+ integrity sha512-fDS7szLcY9sCtIip8Fjry9oGf3I2ht/QT21bAHm5Dmf0mD4X3ReNUf17y+bO6fR8WgbIZTlbyG1ak/53cbRzKQ==
+ dependencies:
+ "@jest/types" "^26.6.2"
+ execa "^4.0.0"
+ throat "^5.0.0"
+
[email protected]:
+ version "26.6.0"
+ resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-26.6.0.tgz#7d9647b2e7f921181869faae1f90a2629fd70705"
+ integrity sha512-L2/Y9szN6FJPWFK8kzWXwfp+FOR7xq0cUL4lIsdbIdwz3Vh6P1nrpcqOleSzr28zOtSHQNV9Z7Tl+KkuK7t5Ng==
+ dependencies:
+ "@babel/traverse" "^7.1.0"
+ "@jest/environment" "^26.6.0"
+ "@jest/test-result" "^26.6.0"
+ "@jest/types" "^26.6.0"
+ "@types/babel__traverse" "^7.0.4"
+ "@types/node" "*"
+ chalk "^4.0.0"
+ co "^4.6.0"
+ dedent "^0.7.0"
+ expect "^26.6.0"
+ is-generator-fn "^2.0.0"
+ jest-each "^26.6.0"
+ jest-matcher-utils "^26.6.0"
+ jest-message-util "^26.6.0"
+ jest-runner "^26.6.0"
+ jest-runtime "^26.6.0"
+ jest-snapshot "^26.6.0"
+ jest-util "^26.6.0"
+ pretty-format "^26.6.0"
+ stack-utils "^2.0.2"
+ throat "^5.0.0"
+
+jest-cli@^26.6.0:
+ version "26.6.3"
+ resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-26.6.3.tgz#43117cfef24bc4cd691a174a8796a532e135e92a"
+ integrity sha512-GF9noBSa9t08pSyl3CY4frMrqp+aQXFGFkf5hEPbh/pIUFYWMK6ZLTfbmadxJVcJrdRoChlWQsA2VkJcDFK8hg==
+ dependencies:
+ "@jest/core" "^26.6.3"
+ "@jest/test-result" "^26.6.2"
+ "@jest/types" "^26.6.2"
+ chalk "^4.0.0"
+ exit "^0.1.2"
+ graceful-fs "^4.2.4"
+ import-local "^3.0.2"
+ is-ci "^2.0.0"
+ jest-config "^26.6.3"
+ jest-util "^26.6.2"
+ jest-validate "^26.6.2"
+ prompts "^2.0.1"
+ yargs "^15.4.1"
+
+jest-config@^26.6.3:
+ version "26.6.3"
+ resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-26.6.3.tgz#64f41444eef9eb03dc51d5c53b75c8c71f645349"
+ integrity sha512-t5qdIj/bCj2j7NFVHb2nFB4aUdfucDn3JRKgrZnplb8nieAirAzRSHP8uDEd+qV6ygzg9Pz4YG7UTJf94LPSyg==
+ dependencies:
+ "@babel/core" "^7.1.0"
+ "@jest/test-sequencer" "^26.6.3"
+ "@jest/types" "^26.6.2"
+ babel-jest "^26.6.3"
+ chalk "^4.0.0"
+ deepmerge "^4.2.2"
+ glob "^7.1.1"
+ graceful-fs "^4.2.4"
+ jest-environment-jsdom "^26.6.2"
+ jest-environment-node "^26.6.2"
+ jest-get-type "^26.3.0"
+ jest-jasmine2 "^26.6.3"
+ jest-regex-util "^26.0.0"
+ jest-resolve "^26.6.2"
+ jest-util "^26.6.2"
+ jest-validate "^26.6.2"
+ micromatch "^4.0.2"
+ pretty-format "^26.6.2"
+
+jest-diff@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-26.6.2.tgz#1aa7468b52c3a68d7d5c5fdcdfcd5e49bd164394"
+ integrity sha512-6m+9Z3Gv9wN0WFVasqjCL/06+EFCMTqDEUl/b87HYK2rAPTyfz4ZIuSlPhY51PIQRWx5TaxeF1qmXKe9gfN3sA==
+ dependencies:
+ chalk "^4.0.0"
+ diff-sequences "^26.6.2"
+ jest-get-type "^26.3.0"
+ pretty-format "^26.6.2"
+
+jest-docblock@^26.0.0:
+ version "26.0.0"
+ resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-26.0.0.tgz#3e2fa20899fc928cb13bd0ff68bd3711a36889b5"
+ integrity sha512-RDZ4Iz3QbtRWycd8bUEPxQsTlYazfYn/h5R65Fc6gOfwozFhoImx+affzky/FFBuqISPTqjXomoIGJVKBWoo0w==
+ dependencies:
+ detect-newline "^3.0.0"
+
+jest-each@^26.6.0, jest-each@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-26.6.2.tgz#02526438a77a67401c8a6382dfe5999952c167cb"
+ integrity sha512-Mer/f0KaATbjl8MCJ+0GEpNdqmnVmDYqCTJYTvoo7rqmRiDllmp2AYN+06F93nXcY3ur9ShIjS+CO/uD+BbH4A==
+ dependencies:
+ "@jest/types" "^26.6.2"
+ chalk "^4.0.0"
+ jest-get-type "^26.3.0"
+ jest-util "^26.6.2"
+ pretty-format "^26.6.2"
+
+jest-environment-jsdom@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-26.6.2.tgz#78d09fe9cf019a357009b9b7e1f101d23bd1da3e"
+ integrity sha512-jgPqCruTlt3Kwqg5/WVFyHIOJHsiAvhcp2qiR2QQstuG9yWox5+iHpU3ZrcBxW14T4fe5Z68jAfLRh7joCSP2Q==
+ dependencies:
+ "@jest/environment" "^26.6.2"
+ "@jest/fake-timers" "^26.6.2"
+ "@jest/types" "^26.6.2"
+ "@types/node" "*"
+ jest-mock "^26.6.2"
+ jest-util "^26.6.2"
+ jsdom "^16.4.0"
+
+jest-environment-node@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-26.6.2.tgz#824e4c7fb4944646356f11ac75b229b0035f2b0c"
+ integrity sha512-zhtMio3Exty18dy8ee8eJ9kjnRyZC1N4C1Nt/VShN1apyXc8rWGtJ9lI7vqiWcyyXS4BVSEn9lxAM2D+07/Tag==
+ dependencies:
+ "@jest/environment" "^26.6.2"
+ "@jest/fake-timers" "^26.6.2"
+ "@jest/types" "^26.6.2"
+ "@types/node" "*"
+ jest-mock "^26.6.2"
+ jest-util "^26.6.2"
+
+jest-get-type@^26.3.0:
+ version "26.3.0"
+ resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-26.3.0.tgz#e97dc3c3f53c2b406ca7afaed4493b1d099199e0"
+ integrity sha512-TpfaviN1R2pQWkIihlfEanwOXK0zcxrKEE4MlU6Tn7keoXdN6/3gK/xl0yEh8DOunn5pOVGKf8hB4R9gVh04ig==
+
+jest-haste-map@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-26.6.2.tgz#dd7e60fe7dc0e9f911a23d79c5ff7fb5c2cafeaa"
+ integrity sha512-easWIJXIw71B2RdR8kgqpjQrbMRWQBgiBwXYEhtGUTaX+doCjBheluShdDMeR8IMfJiTqH4+zfhtg29apJf/8w==
+ dependencies:
+ "@jest/types" "^26.6.2"
+ "@types/graceful-fs" "^4.1.2"
+ "@types/node" "*"
+ anymatch "^3.0.3"
+ fb-watchman "^2.0.0"
+ graceful-fs "^4.2.4"
+ jest-regex-util "^26.0.0"
+ jest-serializer "^26.6.2"
+ jest-util "^26.6.2"
+ jest-worker "^26.6.2"
+ micromatch "^4.0.2"
+ sane "^4.0.3"
+ walker "^1.0.7"
+ optionalDependencies:
+ fsevents "^2.1.2"
+
+jest-jasmine2@^26.6.3:
+ version "26.6.3"
+ resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-26.6.3.tgz#adc3cf915deacb5212c93b9f3547cd12958f2edd"
+ integrity sha512-kPKUrQtc8aYwBV7CqBg5pu+tmYXlvFlSFYn18ev4gPFtrRzB15N2gW/Roew3187q2w2eHuu0MU9TJz6w0/nPEg==
+ dependencies:
+ "@babel/traverse" "^7.1.0"
+ "@jest/environment" "^26.6.2"
+ "@jest/source-map" "^26.6.2"
+ "@jest/test-result" "^26.6.2"
+ "@jest/types" "^26.6.2"
+ "@types/node" "*"
+ chalk "^4.0.0"
+ co "^4.6.0"
+ expect "^26.6.2"
+ is-generator-fn "^2.0.0"
+ jest-each "^26.6.2"
+ jest-matcher-utils "^26.6.2"
+ jest-message-util "^26.6.2"
+ jest-runtime "^26.6.3"
+ jest-snapshot "^26.6.2"
+ jest-util "^26.6.2"
+ pretty-format "^26.6.2"
+ throat "^5.0.0"
+
+jest-leak-detector@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-26.6.2.tgz#7717cf118b92238f2eba65054c8a0c9c653a91af"
+ integrity sha512-i4xlXpsVSMeKvg2cEKdfhh0H39qlJlP5Ex1yQxwF9ubahboQYMgTtz5oML35AVA3B4Eu+YsmwaiKVev9KCvLxg==
+ dependencies:
+ jest-get-type "^26.3.0"
+ pretty-format "^26.6.2"
+
+jest-matcher-utils@^26.6.0, jest-matcher-utils@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-26.6.2.tgz#8e6fd6e863c8b2d31ac6472eeb237bc595e53e7a"
+ integrity sha512-llnc8vQgYcNqDrqRDXWwMr9i7rS5XFiCwvh6DTP7Jqa2mqpcCBBlpCbn+trkG0KNhPu/h8rzyBkriOtBstvWhw==
+ dependencies:
+ chalk "^4.0.0"
+ jest-diff "^26.6.2"
+ jest-get-type "^26.3.0"
+ pretty-format "^26.6.2"
+
+jest-message-util@^26.6.0, jest-message-util@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-26.6.2.tgz#58173744ad6fc0506b5d21150b9be56ef001ca07"
+ integrity sha512-rGiLePzQ3AzwUshu2+Rn+UMFk0pHN58sOG+IaJbk5Jxuqo3NYO1U2/MIR4S1sKgsoYSXSzdtSa0TgrmtUwEbmA==
+ dependencies:
+ "@babel/code-frame" "^7.0.0"
+ "@jest/types" "^26.6.2"
+ "@types/stack-utils" "^2.0.0"
+ chalk "^4.0.0"
+ graceful-fs "^4.2.4"
+ micromatch "^4.0.2"
+ pretty-format "^26.6.2"
+ slash "^3.0.0"
+ stack-utils "^2.0.2"
+
+jest-mock@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-26.6.2.tgz#d6cb712b041ed47fe0d9b6fc3474bc6543feb302"
+ integrity sha512-YyFjePHHp1LzpzYcmgqkJ0nm0gg/lJx2aZFzFy1S6eUqNjXsOqTK10zNRff2dNfssgokjkG65OlWNcIlgd3zew==
+ dependencies:
+ "@jest/types" "^26.6.2"
+ "@types/node" "*"
+
+jest-pnp-resolver@^1.2.2:
+ version "1.2.2"
+ resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c"
+ integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w==
+
+jest-regex-util@^26.0.0:
+ version "26.0.0"
+ resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-26.0.0.tgz#d25e7184b36e39fd466c3bc41be0971e821fee28"
+ integrity sha512-Gv3ZIs/nA48/Zvjrl34bf+oD76JHiGDUxNOVgUjh3j890sblXryjY4rss71fPtD/njchl6PSE2hIhvyWa1eT0A==
+
+jest-resolve-dependencies@^26.6.3:
+ version "26.6.3"
+ resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-26.6.3.tgz#6680859ee5d22ee5dcd961fe4871f59f4c784fb6"
+ integrity sha512-pVwUjJkxbhe4RY8QEWzN3vns2kqyuldKpxlxJlzEYfKSvY6/bMvxoFrYYzUO1Gx28yKWN37qyV7rIoIp2h8fTg==
+ dependencies:
+ "@jest/types" "^26.6.2"
+ jest-regex-util "^26.0.0"
+ jest-snapshot "^26.6.2"
+
[email protected]:
+ version "26.6.0"
+ resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-26.6.0.tgz#070fe7159af87b03e50f52ea5e17ee95bbee40e1"
+ integrity sha512-tRAz2bwraHufNp+CCmAD8ciyCpXCs1NQxB5EJAmtCFy6BN81loFEGWKzYu26Y62lAJJe4X4jg36Kf+NsQyiStQ==
+ dependencies:
+ "@jest/types" "^26.6.0"
+ chalk "^4.0.0"
+ graceful-fs "^4.2.4"
+ jest-pnp-resolver "^1.2.2"
+ jest-util "^26.6.0"
+ read-pkg-up "^7.0.1"
+ resolve "^1.17.0"
+ slash "^3.0.0"
+
+jest-resolve@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-26.6.2.tgz#a3ab1517217f469b504f1b56603c5bb541fbb507"
+ integrity sha512-sOxsZOq25mT1wRsfHcbtkInS+Ek7Q8jCHUB0ZUTP0tc/c41QHriU/NunqMfCUWsL4H3MHpvQD4QR9kSYhS7UvQ==
+ dependencies:
+ "@jest/types" "^26.6.2"
+ chalk "^4.0.0"
+ graceful-fs "^4.2.4"
+ jest-pnp-resolver "^1.2.2"
+ jest-util "^26.6.2"
+ read-pkg-up "^7.0.1"
+ resolve "^1.18.1"
+ slash "^3.0.0"
+
+jest-runner@^26.6.0, jest-runner@^26.6.3:
+ version "26.6.3"
+ resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-26.6.3.tgz#2d1fed3d46e10f233fd1dbd3bfaa3fe8924be159"
+ integrity sha512-atgKpRHnaA2OvByG/HpGA4g6CSPS/1LK0jK3gATJAoptC1ojltpmVlYC3TYgdmGp+GLuhzpH30Gvs36szSL2JQ==
+ dependencies:
+ "@jest/console" "^26.6.2"
+ "@jest/environment" "^26.6.2"
+ "@jest/test-result" "^26.6.2"
+ "@jest/types" "^26.6.2"
+ "@types/node" "*"
+ chalk "^4.0.0"
+ emittery "^0.7.1"
+ exit "^0.1.2"
+ graceful-fs "^4.2.4"
+ jest-config "^26.6.3"
+ jest-docblock "^26.0.0"
+ jest-haste-map "^26.6.2"
+ jest-leak-detector "^26.6.2"
+ jest-message-util "^26.6.2"
+ jest-resolve "^26.6.2"
+ jest-runtime "^26.6.3"
+ jest-util "^26.6.2"
+ jest-worker "^26.6.2"
+ source-map-support "^0.5.6"
+ throat "^5.0.0"
+
+jest-runtime@^26.6.0, jest-runtime@^26.6.3:
+ version "26.6.3"
+ resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-26.6.3.tgz#4f64efbcfac398331b74b4b3c82d27d401b8fa2b"
+ integrity sha512-lrzyR3N8sacTAMeonbqpnSka1dHNux2uk0qqDXVkMv2c/A3wYnvQ4EXuI013Y6+gSKSCxdaczvf4HF0mVXHRdw==
+ dependencies:
+ "@jest/console" "^26.6.2"
+ "@jest/environment" "^26.6.2"
+ "@jest/fake-timers" "^26.6.2"
+ "@jest/globals" "^26.6.2"
+ "@jest/source-map" "^26.6.2"
+ "@jest/test-result" "^26.6.2"
+ "@jest/transform" "^26.6.2"
+ "@jest/types" "^26.6.2"
+ "@types/yargs" "^15.0.0"
+ chalk "^4.0.0"
+ cjs-module-lexer "^0.6.0"
+ collect-v8-coverage "^1.0.0"
+ exit "^0.1.2"
+ glob "^7.1.3"
+ graceful-fs "^4.2.4"
+ jest-config "^26.6.3"
+ jest-haste-map "^26.6.2"
+ jest-message-util "^26.6.2"
+ jest-mock "^26.6.2"
+ jest-regex-util "^26.0.0"
+ jest-resolve "^26.6.2"
+ jest-snapshot "^26.6.2"
+ jest-util "^26.6.2"
+ jest-validate "^26.6.2"
+ slash "^3.0.0"
+ strip-bom "^4.0.0"
+ yargs "^15.4.1"
+
+jest-serializer@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-26.6.2.tgz#d139aafd46957d3a448f3a6cdabe2919ba0742d1"
+ integrity sha512-S5wqyz0DXnNJPd/xfIzZ5Xnp1HrJWBczg8mMfMpN78OJ5eDxXyf+Ygld9wX1DnUWbIbhM1YDY95NjR4CBXkb2g==
+ dependencies:
+ "@types/node" "*"
+ graceful-fs "^4.2.4"
+
+jest-snapshot@^26.6.0, jest-snapshot@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-26.6.2.tgz#f3b0af1acb223316850bd14e1beea9837fb39c84"
+ integrity sha512-OLhxz05EzUtsAmOMzuupt1lHYXCNib0ECyuZ/PZOx9TrZcC8vL0x+DUG3TL+GLX3yHG45e6YGjIm0XwDc3q3og==
+ dependencies:
+ "@babel/types" "^7.0.0"
+ "@jest/types" "^26.6.2"
+ "@types/babel__traverse" "^7.0.4"
+ "@types/prettier" "^2.0.0"
+ chalk "^4.0.0"
+ expect "^26.6.2"
+ graceful-fs "^4.2.4"
+ jest-diff "^26.6.2"
+ jest-get-type "^26.3.0"
+ jest-haste-map "^26.6.2"
+ jest-matcher-utils "^26.6.2"
+ jest-message-util "^26.6.2"
+ jest-resolve "^26.6.2"
+ natural-compare "^1.4.0"
+ pretty-format "^26.6.2"
+ semver "^7.3.2"
+
+jest-util@^26.6.0, jest-util@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-26.6.2.tgz#907535dbe4d5a6cb4c47ac9b926f6af29576cbc1"
+ integrity sha512-MDW0fKfsn0OI7MS7Euz6h8HNDXVQ0gaM9uW6RjfDmd1DAFcaxX9OqIakHIqhbnmF08Cf2DLDG+ulq8YQQ0Lp0Q==
+ dependencies:
+ "@jest/types" "^26.6.2"
+ "@types/node" "*"
+ chalk "^4.0.0"
+ graceful-fs "^4.2.4"
+ is-ci "^2.0.0"
+ micromatch "^4.0.2"
+
+jest-validate@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-26.6.2.tgz#23d380971587150467342911c3d7b4ac57ab20ec"
+ integrity sha512-NEYZ9Aeyj0i5rQqbq+tpIOom0YS1u2MVu6+euBsvpgIme+FOfRmoC4R5p0JiAUpaFvFy24xgrpMknarR/93XjQ==
+ dependencies:
+ "@jest/types" "^26.6.2"
+ camelcase "^6.0.0"
+ chalk "^4.0.0"
+ jest-get-type "^26.3.0"
+ leven "^3.1.0"
+ pretty-format "^26.6.2"
+
[email protected]:
+ version "0.6.1"
+ resolved "https://registry.yarnpkg.com/jest-watch-typeahead/-/jest-watch-typeahead-0.6.1.tgz#45221b86bb6710b7e97baaa1640ae24a07785e63"
+ integrity sha512-ITVnHhj3Jd/QkqQcTqZfRgjfyRhDFM/auzgVo2RKvSwi18YMvh0WvXDJFoFED6c7jd/5jxtu4kSOb9PTu2cPVg==
+ dependencies:
+ ansi-escapes "^4.3.1"
+ chalk "^4.0.0"
+ jest-regex-util "^26.0.0"
+ jest-watcher "^26.3.0"
+ slash "^3.0.0"
+ string-length "^4.0.1"
+ strip-ansi "^6.0.0"
+
+jest-watcher@^26.3.0, jest-watcher@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-26.6.2.tgz#a5b683b8f9d68dbcb1d7dae32172d2cca0592975"
+ integrity sha512-WKJob0P/Em2csiVthsI68p6aGKTIcsfjH9Gsx1f0A3Italz43e3ho0geSAVsmj09RWOELP1AZ/DXyJgOgDKxXQ==
+ dependencies:
+ "@jest/test-result" "^26.6.2"
+ "@jest/types" "^26.6.2"
+ "@types/node" "*"
+ ansi-escapes "^4.2.1"
+ chalk "^4.0.0"
+ jest-util "^26.6.2"
+ string-length "^4.0.1"
+
+jest-worker@^24.9.0:
+ version "24.9.0"
+ resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-24.9.0.tgz#5dbfdb5b2d322e98567898238a9697bcce67b3e5"
+ integrity sha512-51PE4haMSXcHohnSMdM42anbvZANYTqMrr52tVKPqqsPJMzoP6FYYDVqahX/HrAoKEKz3uUPzSvKs9A3qR4iVw==
+ dependencies:
+ merge-stream "^2.0.0"
+ supports-color "^6.1.0"
+
+jest-worker@^26.5.0, jest-worker@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed"
+ integrity sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ==
+ dependencies:
+ "@types/node" "*"
+ merge-stream "^2.0.0"
+ supports-color "^7.0.0"
+
[email protected]:
+ version "26.6.0"
+ resolved "https://registry.yarnpkg.com/jest/-/jest-26.6.0.tgz#546b25a1d8c888569dbbe93cae131748086a4a25"
+ integrity sha512-jxTmrvuecVISvKFFhOkjsWRZV7sFqdSUAd1ajOKY+/QE/aLBVstsJ/dX8GczLzwiT6ZEwwmZqtCUHLHHQVzcfA==
+ dependencies:
+ "@jest/core" "^26.6.0"
+ import-local "^3.0.2"
+ jest-cli "^26.6.0"
+
+"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
+ integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==
+
+js-yaml@^3.13.1:
+ version "3.14.1"
+ resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537"
+ integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==
+ dependencies:
+ argparse "^1.0.7"
+ esprima "^4.0.0"
+
+jsbn@~0.1.0:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513"
+ integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM=
+
+jsdom@^16.4.0:
+ version "16.4.0"
+ resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-16.4.0.tgz#36005bde2d136f73eee1a830c6d45e55408edddb"
+ integrity sha512-lYMm3wYdgPhrl7pDcRmvzPhhrGVBeVhPIqeHjzeiHN3DFmD1RBpbExbi8vU7BJdH8VAZYovR8DMt0PNNDM7k8w==
+ dependencies:
+ abab "^2.0.3"
+ acorn "^7.1.1"
+ acorn-globals "^6.0.0"
+ cssom "^0.4.4"
+ cssstyle "^2.2.0"
+ data-urls "^2.0.0"
+ decimal.js "^10.2.0"
+ domexception "^2.0.1"
+ escodegen "^1.14.1"
+ html-encoding-sniffer "^2.0.1"
+ is-potential-custom-element-name "^1.0.0"
+ nwsapi "^2.2.0"
+ parse5 "5.1.1"
+ request "^2.88.2"
+ request-promise-native "^1.0.8"
+ saxes "^5.0.0"
+ symbol-tree "^3.2.4"
+ tough-cookie "^3.0.1"
+ w3c-hr-time "^1.0.2"
+ w3c-xmlserializer "^2.0.0"
+ webidl-conversions "^6.1.0"
+ whatwg-encoding "^1.0.5"
+ whatwg-mimetype "^2.3.0"
+ whatwg-url "^8.0.0"
+ ws "^7.2.3"
+ xml-name-validator "^3.0.0"
+
+jsesc@^2.5.1:
+ version "2.5.2"
+ resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4"
+ integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==
+
+jsesc@~0.5.0:
+ version "0.5.0"
+ resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d"
+ integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=
+
+json-parse-better-errors@^1.0.1, json-parse-better-errors@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9"
+ integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==
+
+json-parse-even-better-errors@^2.3.0:
+ version "2.3.1"
+ resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d"
+ integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==
+
+json-schema-traverse@^0.4.1:
+ version "0.4.1"
+ resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660"
+ integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==
+
+json-schema-traverse@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2"
+ integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==
+
[email protected]:
+ version "0.2.3"
+ resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13"
+ integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=
+
+json-stable-stringify-without-jsonify@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651"
+ integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=
+
+json-stringify-safe@~5.0.1:
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
+ integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=
+
+json3@^3.3.3:
+ version "3.3.3"
+ resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.3.tgz#7fc10e375fc5ae42c4705a5cc0aa6f62be305b81"
+ integrity sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA==
+
+json5@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe"
+ integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==
+ dependencies:
+ minimist "^1.2.0"
+
+json5@^2.1.2:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.0.tgz#2dfefe720c6ba525d9ebd909950f0515316c89a3"
+ integrity sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==
+ dependencies:
+ minimist "^1.2.5"
+
+jsonfile@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb"
+ integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=
+ optionalDependencies:
+ graceful-fs "^4.1.6"
+
+jsonfile@^6.0.1:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae"
+ integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==
+ dependencies:
+ universalify "^2.0.0"
+ optionalDependencies:
+ graceful-fs "^4.1.6"
+
+jsprim@^1.2.2:
+ version "1.4.1"
+ resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2"
+ integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=
+ dependencies:
+ assert-plus "1.0.0"
+ extsprintf "1.3.0"
+ json-schema "0.2.3"
+ verror "1.10.0"
+
+"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.1.0:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.2.0.tgz#41108d2cec408c3453c1bbe8a4aae9e1e2bd8f82"
+ integrity sha512-EIsmt3O3ljsU6sot/J4E1zDRxfBNrhjyf/OKjlydwgEimQuznlM4Wv7U+ueONJMyEn1WRE0K8dhi3dVAXYT24Q==
+ dependencies:
+ array-includes "^3.1.2"
+ object.assign "^4.1.2"
+
+killable@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.1.tgz#4c8ce441187a061c7474fb87ca08e2a638194892"
+ integrity sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg==
+
+kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0:
+ version "3.2.2"
+ resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64"
+ integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=
+ dependencies:
+ is-buffer "^1.1.5"
+
+kind-of@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57"
+ integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc=
+ dependencies:
+ is-buffer "^1.1.5"
+
+kind-of@^5.0.0:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d"
+ integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==
+
+kind-of@^6.0.0, kind-of@^6.0.2:
+ version "6.0.3"
+ resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd"
+ integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==
+
+kleur@^3.0.3:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e"
+ integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==
+
+klona@^2.0.4:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/klona/-/klona-2.0.4.tgz#7bb1e3affb0cb8624547ef7e8f6708ea2e39dfc0"
+ integrity sha512-ZRbnvdg/NxqzC7L9Uyqzf4psi1OM4Cuc+sJAkQPjO6XkQIJTNbfK2Rsmbw8fx1p2mkZdp2FZYo2+LwXYY/uwIA==
+
+language-subtag-registry@~0.3.2:
+ version "0.3.21"
+ resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.21.tgz#04ac218bea46f04cb039084602c6da9e788dd45a"
+ integrity sha512-L0IqwlIXjilBVVYKFT37X9Ih11Um5NEl9cbJIuU/SwP/zEEAbBPOnEeeuxVMf45ydWQRDQN3Nqc96OgbH1K+Pg==
+
+language-tags@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/language-tags/-/language-tags-1.0.5.tgz#d321dbc4da30ba8bf3024e040fa5c14661f9193a"
+ integrity sha1-0yHbxNowuovzAk4ED6XBRmH5GTo=
+ dependencies:
+ language-subtag-registry "~0.3.2"
+
+last-call-webpack-plugin@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/last-call-webpack-plugin/-/last-call-webpack-plugin-3.0.0.tgz#9742df0e10e3cf46e5c0381c2de90d3a7a2d7555"
+ integrity sha512-7KI2l2GIZa9p2spzPIVZBYyNKkN+e/SQPpnjlTiPhdbDW3F86tdKKELxKpzJ5sgU19wQWsACULZmpTPYHeWO5w==
+ dependencies:
+ lodash "^4.17.5"
+ webpack-sources "^1.1.0"
+
+leven@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2"
+ integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==
+
+levn@^0.4.1:
+ version "0.4.1"
+ resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade"
+ integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==
+ dependencies:
+ prelude-ls "^1.2.1"
+ type-check "~0.4.0"
+
+levn@~0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee"
+ integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=
+ dependencies:
+ prelude-ls "~1.1.2"
+ type-check "~0.3.2"
+
+lines-and-columns@^1.1.6:
+ version "1.1.6"
+ resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00"
+ integrity sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=
+
+load-json-file@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-2.0.0.tgz#7947e42149af80d696cbf797bcaabcfe1fe29ca8"
+ integrity sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=
+ dependencies:
+ graceful-fs "^4.1.2"
+ parse-json "^2.2.0"
+ pify "^2.0.0"
+ strip-bom "^3.0.0"
+
+load-json-file@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-4.0.0.tgz#2f5f45ab91e33216234fd53adab668eb4ec0993b"
+ integrity sha1-L19Fq5HjMhYjT9U62rZo607AmTs=
+ dependencies:
+ graceful-fs "^4.1.2"
+ parse-json "^4.0.0"
+ pify "^3.0.0"
+ strip-bom "^3.0.0"
+
+loader-runner@^2.4.0:
+ version "2.4.0"
+ resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-2.4.0.tgz#ed47066bfe534d7e84c4c7b9998c2a75607d9357"
+ integrity sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw==
+
[email protected]:
+ version "1.2.3"
+ resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.2.3.tgz#1ff5dc6911c9f0a062531a4c04b609406108c2c7"
+ integrity sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA==
+ dependencies:
+ big.js "^5.2.2"
+ emojis-list "^2.0.0"
+ json5 "^1.0.1"
+
[email protected], loader-utils@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.0.tgz#e4cace5b816d425a166b5f097e10cd12b36064b0"
+ integrity sha512-rP4F0h2RaWSvPEkD7BLDFQnvSf+nK+wr3ESUjNTyAGobqrijmW92zc+SO6d4p4B1wh7+B/Jg1mkQe5NYUEHtHQ==
+ dependencies:
+ big.js "^5.2.2"
+ emojis-list "^3.0.0"
+ json5 "^2.1.2"
+
+loader-utils@^1.1.0, loader-utils@^1.2.3, loader-utils@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.0.tgz#c579b5e34cb34b1a74edc6c1fb36bfa371d5a613"
+ integrity sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA==
+ dependencies:
+ big.js "^5.2.2"
+ emojis-list "^3.0.0"
+ json5 "^1.0.1"
+
+locate-path@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e"
+ integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=
+ dependencies:
+ p-locate "^2.0.0"
+ path-exists "^3.0.0"
+
+locate-path@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e"
+ integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==
+ dependencies:
+ p-locate "^3.0.0"
+ path-exists "^3.0.0"
+
+locate-path@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0"
+ integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==
+ dependencies:
+ p-locate "^4.1.0"
+
+lodash._reinterpolate@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz#0ccf2d89166af03b3663c796538b75ac6e114d9d"
+ integrity sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0=
+
+lodash.camelcase@^4.3.0:
+ version "4.3.0"
+ resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6"
+ integrity sha1-soqmKIorn8ZRA1x3EfZathkDMaY=
+
+lodash.clonedeep@^4.5.0:
+ version "4.5.0"
+ resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef"
+ integrity sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=
+
+lodash.debounce@^4.0.8:
+ version "4.0.8"
+ resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af"
+ integrity sha1-gteb/zCmfEAF/9XiUVMArZyk168=
+
+lodash.memoize@^4.1.2:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe"
+ integrity sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=
+
+lodash.merge@^4.6.2:
+ version "4.6.2"
+ resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a"
+ integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==
+
[email protected]:
+ version "4.6.2"
+ resolved "https://registry.yarnpkg.com/lodash.mergewith/-/lodash.mergewith-4.6.2.tgz#617121f89ac55f59047c7aec1ccd6654c6590f55"
+ integrity sha512-GK3g5RPZWTRSeLSpgP8Xhra+pnjBC56q9FZYe1d5RN3TJ35dbkGy3YqBSMbyCrlbi+CM9Z3Jk5yTL7RCsqboyQ==
+
+lodash.sortby@^4.7.0:
+ version "4.7.0"
+ resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438"
+ integrity sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=
+
+lodash.template@^4.5.0:
+ version "4.5.0"
+ resolved "https://registry.yarnpkg.com/lodash.template/-/lodash.template-4.5.0.tgz#f976195cf3f347d0d5f52483569fe8031ccce8ab"
+ integrity sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A==
+ dependencies:
+ lodash._reinterpolate "^3.0.0"
+ lodash.templatesettings "^4.0.0"
+
+lodash.templatesettings@^4.0.0:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/lodash.templatesettings/-/lodash.templatesettings-4.2.0.tgz#e481310f049d3cf6d47e912ad09313b154f0fb33"
+ integrity sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ==
+ dependencies:
+ lodash._reinterpolate "^3.0.0"
+
+lodash.throttle@^4.1.1:
+ version "4.1.1"
+ resolved "https://registry.yarnpkg.com/lodash.throttle/-/lodash.throttle-4.1.1.tgz#c23e91b710242ac70c37f1e1cda9274cc39bf2f4"
+ integrity sha1-wj6RtxAkKscMN/HhzaknTMOb8vQ=
+
+lodash.truncate@^4.4.2:
+ version "4.4.2"
+ resolved "https://registry.yarnpkg.com/lodash.truncate/-/lodash.truncate-4.4.2.tgz#5a350da0b1113b837ecfffd5812cbe58d6eae193"
+ integrity sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=
+
+lodash.uniq@^4.5.0:
+ version "4.5.0"
+ resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
+ integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=
+
+"lodash@>=3.5 <5", lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.5:
+ version "4.17.21"
+ resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
+ integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
+
+loglevel@^1.6.8:
+ version "1.7.1"
+ resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.7.1.tgz#005fde2f5e6e47068f935ff28573e125ef72f197"
+ integrity sha512-Hesni4s5UkWkwCGJMQGAh71PaLUmKFM60dHvq0zi/vDhhrzuk+4GgNbTXJ12YYQJn6ZKBDNIjYcuQGKudvqrIw==
+
+long@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/long/-/long-4.0.0.tgz#9a7b71cfb7d361a194ea555241c92f7468d5bf28"
+ integrity sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==
+
+loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.2.0, loose-envify@^1.3.1, loose-envify@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf"
+ integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==
+ dependencies:
+ js-tokens "^3.0.0 || ^4.0.0"
+
+lower-case@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28"
+ integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==
+ dependencies:
+ tslib "^2.0.3"
+
+lru-cache@^5.1.1:
+ version "5.1.1"
+ resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920"
+ integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==
+ dependencies:
+ yallist "^3.0.2"
+
+lru-cache@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94"
+ integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==
+ dependencies:
+ yallist "^4.0.0"
+
+magic-string@^0.25.0, magic-string@^0.25.7:
+ version "0.25.7"
+ resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.25.7.tgz#3f497d6fd34c669c6798dcb821f2ef31f5445051"
+ integrity sha512-4CrMT5DOHTDk4HYDlzmwu4FVCcIYI8gauveasrdCu2IKIFOJ3f0v/8MDGJCDL9oD2ppz/Av1b0Nj345H9M+XIA==
+ dependencies:
+ sourcemap-codec "^1.4.4"
+
+make-dir@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5"
+ integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==
+ dependencies:
+ pify "^4.0.1"
+ semver "^5.6.0"
+
+make-dir@^3.0.0, make-dir@^3.0.2:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f"
+ integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==
+ dependencies:
+ semver "^6.0.0"
+
[email protected]:
+ version "1.0.11"
+ resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.11.tgz#e01a5c9109f2af79660e4e8b9587790184f5a96c"
+ integrity sha1-4BpckQnyr3lmDk6LlYd5AYT1qWw=
+ dependencies:
+ tmpl "1.0.x"
+
+map-cache@^0.2.2:
+ version "0.2.2"
+ resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf"
+ integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8=
+
+map-visit@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f"
+ integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48=
+ dependencies:
+ object-visit "^1.0.0"
+
+md5.js@^1.3.4:
+ version "1.3.5"
+ resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f"
+ integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==
+ dependencies:
+ hash-base "^3.0.0"
+ inherits "^2.0.1"
+ safe-buffer "^5.1.2"
+
[email protected]:
+ version "2.0.14"
+ resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50"
+ integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==
+
[email protected]:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b"
+ integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA==
+
[email protected]:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
+ integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=
+
+memory-fs@^0.4.1:
+ version "0.4.1"
+ resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552"
+ integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI=
+ dependencies:
+ errno "^0.1.3"
+ readable-stream "^2.0.1"
+
+memory-fs@^0.5.0:
+ version "0.5.0"
+ resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c"
+ integrity sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA==
+ dependencies:
+ errno "^0.1.3"
+ readable-stream "^2.0.1"
+
[email protected]:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61"
+ integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=
+
+merge-stream@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60"
+ integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==
+
+merge2@^1.3.0:
+ version "1.4.1"
+ resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae"
+ integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==
+
+methods@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
+ integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=
+
+microevent.ts@~0.1.1:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/microevent.ts/-/microevent.ts-0.1.1.tgz#70b09b83f43df5172d0205a63025bce0f7357fa0"
+ integrity sha512-jo1OfR4TaEwd5HOrt5+tAZ9mqT4jmpNAusXtyfNzqVm9uiSYFZlKM1wYL4oU7azZW/PxQW53wM0S6OR1JHNa2g==
+
+micromatch@^3.1.10, micromatch@^3.1.4:
+ version "3.1.10"
+ resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23"
+ integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==
+ dependencies:
+ arr-diff "^4.0.0"
+ array-unique "^0.3.2"
+ braces "^2.3.1"
+ define-property "^2.0.2"
+ extend-shallow "^3.0.2"
+ extglob "^2.0.4"
+ fragment-cache "^0.2.1"
+ kind-of "^6.0.2"
+ nanomatch "^1.2.9"
+ object.pick "^1.3.0"
+ regex-not "^1.0.0"
+ snapdragon "^0.8.1"
+ to-regex "^3.0.2"
+
+micromatch@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.2.tgz#4fcb0999bf9fbc2fcbdd212f6d629b9a56c39259"
+ integrity sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q==
+ dependencies:
+ braces "^3.0.1"
+ picomatch "^2.0.5"
+
+miller-rabin@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d"
+ integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==
+ dependencies:
+ bn.js "^4.0.0"
+ brorand "^1.0.1"
+
[email protected], "mime-db@>= 1.43.0 < 2":
+ version "1.46.0"
+ resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.46.0.tgz#6267748a7f799594de3cbc8cde91def349661cee"
+ integrity sha512-svXaP8UQRZ5K7or+ZmfNhg2xX3yKDMUzqadsSqi4NCH/KomcH75MAMYAGVlvXn4+b/xOPhS3I2uHKRUzvjY7BQ==
+
+mime-types@^2.1.12, mime-types@^2.1.27, mime-types@~2.1.17, mime-types@~2.1.19, mime-types@~2.1.24:
+ version "2.1.29"
+ resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.29.tgz#1d4ab77da64b91f5f72489df29236563754bb1b2"
+ integrity sha512-Y/jMt/S5sR9OaqteJtslsFZKWOIIqMACsJSiHghlCAyhf7jfVYjKBmLiX8OgpWeW+fjJ2b+Az69aPFPkUOY6xQ==
+ dependencies:
+ mime-db "1.46.0"
+
[email protected]:
+ version "1.6.0"
+ resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1"
+ integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==
+
+mime@^2.4.4:
+ version "2.5.2"
+ resolved "https://registry.yarnpkg.com/mime/-/mime-2.5.2.tgz#6e3dc6cc2b9510643830e5f19d5cb753da5eeabe"
+ integrity sha512-tqkh47FzKeCPD2PUiPB6pkbMzsCasjxAfC62/Wap5qrUWcb+sFasXUC5I3gYM5iBM8v/Qpn4UK0x+j0iHyFPDg==
+
+mimic-fn@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b"
+ integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==
+
+mini-create-react-context@^0.4.0:
+ version "0.4.1"
+ resolved "https://registry.yarnpkg.com/mini-create-react-context/-/mini-create-react-context-0.4.1.tgz#072171561bfdc922da08a60c2197a497cc2d1d5e"
+ integrity sha512-YWCYEmd5CQeHGSAKrYvXgmzzkrvssZcuuQDDeqkT+PziKGMgE+0MCCtcKbROzocGBG1meBLl2FotlRwf4gAzbQ==
+ dependencies:
+ "@babel/runtime" "^7.12.1"
+ tiny-warning "^1.0.3"
+
[email protected]:
+ version "0.11.3"
+ resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-0.11.3.tgz#15b0910a7f32e62ffde4a7430cfefbd700724ea6"
+ integrity sha512-n9BA8LonkOkW1/zn+IbLPQmovsL0wMb9yx75fMJQZf2X1Zoec9yTZtyMePcyu19wPkmFbzZZA6fLTotpFhQsOA==
+ dependencies:
+ loader-utils "^1.1.0"
+ normalize-url "1.9.1"
+ schema-utils "^1.0.0"
+ webpack-sources "^1.1.0"
+
+minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7"
+ integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==
+
+minimalistic-crypto-utils@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a"
+ integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo=
+
[email protected], minimatch@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083"
+ integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==
+ dependencies:
+ brace-expansion "^1.1.7"
+
+minimist@^1.1.1, minimist@^1.2.0, minimist@^1.2.5:
+ version "1.2.5"
+ resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602"
+ integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==
+
+minipass-collect@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/minipass-collect/-/minipass-collect-1.0.2.tgz#22b813bf745dc6edba2576b940022ad6edc8c617"
+ integrity sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==
+ dependencies:
+ minipass "^3.0.0"
+
+minipass-flush@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/minipass-flush/-/minipass-flush-1.0.5.tgz#82e7135d7e89a50ffe64610a787953c4c4cbb373"
+ integrity sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==
+ dependencies:
+ minipass "^3.0.0"
+
+minipass-pipeline@^1.2.2:
+ version "1.2.4"
+ resolved "https://registry.yarnpkg.com/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz#68472f79711c084657c067c5c6ad93cddea8214c"
+ integrity sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==
+ dependencies:
+ minipass "^3.0.0"
+
+minipass@^3.0.0, minipass@^3.1.1:
+ version "3.1.3"
+ resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.1.3.tgz#7d42ff1f39635482e15f9cdb53184deebd5815fd"
+ integrity sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==
+ dependencies:
+ yallist "^4.0.0"
+
+minizlib@^2.1.1:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931"
+ integrity sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==
+ dependencies:
+ minipass "^3.0.0"
+ yallist "^4.0.0"
+
+mississippi@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022"
+ integrity sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA==
+ dependencies:
+ concat-stream "^1.5.0"
+ duplexify "^3.4.2"
+ end-of-stream "^1.1.0"
+ flush-write-stream "^1.0.0"
+ from2 "^2.1.0"
+ parallel-transform "^1.1.0"
+ pump "^3.0.0"
+ pumpify "^1.3.3"
+ stream-each "^1.1.0"
+ through2 "^2.0.0"
+
+mixin-deep@^1.2.0:
+ version "1.3.2"
+ resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566"
+ integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==
+ dependencies:
+ for-in "^1.0.2"
+ is-extendable "^1.0.1"
+
+mkdirp@^0.5.1, mkdirp@^0.5.3, mkdirp@^0.5.5, mkdirp@~0.5.1:
+ version "0.5.5"
+ resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def"
+ integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==
+ dependencies:
+ minimist "^1.2.5"
+
+mkdirp@^1.0.3, mkdirp@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e"
+ integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==
+
+move-concurrently@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92"
+ integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I=
+ dependencies:
+ aproba "^1.1.1"
+ copy-concurrently "^1.0.0"
+ fs-write-stream-atomic "^1.0.8"
+ mkdirp "^0.5.1"
+ rimraf "^2.5.4"
+ run-queue "^1.0.3"
+
[email protected]:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
+ integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=
+
[email protected]:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a"
+ integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==
+
[email protected]:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
+ integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
+
+ms@^2.1.1:
+ version "2.1.3"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
+ integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
+
+multicast-dns-service-types@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz#899f11d9686e5e05cb91b35d5f0e63b773cfc901"
+ integrity sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE=
+
+multicast-dns@^6.0.1:
+ version "6.2.3"
+ resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-6.2.3.tgz#a0ec7bd9055c4282f790c3c82f4e28db3b31b229"
+ integrity sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g==
+ dependencies:
+ dns-packet "^1.3.1"
+ thunky "^1.0.2"
+
+nan@^2.12.1:
+ version "2.14.2"
+ resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.2.tgz#f5376400695168f4cc694ac9393d0c9585eeea19"
+ integrity sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ==
+
+nanoid@^3.1.20:
+ version "3.1.20"
+ resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.1.20.tgz#badc263c6b1dcf14b71efaa85f6ab4c1d6cfc788"
+ integrity sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw==
+
+nanomatch@^1.2.9:
+ version "1.2.13"
+ resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119"
+ integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==
+ dependencies:
+ arr-diff "^4.0.0"
+ array-unique "^0.3.2"
+ define-property "^2.0.2"
+ extend-shallow "^3.0.2"
+ fragment-cache "^0.2.1"
+ is-windows "^1.0.2"
+ kind-of "^6.0.2"
+ object.pick "^1.3.0"
+ regex-not "^1.0.0"
+ snapdragon "^0.8.1"
+ to-regex "^3.0.1"
+
+native-url@^0.2.6:
+ version "0.2.6"
+ resolved "https://registry.yarnpkg.com/native-url/-/native-url-0.2.6.tgz#ca1258f5ace169c716ff44eccbddb674e10399ae"
+ integrity sha512-k4bDC87WtgrdD362gZz6zoiXQrl40kYlBmpfmSjwRO1VU0V5ccwJTlxuE72F6m3V0vc1xOf6n3UCP9QyerRqmA==
+ dependencies:
+ querystring "^0.2.0"
+
+natural-compare@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
+ integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=
+
[email protected]:
+ version "0.6.2"
+ resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb"
+ integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==
+
+neo-async@^2.5.0, neo-async@^2.6.1, neo-async@^2.6.2:
+ version "2.6.2"
+ resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f"
+ integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==
+
+next-tick@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c"
+ integrity sha1-yobR/ogoFpsBICCOPchCS524NCw=
+
+nice-try@^1.0.4:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
+ integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
+
+no-case@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d"
+ integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==
+ dependencies:
+ lower-case "^2.0.2"
+ tslib "^2.0.3"
+
[email protected]:
+ version "2.6.1"
+ resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052"
+ integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==
+
+node-forge@^0.10.0:
+ version "0.10.0"
+ resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.10.0.tgz#32dea2afb3e9926f02ee5ce8794902691a676bf3"
+ integrity sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==
+
+node-int64@^0.4.0:
+ version "0.4.0"
+ resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b"
+ integrity sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs=
+
+node-libs-browser@^2.2.1:
+ version "2.2.1"
+ resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.1.tgz#b64f513d18338625f90346d27b0d235e631f6425"
+ integrity sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q==
+ dependencies:
+ assert "^1.1.1"
+ browserify-zlib "^0.2.0"
+ buffer "^4.3.0"
+ console-browserify "^1.1.0"
+ constants-browserify "^1.0.0"
+ crypto-browserify "^3.11.0"
+ domain-browser "^1.1.1"
+ events "^3.0.0"
+ https-browserify "^1.0.0"
+ os-browserify "^0.3.0"
+ path-browserify "0.0.1"
+ process "^0.11.10"
+ punycode "^1.2.4"
+ querystring-es3 "^0.2.0"
+ readable-stream "^2.3.3"
+ stream-browserify "^2.0.1"
+ stream-http "^2.7.2"
+ string_decoder "^1.0.0"
+ timers-browserify "^2.0.4"
+ tty-browserify "0.0.0"
+ url "^0.11.0"
+ util "^0.11.0"
+ vm-browserify "^1.0.1"
+
+node-modules-regexp@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz#8d9dbe28964a4ac5712e9131642107c71e90ec40"
+ integrity sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA=
+
+node-notifier@^8.0.0:
+ version "8.0.1"
+ resolved "https://registry.yarnpkg.com/node-notifier/-/node-notifier-8.0.1.tgz#f86e89bbc925f2b068784b31f382afdc6ca56be1"
+ integrity sha512-BvEXF+UmsnAfYfoapKM9nGxnP+Wn7P91YfXmrKnfcYCx6VBeoN5Ez5Ogck6I8Bi5k4RlpqRYaw75pAwzX9OphA==
+ dependencies:
+ growly "^1.3.0"
+ is-wsl "^2.2.0"
+ semver "^7.3.2"
+ shellwords "^0.1.1"
+ uuid "^8.3.0"
+ which "^2.0.2"
+
+node-releases@^1.1.61, node-releases@^1.1.70:
+ version "1.1.70"
+ resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.70.tgz#66e0ed0273aa65666d7fe78febe7634875426a08"
+ integrity sha512-Slf2s69+2/uAD79pVVQo8uSiC34+g8GWY8UH2Qtqv34ZfhYrxpYpfzs9Js9d6O0mbDmALuxaTlplnBTnSELcrw==
+
+normalize-package-data@^2.3.2, normalize-package-data@^2.5.0:
+ version "2.5.0"
+ resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8"
+ integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==
+ dependencies:
+ hosted-git-info "^2.1.4"
+ resolve "^1.10.0"
+ semver "2 || 3 || 4 || 5"
+ validate-npm-package-license "^3.0.1"
+
+normalize-path@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9"
+ integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=
+ dependencies:
+ remove-trailing-separator "^1.0.1"
+
+normalize-path@^3.0.0, normalize-path@~3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
+ integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==
+
+normalize-range@^0.1.2:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942"
+ integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI=
+
[email protected]:
+ version "1.9.1"
+ resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-1.9.1.tgz#2cc0d66b31ea23036458436e3620d85954c66c3c"
+ integrity sha1-LMDWazHqIwNkWENuNiDYWVTGbDw=
+ dependencies:
+ object-assign "^4.0.1"
+ prepend-http "^1.0.0"
+ query-string "^4.1.0"
+ sort-keys "^1.0.0"
+
+normalize-url@^3.0.0:
+ version "3.3.0"
+ resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-3.3.0.tgz#b2e1c4dc4f7c6d57743df733a4f5978d18650559"
+ integrity sha512-U+JJi7duF1o+u2pynbp2zXDW2/PADgC30f0GsHZtRh+HOcXHnw137TrNlyxxRvWW5fjKd3bcLHPxofWuCjaeZg==
+
+npm-run-path@^2.0.0:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f"
+ integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=
+ dependencies:
+ path-key "^2.0.0"
+
+npm-run-path@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea"
+ integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==
+ dependencies:
+ path-key "^3.0.0"
+
+nth-check@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c"
+ integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg==
+ dependencies:
+ boolbase "~1.0.0"
+
+num2fraction@^1.2.2:
+ version "1.2.2"
+ resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede"
+ integrity sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4=
+
+nwsapi@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.0.tgz#204879a9e3d068ff2a55139c2c772780681a38b7"
+ integrity sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ==
+
+oauth-sign@~0.9.0:
+ version "0.9.0"
+ resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455"
+ integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==
+
+object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1:
+ version "4.1.1"
+ resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
+ integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=
+
+object-copy@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c"
+ integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw=
+ dependencies:
+ copy-descriptor "^0.1.0"
+ define-property "^0.2.5"
+ kind-of "^3.0.3"
+
+object-inspect@^1.11.0:
+ version "1.11.0"
+ resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.11.0.tgz#9dceb146cedd4148a0d9e51ab88d34cf509922b1"
+ integrity sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==
+
+object-inspect@^1.8.0, object-inspect@^1.9.0:
+ version "1.9.0"
+ resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.9.0.tgz#c90521d74e1127b67266ded3394ad6116986533a"
+ integrity sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw==
+
+object-is@^1.0.1:
+ version "1.1.5"
+ resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.5.tgz#b9deeaa5fc7f1846a0faecdceec138e5778f53ac"
+ integrity sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+
+object-keys@^1.0.12, object-keys@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e"
+ integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==
+
+object-visit@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb"
+ integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs=
+ dependencies:
+ isobject "^3.0.0"
+
+object.assign@^4.1.0, object.assign@^4.1.1, object.assign@^4.1.2:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940"
+ integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==
+ dependencies:
+ call-bind "^1.0.0"
+ define-properties "^1.1.3"
+ has-symbols "^1.0.1"
+ object-keys "^1.1.1"
+
+object.entries@^1.1.0, object.entries@^1.1.2:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.3.tgz#c601c7f168b62374541a07ddbd3e2d5e4f7711a6"
+ integrity sha512-ym7h7OZebNS96hn5IJeyUmaWhaSM4SVtAPPfNLQEI2MYWCO2egsITb9nab2+i/Pwibx+R0mtn+ltKJXRSeTMGg==
+ dependencies:
+ call-bind "^1.0.0"
+ define-properties "^1.1.3"
+ es-abstract "^1.18.0-next.1"
+ has "^1.0.3"
+
+object.entries@^1.1.4:
+ version "1.1.4"
+ resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.4.tgz#43ccf9a50bc5fd5b649d45ab1a579f24e088cafd"
+ integrity sha512-h4LWKWE+wKQGhtMjZEBud7uLGhqyLwj8fpHOarZhD2uY3C9cRtk57VQ89ke3moByLXMedqs3XCHzyb4AmA2DjA==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+ es-abstract "^1.18.2"
+
+object.fromentries@^2.0.2, object.fromentries@^2.0.4:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.4.tgz#26e1ba5c4571c5c6f0890cef4473066456a120b8"
+ integrity sha512-EsFBshs5RUUpQEY1D4q/m59kMfz4YJvxuNCJcv/jWwOJr34EaVnG11ZrZa0UHB3wnzV1wx8m58T4hQL8IuNXlQ==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+ es-abstract "^1.18.0-next.2"
+ has "^1.0.3"
+
+object.getownpropertydescriptors@^2.0.3, object.getownpropertydescriptors@^2.1.0:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.2.tgz#1bd63aeacf0d5d2d2f31b5e393b03a7c601a23f7"
+ integrity sha512-WtxeKSzfBjlzL+F9b7M7hewDzMwy+C8NRssHd1YrNlzHzIDrXcXiNOMrezdAEM4UXixgV+vvnyBeN7Rygl2ttQ==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+ es-abstract "^1.18.0-next.2"
+
+object.pick@^1.3.0:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747"
+ integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c=
+ dependencies:
+ isobject "^3.0.1"
+
+object.values@^1.1.0, object.values@^1.1.1:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.2.tgz#7a2015e06fcb0f546bd652486ce8583a4731c731"
+ integrity sha512-MYC0jvJopr8EK6dPBiO8Nb9mvjdypOachO5REGk6MXzujbBrAisKo3HmdEI6kZDL6fC31Mwee/5YbtMebixeag==
+ dependencies:
+ call-bind "^1.0.0"
+ define-properties "^1.1.3"
+ es-abstract "^1.18.0-next.1"
+ has "^1.0.3"
+
+object.values@^1.1.3, object.values@^1.1.4:
+ version "1.1.4"
+ resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.4.tgz#0d273762833e816b693a637d30073e7051535b30"
+ integrity sha512-TnGo7j4XSnKQoK3MfvkzqKCi0nVe/D9I9IjwTNYdb/fxYHpjrluHVOgw0AF6jrRFGMPHdfuidR09tIDiIvnaSg==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+ es-abstract "^1.18.2"
+
+obuf@^1.0.0, obuf@^1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e"
+ integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==
+
+on-finished@~2.3.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947"
+ integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=
+ dependencies:
+ ee-first "1.1.1"
+
+on-headers@~1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f"
+ integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==
+
+once@^1.3.0, once@^1.3.1, once@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
+ integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
+ dependencies:
+ wrappy "1"
+
+onetime@^5.1.0:
+ version "5.1.2"
+ resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e"
+ integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==
+ dependencies:
+ mimic-fn "^2.1.0"
+
+open@^7.0.2:
+ version "7.4.2"
+ resolved "https://registry.yarnpkg.com/open/-/open-7.4.2.tgz#b8147e26dcf3e426316c730089fd71edd29c2321"
+ integrity sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q==
+ dependencies:
+ is-docker "^2.0.0"
+ is-wsl "^2.1.1"
+
+opn@^5.5.0:
+ version "5.5.0"
+ resolved "https://registry.yarnpkg.com/opn/-/opn-5.5.0.tgz#fc7164fab56d235904c51c3b27da6758ca3b9bfc"
+ integrity sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA==
+ dependencies:
+ is-wsl "^1.1.0"
+
[email protected]:
+ version "5.0.4"
+ resolved "https://registry.yarnpkg.com/optimize-css-assets-webpack-plugin/-/optimize-css-assets-webpack-plugin-5.0.4.tgz#85883c6528aaa02e30bbad9908c92926bb52dc90"
+ integrity sha512-wqd6FdI2a5/FdoiCNNkEvLeA//lHHfG24Ln2Xm2qqdIk4aOlsR18jwpyOihqQ8849W3qu2DX8fOYxpvTMj+93A==
+ dependencies:
+ cssnano "^4.1.10"
+ last-call-webpack-plugin "^3.0.0"
+
+optionator@^0.8.1:
+ version "0.8.3"
+ resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495"
+ integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==
+ dependencies:
+ deep-is "~0.1.3"
+ fast-levenshtein "~2.0.6"
+ levn "~0.3.0"
+ prelude-ls "~1.1.2"
+ type-check "~0.3.2"
+ word-wrap "~1.2.3"
+
+optionator@^0.9.1:
+ version "0.9.1"
+ resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499"
+ integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==
+ dependencies:
+ deep-is "^0.1.3"
+ fast-levenshtein "^2.0.6"
+ levn "^0.4.1"
+ prelude-ls "^1.2.1"
+ type-check "^0.4.0"
+ word-wrap "^1.2.3"
+
+original@^1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/original/-/original-1.0.2.tgz#e442a61cffe1c5fd20a65f3261c26663b303f25f"
+ integrity sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg==
+ dependencies:
+ url-parse "^1.4.3"
+
+os-browserify@^0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27"
+ integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=
+
+p-each-series@^2.1.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/p-each-series/-/p-each-series-2.2.0.tgz#105ab0357ce72b202a8a8b94933672657b5e2a9a"
+ integrity sha512-ycIL2+1V32th+8scbpTvyHNaHe02z0sjgh91XXjAk+ZeXoPN4Z46DVUnzdso0aX4KckKw0FNNFHdjZ2UsZvxiA==
+
+p-finally@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
+ integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=
+
+p-limit@^1.1.0:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8"
+ integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==
+ dependencies:
+ p-try "^1.0.0"
+
+p-limit@^2.0.0, p-limit@^2.2.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1"
+ integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==
+ dependencies:
+ p-try "^2.0.0"
+
+p-limit@^3.0.2:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b"
+ integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==
+ dependencies:
+ yocto-queue "^0.1.0"
+
+p-locate@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43"
+ integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=
+ dependencies:
+ p-limit "^1.1.0"
+
+p-locate@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4"
+ integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==
+ dependencies:
+ p-limit "^2.0.0"
+
+p-locate@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07"
+ integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==
+ dependencies:
+ p-limit "^2.2.0"
+
+p-map@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175"
+ integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==
+
+p-map@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b"
+ integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==
+ dependencies:
+ aggregate-error "^3.0.0"
+
+p-retry@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-3.0.1.tgz#316b4c8893e2c8dc1cfa891f406c4b422bebf328"
+ integrity sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w==
+ dependencies:
+ retry "^0.12.0"
+
+p-try@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3"
+ integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=
+
+p-try@^2.0.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
+ integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==
+
+pako@~1.0.5:
+ version "1.0.11"
+ resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf"
+ integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==
+
+parallel-transform@^1.1.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.2.0.tgz#9049ca37d6cb2182c3b1d2c720be94d14a5814fc"
+ integrity sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg==
+ dependencies:
+ cyclist "^1.0.1"
+ inherits "^2.0.3"
+ readable-stream "^2.1.5"
+
+param-case@^3.0.3:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5"
+ integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==
+ dependencies:
+ dot-case "^3.0.4"
+ tslib "^2.0.3"
+
+parent-module@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2"
+ integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==
+ dependencies:
+ callsites "^3.0.0"
+
+parse-asn1@^5.0.0, parse-asn1@^5.1.5:
+ version "5.1.6"
+ resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.6.tgz#385080a3ec13cb62a62d39409cb3e88844cdaed4"
+ integrity sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw==
+ dependencies:
+ asn1.js "^5.2.0"
+ browserify-aes "^1.0.0"
+ evp_bytestokey "^1.0.0"
+ pbkdf2 "^3.0.3"
+ safe-buffer "^5.1.1"
+
+parse-json@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9"
+ integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=
+ dependencies:
+ error-ex "^1.2.0"
+
+parse-json@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0"
+ integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=
+ dependencies:
+ error-ex "^1.3.1"
+ json-parse-better-errors "^1.0.1"
+
+parse-json@^5.0.0:
+ version "5.2.0"
+ resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd"
+ integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==
+ dependencies:
+ "@babel/code-frame" "^7.0.0"
+ error-ex "^1.3.1"
+ json-parse-even-better-errors "^2.3.0"
+ lines-and-columns "^1.1.6"
+
[email protected]:
+ version "5.1.1"
+ resolved "https://registry.yarnpkg.com/parse5/-/parse5-5.1.1.tgz#f68e4e5ba1852ac2cadc00f4555fff6c2abb6178"
+ integrity sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug==
+
+parseurl@~1.3.2, parseurl@~1.3.3:
+ version "1.3.3"
+ resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4"
+ integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==
+
+pascal-case@^3.1.2:
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb"
+ integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==
+ dependencies:
+ no-case "^3.0.4"
+ tslib "^2.0.3"
+
+pascalcase@^0.1.1:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14"
+ integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ=
+
[email protected]:
+ version "0.0.1"
+ resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a"
+ integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ==
+
+path-dirname@^1.0.0:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0"
+ integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA=
+
+path-exists@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
+ integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=
+
+path-exists@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3"
+ integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==
+
+path-is-absolute@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
+ integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18=
+
+path-is-inside@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53"
+ integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=
+
+path-key@^2.0.0, path-key@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
+ integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=
+
+path-key@^3.0.0, path-key@^3.1.0:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375"
+ integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==
+
+path-parse@^1.0.6:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c"
+ integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==
+
[email protected]:
+ version "0.1.7"
+ resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c"
+ integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=
+
+path-to-regexp@^1.7.0:
+ version "1.8.0"
+ resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a"
+ integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==
+ dependencies:
+ isarray "0.0.1"
+
+path-type@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/path-type/-/path-type-2.0.0.tgz#f012ccb8415b7096fc2daa1054c3d72389594c73"
+ integrity sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=
+ dependencies:
+ pify "^2.0.0"
+
+path-type@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f"
+ integrity sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==
+ dependencies:
+ pify "^3.0.0"
+
+path-type@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b"
+ integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==
+
+pbkdf2@^3.0.3:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.1.1.tgz#cb8724b0fada984596856d1a6ebafd3584654b94"
+ integrity sha512-4Ejy1OPxi9f2tt1rRV7Go7zmfDQ+ZectEQz3VGUQhgq62HtIRPDyG/JtnwIxs6x3uNMwo2V7q1fMvKjb+Tnpqg==
+ dependencies:
+ create-hash "^1.1.2"
+ create-hmac "^1.1.4"
+ ripemd160 "^2.0.1"
+ safe-buffer "^5.0.1"
+ sha.js "^2.4.8"
+
+performance-now@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b"
+ integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=
+
+picomatch@^2.0.4, picomatch@^2.0.5, picomatch@^2.2.1, picomatch@^2.2.2:
+ version "2.2.2"
+ resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
+ integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
+
+pify@^2.0.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
+ integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw=
+
+pify@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176"
+ integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=
+
+pify@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231"
+ integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==
+
+pinkie-promise@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa"
+ integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o=
+ dependencies:
+ pinkie "^2.0.0"
+
+pinkie@^2.0.0:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870"
+ integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA=
+
+pirates@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.1.tgz#643a92caf894566f91b2b986d2c66950a8e2fb87"
+ integrity sha512-WuNqLTbMI3tmfef2TKxlQmAiLHKtFhlsCZnPIpuv2Ow0RDVO8lfy1Opf4NUzlMXLjPl+Men7AuVdX6TA+s+uGA==
+ dependencies:
+ node-modules-regexp "^1.0.0"
+
+pkg-dir@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b"
+ integrity sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=
+ dependencies:
+ find-up "^2.1.0"
+
+pkg-dir@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3"
+ integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==
+ dependencies:
+ find-up "^3.0.0"
+
+pkg-dir@^4.1.0, pkg-dir@^4.2.0:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3"
+ integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==
+ dependencies:
+ find-up "^4.0.0"
+
[email protected]:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5"
+ integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA==
+ dependencies:
+ find-up "^3.0.0"
+
+pkg-up@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-2.0.0.tgz#c819ac728059a461cab1c3889a2be3c49a004d7f"
+ integrity sha1-yBmscoBZpGHKscOImivjxJoATX8=
+ dependencies:
+ find-up "^2.1.0"
+
[email protected]:
+ version "1.6.4"
+ resolved "https://registry.yarnpkg.com/pnp-webpack-plugin/-/pnp-webpack-plugin-1.6.4.tgz#c9711ac4dc48a685dabafc86f8b6dd9f8df84149"
+ integrity sha512-7Wjy+9E3WwLOEL30D+m8TSTF7qJJUJLONBnwQp0518siuMxUQUbgZwssaFX+QKlZkjHZcw/IpZCt/H0srrntSg==
+ dependencies:
+ ts-pnp "^1.1.6"
+
[email protected]:
+ version "9.3.6"
+ resolved "https://registry.yarnpkg.com/popmotion/-/popmotion-9.3.6.tgz#b5236fa28f242aff3871b9e23721f093133248d1"
+ integrity sha512-ZTbXiu6zIggXzIliMi8LGxXBF5ST+wkpXGEjeTUDUOCdSQ356hij/xjeUdv0F8zCQNeqB1+PR5/BB+gC+QLAPw==
+ dependencies:
+ framesync "5.3.0"
+ hey-listen "^1.0.8"
+ style-value-types "4.1.4"
+ tslib "^2.1.0"
+
+portfinder@^1.0.26:
+ version "1.0.28"
+ resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.28.tgz#67c4622852bd5374dd1dd900f779f53462fac778"
+ integrity sha512-Se+2isanIcEqf2XMHjyUKskczxbPH7dQnlMjXX6+dybayyHvAf/TCgyMRlzf/B6QDhAEFOGes0pzRo3by4AbMA==
+ dependencies:
+ async "^2.6.2"
+ debug "^3.1.1"
+ mkdirp "^0.5.5"
+
+posix-character-classes@^0.1.0:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab"
+ integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=
+
+postcss-attribute-case-insensitive@^4.0.1:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-4.0.2.tgz#d93e46b504589e94ac7277b0463226c68041a880"
+ integrity sha512-clkFxk/9pcdb4Vkn0hAHq3YnxBQ2p0CGD1dy24jN+reBck+EWxMbxSUqN4Yj7t0w8csl87K6p0gxBe1utkJsYA==
+ dependencies:
+ postcss "^7.0.2"
+ postcss-selector-parser "^6.0.2"
+
+postcss-browser-comments@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-browser-comments/-/postcss-browser-comments-3.0.0.tgz#1248d2d935fb72053c8e1f61a84a57292d9f65e9"
+ integrity sha512-qfVjLfq7HFd2e0HW4s1dvU8X080OZdG46fFbIBFjW7US7YPDcWfRvdElvwMJr2LI6hMmD+7LnH2HcmXTs+uOig==
+ dependencies:
+ postcss "^7"
+
+postcss-calc@^7.0.1:
+ version "7.0.5"
+ resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-7.0.5.tgz#f8a6e99f12e619c2ebc23cf6c486fdc15860933e"
+ integrity sha512-1tKHutbGtLtEZF6PT4JSihCHfIVldU72mZ8SdZHIYriIZ9fh9k9aWSppaT8rHsyI3dX+KSR+W+Ix9BMY3AODrg==
+ dependencies:
+ postcss "^7.0.27"
+ postcss-selector-parser "^6.0.2"
+ postcss-value-parser "^4.0.2"
+
+postcss-color-functional-notation@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-2.0.1.tgz#5efd37a88fbabeb00a2966d1e53d98ced93f74e0"
+ integrity sha512-ZBARCypjEDofW4P6IdPVTLhDNXPRn8T2s1zHbZidW6rPaaZvcnCS2soYFIQJrMZSxiePJ2XIYTlcb2ztr/eT2g==
+ dependencies:
+ postcss "^7.0.2"
+ postcss-values-parser "^2.0.0"
+
+postcss-color-gray@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-color-gray/-/postcss-color-gray-5.0.0.tgz#532a31eb909f8da898ceffe296fdc1f864be8547"
+ integrity sha512-q6BuRnAGKM/ZRpfDascZlIZPjvwsRye7UDNalqVz3s7GDxMtqPY6+Q871liNxsonUw8oC61OG+PSaysYpl1bnw==
+ dependencies:
+ "@csstools/convert-colors" "^1.4.0"
+ postcss "^7.0.5"
+ postcss-values-parser "^2.0.0"
+
+postcss-color-hex-alpha@^5.0.3:
+ version "5.0.3"
+ resolved "https://registry.yarnpkg.com/postcss-color-hex-alpha/-/postcss-color-hex-alpha-5.0.3.tgz#a8d9ca4c39d497c9661e374b9c51899ef0f87388"
+ integrity sha512-PF4GDel8q3kkreVXKLAGNpHKilXsZ6xuu+mOQMHWHLPNyjiUBOr75sp5ZKJfmv1MCus5/DWUGcK9hm6qHEnXYw==
+ dependencies:
+ postcss "^7.0.14"
+ postcss-values-parser "^2.0.1"
+
+postcss-color-mod-function@^3.0.3:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/postcss-color-mod-function/-/postcss-color-mod-function-3.0.3.tgz#816ba145ac11cc3cb6baa905a75a49f903e4d31d"
+ integrity sha512-YP4VG+xufxaVtzV6ZmhEtc+/aTXH3d0JLpnYfxqTvwZPbJhWqp8bSY3nfNzNRFLgB4XSaBA82OE4VjOOKpCdVQ==
+ dependencies:
+ "@csstools/convert-colors" "^1.4.0"
+ postcss "^7.0.2"
+ postcss-values-parser "^2.0.0"
+
+postcss-color-rebeccapurple@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-4.0.1.tgz#c7a89be872bb74e45b1e3022bfe5748823e6de77"
+ integrity sha512-aAe3OhkS6qJXBbqzvZth2Au4V3KieR5sRQ4ptb2b2O8wgvB3SJBsdG+jsn2BZbbwekDG8nTfcCNKcSfe/lEy8g==
+ dependencies:
+ postcss "^7.0.2"
+ postcss-values-parser "^2.0.0"
+
+postcss-colormin@^4.0.3:
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-4.0.3.tgz#ae060bce93ed794ac71264f08132d550956bd381"
+ integrity sha512-WyQFAdDZpExQh32j0U0feWisZ0dmOtPl44qYmJKkq9xFWY3p+4qnRzCHeNrkeRhwPHz9bQ3mo0/yVkaply0MNw==
+ dependencies:
+ browserslist "^4.0.0"
+ color "^3.0.0"
+ has "^1.0.0"
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+
+postcss-convert-values@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-4.0.1.tgz#ca3813ed4da0f812f9d43703584e449ebe189a7f"
+ integrity sha512-Kisdo1y77KUC0Jmn0OXU/COOJbzM8cImvw1ZFsBgBgMgb1iL23Zs/LXRe3r+EZqM3vGYKdQ2YJVQ5VkJI+zEJQ==
+ dependencies:
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+
+postcss-custom-media@^7.0.8:
+ version "7.0.8"
+ resolved "https://registry.yarnpkg.com/postcss-custom-media/-/postcss-custom-media-7.0.8.tgz#fffd13ffeffad73621be5f387076a28b00294e0c"
+ integrity sha512-c9s5iX0Ge15o00HKbuRuTqNndsJUbaXdiNsksnVH8H4gdc+zbLzr/UasOwNG6CTDpLFekVY4672eWdiiWu2GUg==
+ dependencies:
+ postcss "^7.0.14"
+
+postcss-custom-properties@^8.0.11:
+ version "8.0.11"
+ resolved "https://registry.yarnpkg.com/postcss-custom-properties/-/postcss-custom-properties-8.0.11.tgz#2d61772d6e92f22f5e0d52602df8fae46fa30d97"
+ integrity sha512-nm+o0eLdYqdnJ5abAJeXp4CEU1c1k+eB2yMCvhgzsds/e0umabFrN6HoTy/8Q4K5ilxERdl/JD1LO5ANoYBeMA==
+ dependencies:
+ postcss "^7.0.17"
+ postcss-values-parser "^2.0.1"
+
+postcss-custom-selectors@^5.1.2:
+ version "5.1.2"
+ resolved "https://registry.yarnpkg.com/postcss-custom-selectors/-/postcss-custom-selectors-5.1.2.tgz#64858c6eb2ecff2fb41d0b28c9dd7b3db4de7fba"
+ integrity sha512-DSGDhqinCqXqlS4R7KGxL1OSycd1lydugJ1ky4iRXPHdBRiozyMHrdu0H3o7qNOCiZwySZTUI5MV0T8QhCLu+w==
+ dependencies:
+ postcss "^7.0.2"
+ postcss-selector-parser "^5.0.0-rc.3"
+
+postcss-dir-pseudo-class@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-5.0.0.tgz#6e3a4177d0edb3abcc85fdb6fbb1c26dabaeaba2"
+ integrity sha512-3pm4oq8HYWMZePJY+5ANriPs3P07q+LW6FAdTlkFH2XqDdP4HeeJYMOzn0HYLhRSjBO3fhiqSwwU9xEULSrPgw==
+ dependencies:
+ postcss "^7.0.2"
+ postcss-selector-parser "^5.0.0-rc.3"
+
+postcss-discard-comments@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-4.0.2.tgz#1fbabd2c246bff6aaad7997b2b0918f4d7af4033"
+ integrity sha512-RJutN259iuRf3IW7GZyLM5Sw4GLTOH8FmsXBnv8Ab/Tc2k4SR4qbV4DNbyyY4+Sjo362SyDmW2DQ7lBSChrpkg==
+ dependencies:
+ postcss "^7.0.0"
+
+postcss-discard-duplicates@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-4.0.2.tgz#3fe133cd3c82282e550fc9b239176a9207b784eb"
+ integrity sha512-ZNQfR1gPNAiXZhgENFfEglF93pciw0WxMkJeVmw8eF+JZBbMD7jp6C67GqJAXVZP2BWbOztKfbsdmMp/k8c6oQ==
+ dependencies:
+ postcss "^7.0.0"
+
+postcss-discard-empty@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-4.0.1.tgz#c8c951e9f73ed9428019458444a02ad90bb9f765"
+ integrity sha512-B9miTzbznhDjTfjvipfHoqbWKwd0Mj+/fL5s1QOz06wufguil+Xheo4XpOnc4NqKYBCNqqEzgPv2aPBIJLox0w==
+ dependencies:
+ postcss "^7.0.0"
+
+postcss-discard-overridden@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-4.0.1.tgz#652aef8a96726f029f5e3e00146ee7a4e755ff57"
+ integrity sha512-IYY2bEDD7g1XM1IDEsUT4//iEYCxAmP5oDSFMVU/JVvT7gh+l4fmjciLqGgwjdWpQIdb0Che2VX00QObS5+cTg==
+ dependencies:
+ postcss "^7.0.0"
+
+postcss-double-position-gradients@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-double-position-gradients/-/postcss-double-position-gradients-1.0.0.tgz#fc927d52fddc896cb3a2812ebc5df147e110522e"
+ integrity sha512-G+nV8EnQq25fOI8CH/B6krEohGWnF5+3A6H/+JEpOncu5dCnkS1QQ6+ct3Jkaepw1NGVqqOZH6lqrm244mCftA==
+ dependencies:
+ postcss "^7.0.5"
+ postcss-values-parser "^2.0.0"
+
+postcss-env-function@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-env-function/-/postcss-env-function-2.0.2.tgz#0f3e3d3c57f094a92c2baf4b6241f0b0da5365d7"
+ integrity sha512-rwac4BuZlITeUbiBq60h/xbLzXY43qOsIErngWa4l7Mt+RaSkT7QBjXVGTcBHupykkblHMDrBFh30zchYPaOUw==
+ dependencies:
+ postcss "^7.0.2"
+ postcss-values-parser "^2.0.0"
+
[email protected]:
+ version "4.2.1"
+ resolved "https://registry.yarnpkg.com/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-4.2.1.tgz#9218a65249f30897deab1033aced8578562a6690"
+ integrity sha512-9SiofaZ9CWpQWxOwRh1b/r85KD5y7GgvsNt1056k6OYLvWUun0czCvogfJgylC22uJTwW1KzY3Gz65NZRlvoiQ==
+ dependencies:
+ postcss "^7.0.26"
+
+postcss-focus-visible@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-focus-visible/-/postcss-focus-visible-4.0.0.tgz#477d107113ade6024b14128317ade2bd1e17046e"
+ integrity sha512-Z5CkWBw0+idJHSV6+Bgf2peDOFf/x4o+vX/pwcNYrWpXFrSfTkQ3JQ1ojrq9yS+upnAlNRHeg8uEwFTgorjI8g==
+ dependencies:
+ postcss "^7.0.2"
+
+postcss-focus-within@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-focus-within/-/postcss-focus-within-3.0.0.tgz#763b8788596cee9b874c999201cdde80659ef680"
+ integrity sha512-W0APui8jQeBKbCGZudW37EeMCjDeVxKgiYfIIEo8Bdh5SpB9sxds/Iq8SEuzS0Q4YFOlG7EPFulbbxujpkrV2w==
+ dependencies:
+ postcss "^7.0.2"
+
+postcss-font-variant@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-font-variant/-/postcss-font-variant-4.0.1.tgz#42d4c0ab30894f60f98b17561eb5c0321f502641"
+ integrity sha512-I3ADQSTNtLTTd8uxZhtSOrTCQ9G4qUVKPjHiDk0bV75QSxXjVWiJVJ2VLdspGUi9fbW9BcjKJoRvxAH1pckqmA==
+ dependencies:
+ postcss "^7.0.2"
+
+postcss-gap-properties@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-gap-properties/-/postcss-gap-properties-2.0.0.tgz#431c192ab3ed96a3c3d09f2ff615960f902c1715"
+ integrity sha512-QZSqDaMgXCHuHTEzMsS2KfVDOq7ZFiknSpkrPJY6jmxbugUPTuSzs/vuE5I3zv0WAS+3vhrlqhijiprnuQfzmg==
+ dependencies:
+ postcss "^7.0.2"
+
+postcss-image-set-function@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-image-set-function/-/postcss-image-set-function-3.0.1.tgz#28920a2f29945bed4c3198d7df6496d410d3f288"
+ integrity sha512-oPTcFFip5LZy8Y/whto91L9xdRHCWEMs3e1MdJxhgt4jy2WYXfhkng59fH5qLXSCPN8k4n94p1Czrfe5IOkKUw==
+ dependencies:
+ postcss "^7.0.2"
+ postcss-values-parser "^2.0.0"
+
+postcss-initial@^3.0.0:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-initial/-/postcss-initial-3.0.2.tgz#f018563694b3c16ae8eaabe3c585ac6319637b2d"
+ integrity sha512-ugA2wKonC0xeNHgirR4D3VWHs2JcU08WAi1KFLVcnb7IN89phID6Qtg2RIctWbnvp1TM2BOmDtX8GGLCKdR8YA==
+ dependencies:
+ lodash.template "^4.5.0"
+ postcss "^7.0.2"
+
+postcss-lab-function@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-lab-function/-/postcss-lab-function-2.0.1.tgz#bb51a6856cd12289ab4ae20db1e3821ef13d7d2e"
+ integrity sha512-whLy1IeZKY+3fYdqQFuDBf8Auw+qFuVnChWjmxm/UhHWqNHZx+B99EwxTvGYmUBqe3Fjxs4L1BoZTJmPu6usVg==
+ dependencies:
+ "@csstools/convert-colors" "^1.4.0"
+ postcss "^7.0.2"
+ postcss-values-parser "^2.0.0"
+
+postcss-load-config@^2.0.0:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-2.1.2.tgz#c5ea504f2c4aef33c7359a34de3573772ad7502a"
+ integrity sha512-/rDeGV6vMUo3mwJZmeHfEDvwnTKKqQ0S7OHUi/kJvvtx3aWtyWG2/0ZWnzCt2keEclwN6Tf0DST2v9kITdOKYw==
+ dependencies:
+ cosmiconfig "^5.0.0"
+ import-cwd "^2.0.0"
+
[email protected]:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-3.0.0.tgz#6b97943e47c72d845fa9e03f273773d4e8dd6c2d"
+ integrity sha512-cLWoDEY5OwHcAjDnkyRQzAXfs2jrKjXpO/HQFcc5b5u/r7aa471wdmChmwfnv7x2u840iat/wi0lQ5nbRgSkUA==
+ dependencies:
+ loader-utils "^1.1.0"
+ postcss "^7.0.0"
+ postcss-load-config "^2.0.0"
+ schema-utils "^1.0.0"
+
+postcss-logical@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-logical/-/postcss-logical-3.0.0.tgz#2495d0f8b82e9f262725f75f9401b34e7b45d5b5"
+ integrity sha512-1SUKdJc2vuMOmeItqGuNaC+N8MzBWFWEkAnRnLpFYj1tGGa7NqyVBujfRtgNa2gXR+6RkGUiB2O5Vmh7E2RmiA==
+ dependencies:
+ postcss "^7.0.2"
+
+postcss-media-minmax@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-media-minmax/-/postcss-media-minmax-4.0.0.tgz#b75bb6cbc217c8ac49433e12f22048814a4f5ed5"
+ integrity sha512-fo9moya6qyxsjbFAYl97qKO9gyre3qvbMnkOZeZwlsW6XYFsvs2DMGDlchVLfAd8LHPZDxivu/+qW2SMQeTHBw==
+ dependencies:
+ postcss "^7.0.2"
+
+postcss-merge-longhand@^4.0.11:
+ version "4.0.11"
+ resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-4.0.11.tgz#62f49a13e4a0ee04e7b98f42bb16062ca2549e24"
+ integrity sha512-alx/zmoeXvJjp7L4mxEMjh8lxVlDFX1gqWHzaaQewwMZiVhLo42TEClKaeHbRf6J7j82ZOdTJ808RtN0ZOZwvw==
+ dependencies:
+ css-color-names "0.0.4"
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+ stylehacks "^4.0.0"
+
+postcss-merge-rules@^4.0.3:
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-4.0.3.tgz#362bea4ff5a1f98e4075a713c6cb25aefef9a650"
+ integrity sha512-U7e3r1SbvYzO0Jr3UT/zKBVgYYyhAz0aitvGIYOYK5CPmkNih+WDSsS5tvPrJ8YMQYlEMvsZIiqmn7HdFUaeEQ==
+ dependencies:
+ browserslist "^4.0.0"
+ caniuse-api "^3.0.0"
+ cssnano-util-same-parent "^4.0.0"
+ postcss "^7.0.0"
+ postcss-selector-parser "^3.0.0"
+ vendors "^1.0.0"
+
+postcss-minify-font-values@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-4.0.2.tgz#cd4c344cce474343fac5d82206ab2cbcb8afd5a6"
+ integrity sha512-j85oO6OnRU9zPf04+PZv1LYIYOprWm6IA6zkXkrJXyRveDEuQggG6tvoy8ir8ZwjLxLuGfNkCZEQG7zan+Hbtg==
+ dependencies:
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+
+postcss-minify-gradients@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-4.0.2.tgz#93b29c2ff5099c535eecda56c4aa6e665a663471"
+ integrity sha512-qKPfwlONdcf/AndP1U8SJ/uzIJtowHlMaSioKzebAXSG4iJthlWC9iSWznQcX4f66gIWX44RSA841HTHj3wK+Q==
+ dependencies:
+ cssnano-util-get-arguments "^4.0.0"
+ is-color-stop "^1.0.0"
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+
+postcss-minify-params@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-4.0.2.tgz#6b9cef030c11e35261f95f618c90036d680db874"
+ integrity sha512-G7eWyzEx0xL4/wiBBJxJOz48zAKV2WG3iZOqVhPet/9geefm/Px5uo1fzlHu+DOjT+m0Mmiz3jkQzVHe6wxAWg==
+ dependencies:
+ alphanum-sort "^1.0.0"
+ browserslist "^4.0.0"
+ cssnano-util-get-arguments "^4.0.0"
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+ uniqs "^2.0.0"
+
+postcss-minify-selectors@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-4.0.2.tgz#e2e5eb40bfee500d0cd9243500f5f8ea4262fbd8"
+ integrity sha512-D5S1iViljXBj9kflQo4YutWnJmwm8VvIsU1GeXJGiG9j8CIg9zs4voPMdQDUmIxetUOh60VilsNzCiAFTOqu3g==
+ dependencies:
+ alphanum-sort "^1.0.0"
+ has "^1.0.0"
+ postcss "^7.0.0"
+ postcss-selector-parser "^3.0.0"
+
+postcss-modules-extract-imports@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-2.0.0.tgz#818719a1ae1da325f9832446b01136eeb493cd7e"
+ integrity sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ==
+ dependencies:
+ postcss "^7.0.5"
+
+postcss-modules-local-by-default@^3.0.3:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-3.0.3.tgz#bb14e0cc78279d504dbdcbfd7e0ca28993ffbbb0"
+ integrity sha512-e3xDq+LotiGesympRlKNgaJ0PCzoUIdpH0dj47iWAui/kyTgh3CiAr1qP54uodmJhl6p9rN6BoNcdEDVJx9RDw==
+ dependencies:
+ icss-utils "^4.1.1"
+ postcss "^7.0.32"
+ postcss-selector-parser "^6.0.2"
+ postcss-value-parser "^4.1.0"
+
+postcss-modules-scope@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-2.2.0.tgz#385cae013cc7743f5a7d7602d1073a89eaae62ee"
+ integrity sha512-YyEgsTMRpNd+HmyC7H/mh3y+MeFWevy7V1evVhJWewmMbjDHIbZbOXICC2y+m1xI1UVfIT1HMW/O04Hxyu9oXQ==
+ dependencies:
+ postcss "^7.0.6"
+ postcss-selector-parser "^6.0.0"
+
+postcss-modules-values@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-3.0.0.tgz#5b5000d6ebae29b4255301b4a3a54574423e7f10"
+ integrity sha512-1//E5jCBrZ9DmRX+zCtmQtRSV6PV42Ix7Bzj9GbwJceduuf7IqP8MgeTXuRDHOWj2m0VzZD5+roFWDuU8RQjcg==
+ dependencies:
+ icss-utils "^4.0.0"
+ postcss "^7.0.6"
+
+postcss-nesting@^7.0.0:
+ version "7.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-nesting/-/postcss-nesting-7.0.1.tgz#b50ad7b7f0173e5b5e3880c3501344703e04c052"
+ integrity sha512-FrorPb0H3nuVq0Sff7W2rnc3SmIcruVC6YwpcS+k687VxyxO33iE1amna7wHuRVzM8vfiYofXSBHNAZ3QhLvYg==
+ dependencies:
+ postcss "^7.0.2"
+
+postcss-normalize-charset@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-4.0.1.tgz#8b35add3aee83a136b0471e0d59be58a50285dd4"
+ integrity sha512-gMXCrrlWh6G27U0hF3vNvR3w8I1s2wOBILvA87iNXaPvSNo5uZAMYsZG7XjCUf1eVxuPfyL4TJ7++SGZLc9A3g==
+ dependencies:
+ postcss "^7.0.0"
+
+postcss-normalize-display-values@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-display-values/-/postcss-normalize-display-values-4.0.2.tgz#0dbe04a4ce9063d4667ed2be476bb830c825935a"
+ integrity sha512-3F2jcsaMW7+VtRMAqf/3m4cPFhPD3EFRgNs18u+k3lTJJlVe7d0YPO+bnwqo2xg8YiRpDXJI2u8A0wqJxMsQuQ==
+ dependencies:
+ cssnano-util-get-match "^4.0.0"
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+
+postcss-normalize-positions@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-positions/-/postcss-normalize-positions-4.0.2.tgz#05f757f84f260437378368a91f8932d4b102917f"
+ integrity sha512-Dlf3/9AxpxE+NF1fJxYDeggi5WwV35MXGFnnoccP/9qDtFrTArZ0D0R+iKcg5WsUd8nUYMIl8yXDCtcrT8JrdA==
+ dependencies:
+ cssnano-util-get-arguments "^4.0.0"
+ has "^1.0.0"
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+
+postcss-normalize-repeat-style@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-4.0.2.tgz#c4ebbc289f3991a028d44751cbdd11918b17910c"
+ integrity sha512-qvigdYYMpSuoFs3Is/f5nHdRLJN/ITA7huIoCyqqENJe9PvPmLhNLMu7QTjPdtnVf6OcYYO5SHonx4+fbJE1+Q==
+ dependencies:
+ cssnano-util-get-arguments "^4.0.0"
+ cssnano-util-get-match "^4.0.0"
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+
+postcss-normalize-string@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-string/-/postcss-normalize-string-4.0.2.tgz#cd44c40ab07a0c7a36dc5e99aace1eca4ec2690c"
+ integrity sha512-RrERod97Dnwqq49WNz8qo66ps0swYZDSb6rM57kN2J+aoyEAJfZ6bMx0sx/F9TIEX0xthPGCmeyiam/jXif0eA==
+ dependencies:
+ has "^1.0.0"
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+
+postcss-normalize-timing-functions@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-4.0.2.tgz#8e009ca2a3949cdaf8ad23e6b6ab99cb5e7d28d9"
+ integrity sha512-acwJY95edP762e++00Ehq9L4sZCEcOPyaHwoaFOhIwWCDfik6YvqsYNxckee65JHLKzuNSSmAdxwD2Cud1Z54A==
+ dependencies:
+ cssnano-util-get-match "^4.0.0"
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+
+postcss-normalize-unicode@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-unicode/-/postcss-normalize-unicode-4.0.1.tgz#841bd48fdcf3019ad4baa7493a3d363b52ae1cfb"
+ integrity sha512-od18Uq2wCYn+vZ/qCOeutvHjB5jm57ToxRaMeNuf0nWVHaP9Hua56QyMF6fs/4FSUnVIw0CBPsU0K4LnBPwYwg==
+ dependencies:
+ browserslist "^4.0.0"
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+
+postcss-normalize-url@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-4.0.1.tgz#10e437f86bc7c7e58f7b9652ed878daaa95faae1"
+ integrity sha512-p5oVaF4+IHwu7VpMan/SSpmpYxcJMtkGppYf0VbdH5B6hN8YNmVyJLuY9FmLQTzY3fag5ESUUHDqM+heid0UVA==
+ dependencies:
+ is-absolute-url "^2.0.0"
+ normalize-url "^3.0.0"
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+
+postcss-normalize-whitespace@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-4.0.2.tgz#bf1d4070fe4fcea87d1348e825d8cc0c5faa7d82"
+ integrity sha512-tO8QIgrsI3p95r8fyqKV+ufKlSHh9hMJqACqbv2XknufqEDhDvbguXGBBqxw9nsQoXWf0qOqppziKJKHMD4GtA==
+ dependencies:
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+
[email protected]:
+ version "8.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-normalize/-/postcss-normalize-8.0.1.tgz#90e80a7763d7fdf2da6f2f0f82be832ce4f66776"
+ integrity sha512-rt9JMS/m9FHIRroDDBGSMsyW1c0fkvOJPy62ggxSHUldJO7B195TqFMqIf+lY5ezpDcYOV4j86aUp3/XbxzCCQ==
+ dependencies:
+ "@csstools/normalize.css" "^10.1.0"
+ browserslist "^4.6.2"
+ postcss "^7.0.17"
+ postcss-browser-comments "^3.0.0"
+ sanitize.css "^10.0.0"
+
+postcss-ordered-values@^4.1.2:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-4.1.2.tgz#0cf75c820ec7d5c4d280189559e0b571ebac0eee"
+ integrity sha512-2fCObh5UanxvSxeXrtLtlwVThBvHn6MQcu4ksNT2tsaV2Fg76R2CV98W7wNSlX+5/pFwEyaDwKLLoEV7uRybAw==
+ dependencies:
+ cssnano-util-get-arguments "^4.0.0"
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+
+postcss-overflow-shorthand@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-overflow-shorthand/-/postcss-overflow-shorthand-2.0.0.tgz#31ecf350e9c6f6ddc250a78f0c3e111f32dd4c30"
+ integrity sha512-aK0fHc9CBNx8jbzMYhshZcEv8LtYnBIRYQD5i7w/K/wS9c2+0NSR6B3OVMu5y0hBHYLcMGjfU+dmWYNKH0I85g==
+ dependencies:
+ postcss "^7.0.2"
+
+postcss-page-break@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-page-break/-/postcss-page-break-2.0.0.tgz#add52d0e0a528cabe6afee8b46e2abb277df46bf"
+ integrity sha512-tkpTSrLpfLfD9HvgOlJuigLuk39wVTbbd8RKcy8/ugV2bNBUW3xU+AIqyxhDrQr1VUj1RmyJrBn1YWrqUm9zAQ==
+ dependencies:
+ postcss "^7.0.2"
+
+postcss-place@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-place/-/postcss-place-4.0.1.tgz#e9f39d33d2dc584e46ee1db45adb77ca9d1dcc62"
+ integrity sha512-Zb6byCSLkgRKLODj/5mQugyuj9bvAAw9LqJJjgwz5cYryGeXfFZfSXoP1UfveccFmeq0b/2xxwcTEVScnqGxBg==
+ dependencies:
+ postcss "^7.0.2"
+ postcss-values-parser "^2.0.0"
+
[email protected]:
+ version "6.7.0"
+ resolved "https://registry.yarnpkg.com/postcss-preset-env/-/postcss-preset-env-6.7.0.tgz#c34ddacf8f902383b35ad1e030f178f4cdf118a5"
+ integrity sha512-eU4/K5xzSFwUFJ8hTdTQzo2RBLbDVt83QZrAvI07TULOkmyQlnYlpwep+2yIK+K+0KlZO4BvFcleOCCcUtwchg==
+ dependencies:
+ autoprefixer "^9.6.1"
+ browserslist "^4.6.4"
+ caniuse-lite "^1.0.30000981"
+ css-blank-pseudo "^0.1.4"
+ css-has-pseudo "^0.10.0"
+ css-prefers-color-scheme "^3.1.1"
+ cssdb "^4.4.0"
+ postcss "^7.0.17"
+ postcss-attribute-case-insensitive "^4.0.1"
+ postcss-color-functional-notation "^2.0.1"
+ postcss-color-gray "^5.0.0"
+ postcss-color-hex-alpha "^5.0.3"
+ postcss-color-mod-function "^3.0.3"
+ postcss-color-rebeccapurple "^4.0.1"
+ postcss-custom-media "^7.0.8"
+ postcss-custom-properties "^8.0.11"
+ postcss-custom-selectors "^5.1.2"
+ postcss-dir-pseudo-class "^5.0.0"
+ postcss-double-position-gradients "^1.0.0"
+ postcss-env-function "^2.0.2"
+ postcss-focus-visible "^4.0.0"
+ postcss-focus-within "^3.0.0"
+ postcss-font-variant "^4.0.0"
+ postcss-gap-properties "^2.0.0"
+ postcss-image-set-function "^3.0.1"
+ postcss-initial "^3.0.0"
+ postcss-lab-function "^2.0.1"
+ postcss-logical "^3.0.0"
+ postcss-media-minmax "^4.0.0"
+ postcss-nesting "^7.0.0"
+ postcss-overflow-shorthand "^2.0.0"
+ postcss-page-break "^2.0.0"
+ postcss-place "^4.0.1"
+ postcss-pseudo-class-any-link "^6.0.0"
+ postcss-replace-overflow-wrap "^3.0.0"
+ postcss-selector-matches "^4.0.0"
+ postcss-selector-not "^4.0.0"
+
+postcss-pseudo-class-any-link@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-6.0.0.tgz#2ed3eed393b3702879dec4a87032b210daeb04d1"
+ integrity sha512-lgXW9sYJdLqtmw23otOzrtbDXofUdfYzNm4PIpNE322/swES3VU9XlXHeJS46zT2onFO7V1QFdD4Q9LiZj8mew==
+ dependencies:
+ postcss "^7.0.2"
+ postcss-selector-parser "^5.0.0-rc.3"
+
+postcss-reduce-initial@^4.0.3:
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-4.0.3.tgz#7fd42ebea5e9c814609639e2c2e84ae270ba48df"
+ integrity sha512-gKWmR5aUulSjbzOfD9AlJiHCGH6AEVLaM0AV+aSioxUDd16qXP1PCh8d1/BGVvpdWn8k/HiK7n6TjeoXN1F7DA==
+ dependencies:
+ browserslist "^4.0.0"
+ caniuse-api "^3.0.0"
+ has "^1.0.0"
+ postcss "^7.0.0"
+
+postcss-reduce-transforms@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-4.0.2.tgz#17efa405eacc6e07be3414a5ca2d1074681d4e29"
+ integrity sha512-EEVig1Q2QJ4ELpJXMZR8Vt5DQx8/mo+dGWSR7vWXqcob2gQLyQGsionYcGKATXvQzMPn6DSN1vTN7yFximdIAg==
+ dependencies:
+ cssnano-util-get-match "^4.0.0"
+ has "^1.0.0"
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+
+postcss-replace-overflow-wrap@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-3.0.0.tgz#61b360ffdaedca84c7c918d2b0f0d0ea559ab01c"
+ integrity sha512-2T5hcEHArDT6X9+9dVSPQdo7QHzG4XKclFT8rU5TzJPDN7RIRTbO9c4drUISOVemLj03aezStHCR2AIcr8XLpw==
+ dependencies:
+ postcss "^7.0.2"
+
[email protected]:
+ version "5.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-safe-parser/-/postcss-safe-parser-5.0.2.tgz#459dd27df6bc2ba64608824ba39e45dacf5e852d"
+ integrity sha512-jDUfCPJbKOABhwpUKcqCVbbXiloe/QXMcbJ6Iipf3sDIihEzTqRCeMBfRaOHxhBuTYqtASrI1KJWxzztZU4qUQ==
+ dependencies:
+ postcss "^8.1.0"
+
+postcss-selector-matches@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-selector-matches/-/postcss-selector-matches-4.0.0.tgz#71c8248f917ba2cc93037c9637ee09c64436fcff"
+ integrity sha512-LgsHwQR/EsRYSqlwdGzeaPKVT0Ml7LAT6E75T8W8xLJY62CE4S/l03BWIt3jT8Taq22kXP08s2SfTSzaraoPww==
+ dependencies:
+ balanced-match "^1.0.0"
+ postcss "^7.0.2"
+
+postcss-selector-not@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-selector-not/-/postcss-selector-not-4.0.1.tgz#263016eef1cf219e0ade9a913780fc1f48204cbf"
+ integrity sha512-YolvBgInEK5/79C+bdFMyzqTg6pkYqDbzZIST/PDMqa/o3qtXenD05apBG2jLgT0/BQ77d4U2UK12jWpilqMAQ==
+ dependencies:
+ balanced-match "^1.0.0"
+ postcss "^7.0.2"
+
+postcss-selector-parser@^3.0.0:
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-3.1.2.tgz#b310f5c4c0fdaf76f94902bbaa30db6aa84f5270"
+ integrity sha512-h7fJ/5uWuRVyOtkO45pnt1Ih40CEleeyCHzipqAZO2e5H20g25Y48uYnFUiShvY4rZWNJ/Bib/KVPmanaCtOhA==
+ dependencies:
+ dot-prop "^5.2.0"
+ indexes-of "^1.0.1"
+ uniq "^1.0.1"
+
+postcss-selector-parser@^5.0.0-rc.3, postcss-selector-parser@^5.0.0-rc.4:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-5.0.0.tgz#249044356697b33b64f1a8f7c80922dddee7195c"
+ integrity sha512-w+zLE5Jhg6Liz8+rQOWEAwtwkyqpfnmsinXjXg6cY7YIONZZtgvE0v2O0uhQBs0peNomOJwWRKt6JBfTdTd3OQ==
+ dependencies:
+ cssesc "^2.0.0"
+ indexes-of "^1.0.1"
+ uniq "^1.0.1"
+
+postcss-selector-parser@^6.0.0, postcss-selector-parser@^6.0.2:
+ version "6.0.4"
+ resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.4.tgz#56075a1380a04604c38b063ea7767a129af5c2b3"
+ integrity sha512-gjMeXBempyInaBqpp8gODmwZ52WaYsVOsfr4L4lDQ7n3ncD6mEyySiDtgzCT+NYC0mmeOLvtsF8iaEf0YT6dBw==
+ dependencies:
+ cssesc "^3.0.0"
+ indexes-of "^1.0.1"
+ uniq "^1.0.1"
+ util-deprecate "^1.0.2"
+
+postcss-svgo@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-4.0.2.tgz#17b997bc711b333bab143aaed3b8d3d6e3d38258"
+ integrity sha512-C6wyjo3VwFm0QgBy+Fu7gCYOkCmgmClghO+pjcxvrcBKtiKt0uCF+hvbMO1fyv5BMImRK90SMb+dwUnfbGd+jw==
+ dependencies:
+ is-svg "^3.0.0"
+ postcss "^7.0.0"
+ postcss-value-parser "^3.0.0"
+ svgo "^1.0.0"
+
+postcss-unique-selectors@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-4.0.1.tgz#9446911f3289bfd64c6d680f073c03b1f9ee4bac"
+ integrity sha512-+JanVaryLo9QwZjKrmJgkI4Fn8SBgRO6WXQBJi7KiAVPlmxikB5Jzc4EvXMT2H0/m0RjrVVm9rGNhZddm/8Spg==
+ dependencies:
+ alphanum-sort "^1.0.0"
+ postcss "^7.0.0"
+ uniqs "^2.0.0"
+
+postcss-value-parser@^3.0.0, postcss-value-parser@^3.3.0:
+ version "3.3.1"
+ resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281"
+ integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==
+
+postcss-value-parser@^4.0.2, postcss-value-parser@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.1.0.tgz#443f6a20ced6481a2bda4fa8532a6e55d789a2cb"
+ integrity sha512-97DXOFbQJhk71ne5/Mt6cOu6yxsSfM0QGQyl0L25Gca4yGWEGJaig7l7gbCX623VqTBNGLRLaVUCnNkcedlRSQ==
+
+postcss-values-parser@^2.0.0, postcss-values-parser@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/postcss-values-parser/-/postcss-values-parser-2.0.1.tgz#da8b472d901da1e205b47bdc98637b9e9e550e5f"
+ integrity sha512-2tLuBsA6P4rYTNKCXYG/71C7j1pU6pK503suYOmn4xYrQIzW+opD+7FAFNuGSdZC/3Qfy334QbeMu7MEb8gOxg==
+ dependencies:
+ flatten "^1.0.2"
+ indexes-of "^1.0.1"
+ uniq "^1.0.1"
+
[email protected]:
+ version "7.0.21"
+ resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.21.tgz#06bb07824c19c2021c5d056d5b10c35b989f7e17"
+ integrity sha512-uIFtJElxJo29QC753JzhidoAhvp/e/Exezkdhfmt8AymWT6/5B7W1WmponYWkHk2eg6sONyTch0A3nkMPun3SQ==
+ dependencies:
+ chalk "^2.4.2"
+ source-map "^0.6.1"
+ supports-color "^6.1.0"
+
+postcss@^7, postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.14, postcss@^7.0.17, postcss@^7.0.2, postcss@^7.0.26, postcss@^7.0.27, postcss@^7.0.32, postcss@^7.0.5, postcss@^7.0.6:
+ version "7.0.35"
+ resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.35.tgz#d2be00b998f7f211d8a276974079f2e92b970e24"
+ integrity sha512-3QT8bBJeX/S5zKTTjTCIjRF3If4avAT6kqxcASlTWEtAFCb9NH0OUxNDfgZSWdP5fJnBYCMEWkIFfWeugjzYMg==
+ dependencies:
+ chalk "^2.4.2"
+ source-map "^0.6.1"
+ supports-color "^6.1.0"
+
+postcss@^8.1.0:
+ version "8.2.6"
+ resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.2.6.tgz#5d69a974543b45f87e464bc4c3e392a97d6be9fe"
+ integrity sha512-xpB8qYxgPuly166AGlpRjUdEYtmOWx2iCwGmrv4vqZL9YPVviDVPZPRXxnXr6xPZOdxQ9lp3ZBFCRgWJ7LE3Sg==
+ dependencies:
+ colorette "^1.2.1"
+ nanoid "^3.1.20"
+ source-map "^0.6.1"
+
+prelude-ls@^1.2.1:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396"
+ integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==
+
+prelude-ls@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54"
+ integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=
+
+prepend-http@^1.0.0:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc"
+ integrity sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw=
+
+pretty-bytes@^5.3.0:
+ version "5.6.0"
+ resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-5.6.0.tgz#356256f643804773c82f64723fe78c92c62beaeb"
+ integrity sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg==
+
+pretty-error@^2.1.1:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.2.tgz#be89f82d81b1c86ec8fdfbc385045882727f93b6"
+ integrity sha512-EY5oDzmsX5wvuynAByrmY0P0hcp+QpnAKbJng2A2MPjVKXCxrDSUkzghVJ4ZGPIv+JC4gX8fPUWscC0RtjsWGw==
+ dependencies:
+ lodash "^4.17.20"
+ renderkid "^2.0.4"
+
+pretty-format@^26.6.0, pretty-format@^26.6.2:
+ version "26.6.2"
+ resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-26.6.2.tgz#e35c2705f14cb7fe2fe94fa078345b444120fc93"
+ integrity sha512-7AeGuCYNGmycyQbCqd/3PWH4eOoX/OiCa0uphp57NVTeAGdJGaAliecxwBDHYQCIvrW7aDBZCYeNTP/WX69mkg==
+ dependencies:
+ "@jest/types" "^26.6.2"
+ ansi-regex "^5.0.0"
+ ansi-styles "^4.0.0"
+ react-is "^17.0.1"
+
+process-nextick-args@~2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
+ integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
+
+process@^0.11.10:
+ version "0.11.10"
+ resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182"
+ integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI=
+
+progress@^2.0.0:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8"
+ integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==
+
+promise-inflight@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3"
+ integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM=
+
[email protected]:
+ version "8.1.3"
+ resolved "https://registry.yarnpkg.com/promise-polyfill/-/promise-polyfill-8.1.3.tgz#8c99b3cf53f3a91c68226ffde7bde81d7f904116"
+ integrity sha512-MG5r82wBzh7pSKDRa9y+vllNHz3e3d4CNj1PQE4BQYxLme0gKYYBm9YENq+UkEikyZ0XbiGWxYlVw3Rl9O/U8g==
+
+promise@^8.1.0:
+ version "8.1.0"
+ resolved "https://registry.yarnpkg.com/promise/-/promise-8.1.0.tgz#697c25c3dfe7435dd79fcd58c38a135888eaf05e"
+ integrity sha512-W04AqnILOL/sPRXziNicCjSNRruLAuIHEOVBazepu0545DDNGYHz7ar9ZgZ1fMU8/MA4mVxp5rkBWRi6OXIy3Q==
+ dependencies:
+ asap "~2.0.6"
+
[email protected], prompts@^2.0.1:
+ version "2.4.0"
+ resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.0.tgz#4aa5de0723a231d1ee9121c40fdf663df73f61d7"
+ integrity sha512-awZAKrk3vN6CroQukBL+R9051a4R3zCZBlJm/HBfrSZ8iTpYix3VX1vU4mveiLpiwmOJT4wokTF9m6HUk4KqWQ==
+ dependencies:
+ kleur "^3.0.3"
+ sisteransi "^1.0.5"
+
+prop-types@^15.6.2, prop-types@^15.7.2:
+ version "15.7.2"
+ resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.7.2.tgz#52c41e75b8c87e72b9d9360e0206b99dcbffa6c5"
+ integrity sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==
+ dependencies:
+ loose-envify "^1.4.0"
+ object-assign "^4.1.1"
+ react-is "^16.8.1"
+
+protobufjs@^6.10.0:
+ version "6.11.2"
+ resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-6.11.2.tgz#de39fabd4ed32beaa08e9bb1e30d08544c1edf8b"
+ integrity sha512-4BQJoPooKJl2G9j3XftkIXjoC9C0Av2NOrWmbLWT1vH32GcSUHjM0Arra6UfTsVyfMAuFzaLucXn1sadxJydAw==
+ dependencies:
+ "@protobufjs/aspromise" "^1.1.2"
+ "@protobufjs/base64" "^1.1.2"
+ "@protobufjs/codegen" "^2.0.4"
+ "@protobufjs/eventemitter" "^1.1.0"
+ "@protobufjs/fetch" "^1.1.0"
+ "@protobufjs/float" "^1.0.2"
+ "@protobufjs/inquire" "^1.1.0"
+ "@protobufjs/path" "^1.1.2"
+ "@protobufjs/pool" "^1.1.0"
+ "@protobufjs/utf8" "^1.1.0"
+ "@types/long" "^4.0.1"
+ "@types/node" ">=13.7.0"
+ long "^4.0.0"
+
+proxy-addr@~2.0.5:
+ version "2.0.6"
+ resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.6.tgz#fdc2336505447d3f2f2c638ed272caf614bbb2bf"
+ integrity sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw==
+ dependencies:
+ forwarded "~0.1.2"
+ ipaddr.js "1.9.1"
+
+prr@~1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476"
+ integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY=
+
+psl@^1.1.28:
+ version "1.8.0"
+ resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24"
+ integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==
+
+public-encrypt@^4.0.0:
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0"
+ integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q==
+ dependencies:
+ bn.js "^4.1.0"
+ browserify-rsa "^4.0.0"
+ create-hash "^1.1.0"
+ parse-asn1 "^5.0.0"
+ randombytes "^2.0.1"
+ safe-buffer "^5.1.2"
+
+pump@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909"
+ integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==
+ dependencies:
+ end-of-stream "^1.1.0"
+ once "^1.3.1"
+
+pump@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
+ integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==
+ dependencies:
+ end-of-stream "^1.1.0"
+ once "^1.3.1"
+
+pumpify@^1.3.3:
+ version "1.5.1"
+ resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce"
+ integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ==
+ dependencies:
+ duplexify "^3.6.0"
+ inherits "^2.0.3"
+ pump "^2.0.0"
+
[email protected]:
+ version "1.3.2"
+ resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d"
+ integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=
+
+punycode@^1.2.4:
+ version "1.4.1"
+ resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e"
+ integrity sha1-wNWmOycYgArY4esPpSachN1BhF4=
+
+punycode@^2.1.0, punycode@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
+ integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
+
+q@^1.1.2:
+ version "1.5.1"
+ resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7"
+ integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc=
+
[email protected]:
+ version "6.7.0"
+ resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc"
+ integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==
+
+qs@~6.5.2:
+ version "6.5.2"
+ resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36"
+ integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==
+
+query-string@^4.1.0:
+ version "4.3.4"
+ resolved "https://registry.yarnpkg.com/query-string/-/query-string-4.3.4.tgz#bbb693b9ca915c232515b228b1a02b609043dbeb"
+ integrity sha1-u7aTucqRXCMlFbIosaArYJBD2+s=
+ dependencies:
+ object-assign "^4.1.0"
+ strict-uri-encode "^1.0.0"
+
+querystring-es3@^0.2.0:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73"
+ integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM=
+
[email protected]:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620"
+ integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=
+
+querystring@^0.2.0:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.1.tgz#40d77615bb09d16902a85c3e38aa8b5ed761c2dd"
+ integrity sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg==
+
+querystringify@^2.1.1:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6"
+ integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==
+
+queue-microtask@^1.2.2:
+ version "1.2.2"
+ resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.2.tgz#abf64491e6ecf0f38a6502403d4cda04f372dfd3"
+ integrity sha512-dB15eXv3p2jDlbOiNLyMabYg1/sXvppd8DP2J3EOCQ0AkuSXCW2tP7mnVouVLJKgUMY6yP0kcQDVpLCN13h4Xg==
+
+raf@^3.4.0, raf@^3.4.1:
+ version "3.4.1"
+ resolved "https://registry.yarnpkg.com/raf/-/raf-3.4.1.tgz#0742e99a4a6552f445d73e3ee0328af0ff1ede39"
+ integrity sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA==
+ dependencies:
+ performance-now "^2.1.0"
+
+randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5, randombytes@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a"
+ integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==
+ dependencies:
+ safe-buffer "^5.1.0"
+
+randomfill@^1.0.3:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458"
+ integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw==
+ dependencies:
+ randombytes "^2.0.5"
+ safe-buffer "^5.1.0"
+
+range-parser@^1.2.1, range-parser@~1.2.1:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031"
+ integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==
+
[email protected]:
+ version "2.4.0"
+ resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332"
+ integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==
+ dependencies:
+ bytes "3.1.0"
+ http-errors "1.7.2"
+ iconv-lite "0.4.24"
+ unpipe "1.0.0"
+
+react-app-polyfill@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/react-app-polyfill/-/react-app-polyfill-2.0.0.tgz#a0bea50f078b8a082970a9d853dc34b6dcc6a3cf"
+ integrity sha512-0sF4ny9v/B7s6aoehwze9vJNWcmCemAUYBVasscVr92+UYiEqDXOxfKjXN685mDaMRNF3WdhHQs76oTODMocFA==
+ dependencies:
+ core-js "^3.6.5"
+ object-assign "^4.1.1"
+ promise "^8.1.0"
+ raf "^3.4.1"
+ regenerator-runtime "^0.13.7"
+ whatwg-fetch "^3.4.1"
+
+react-clientside-effect@^1.2.2:
+ version "1.2.5"
+ resolved "https://registry.yarnpkg.com/react-clientside-effect/-/react-clientside-effect-1.2.5.tgz#e2c4dc3c9ee109f642fac4f5b6e9bf5bcd2219a3"
+ integrity sha512-2bL8qFW1TGBHozGGbVeyvnggRpMjibeZM2536AKNENLECutp2yfs44IL8Hmpn8qjFQ2K7A9PnYf3vc7aQq/cPA==
+ dependencies:
+ "@babel/runtime" "^7.12.13"
+
+react-dev-utils@^11.0.3:
+ version "11.0.4"
+ resolved "https://registry.yarnpkg.com/react-dev-utils/-/react-dev-utils-11.0.4.tgz#a7ccb60257a1ca2e0efe7a83e38e6700d17aa37a"
+ integrity sha512-dx0LvIGHcOPtKbeiSUM4jqpBl3TcY7CDjZdfOIcKeznE7BWr9dg0iPG90G5yfVQ+p/rGNMXdbfStvzQZEVEi4A==
+ dependencies:
+ "@babel/code-frame" "7.10.4"
+ address "1.1.2"
+ browserslist "4.14.2"
+ chalk "2.4.2"
+ cross-spawn "7.0.3"
+ detect-port-alt "1.1.6"
+ escape-string-regexp "2.0.0"
+ filesize "6.1.0"
+ find-up "4.1.0"
+ fork-ts-checker-webpack-plugin "4.1.6"
+ global-modules "2.0.0"
+ globby "11.0.1"
+ gzip-size "5.1.1"
+ immer "8.0.1"
+ is-root "2.1.0"
+ loader-utils "2.0.0"
+ open "^7.0.2"
+ pkg-up "3.1.0"
+ prompts "2.4.0"
+ react-error-overlay "^6.0.9"
+ recursive-readdir "2.2.2"
+ shell-quote "1.7.2"
+ strip-ansi "6.0.0"
+ text-table "0.2.0"
+
+react-dom@^17.0.2:
+ version "17.0.2"
+ resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-17.0.2.tgz#ecffb6845e3ad8dbfcdc498f0d0a939736502c23"
+ integrity sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==
+ dependencies:
+ loose-envify "^1.1.0"
+ object-assign "^4.1.1"
+ scheduler "^0.20.2"
+
+react-error-overlay@^6.0.9:
+ version "6.0.9"
+ resolved "https://registry.yarnpkg.com/react-error-overlay/-/react-error-overlay-6.0.9.tgz#3c743010c9359608c375ecd6bc76f35d93995b0a"
+ integrity sha512-nQTTcUu+ATDbrSD1BZHr5kgSD4oF8OFjxun8uAaL8RwPBacGBNPf/yAuVVdx17N8XNzRDMrZ9XcKZHCjPW+9ew==
+
[email protected]:
+ version "3.2.0"
+ resolved "https://registry.yarnpkg.com/react-fast-compare/-/react-fast-compare-3.2.0.tgz#641a9da81b6a6320f270e89724fb45a0b39e43bb"
+ integrity sha512-rtGImPZ0YyLrscKI9xTpV8psd6I8VAtjKCzQDlzyDvqJA8XOW78TXYQwNRNd8g8JZnDu8q9Fu/1v4HPAVwVdHA==
+
[email protected]:
+ version "2.5.0"
+ resolved "https://registry.yarnpkg.com/react-focus-lock/-/react-focus-lock-2.5.0.tgz#12e3a3940e897c26e2c2a0408cd25ea3c99b3709"
+ integrity sha512-XLxj6uTXgz0US8TmqNU2jMfnXwZG0mH2r/afQqvPEaX6nyEll5LHVcEXk2XDUQ34RVeLPkO/xK5x6c/qiuSq/A==
+ dependencies:
+ "@babel/runtime" "^7.0.0"
+ focus-lock "^0.8.1"
+ prop-types "^15.6.2"
+ react-clientside-effect "^1.2.2"
+ use-callback-ref "^1.2.1"
+ use-sidecar "^1.0.1"
+
[email protected]:
+ version "16.10.2"
+ resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.10.2.tgz#984120fd4d16800e9a738208ab1fba422d23b5ab"
+ integrity sha512-INBT1QEgtcCCgvccr5/86CfD71fw9EPmDxgiJX4I2Ddr6ZsV6iFXsuby+qWJPtmNuMY0zByTsG4468P7nHuNWA==
+
+react-is@^16.6.0, react-is@^16.7.0, react-is@^16.8.1:
+ version "16.13.1"
+ resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
+ integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==
+
+react-is@^17.0.1:
+ version "17.0.1"
+ resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.1.tgz#5b3531bd76a645a4c9fb6e693ed36419e3301339"
+ integrity sha512-NAnt2iGDXohE5LI7uBnLnqvLQMtzhkiAOLXTmv+qnF9Ky7xAPcX8Up/xWIhxvLVGJvuLiNc4xQLtuqDRzb4fSA==
+
+react-lifecycles-compat@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz#4f1a273afdfc8f3488a8c516bfda78f872352362"
+ integrity sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==
+
+react-refresh@^0.8.3:
+ version "0.8.3"
+ resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.8.3.tgz#721d4657672d400c5e3c75d063c4a85fb2d5d68f"
+ integrity sha512-X8jZHc7nCMjaCqoU+V2I0cOhNW+QMBwSUkeXnTi8IPe6zaRWfn60ZzvFDZqWPfmSJfjub7dDW1SP0jaHWLu/hg==
+
+react-remove-scroll-bar@^2.1.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/react-remove-scroll-bar/-/react-remove-scroll-bar-2.2.0.tgz#d4d545a7df024f75d67e151499a6ab5ac97c8cdd"
+ integrity sha512-UU9ZBP1wdMR8qoUs7owiVcpaPwsQxUDC2lypP6mmixaGlARZa7ZIBx1jcuObLdhMOvCsnZcvetOho0wzPa9PYg==
+ dependencies:
+ react-style-singleton "^2.1.0"
+ tslib "^1.0.0"
+
[email protected]:
+ version "2.4.1"
+ resolved "https://registry.yarnpkg.com/react-remove-scroll/-/react-remove-scroll-2.4.1.tgz#e0af6126621083a5064591d367291a81b2d107f5"
+ integrity sha512-K7XZySEzOHMTq7dDwcHsZA6Y7/1uX5RsWhRXVYv8rdh+y9Qz2nMwl9RX/Mwnj/j7JstCGmxyfyC0zbVGXYh3mA==
+ dependencies:
+ react-remove-scroll-bar "^2.1.0"
+ react-style-singleton "^2.1.0"
+ tslib "^1.0.0"
+ use-callback-ref "^1.2.3"
+ use-sidecar "^1.0.1"
+
+react-resize-detector@^6.6.3:
+ version "6.7.4"
+ resolved "https://registry.yarnpkg.com/react-resize-detector/-/react-resize-detector-6.7.4.tgz#594cc026115af05484e8011157b5dc2137492680"
+ integrity sha512-wzvGmUdEDMhiUHVZGnl4kuyj/TEQhvbB5LyAGkbYXetwJ2O+u/zftmPvU+kxiO1h+d9aUqQBKcNLS7TvB3ytqA==
+ dependencies:
+ "@types/resize-observer-browser" "^0.1.5"
+ lodash.debounce "^4.0.8"
+ lodash.throttle "^4.1.1"
+ resize-observer-polyfill "^1.5.1"
+
+react-router-dom@^5.2.0:
+ version "5.2.0"
+ resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-5.2.0.tgz#9e65a4d0c45e13289e66c7b17c7e175d0ea15662"
+ integrity sha512-gxAmfylo2QUjcwxI63RhQ5G85Qqt4voZpUXSEqCwykV0baaOTQDR1f0PmY8AELqIyVc0NEZUj0Gov5lNGcXgsA==
+ dependencies:
+ "@babel/runtime" "^7.1.2"
+ history "^4.9.0"
+ loose-envify "^1.3.1"
+ prop-types "^15.6.2"
+ react-router "5.2.0"
+ tiny-invariant "^1.0.2"
+ tiny-warning "^1.0.0"
+
[email protected]:
+ version "5.2.0"
+ resolved "https://registry.yarnpkg.com/react-router/-/react-router-5.2.0.tgz#424e75641ca8747fbf76e5ecca69781aa37ea293"
+ integrity sha512-smz1DUuFHRKdcJC0jobGo8cVbhO3x50tCL4icacOlcwDOEQPq4TMqwx3sY1TP+DvtTgz4nm3thuo7A+BK2U0Dw==
+ dependencies:
+ "@babel/runtime" "^7.1.2"
+ history "^4.9.0"
+ hoist-non-react-statics "^3.1.0"
+ loose-envify "^1.3.1"
+ mini-create-react-context "^0.4.0"
+ path-to-regexp "^1.7.0"
+ prop-types "^15.6.2"
+ react-is "^16.6.0"
+ tiny-invariant "^1.0.2"
+ tiny-warning "^1.0.0"
+
[email protected]:
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/react-scripts/-/react-scripts-4.0.3.tgz#b1cafed7c3fa603e7628ba0f187787964cb5d345"
+ integrity sha512-S5eO4vjUzUisvkIPB7jVsKtuH2HhWcASREYWHAQ1FP5HyCv3xgn+wpILAEWkmy+A+tTNbSZClhxjT3qz6g4L1A==
+ dependencies:
+ "@babel/core" "7.12.3"
+ "@pmmmwh/react-refresh-webpack-plugin" "0.4.3"
+ "@svgr/webpack" "5.5.0"
+ "@typescript-eslint/eslint-plugin" "^4.5.0"
+ "@typescript-eslint/parser" "^4.5.0"
+ babel-eslint "^10.1.0"
+ babel-jest "^26.6.0"
+ babel-loader "8.1.0"
+ babel-plugin-named-asset-import "^0.3.7"
+ babel-preset-react-app "^10.0.0"
+ bfj "^7.0.2"
+ camelcase "^6.1.0"
+ case-sensitive-paths-webpack-plugin "2.3.0"
+ css-loader "4.3.0"
+ dotenv "8.2.0"
+ dotenv-expand "5.1.0"
+ eslint "^7.11.0"
+ eslint-config-react-app "^6.0.0"
+ eslint-plugin-flowtype "^5.2.0"
+ eslint-plugin-import "^2.22.1"
+ eslint-plugin-jest "^24.1.0"
+ eslint-plugin-jsx-a11y "^6.3.1"
+ eslint-plugin-react "^7.21.5"
+ eslint-plugin-react-hooks "^4.2.0"
+ eslint-plugin-testing-library "^3.9.2"
+ eslint-webpack-plugin "^2.5.2"
+ file-loader "6.1.1"
+ fs-extra "^9.0.1"
+ html-webpack-plugin "4.5.0"
+ identity-obj-proxy "3.0.0"
+ jest "26.6.0"
+ jest-circus "26.6.0"
+ jest-resolve "26.6.0"
+ jest-watch-typeahead "0.6.1"
+ mini-css-extract-plugin "0.11.3"
+ optimize-css-assets-webpack-plugin "5.0.4"
+ pnp-webpack-plugin "1.6.4"
+ postcss-flexbugs-fixes "4.2.1"
+ postcss-loader "3.0.0"
+ postcss-normalize "8.0.1"
+ postcss-preset-env "6.7.0"
+ postcss-safe-parser "5.0.2"
+ prompts "2.4.0"
+ react-app-polyfill "^2.0.0"
+ react-dev-utils "^11.0.3"
+ react-refresh "^0.8.3"
+ resolve "1.18.1"
+ resolve-url-loader "^3.1.2"
+ sass-loader "^10.0.5"
+ semver "7.3.2"
+ style-loader "1.3.0"
+ terser-webpack-plugin "4.2.3"
+ ts-pnp "1.2.0"
+ url-loader "4.1.1"
+ webpack "4.44.2"
+ webpack-dev-server "3.11.1"
+ webpack-manifest-plugin "2.2.0"
+ workbox-webpack-plugin "5.1.4"
+ optionalDependencies:
+ fsevents "^2.1.3"
+
+react-smooth@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/react-smooth/-/react-smooth-2.0.0.tgz#561647b33e498b2e25f449b3c6689b2e9111bf91"
+ integrity sha512-wK4dBBR6P21otowgMT9toZk+GngMplGS1O5gk+2WSiHEXIrQgDvhR5IIlT74Vtu//qpTcipkgo21dD7a7AUNxw==
+ dependencies:
+ fast-equals "^2.0.0"
+ raf "^3.4.0"
+ react-transition-group "2.9.0"
+
+react-style-singleton@^2.1.0:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/react-style-singleton/-/react-style-singleton-2.1.1.tgz#ce7f90b67618be2b6b94902a30aaea152ce52e66"
+ integrity sha512-jNRp07Jza6CBqdRKNgGhT3u9umWvils1xsuMOjZlghBDH2MU0PL2WZor4PGYjXpnRCa9DQSlHMs/xnABWOwYbA==
+ dependencies:
+ get-nonce "^1.0.0"
+ invariant "^2.2.4"
+ tslib "^1.0.0"
+
[email protected]:
+ version "2.9.0"
+ resolved "https://registry.yarnpkg.com/react-transition-group/-/react-transition-group-2.9.0.tgz#df9cdb025796211151a436c69a8f3b97b5b07c8d"
+ integrity sha512-+HzNTCHpeQyl4MJ/bdE0u6XRMe9+XG/+aL4mCxVN4DnPBQ0/5bfHWPDuOZUzYdMj94daZaZdCCc1Dzt9R/xSSg==
+ dependencies:
+ dom-helpers "^3.4.0"
+ loose-envify "^1.4.0"
+ prop-types "^15.6.2"
+ react-lifecycles-compat "^3.0.4"
+
+react@^17.0.2:
+ version "17.0.2"
+ resolved "https://registry.yarnpkg.com/react/-/react-17.0.2.tgz#d0b5cc516d29eb3eee383f75b62864cfb6800037"
+ integrity sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==
+ dependencies:
+ loose-envify "^1.1.0"
+ object-assign "^4.1.1"
+
+read-pkg-up@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-2.0.0.tgz#6b72a8048984e0c41e79510fd5e9fa99b3b549be"
+ integrity sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=
+ dependencies:
+ find-up "^2.0.0"
+ read-pkg "^2.0.0"
+
+read-pkg-up@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-3.0.0.tgz#3ed496685dba0f8fe118d0691dc51f4a1ff96f07"
+ integrity sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc=
+ dependencies:
+ find-up "^2.0.0"
+ read-pkg "^3.0.0"
+
+read-pkg-up@^7.0.1:
+ version "7.0.1"
+ resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507"
+ integrity sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==
+ dependencies:
+ find-up "^4.1.0"
+ read-pkg "^5.2.0"
+ type-fest "^0.8.1"
+
+read-pkg@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8"
+ integrity sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=
+ dependencies:
+ load-json-file "^2.0.0"
+ normalize-package-data "^2.3.2"
+ path-type "^2.0.0"
+
+read-pkg@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-3.0.0.tgz#9cbc686978fee65d16c00e2b19c237fcf6e38389"
+ integrity sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=
+ dependencies:
+ load-json-file "^4.0.0"
+ normalize-package-data "^2.3.2"
+ path-type "^3.0.0"
+
+read-pkg@^5.2.0:
+ version "5.2.0"
+ resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc"
+ integrity sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==
+ dependencies:
+ "@types/normalize-package-data" "^2.4.0"
+ normalize-package-data "^2.5.0"
+ parse-json "^5.0.0"
+ type-fest "^0.6.0"
+
+"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6:
+ version "2.3.7"
+ resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
+ integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
+ dependencies:
+ core-util-is "~1.0.0"
+ inherits "~2.0.3"
+ isarray "~1.0.0"
+ process-nextick-args "~2.0.0"
+ safe-buffer "~5.1.1"
+ string_decoder "~1.1.1"
+ util-deprecate "~1.0.1"
+
+readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.6.0:
+ version "3.6.0"
+ resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198"
+ integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==
+ dependencies:
+ inherits "^2.0.3"
+ string_decoder "^1.1.1"
+ util-deprecate "^1.0.1"
+
+readdirp@^2.2.1:
+ version "2.2.1"
+ resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525"
+ integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==
+ dependencies:
+ graceful-fs "^4.1.11"
+ micromatch "^3.1.10"
+ readable-stream "^2.0.2"
+
+readdirp@~3.5.0:
+ version "3.5.0"
+ resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.5.0.tgz#9ba74c019b15d365278d2e91bb8c48d7b4d42c9e"
+ integrity sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==
+ dependencies:
+ picomatch "^2.2.1"
+
+recharts-scale@^0.4.4:
+ version "0.4.5"
+ resolved "https://registry.yarnpkg.com/recharts-scale/-/recharts-scale-0.4.5.tgz#0969271f14e732e642fcc5bd4ab270d6e87dd1d9"
+ integrity sha512-kivNFO+0OcUNu7jQquLXAxz1FIwZj8nrj+YkOKc5694NbjCvcT6aSZiIzNzd2Kul4o4rTto8QVR9lMNtxD4G1w==
+ dependencies:
+ decimal.js-light "^2.4.1"
+
+recharts@^2.0.10:
+ version "2.0.10"
+ resolved "https://registry.yarnpkg.com/recharts/-/recharts-2.0.10.tgz#d5b212b06aeb4ba346c20c6987fe8f9f97d57b4d"
+ integrity sha512-yVIlHXPiSgxg8z+qApVnBcGVCLpluqNbXJenK3jKxsCb/FJzh77wyEztS6kXLXSC1RCZa7UABmBPBhqspMCF+g==
+ dependencies:
+ "@types/d3-scale" "^3.0.0"
+ "@types/d3-shape" "^2.0.0"
+ classnames "^2.2.5"
+ d3-interpolate "^2.0.1"
+ d3-scale "^3.2.3"
+ d3-shape "^2.0.0"
+ eventemitter3 "^4.0.1"
+ lodash "^4.17.19"
+ react-is "16.10.2"
+ react-resize-detector "^6.6.3"
+ react-smooth "^2.0.0"
+ recharts-scale "^0.4.4"
+ reduce-css-calc "^2.1.8"
+
[email protected]:
+ version "2.2.2"
+ resolved "https://registry.yarnpkg.com/recursive-readdir/-/recursive-readdir-2.2.2.tgz#9946fb3274e1628de6e36b2f6714953b4845094f"
+ integrity sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg==
+ dependencies:
+ minimatch "3.0.4"
+
+reduce-css-calc@^2.1.8:
+ version "2.1.8"
+ resolved "https://registry.yarnpkg.com/reduce-css-calc/-/reduce-css-calc-2.1.8.tgz#7ef8761a28d614980dc0c982f772c93f7a99de03"
+ integrity sha512-8liAVezDmUcH+tdzoEGrhfbGcP7nOV4NkGE3a74+qqvE7nt9i4sKLGBuZNOnpI4WiGksiNPklZxva80061QiPg==
+ dependencies:
+ css-unit-converter "^1.1.1"
+ postcss-value-parser "^3.3.0"
+
+regenerate-unicode-properties@^8.2.0:
+ version "8.2.0"
+ resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.2.0.tgz#e5de7111d655e7ba60c057dbe9ff37c87e65cdec"
+ integrity sha512-F9DjY1vKLo/tPePDycuH3dn9H1OTPIkVD9Kz4LODu+F2C75mgjAJ7x/gwy6ZcSNRAAkhNlJSOHRe8k3p+K9WhA==
+ dependencies:
+ regenerate "^1.4.0"
+
+regenerate@^1.4.0:
+ version "1.4.2"
+ resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a"
+ integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==
+
+regenerator-runtime@^0.11.0:
+ version "0.11.1"
+ resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9"
+ integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==
+
+regenerator-runtime@^0.13.4, regenerator-runtime@^0.13.7:
+ version "0.13.7"
+ resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz#cac2dacc8a1ea675feaabaeb8ae833898ae46f55"
+ integrity sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew==
+
+regenerator-transform@^0.14.2:
+ version "0.14.5"
+ resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.5.tgz#c98da154683671c9c4dcb16ece736517e1b7feb4"
+ integrity sha512-eOf6vka5IO151Jfsw2NO9WpGX58W6wWmefK3I1zEGr0lOD0u8rwPaNqQL1aRxUaxLeKO3ArNh3VYg1KbaD+FFw==
+ dependencies:
+ "@babel/runtime" "^7.8.4"
+
+regex-not@^1.0.0, regex-not@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c"
+ integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==
+ dependencies:
+ extend-shallow "^3.0.2"
+ safe-regex "^1.1.0"
+
+regex-parser@^2.2.11:
+ version "2.2.11"
+ resolved "https://registry.yarnpkg.com/regex-parser/-/regex-parser-2.2.11.tgz#3b37ec9049e19479806e878cabe7c1ca83ccfe58"
+ integrity sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q==
+
+regexp.prototype.flags@^1.2.0, regexp.prototype.flags@^1.3.1:
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.3.1.tgz#7ef352ae8d159e758c0eadca6f8fcb4eef07be26"
+ integrity sha512-JiBdRBq91WlY7uRJ0ds7R+dU02i6LKi8r3BuQhNXn+kmeLN+EfHhfjqMRis1zJxnlu88hq/4dx0P2OP3APRTOA==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+
+regexpp@^3.0.0, regexpp@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.1.0.tgz#206d0ad0a5648cffbdb8ae46438f3dc51c9f78e2"
+ integrity sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q==
+
+regexpu-core@^4.7.1:
+ version "4.7.1"
+ resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.7.1.tgz#2dea5a9a07233298fbf0db91fa9abc4c6e0f8ad6"
+ integrity sha512-ywH2VUraA44DZQuRKzARmw6S66mr48pQVva4LBeRhcOltJ6hExvWly5ZjFLYo67xbIxb6W1q4bAGtgfEl20zfQ==
+ dependencies:
+ regenerate "^1.4.0"
+ regenerate-unicode-properties "^8.2.0"
+ regjsgen "^0.5.1"
+ regjsparser "^0.6.4"
+ unicode-match-property-ecmascript "^1.0.4"
+ unicode-match-property-value-ecmascript "^1.2.0"
+
+regjsgen@^0.5.1:
+ version "0.5.2"
+ resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.2.tgz#92ff295fb1deecbf6ecdab2543d207e91aa33733"
+ integrity sha512-OFFT3MfrH90xIW8OOSyUrk6QHD5E9JOTeGodiJeBS3J6IwlgzJMNE/1bZklWz5oTg+9dCMyEetclvCVXOPoN3A==
+
+regjsparser@^0.6.4:
+ version "0.6.7"
+ resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.7.tgz#c00164e1e6713c2e3ee641f1701c4b7aa0a7f86c"
+ integrity sha512-ib77G0uxsA2ovgiYbCVGx4Pv3PSttAx2vIwidqQzbL2U5S4Q+j00HdSAneSBuyVcMvEnTXMjiGgB+DlXozVhpQ==
+ dependencies:
+ jsesc "~0.5.0"
+
+relateurl@^0.2.7:
+ version "0.2.7"
+ resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9"
+ integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk=
+
+remove-trailing-separator@^1.0.1:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef"
+ integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8=
+
+renderkid@^2.0.4:
+ version "2.0.5"
+ resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.5.tgz#483b1ac59c6601ab30a7a596a5965cabccfdd0a5"
+ integrity sha512-ccqoLg+HLOHq1vdfYNm4TBeaCDIi1FLt3wGojTDSvdewUv65oTmI3cnT2E4hRjl1gzKZIPK+KZrXzlUYKnR+vQ==
+ dependencies:
+ css-select "^2.0.2"
+ dom-converter "^0.2"
+ htmlparser2 "^3.10.1"
+ lodash "^4.17.20"
+ strip-ansi "^3.0.0"
+
+repeat-element@^1.1.2:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce"
+ integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g==
+
+repeat-string@^1.6.1:
+ version "1.6.1"
+ resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637"
+ integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc=
+
[email protected]:
+ version "1.1.4"
+ resolved "https://registry.yarnpkg.com/request-promise-core/-/request-promise-core-1.1.4.tgz#3eedd4223208d419867b78ce815167d10593a22f"
+ integrity sha512-TTbAfBBRdWD7aNNOoVOBH4pN/KigV6LyapYNNlAPA8JwbovRti1E88m3sYAwsLi5ryhPKsE9APwnjFTgdUjTpw==
+ dependencies:
+ lodash "^4.17.19"
+
+request-promise-native@^1.0.8:
+ version "1.0.9"
+ resolved "https://registry.yarnpkg.com/request-promise-native/-/request-promise-native-1.0.9.tgz#e407120526a5efdc9a39b28a5679bf47b9d9dc28"
+ integrity sha512-wcW+sIUiWnKgNY0dqCpOZkUbF/I+YPi+f09JZIDa39Ec+q82CpSYniDp+ISgTTbKmnpJWASeJBPZmoxH84wt3g==
+ dependencies:
+ request-promise-core "1.1.4"
+ stealthy-require "^1.1.1"
+ tough-cookie "^2.3.3"
+
+request@^2.88.2:
+ version "2.88.2"
+ resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3"
+ integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==
+ dependencies:
+ aws-sign2 "~0.7.0"
+ aws4 "^1.8.0"
+ caseless "~0.12.0"
+ combined-stream "~1.0.6"
+ extend "~3.0.2"
+ forever-agent "~0.6.1"
+ form-data "~2.3.2"
+ har-validator "~5.1.3"
+ http-signature "~1.2.0"
+ is-typedarray "~1.0.0"
+ isstream "~0.1.2"
+ json-stringify-safe "~5.0.1"
+ mime-types "~2.1.19"
+ oauth-sign "~0.9.0"
+ performance-now "^2.1.0"
+ qs "~6.5.2"
+ safe-buffer "^5.1.2"
+ tough-cookie "~2.5.0"
+ tunnel-agent "^0.6.0"
+ uuid "^3.3.2"
+
+require-directory@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42"
+ integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I=
+
+require-from-string@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909"
+ integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==
+
+require-main-filename@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b"
+ integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==
+
+requires-port@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff"
+ integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=
+
+resize-observer-polyfill@^1.5.1:
+ version "1.5.1"
+ resolved "https://registry.yarnpkg.com/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz#0e9020dd3d21024458d4ebd27e23e40269810464"
+ integrity sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg==
+
+resolve-cwd@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a"
+ integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo=
+ dependencies:
+ resolve-from "^3.0.0"
+
+resolve-cwd@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d"
+ integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==
+ dependencies:
+ resolve-from "^5.0.0"
+
+resolve-from@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748"
+ integrity sha1-six699nWiBvItuZTM17rywoYh0g=
+
+resolve-from@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6"
+ integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==
+
+resolve-from@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69"
+ integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==
+
+resolve-pathname@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/resolve-pathname/-/resolve-pathname-3.0.0.tgz#99d02224d3cf263689becbb393bc560313025dcd"
+ integrity sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng==
+
+resolve-url-loader@^3.1.2:
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/resolve-url-loader/-/resolve-url-loader-3.1.2.tgz#235e2c28e22e3e432ba7a5d4e305c59a58edfc08"
+ integrity sha512-QEb4A76c8Mi7I3xNKXlRKQSlLBwjUV/ULFMP+G7n3/7tJZ8MG5wsZ3ucxP1Jz8Vevn6fnJsxDx9cIls+utGzPQ==
+ dependencies:
+ adjust-sourcemap-loader "3.0.0"
+ camelcase "5.3.1"
+ compose-function "3.0.3"
+ convert-source-map "1.7.0"
+ es6-iterator "2.0.3"
+ loader-utils "1.2.3"
+ postcss "7.0.21"
+ rework "1.0.1"
+ rework-visit "1.0.0"
+ source-map "0.6.1"
+
+resolve-url@^0.2.1:
+ version "0.2.1"
+ resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a"
+ integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=
+
[email protected]:
+ version "1.18.1"
+ resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.18.1.tgz#018fcb2c5b207d2a6424aee361c5a266da8f4130"
+ integrity sha512-lDfCPaMKfOJXjy0dPayzPdF1phampNWr3qFCjAu+rw/qbQmr5jWH5xN2hwh9QKfw9E5v4hwV7A+jrCmL8yjjqA==
+ dependencies:
+ is-core-module "^2.0.0"
+ path-parse "^1.0.6"
+
+resolve@^1.10.0, resolve@^1.12.0, resolve@^1.13.1, resolve@^1.14.2, resolve@^1.17.0, resolve@^1.18.1, resolve@^1.20.0, resolve@^1.3.2, resolve@^1.8.1:
+ version "1.20.0"
+ resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975"
+ integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==
+ dependencies:
+ is-core-module "^2.2.0"
+ path-parse "^1.0.6"
+
+resolve@^2.0.0-next.3:
+ version "2.0.0-next.3"
+ resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.3.tgz#d41016293d4a8586a39ca5d9b5f15cbea1f55e46"
+ integrity sha512-W8LucSynKUIDu9ylraa7ueVZ7hc0uAgJBxVsQSKOXOyle8a93qXhcz+XAXZ8bIq2d6i4Ehddn6Evt+0/UwKk6Q==
+ dependencies:
+ is-core-module "^2.2.0"
+ path-parse "^1.0.6"
+
+ret@~0.1.10:
+ version "0.1.15"
+ resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc"
+ integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==
+
+retry@^0.12.0:
+ version "0.12.0"
+ resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b"
+ integrity sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs=
+
+reusify@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76"
+ integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==
+
[email protected]:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/rework-visit/-/rework-visit-1.0.0.tgz#9945b2803f219e2f7aca00adb8bc9f640f842c9a"
+ integrity sha1-mUWygD8hni96ygCtuLyfZA+ELJo=
+
[email protected]:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/rework/-/rework-1.0.1.tgz#30806a841342b54510aa4110850cd48534144aa7"
+ integrity sha1-MIBqhBNCtUUQqkEQhQzUhTQUSqc=
+ dependencies:
+ convert-source-map "^0.3.3"
+ css "^2.0.0"
+
+rgb-regex@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/rgb-regex/-/rgb-regex-1.0.1.tgz#c0e0d6882df0e23be254a475e8edd41915feaeb1"
+ integrity sha1-wODWiC3w4jviVKR16O3UGRX+rrE=
+
+rgba-regex@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/rgba-regex/-/rgba-regex-1.0.0.tgz#43374e2e2ca0968b0ef1523460b7d730ff22eeb3"
+ integrity sha1-QzdOLiyglosO8VI0YLfXMP8i7rM=
+
+rimraf@^2.5.4, rimraf@^2.6.3:
+ version "2.7.1"
+ resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec"
+ integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==
+ dependencies:
+ glob "^7.1.3"
+
+rimraf@^3.0.0, rimraf@^3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a"
+ integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==
+ dependencies:
+ glob "^7.1.3"
+
+ripemd160@^2.0.0, ripemd160@^2.0.1:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c"
+ integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==
+ dependencies:
+ hash-base "^3.0.0"
+ inherits "^2.0.1"
+
+rollup-plugin-babel@^4.3.3:
+ version "4.4.0"
+ resolved "https://registry.yarnpkg.com/rollup-plugin-babel/-/rollup-plugin-babel-4.4.0.tgz#d15bd259466a9d1accbdb2fe2fff17c52d030acb"
+ integrity sha512-Lek/TYp1+7g7I+uMfJnnSJ7YWoD58ajo6Oarhlex7lvUce+RCKRuGRSgztDO3/MF/PuGKmUL5iTHKf208UNszw==
+ dependencies:
+ "@babel/helper-module-imports" "^7.0.0"
+ rollup-pluginutils "^2.8.1"
+
+rollup-plugin-terser@^5.3.1:
+ version "5.3.1"
+ resolved "https://registry.yarnpkg.com/rollup-plugin-terser/-/rollup-plugin-terser-5.3.1.tgz#8c650062c22a8426c64268548957463bf981b413"
+ integrity sha512-1pkwkervMJQGFYvM9nscrUoncPwiKR/K+bHdjv6PFgRo3cgPHoRT83y2Aa3GvINj4539S15t/tpFPb775TDs6w==
+ dependencies:
+ "@babel/code-frame" "^7.5.5"
+ jest-worker "^24.9.0"
+ rollup-pluginutils "^2.8.2"
+ serialize-javascript "^4.0.0"
+ terser "^4.6.2"
+
+rollup-pluginutils@^2.8.1, rollup-pluginutils@^2.8.2:
+ version "2.8.2"
+ resolved "https://registry.yarnpkg.com/rollup-pluginutils/-/rollup-pluginutils-2.8.2.tgz#72f2af0748b592364dbd3389e600e5a9444a351e"
+ integrity sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==
+ dependencies:
+ estree-walker "^0.6.1"
+
+rollup@^1.31.1:
+ version "1.32.1"
+ resolved "https://registry.yarnpkg.com/rollup/-/rollup-1.32.1.tgz#4480e52d9d9e2ae4b46ba0d9ddeaf3163940f9c4"
+ integrity sha512-/2HA0Ec70TvQnXdzynFffkjA6XN+1e2pEv/uKS5Ulca40g2L7KuOE3riasHoNVHOsFD5KKZgDsMk1CP3Tw9s+A==
+ dependencies:
+ "@types/estree" "*"
+ "@types/node" "*"
+ acorn "^7.1.0"
+
+rsvp@^4.8.4:
+ version "4.8.5"
+ resolved "https://registry.yarnpkg.com/rsvp/-/rsvp-4.8.5.tgz#c8f155311d167f68f21e168df71ec5b083113734"
+ integrity sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA==
+
+run-parallel@^1.1.9:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee"
+ integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==
+ dependencies:
+ queue-microtask "^1.2.2"
+
+run-queue@^1.0.0, run-queue@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47"
+ integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec=
+ dependencies:
+ aproba "^1.1.1"
+
[email protected], safe-buffer@~5.1.0, safe-buffer@~5.1.1:
+ version "5.1.2"
+ resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
+ integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
+
+safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@~5.2.0:
+ version "5.2.1"
+ resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
+ integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
+
+safe-regex@^1.1.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e"
+ integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4=
+ dependencies:
+ ret "~0.1.10"
+
+"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
+ integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
+
+sane@^4.0.3:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/sane/-/sane-4.1.0.tgz#ed881fd922733a6c461bc189dc2b6c006f3ffded"
+ integrity sha512-hhbzAgTIX8O7SHfp2c8/kREfEn4qO/9q8C9beyY6+tvZ87EpoZ3i1RIEvp27YBswnNbY9mWd6paKVmKbAgLfZA==
+ dependencies:
+ "@cnakazawa/watch" "^1.0.3"
+ anymatch "^2.0.0"
+ capture-exit "^2.0.0"
+ exec-sh "^0.3.2"
+ execa "^1.0.0"
+ fb-watchman "^2.0.0"
+ micromatch "^3.1.4"
+ minimist "^1.1.1"
+ walker "~1.0.5"
+
+sanitize.css@^10.0.0:
+ version "10.0.0"
+ resolved "https://registry.yarnpkg.com/sanitize.css/-/sanitize.css-10.0.0.tgz#b5cb2547e96d8629a60947544665243b1dc3657a"
+ integrity sha512-vTxrZz4dX5W86M6oVWVdOVe72ZiPs41Oi7Z6Km4W5Turyz28mrXSJhhEBZoRtzJWIv3833WKVwLSDWWkEfupMg==
+
+sass-loader@^10.0.5:
+ version "10.1.1"
+ resolved "https://registry.yarnpkg.com/sass-loader/-/sass-loader-10.1.1.tgz#4ddd5a3d7638e7949065dd6e9c7c04037f7e663d"
+ integrity sha512-W6gVDXAd5hR/WHsPicvZdjAWHBcEJ44UahgxcIE196fW2ong0ZHMPO1kZuI5q0VlvMQZh32gpv69PLWQm70qrw==
+ dependencies:
+ klona "^2.0.4"
+ loader-utils "^2.0.0"
+ neo-async "^2.6.2"
+ schema-utils "^3.0.0"
+ semver "^7.3.2"
+
+sax@~1.2.4:
+ version "1.2.4"
+ resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
+ integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==
+
+saxes@^5.0.0:
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d"
+ integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw==
+ dependencies:
+ xmlchars "^2.2.0"
+
+scheduler@^0.20.2:
+ version "0.20.2"
+ resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.20.2.tgz#4baee39436e34aa93b4874bddcbf0fe8b8b50e91"
+ integrity sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==
+ dependencies:
+ loose-envify "^1.1.0"
+ object-assign "^4.1.1"
+
+schema-utils@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770"
+ integrity sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==
+ dependencies:
+ ajv "^6.1.0"
+ ajv-errors "^1.0.0"
+ ajv-keywords "^3.1.0"
+
+schema-utils@^2.6.5, schema-utils@^2.7.0, schema-utils@^2.7.1:
+ version "2.7.1"
+ resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7"
+ integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg==
+ dependencies:
+ "@types/json-schema" "^7.0.5"
+ ajv "^6.12.4"
+ ajv-keywords "^3.5.2"
+
+schema-utils@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.0.0.tgz#67502f6aa2b66a2d4032b4279a2944978a0913ef"
+ integrity sha512-6D82/xSzO094ajanoOSbe4YvXWMfn2A//8Y1+MUqFAJul5Bs+yn36xbK9OtNDcRVSBJ9jjeoXftM6CfztsjOAA==
+ dependencies:
+ "@types/json-schema" "^7.0.6"
+ ajv "^6.12.5"
+ ajv-keywords "^3.5.2"
+
+select-hose@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca"
+ integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo=
+
+selfsigned@^1.10.8:
+ version "1.10.8"
+ resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.8.tgz#0d17208b7d12c33f8eac85c41835f27fc3d81a30"
+ integrity sha512-2P4PtieJeEwVgTU9QEcwIRDQ/mXJLX8/+I3ur+Pg16nS8oNbrGxEso9NyYWy8NAmXiNl4dlAp5MwoNeCWzON4w==
+ dependencies:
+ node-forge "^0.10.0"
+
+"semver@2 || 3 || 4 || 5", semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0:
+ version "5.7.1"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
+ integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
+
[email protected]:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e"
+ integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==
+
[email protected]:
+ version "7.3.2"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.2.tgz#604962b052b81ed0786aae84389ffba70ffd3938"
+ integrity sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==
+
+semver@^6.0.0, semver@^6.3.0:
+ version "6.3.0"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d"
+ integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==
+
+semver@^7.2.1, semver@^7.3.2:
+ version "7.3.4"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.4.tgz#27aaa7d2e4ca76452f98d3add093a72c943edc97"
+ integrity sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw==
+ dependencies:
+ lru-cache "^6.0.0"
+
[email protected]:
+ version "0.17.1"
+ resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8"
+ integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==
+ dependencies:
+ debug "2.6.9"
+ depd "~1.1.2"
+ destroy "~1.0.4"
+ encodeurl "~1.0.2"
+ escape-html "~1.0.3"
+ etag "~1.8.1"
+ fresh "0.5.2"
+ http-errors "~1.7.2"
+ mime "1.6.0"
+ ms "2.1.1"
+ on-finished "~2.3.0"
+ range-parser "~1.2.1"
+ statuses "~1.5.0"
+
+serialize-javascript@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa"
+ integrity sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw==
+ dependencies:
+ randombytes "^2.1.0"
+
+serialize-javascript@^5.0.1:
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-5.0.1.tgz#7886ec848049a462467a97d3d918ebb2aaf934f4"
+ integrity sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA==
+ dependencies:
+ randombytes "^2.1.0"
+
+serve-index@^1.9.1:
+ version "1.9.1"
+ resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239"
+ integrity sha1-03aNabHn2C5c4FD/9bRTvqEqkjk=
+ dependencies:
+ accepts "~1.3.4"
+ batch "0.6.1"
+ debug "2.6.9"
+ escape-html "~1.0.3"
+ http-errors "~1.6.2"
+ mime-types "~2.1.17"
+ parseurl "~1.3.2"
+
[email protected]:
+ version "1.14.1"
+ resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9"
+ integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==
+ dependencies:
+ encodeurl "~1.0.2"
+ escape-html "~1.0.3"
+ parseurl "~1.3.3"
+ send "0.17.1"
+
+set-blocking@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
+ integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc=
+
+set-value@^2.0.0, set-value@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b"
+ integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==
+ dependencies:
+ extend-shallow "^2.0.1"
+ is-extendable "^0.1.1"
+ is-plain-object "^2.0.3"
+ split-string "^3.0.1"
+
+setimmediate@^1.0.4:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285"
+ integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU=
+
[email protected]:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656"
+ integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==
+
[email protected]:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683"
+ integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==
+
+sha.js@^2.4.0, sha.js@^2.4.8:
+ version "2.4.11"
+ resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7"
+ integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==
+ dependencies:
+ inherits "^2.0.1"
+ safe-buffer "^5.0.1"
+
+shebang-command@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
+ integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=
+ dependencies:
+ shebang-regex "^1.0.0"
+
+shebang-command@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea"
+ integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==
+ dependencies:
+ shebang-regex "^3.0.0"
+
+shebang-regex@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3"
+ integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=
+
+shebang-regex@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172"
+ integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==
+
[email protected]:
+ version "1.7.2"
+ resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.7.2.tgz#67a7d02c76c9da24f99d20808fcaded0e0e04be2"
+ integrity sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg==
+
+shellwords@^0.1.1:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/shellwords/-/shellwords-0.1.1.tgz#d6b9181c1a48d397324c84871efbcfc73fc0654b"
+ integrity sha512-vFwSUfQvqybiICwZY5+DAWIPLKsWO31Q91JSKl3UYv+K5c2QRPzn0qzec6QPu1Qc9eHYItiP3NdJqNVqetYAww==
+
+side-channel@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf"
+ integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==
+ dependencies:
+ call-bind "^1.0.0"
+ get-intrinsic "^1.0.2"
+ object-inspect "^1.9.0"
+
+signal-exit@^3.0.0, signal-exit@^3.0.2:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c"
+ integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==
+
+simple-swizzle@^0.2.2:
+ version "0.2.2"
+ resolved "https://registry.yarnpkg.com/simple-swizzle/-/simple-swizzle-0.2.2.tgz#a4da6b635ffcccca33f70d17cb92592de95e557a"
+ integrity sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=
+ dependencies:
+ is-arrayish "^0.3.1"
+
+sisteransi@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed"
+ integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==
+
+slash@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634"
+ integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==
+
+slice-ansi@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-4.0.0.tgz#500e8dd0fd55b05815086255b3195adf2a45fe6b"
+ integrity sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==
+ dependencies:
+ ansi-styles "^4.0.0"
+ astral-regex "^2.0.0"
+ is-fullwidth-code-point "^3.0.0"
+
+snapdragon-node@^2.0.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b"
+ integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==
+ dependencies:
+ define-property "^1.0.0"
+ isobject "^3.0.0"
+ snapdragon-util "^3.0.1"
+
+snapdragon-util@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2"
+ integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==
+ dependencies:
+ kind-of "^3.2.0"
+
+snapdragon@^0.8.1:
+ version "0.8.2"
+ resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d"
+ integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==
+ dependencies:
+ base "^0.11.1"
+ debug "^2.2.0"
+ define-property "^0.2.5"
+ extend-shallow "^2.0.1"
+ map-cache "^0.2.2"
+ source-map "^0.5.6"
+ source-map-resolve "^0.5.0"
+ use "^3.1.0"
+
+sockjs-client@^1.5.0:
+ version "1.5.0"
+ resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.5.0.tgz#2f8ff5d4b659e0d092f7aba0b7c386bd2aa20add"
+ integrity sha512-8Dt3BDi4FYNrCFGTL/HtwVzkARrENdwOUf1ZoW/9p3M8lZdFT35jVdrHza+qgxuG9H3/shR4cuX/X9umUrjP8Q==
+ dependencies:
+ debug "^3.2.6"
+ eventsource "^1.0.7"
+ faye-websocket "^0.11.3"
+ inherits "^2.0.4"
+ json3 "^3.3.3"
+ url-parse "^1.4.7"
+
+sockjs@^0.3.21:
+ version "0.3.21"
+ resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.21.tgz#b34ffb98e796930b60a0cfa11904d6a339a7d417"
+ integrity sha512-DhbPFGpxjc6Z3I+uX07Id5ZO2XwYsWOrYjaSeieES78cq+JaJvVe5q/m1uvjIQhXinhIeCFRH6JgXe+mvVMyXw==
+ dependencies:
+ faye-websocket "^0.11.3"
+ uuid "^3.4.0"
+ websocket-driver "^0.7.4"
+
+sort-keys@^1.0.0:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-1.1.2.tgz#441b6d4d346798f1b4e49e8920adfba0e543f9ad"
+ integrity sha1-RBttTTRnmPG05J6JIK37oOVD+a0=
+ dependencies:
+ is-plain-obj "^1.0.0"
+
+source-list-map@^2.0.0:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34"
+ integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==
+
+source-map-resolve@^0.5.0, source-map-resolve@^0.5.2:
+ version "0.5.3"
+ resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a"
+ integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==
+ dependencies:
+ atob "^2.1.2"
+ decode-uri-component "^0.2.0"
+ resolve-url "^0.2.1"
+ source-map-url "^0.4.0"
+ urix "^0.1.0"
+
+source-map-support@^0.5.6, source-map-support@~0.5.12, source-map-support@~0.5.19:
+ version "0.5.19"
+ resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61"
+ integrity sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==
+ dependencies:
+ buffer-from "^1.0.0"
+ source-map "^0.6.0"
+
+source-map-url@^0.4.0:
+ version "0.4.1"
+ resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.1.tgz#0af66605a745a5a2f91cf1bbf8a7afbc283dec56"
+ integrity sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw==
+
[email protected], source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1:
+ version "0.6.1"
+ resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
+ integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==
+
+source-map@^0.5.0, source-map@^0.5.6, source-map@^0.5.7:
+ version "0.5.7"
+ resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc"
+ integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=
+
+source-map@^0.7.3, source-map@~0.7.2:
+ version "0.7.3"
+ resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383"
+ integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==
+
+sourcemap-codec@^1.4.4:
+ version "1.4.8"
+ resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4"
+ integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==
+
+spdx-correct@^3.0.0:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9"
+ integrity sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==
+ dependencies:
+ spdx-expression-parse "^3.0.0"
+ spdx-license-ids "^3.0.0"
+
+spdx-exceptions@^2.1.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d"
+ integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==
+
+spdx-expression-parse@^3.0.0:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679"
+ integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==
+ dependencies:
+ spdx-exceptions "^2.1.0"
+ spdx-license-ids "^3.0.0"
+
+spdx-license-ids@^3.0.0:
+ version "3.0.7"
+ resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.7.tgz#e9c18a410e5ed7e12442a549fbd8afa767038d65"
+ integrity sha512-U+MTEOO0AiDzxwFvoa4JVnMV6mZlJKk2sBLt90s7G0Gd0Mlknc7kxEn3nuDPNZRta7O2uy8oLcZLVT+4sqNZHQ==
+
+spdy-transport@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31"
+ integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==
+ dependencies:
+ debug "^4.1.0"
+ detect-node "^2.0.4"
+ hpack.js "^2.1.6"
+ obuf "^1.1.2"
+ readable-stream "^3.0.6"
+ wbuf "^1.7.3"
+
+spdy@^4.0.2:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.2.tgz#b74f466203a3eda452c02492b91fb9e84a27677b"
+ integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==
+ dependencies:
+ debug "^4.1.0"
+ handle-thing "^2.0.0"
+ http-deceiver "^1.2.7"
+ select-hose "^2.0.0"
+ spdy-transport "^3.0.0"
+
+split-string@^3.0.1, split-string@^3.0.2:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2"
+ integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==
+ dependencies:
+ extend-shallow "^3.0.0"
+
+sprintf-js@~1.0.2:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
+ integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=
+
+sshpk@^1.7.0:
+ version "1.16.1"
+ resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877"
+ integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==
+ dependencies:
+ asn1 "~0.2.3"
+ assert-plus "^1.0.0"
+ bcrypt-pbkdf "^1.0.0"
+ dashdash "^1.12.0"
+ ecc-jsbn "~0.1.1"
+ getpass "^0.1.1"
+ jsbn "~0.1.0"
+ safer-buffer "^2.0.2"
+ tweetnacl "~0.14.0"
+
+ssri@^6.0.1:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.1.tgz#2a3c41b28dd45b62b63676ecb74001265ae9edd8"
+ integrity sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA==
+ dependencies:
+ figgy-pudding "^3.5.1"
+
+ssri@^8.0.0:
+ version "8.0.1"
+ resolved "https://registry.yarnpkg.com/ssri/-/ssri-8.0.1.tgz#638e4e439e2ffbd2cd289776d5ca457c4f51a2af"
+ integrity sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ==
+ dependencies:
+ minipass "^3.1.1"
+
+stable@^0.1.8:
+ version "0.1.8"
+ resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf"
+ integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==
+
+stack-utils@^2.0.2:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.3.tgz#cd5f030126ff116b78ccb3c027fe302713b61277"
+ integrity sha512-gL//fkxfWUsIlFL2Tl42Cl6+HFALEaB1FU76I/Fy+oZjRreP7OPMXFlGbxM7NQsI0ZpUfw76sHnv0WNYuTb7Iw==
+ dependencies:
+ escape-string-regexp "^2.0.0"
+
+stackframe@^1.1.1:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/stackframe/-/stackframe-1.2.0.tgz#52429492d63c62eb989804c11552e3d22e779303"
+ integrity sha512-GrdeshiRmS1YLMYgzF16olf2jJ/IzxXY9lhKOskuVziubpTYcYqyOwYeJKzQkwy7uN0fYSsbsC4RQaXf9LCrYA==
+
+static-extend@^0.1.1:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6"
+ integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY=
+ dependencies:
+ define-property "^0.2.5"
+ object-copy "^0.1.0"
+
+"statuses@>= 1.4.0 < 2", "statuses@>= 1.5.0 < 2", statuses@~1.5.0:
+ version "1.5.0"
+ resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c"
+ integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=
+
+stealthy-require@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b"
+ integrity sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=
+
+stream-browserify@^2.0.1:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b"
+ integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg==
+ dependencies:
+ inherits "~2.0.1"
+ readable-stream "^2.0.2"
+
+stream-each@^1.1.0:
+ version "1.2.3"
+ resolved "https://registry.yarnpkg.com/stream-each/-/stream-each-1.2.3.tgz#ebe27a0c389b04fbcc233642952e10731afa9bae"
+ integrity sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw==
+ dependencies:
+ end-of-stream "^1.1.0"
+ stream-shift "^1.0.0"
+
+stream-http@^2.7.2:
+ version "2.8.3"
+ resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc"
+ integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw==
+ dependencies:
+ builtin-status-codes "^3.0.0"
+ inherits "^2.0.1"
+ readable-stream "^2.3.6"
+ to-arraybuffer "^1.0.0"
+ xtend "^4.0.0"
+
+stream-shift@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d"
+ integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==
+
+strict-uri-encode@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713"
+ integrity sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM=
+
+string-length@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.1.tgz#4a973bf31ef77c4edbceadd6af2611996985f8a1"
+ integrity sha512-PKyXUd0LK0ePjSOnWn34V2uD6acUWev9uy0Ft05k0E8xRW+SKcA0F7eMr7h5xlzfn+4O3N+55rduYyet3Jk+jw==
+ dependencies:
+ char-regex "^1.0.2"
+ strip-ansi "^6.0.0"
+
+string-natural-compare@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4"
+ integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw==
+
+string-width@^3.0.0, string-width@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961"
+ integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==
+ dependencies:
+ emoji-regex "^7.0.1"
+ is-fullwidth-code-point "^2.0.0"
+ strip-ansi "^5.1.0"
+
+string-width@^4.1.0, string-width@^4.2.0:
+ version "4.2.0"
+ resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5"
+ integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==
+ dependencies:
+ emoji-regex "^8.0.0"
+ is-fullwidth-code-point "^3.0.0"
+ strip-ansi "^6.0.0"
+
+string.prototype.matchall@^4.0.2:
+ version "4.0.4"
+ resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.4.tgz#608f255e93e072107f5de066f81a2dfb78cf6b29"
+ integrity sha512-pknFIWVachNcyqRfaQSeu/FUfpvJTe4uskUSZ9Wc1RijsPuzbZ8TyYT8WCNnntCjUEqQ3vUHMAfVj2+wLAisPQ==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+ es-abstract "^1.18.0-next.2"
+ has-symbols "^1.0.1"
+ internal-slot "^1.0.3"
+ regexp.prototype.flags "^1.3.1"
+ side-channel "^1.0.4"
+
+string.prototype.matchall@^4.0.5:
+ version "4.0.5"
+ resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.5.tgz#59370644e1db7e4c0c045277690cf7b01203c4da"
+ integrity sha512-Z5ZaXO0svs0M2xd/6By3qpeKpLKd9mO4v4q3oMEQrk8Ck4xOD5d5XeBOOjGrmVZZ/AHB1S0CgG4N5r1G9N3E2Q==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+ es-abstract "^1.18.2"
+ get-intrinsic "^1.1.1"
+ has-symbols "^1.0.2"
+ internal-slot "^1.0.3"
+ regexp.prototype.flags "^1.3.1"
+ side-channel "^1.0.4"
+
+string.prototype.trimend@^1.0.1, string.prototype.trimend@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.3.tgz#a22bd53cca5c7cf44d7c9d5c732118873d6cd18b"
+ integrity sha512-ayH0pB+uf0U28CtjlLvL7NaohvR1amUvVZk+y3DYb0Ey2PUV5zPkkKy9+U1ndVEIXO8hNg18eIv9Jntbii+dKw==
+ dependencies:
+ call-bind "^1.0.0"
+ define-properties "^1.1.3"
+
+string.prototype.trimend@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz#e75ae90c2942c63504686c18b287b4a0b1a45f80"
+ integrity sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+
+string.prototype.trimstart@^1.0.1, string.prototype.trimstart@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.3.tgz#9b4cb590e123bb36564401d59824298de50fd5aa"
+ integrity sha512-oBIBUy5lea5tt0ovtOFiEQaBkoBBkyJhZXzJYrSmDo5IUUqbOPvVezuRs/agBIdZ2p2Eo1FD6bD9USyBLfl3xg==
+ dependencies:
+ call-bind "^1.0.0"
+ define-properties "^1.1.3"
+
+string.prototype.trimstart@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz#b36399af4ab2999b4c9c648bd7a3fb2bb26feeed"
+ integrity sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw==
+ dependencies:
+ call-bind "^1.0.2"
+ define-properties "^1.1.3"
+
+string_decoder@^1.0.0, string_decoder@^1.1.1:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
+ integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
+ dependencies:
+ safe-buffer "~5.2.0"
+
+string_decoder@~1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
+ integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==
+ dependencies:
+ safe-buffer "~5.1.0"
+
+stringify-object@^3.3.0:
+ version "3.3.0"
+ resolved "https://registry.yarnpkg.com/stringify-object/-/stringify-object-3.3.0.tgz#703065aefca19300d3ce88af4f5b3956d7556629"
+ integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==
+ dependencies:
+ get-own-enumerable-property-symbols "^3.0.0"
+ is-obj "^1.0.1"
+ is-regexp "^1.0.0"
+
[email protected], strip-ansi@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532"
+ integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==
+ dependencies:
+ ansi-regex "^5.0.0"
+
+strip-ansi@^3.0.0, strip-ansi@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf"
+ integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=
+ dependencies:
+ ansi-regex "^2.0.0"
+
+strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0:
+ version "5.2.0"
+ resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae"
+ integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==
+ dependencies:
+ ansi-regex "^4.1.0"
+
+strip-bom@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3"
+ integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=
+
+strip-bom@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878"
+ integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==
+
+strip-comments@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/strip-comments/-/strip-comments-1.0.2.tgz#82b9c45e7f05873bee53f37168af930aa368679d"
+ integrity sha512-kL97alc47hoyIQSV165tTt9rG5dn4w1dNnBhOQ3bOU1Nc1hel09jnXANaHJ7vzHLd4Ju8kseDGzlev96pghLFw==
+ dependencies:
+ babel-extract-comments "^1.0.0"
+ babel-plugin-transform-object-rest-spread "^6.26.0"
+
+strip-eof@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
+ integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=
+
+strip-final-newline@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad"
+ integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==
+
+strip-json-comments@^3.1.0, strip-json-comments@^3.1.1:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006"
+ integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
+
[email protected]:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-1.3.0.tgz#828b4a3b3b7e7aa5847ce7bae9e874512114249e"
+ integrity sha512-V7TCORko8rs9rIqkSrlMfkqA63DfoGBBJmK1kKGCcSi+BWb4cqz0SRsnp4l6rU5iwOEd0/2ePv68SV22VXon4Q==
+ dependencies:
+ loader-utils "^2.0.0"
+ schema-utils "^2.7.0"
+
[email protected]:
+ version "4.1.4"
+ resolved "https://registry.yarnpkg.com/style-value-types/-/style-value-types-4.1.4.tgz#80f37cb4fb024d6394087403dfb275e8bb627e75"
+ integrity sha512-LCJL6tB+vPSUoxgUBt9juXIlNJHtBMy8jkXzUJSBzeHWdBu6lhzHqCvLVkXFGsFIlNa2ln1sQHya/gzaFmB2Lg==
+ dependencies:
+ hey-listen "^1.0.8"
+ tslib "^2.1.0"
+
+stylehacks@^4.0.0:
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-4.0.3.tgz#6718fcaf4d1e07d8a1318690881e8d96726a71d5"
+ integrity sha512-7GlLk9JwlElY4Y6a/rmbH2MhVlTyVmiJd1PfTCqFaIBEGMYNsrO/v3SeGTdhBThLg4Z+NbOk/qFMwCa+J+3p/g==
+ dependencies:
+ browserslist "^4.0.0"
+ postcss "^7.0.0"
+ postcss-selector-parser "^3.0.0"
+
+stylis@^4.0.3:
+ version "4.0.10"
+ resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.0.10.tgz#446512d1097197ab3f02fb3c258358c3f7a14240"
+ integrity sha512-m3k+dk7QeJw660eIKRRn3xPF6uuvHs/FFzjX3HQ5ove0qYsiygoAhwn5a3IYKaZPo5LrYD0rfVmtv1gNY1uYwg==
+
+supports-color@^5.3.0:
+ version "5.5.0"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
+ integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==
+ dependencies:
+ has-flag "^3.0.0"
+
+supports-color@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3"
+ integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==
+ dependencies:
+ has-flag "^3.0.0"
+
+supports-color@^7.0.0, supports-color@^7.1.0:
+ version "7.2.0"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da"
+ integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==
+ dependencies:
+ has-flag "^4.0.0"
+
+supports-hyperlinks@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.1.0.tgz#f663df252af5f37c5d49bbd7eeefa9e0b9e59e47"
+ integrity sha512-zoE5/e+dnEijk6ASB6/qrK+oYdm2do1hjoLWrqUC/8WEIW1gbxFcKuBof7sW8ArN6e+AYvsE8HBGiVRWL/F5CA==
+ dependencies:
+ has-flag "^4.0.0"
+ supports-color "^7.0.0"
+
+svg-parser@^2.0.2:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5"
+ integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==
+
+svgo@^1.0.0, svgo@^1.2.2:
+ version "1.3.2"
+ resolved "https://registry.yarnpkg.com/svgo/-/svgo-1.3.2.tgz#b6dc511c063346c9e415b81e43401145b96d4167"
+ integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw==
+ dependencies:
+ chalk "^2.4.1"
+ coa "^2.0.2"
+ css-select "^2.0.0"
+ css-select-base-adapter "^0.1.1"
+ css-tree "1.0.0-alpha.37"
+ csso "^4.0.2"
+ js-yaml "^3.13.1"
+ mkdirp "~0.5.1"
+ object.values "^1.1.0"
+ sax "~1.2.4"
+ stable "^0.1.8"
+ unquote "~1.1.1"
+ util.promisify "~1.0.0"
+
+symbol-tree@^3.2.4:
+ version "3.2.4"
+ resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2"
+ integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==
+
+table@^6.0.4:
+ version "6.0.7"
+ resolved "https://registry.yarnpkg.com/table/-/table-6.0.7.tgz#e45897ffbcc1bcf9e8a87bf420f2c9e5a7a52a34"
+ integrity sha512-rxZevLGTUzWna/qBLObOe16kB2RTnnbhciwgPbMMlazz1yZGVEgnZK762xyVdVznhqxrfCeBMmMkgOOaPwjH7g==
+ dependencies:
+ ajv "^7.0.2"
+ lodash "^4.17.20"
+ slice-ansi "^4.0.0"
+ string-width "^4.2.0"
+
+table@^6.0.9:
+ version "6.7.1"
+ resolved "https://registry.yarnpkg.com/table/-/table-6.7.1.tgz#ee05592b7143831a8c94f3cee6aae4c1ccef33e2"
+ integrity sha512-ZGum47Yi6KOOFDE8m223td53ath2enHcYLgOCjGr5ngu8bdIARQk6mN/wRMv4yMRcHnCSnHbCEha4sobQx5yWg==
+ dependencies:
+ ajv "^8.0.1"
+ lodash.clonedeep "^4.5.0"
+ lodash.truncate "^4.4.2"
+ slice-ansi "^4.0.0"
+ string-width "^4.2.0"
+ strip-ansi "^6.0.0"
+
+tapable@^1.0.0, tapable@^1.1.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2"
+ integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==
+
+tar@^6.0.2:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.0.tgz#d1724e9bcc04b977b18d5c573b333a2207229a83"
+ integrity sha512-DUCttfhsnLCjwoDoFcI+B2iJgYa93vBnDUATYEeRx6sntCTdN01VnqsIuTlALXla/LWooNg0yEGeB+Y8WdFxGA==
+ dependencies:
+ chownr "^2.0.0"
+ fs-minipass "^2.0.0"
+ minipass "^3.0.0"
+ minizlib "^2.1.1"
+ mkdirp "^1.0.3"
+ yallist "^4.0.0"
+
+temp-dir@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-1.0.0.tgz#0a7c0ea26d3a39afa7e0ebea9c1fc0bc4daa011d"
+ integrity sha1-CnwOom06Oa+n4OvqnB/AvE2qAR0=
+
+tempy@^0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/tempy/-/tempy-0.3.0.tgz#6f6c5b295695a16130996ad5ab01a8bd726e8bf8"
+ integrity sha512-WrH/pui8YCwmeiAoxV+lpRH9HpRtgBhSR2ViBPgpGb/wnYDzp21R4MN45fsCGvLROvY67o3byhJRYRONJyImVQ==
+ dependencies:
+ temp-dir "^1.0.0"
+ type-fest "^0.3.1"
+ unique-string "^1.0.0"
+
+terminal-link@^2.0.0:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994"
+ integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ==
+ dependencies:
+ ansi-escapes "^4.2.1"
+ supports-hyperlinks "^2.0.0"
+
[email protected]:
+ version "4.2.3"
+ resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-4.2.3.tgz#28daef4a83bd17c1db0297070adc07fc8cfc6a9a"
+ integrity sha512-jTgXh40RnvOrLQNgIkwEKnQ8rmHjHK4u+6UBEi+W+FPmvb+uo+chJXntKe7/3lW5mNysgSWD60KyesnhW8D6MQ==
+ dependencies:
+ cacache "^15.0.5"
+ find-cache-dir "^3.3.1"
+ jest-worker "^26.5.0"
+ p-limit "^3.0.2"
+ schema-utils "^3.0.0"
+ serialize-javascript "^5.0.1"
+ source-map "^0.6.1"
+ terser "^5.3.4"
+ webpack-sources "^1.4.3"
+
+terser-webpack-plugin@^1.4.3:
+ version "1.4.5"
+ resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.5.tgz#a217aefaea330e734ffacb6120ec1fa312d6040b"
+ integrity sha512-04Rfe496lN8EYruwi6oPQkG0vo8C+HT49X687FZnpPF0qMAIHONI6HEXYPKDOE8e5HjXTyKfqRd/agHtH0kOtw==
+ dependencies:
+ cacache "^12.0.2"
+ find-cache-dir "^2.1.0"
+ is-wsl "^1.1.0"
+ schema-utils "^1.0.0"
+ serialize-javascript "^4.0.0"
+ source-map "^0.6.1"
+ terser "^4.1.2"
+ webpack-sources "^1.4.0"
+ worker-farm "^1.7.0"
+
+terser@^4.1.2, terser@^4.6.2, terser@^4.6.3:
+ version "4.8.0"
+ resolved "https://registry.yarnpkg.com/terser/-/terser-4.8.0.tgz#63056343d7c70bb29f3af665865a46fe03a0df17"
+ integrity sha512-EAPipTNeWsb/3wLPeup1tVPaXfIaU68xMnVdPafIL1TV05OhASArYyIfFvnvJCNrR2NIOvDVNNTFRa+Re2MWyw==
+ dependencies:
+ commander "^2.20.0"
+ source-map "~0.6.1"
+ source-map-support "~0.5.12"
+
+terser@^5.3.4:
+ version "5.6.0"
+ resolved "https://registry.yarnpkg.com/terser/-/terser-5.6.0.tgz#138cdf21c5e3100b1b3ddfddf720962f88badcd2"
+ integrity sha512-vyqLMoqadC1uR0vywqOZzriDYzgEkNJFK4q9GeyOBHIbiECHiWLKcWfbQWAUaPfxkjDhapSlZB9f7fkMrvkVjA==
+ dependencies:
+ commander "^2.20.0"
+ source-map "~0.7.2"
+ source-map-support "~0.5.19"
+
+test-exclude@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e"
+ integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==
+ dependencies:
+ "@istanbuljs/schema" "^0.1.2"
+ glob "^7.1.4"
+ minimatch "^3.0.4"
+
[email protected], text-table@^0.2.0:
+ version "0.2.0"
+ resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4"
+ integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=
+
+throat@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/throat/-/throat-5.0.0.tgz#c5199235803aad18754a667d659b5e72ce16764b"
+ integrity sha512-fcwX4mndzpLQKBS1DVYhGAcYaYt7vsHNIvQV+WXMvnow5cgjPphq5CaayLaGsjRdSCKZFNGt7/GYAuXaNOiYCA==
+
+through2@^2.0.0:
+ version "2.0.5"
+ resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd"
+ integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==
+ dependencies:
+ readable-stream "~2.3.6"
+ xtend "~4.0.1"
+
+thunky@^1.0.2:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d"
+ integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==
+
+timers-browserify@^2.0.4:
+ version "2.0.12"
+ resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.12.tgz#44a45c11fbf407f34f97bccd1577c652361b00ee"
+ integrity sha512-9phl76Cqm6FhSX9Xe1ZUAMLtm1BLkKj2Qd5ApyWkXzsMRaA7dgr81kf4wJmQf/hAvg8EEyJxDo3du/0KlhPiKQ==
+ dependencies:
+ setimmediate "^1.0.4"
+
+timsort@^0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/timsort/-/timsort-0.3.0.tgz#405411a8e7e6339fe64db9a234de11dc31e02bd4"
+ integrity sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q=
+
+tiny-invariant@^1.0.2, tiny-invariant@^1.0.6:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.1.0.tgz#634c5f8efdc27714b7f386c35e6760991d230875"
+ integrity sha512-ytxQvrb1cPc9WBEI/HSeYYoGD0kWnGEOR8RY6KomWLBVhqz0RgTwVO9dLrGz7dC+nN9llyI7OKAgRq8Vq4ZBSw==
+
+tiny-warning@^1.0.0, tiny-warning@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/tiny-warning/-/tiny-warning-1.0.3.tgz#94a30db453df4c643d0fd566060d60a875d84754"
+ integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==
+
[email protected]:
+ version "1.4.2"
+ resolved "https://registry.yarnpkg.com/tinycolor2/-/tinycolor2-1.4.2.tgz#3f6a4d1071ad07676d7fa472e1fac40a719d8803"
+ integrity sha512-vJhccZPs965sV/L2sU4oRQVAos0pQXwsvTLkWYdqJ+a8Q5kPFzJTuOFwy7UniPli44NKQGAglksjvOcpo95aZA==
+
[email protected]:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1"
+ integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE=
+
+to-arraybuffer@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43"
+ integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M=
+
+to-fast-properties@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e"
+ integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=
+
+to-object-path@^0.3.0:
+ version "0.3.0"
+ resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af"
+ integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68=
+ dependencies:
+ kind-of "^3.0.2"
+
+to-regex-range@^2.1.0:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38"
+ integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=
+ dependencies:
+ is-number "^3.0.0"
+ repeat-string "^1.6.1"
+
+to-regex-range@^5.0.1:
+ version "5.0.1"
+ resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4"
+ integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==
+ dependencies:
+ is-number "^7.0.0"
+
+to-regex@^3.0.1, to-regex@^3.0.2:
+ version "3.0.2"
+ resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce"
+ integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==
+ dependencies:
+ define-property "^2.0.2"
+ extend-shallow "^3.0.2"
+ regex-not "^1.0.2"
+ safe-regex "^1.1.0"
+
+toggle-selection@^1.0.6:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/toggle-selection/-/toggle-selection-1.0.6.tgz#6e45b1263f2017fa0acc7d89d78b15b8bf77da32"
+ integrity sha1-bkWxJj8gF/oKzH2J14sVuL932jI=
+
[email protected]:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553"
+ integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==
+
+tough-cookie@^2.3.3, tough-cookie@~2.5.0:
+ version "2.5.0"
+ resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2"
+ integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==
+ dependencies:
+ psl "^1.1.28"
+ punycode "^2.1.1"
+
+tough-cookie@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-3.0.1.tgz#9df4f57e739c26930a018184887f4adb7dca73b2"
+ integrity sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==
+ dependencies:
+ ip-regex "^2.1.0"
+ psl "^1.1.28"
+ punycode "^2.1.1"
+
+tr46@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.0.2.tgz#03273586def1595ae08fedb38d7733cee91d2479"
+ integrity sha512-3n1qG+/5kg+jrbTzwAykB5yRYtQCTqOGKq5U5PE3b0a1/mzo6snDhjGS0zJVJunO0NrT3Dg1MLy5TjWP/UJppg==
+ dependencies:
+ punycode "^2.1.1"
+
+tryer@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/tryer/-/tryer-1.0.1.tgz#f2c85406800b9b0f74c9f7465b81eaad241252f8"
+ integrity sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA==
+
[email protected], ts-pnp@^1.1.6:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/ts-pnp/-/ts-pnp-1.2.0.tgz#a500ad084b0798f1c3071af391e65912c86bca92"
+ integrity sha512-csd+vJOb/gkzvcCHgTGSChYpy5f1/XKNsmvBGO4JXS+z1v2HobugDz4s1IeFXM3wZB44uczs+eazB5Q/ccdhQw==
+
+tsconfig-paths@^3.9.0:
+ version "3.9.0"
+ resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz#098547a6c4448807e8fcb8eae081064ee9a3c90b"
+ integrity sha512-dRcuzokWhajtZWkQsDVKbWyY+jgcLC5sqJhg2PSgf4ZkH2aHPvaOY8YWGhmjb68b5qqTfasSsDO9k7RUiEmZAw==
+ dependencies:
+ "@types/json5" "^0.0.29"
+ json5 "^1.0.1"
+ minimist "^1.2.0"
+ strip-bom "^3.0.0"
+
+tslib@^1.0.0, tslib@^1.8.1, tslib@^1.9.0, tslib@^1.9.3:
+ version "1.14.1"
+ resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00"
+ integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==
+
+tslib@^2.0.3:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.1.0.tgz#da60860f1c2ecaa5703ab7d39bc05b6bf988b97a"
+ integrity sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A==
+
+tslib@^2.1.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.0.tgz#803b8cdab3e12ba581a4ca41c8839bbb0dacb09e"
+ integrity sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg==
+
+tsutils@^3.17.1:
+ version "3.20.0"
+ resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.20.0.tgz#ea03ea45462e146b53d70ce0893de453ff24f698"
+ integrity sha512-RYbuQuvkhuqVeXweWT3tJLKOEJ/UUw9GjNEZGWdrLLlM+611o1gwLHBpxoFJKKl25fLprp2eVthtKs5JOrNeXg==
+ dependencies:
+ tslib "^1.8.1"
+
[email protected]:
+ version "0.0.0"
+ resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6"
+ integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY=
+
+tunnel-agent@^0.6.0:
+ version "0.6.0"
+ resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd"
+ integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=
+ dependencies:
+ safe-buffer "^5.0.1"
+
+tweetnacl@^0.14.3, tweetnacl@~0.14.0:
+ version "0.14.5"
+ resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64"
+ integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=
+
+type-check@^0.4.0, type-check@~0.4.0:
+ version "0.4.0"
+ resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1"
+ integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==
+ dependencies:
+ prelude-ls "^1.2.1"
+
+type-check@~0.3.2:
+ version "0.3.2"
+ resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72"
+ integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=
+ dependencies:
+ prelude-ls "~1.1.2"
+
[email protected]:
+ version "4.0.8"
+ resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c"
+ integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==
+
+type-fest@^0.11.0:
+ version "0.11.0"
+ resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.11.0.tgz#97abf0872310fed88a5c466b25681576145e33f1"
+ integrity sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==
+
+type-fest@^0.20.2:
+ version "0.20.2"
+ resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4"
+ integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==
+
+type-fest@^0.3.1:
+ version "0.3.1"
+ resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.3.1.tgz#63d00d204e059474fe5e1b7c011112bbd1dc29e1"
+ integrity sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ==
+
+type-fest@^0.6.0:
+ version "0.6.0"
+ resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b"
+ integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==
+
+type-fest@^0.8.1:
+ version "0.8.1"
+ resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d"
+ integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==
+
+type-is@~1.6.17, type-is@~1.6.18:
+ version "1.6.18"
+ resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131"
+ integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==
+ dependencies:
+ media-typer "0.3.0"
+ mime-types "~2.1.24"
+
+type@^1.0.1:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0"
+ integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==
+
+type@^2.0.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/type/-/type-2.3.0.tgz#ada7c045f07ead08abf9e2edd29be1a0c0661132"
+ integrity sha512-rgPIqOdfK/4J9FhiVrZ3cveAjRRo5rsQBAIhnylX874y1DX/kEKSVdLsnuHB6l1KTjHyU01VjiMBHgU2adejyg==
+
+typedarray-to-buffer@^3.1.5:
+ version "3.1.5"
+ resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080"
+ integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==
+ dependencies:
+ is-typedarray "^1.0.0"
+
+typedarray@^0.0.6:
+ version "0.0.6"
+ resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777"
+ integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=
+
+unbox-primitive@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.1.tgz#085e215625ec3162574dc8859abee78a59b14471"
+ integrity sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw==
+ dependencies:
+ function-bind "^1.1.1"
+ has-bigints "^1.0.1"
+ has-symbols "^1.0.2"
+ which-boxed-primitive "^1.0.2"
+
+unicode-canonical-property-names-ecmascript@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz#2619800c4c825800efdd8343af7dd9933cbe2818"
+ integrity sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ==
+
+unicode-match-property-ecmascript@^1.0.4:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz#8ed2a32569961bce9227d09cd3ffbb8fed5f020c"
+ integrity sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg==
+ dependencies:
+ unicode-canonical-property-names-ecmascript "^1.0.4"
+ unicode-property-aliases-ecmascript "^1.0.4"
+
+unicode-match-property-value-ecmascript@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.2.0.tgz#0d91f600eeeb3096aa962b1d6fc88876e64ea531"
+ integrity sha512-wjuQHGQVofmSJv1uVISKLE5zO2rNGzM/KCYZch/QQvez7C1hUhBIuZ701fYXExuufJFMPhv2SyL8CyoIfMLbIQ==
+
+unicode-property-aliases-ecmascript@^1.0.4:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.1.0.tgz#dd57a99f6207bedff4628abefb94c50db941c8f4"
+ integrity sha512-PqSoPh/pWetQ2phoj5RLiaqIk4kCNwoV3CI+LfGmWLKI3rE3kl1h59XpX2BjgDrmbxD9ARtQobPGU1SguCYuQg==
+
+union-value@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847"
+ integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==
+ dependencies:
+ arr-union "^3.1.0"
+ get-value "^2.0.6"
+ is-extendable "^0.1.1"
+ set-value "^2.0.1"
+
+uniq@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff"
+ integrity sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8=
+
+uniqs@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/uniqs/-/uniqs-2.0.0.tgz#ffede4b36b25290696e6e165d4a59edb998e6b02"
+ integrity sha1-/+3ks2slKQaW5uFl1KWe25mOawI=
+
+unique-filename@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230"
+ integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==
+ dependencies:
+ unique-slug "^2.0.0"
+
+unique-slug@^2.0.0:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c"
+ integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w==
+ dependencies:
+ imurmurhash "^0.1.4"
+
+unique-string@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-1.0.0.tgz#9e1057cca851abb93398f8b33ae187b99caec11a"
+ integrity sha1-nhBXzKhRq7kzmPizOuGHuZyuwRo=
+ dependencies:
+ crypto-random-string "^1.0.0"
+
+universalify@^0.1.0:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66"
+ integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==
+
+universalify@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717"
+ integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==
+
[email protected], unpipe@~1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
+ integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=
+
+unquote@~1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/unquote/-/unquote-1.1.1.tgz#8fded7324ec6e88a0ff8b905e7c098cdc086d544"
+ integrity sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ=
+
+unset-value@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559"
+ integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk=
+ dependencies:
+ has-value "^0.3.1"
+ isobject "^3.0.0"
+
+upath@^1.1.1, upath@^1.1.2, upath@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894"
+ integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==
+
+uri-js@^4.2.2:
+ version "4.4.1"
+ resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e"
+ integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==
+ dependencies:
+ punycode "^2.1.0"
+
+urix@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72"
+ integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=
+
[email protected]:
+ version "4.1.1"
+ resolved "https://registry.yarnpkg.com/url-loader/-/url-loader-4.1.1.tgz#28505e905cae158cf07c92ca622d7f237e70a4e2"
+ integrity sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA==
+ dependencies:
+ loader-utils "^2.0.0"
+ mime-types "^2.1.27"
+ schema-utils "^3.0.0"
+
+url-parse@^1.4.3, url-parse@^1.4.7:
+ version "1.5.1"
+ resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.1.tgz#d5fa9890af8a5e1f274a2c98376510f6425f6e3b"
+ integrity sha512-HOfCOUJt7iSYzEx/UqgtwKRMC6EU91NFhsCHMv9oM03VJcVo2Qrp8T8kI9D7amFf1cu+/3CEhgb3rF9zL7k85Q==
+ dependencies:
+ querystringify "^2.1.1"
+ requires-port "^1.0.0"
+
+url@^0.11.0:
+ version "0.11.0"
+ resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1"
+ integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=
+ dependencies:
+ punycode "1.3.2"
+ querystring "0.2.0"
+
+use-callback-ref@^1.2.1, use-callback-ref@^1.2.3:
+ version "1.2.5"
+ resolved "https://registry.yarnpkg.com/use-callback-ref/-/use-callback-ref-1.2.5.tgz#6115ed242cfbaed5915499c0a9842ca2912f38a5"
+ integrity sha512-gN3vgMISAgacF7sqsLPByqoePooY3n2emTH59Ur5d/M8eg4WTWu1xp8i8DHjohftIyEx0S08RiYxbffr4j8Peg==
+
+use-sidecar@^1.0.1:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/use-sidecar/-/use-sidecar-1.0.5.tgz#ffff2a17c1df42e348624b699ba6e5c220527f2b"
+ integrity sha512-k9jnrjYNwN6xYLj1iaGhonDghfvmeTmYjAiGvOr7clwKfPjMXJf4/HOr7oT5tJwYafgp2tG2l3eZEOfoELiMcA==
+ dependencies:
+ detect-node-es "^1.1.0"
+ tslib "^1.9.3"
+
+use@^3.1.0:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f"
+ integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==
+
+util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
+ integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
+
[email protected]:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030"
+ integrity sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA==
+ dependencies:
+ define-properties "^1.1.2"
+ object.getownpropertydescriptors "^2.0.3"
+
+util.promisify@~1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.1.tgz#6baf7774b80eeb0f7520d8b81d07982a59abbaee"
+ integrity sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA==
+ dependencies:
+ define-properties "^1.1.3"
+ es-abstract "^1.17.2"
+ has-symbols "^1.0.1"
+ object.getownpropertydescriptors "^2.1.0"
+
[email protected]:
+ version "0.10.3"
+ resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9"
+ integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk=
+ dependencies:
+ inherits "2.0.1"
+
+util@^0.11.0:
+ version "0.11.1"
+ resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61"
+ integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ==
+ dependencies:
+ inherits "2.0.3"
+
+utila@~0.4:
+ version "0.4.0"
+ resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c"
+ integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw=
+
[email protected]:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
+ integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=
+
+uuid@^3.3.2, uuid@^3.4.0:
+ version "3.4.0"
+ resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee"
+ integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==
+
+uuid@^8.3.0:
+ version "8.3.2"
+ resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"
+ integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
+
+v8-compile-cache@^2.0.3:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.2.0.tgz#9471efa3ef9128d2f7c6a7ca39c4dd6b5055b132"
+ integrity sha512-gTpR5XQNKFwOd4clxfnhaqvfqMpqEwr4tOtCyz4MtYZX2JYhfr1JvBFKdS+7K/9rfpZR3VLX+YWBbKoxCgS43Q==
+
+v8-to-istanbul@^7.0.0:
+ version "7.1.0"
+ resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-7.1.0.tgz#5b95cef45c0f83217ec79f8fc7ee1c8b486aee07"
+ integrity sha512-uXUVqNUCLa0AH1vuVxzi+MI4RfxEOKt9pBgKwHbgH7st8Kv2P1m+jvWNnektzBh5QShF3ODgKmUFCf38LnVz1g==
+ dependencies:
+ "@types/istanbul-lib-coverage" "^2.0.1"
+ convert-source-map "^1.6.0"
+ source-map "^0.7.3"
+
+validate-npm-package-license@^3.0.1:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a"
+ integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==
+ dependencies:
+ spdx-correct "^3.0.0"
+ spdx-expression-parse "^3.0.0"
+
+value-equal@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/value-equal/-/value-equal-1.0.1.tgz#1e0b794c734c5c0cade179c437d356d931a34d6c"
+ integrity sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw==
+
+vary@~1.1.2:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc"
+ integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=
+
+vendors@^1.0.0:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/vendors/-/vendors-1.0.4.tgz#e2b800a53e7a29b93506c3cf41100d16c4c4ad8e"
+ integrity sha512-/juG65kTL4Cy2su4P8HjtkTxk6VmJDiOPBufWniqQ6wknac6jNiXS9vU+hO3wgusiyqWlzTbVHi0dyJqRONg3w==
+
[email protected]:
+ version "1.10.0"
+ resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400"
+ integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=
+ dependencies:
+ assert-plus "^1.0.0"
+ core-util-is "1.0.2"
+ extsprintf "^1.2.0"
+
+vm-browserify@^1.0.1:
+ version "1.1.2"
+ resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0"
+ integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==
+
+w3c-hr-time@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd"
+ integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ==
+ dependencies:
+ browser-process-hrtime "^1.0.0"
+
+w3c-xmlserializer@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a"
+ integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA==
+ dependencies:
+ xml-name-validator "^3.0.0"
+
+walker@^1.0.7, walker@~1.0.5:
+ version "1.0.7"
+ resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.7.tgz#2f7f9b8fd10d677262b18a884e28d19618e028fb"
+ integrity sha1-L3+bj9ENZ3JisYqITijRlhjgKPs=
+ dependencies:
+ makeerror "1.0.x"
+
+warning@^4.0.3:
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/warning/-/warning-4.0.3.tgz#16e9e077eb8a86d6af7d64aa1e05fd85b4678ca3"
+ integrity sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w==
+ dependencies:
+ loose-envify "^1.0.0"
+
+watchpack-chokidar2@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/watchpack-chokidar2/-/watchpack-chokidar2-2.0.1.tgz#38500072ee6ece66f3769936950ea1771be1c957"
+ integrity sha512-nCFfBIPKr5Sh61s4LPpy1Wtfi0HE8isJ3d2Yb5/Ppw2P2B/3eVSEBjKfN0fmHJSK14+31KwMKmcrzs2GM4P0Ww==
+ dependencies:
+ chokidar "^2.1.8"
+
+watchpack@^1.7.4:
+ version "1.7.5"
+ resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-1.7.5.tgz#1267e6c55e0b9b5be44c2023aed5437a2c26c453"
+ integrity sha512-9P3MWk6SrKjHsGkLT2KHXdQ/9SNkyoJbabxnKOoJepsvJjJG8uYTR3yTPxPQvNDI3w4Nz1xnE0TLHK4RIVe/MQ==
+ dependencies:
+ graceful-fs "^4.1.2"
+ neo-async "^2.5.0"
+ optionalDependencies:
+ chokidar "^3.4.1"
+ watchpack-chokidar2 "^2.0.1"
+
+wbuf@^1.1.0, wbuf@^1.7.3:
+ version "1.7.3"
+ resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df"
+ integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==
+ dependencies:
+ minimalistic-assert "^1.0.0"
+
+webidl-conversions@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff"
+ integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA==
+
+webidl-conversions@^6.1.0:
+ version "6.1.0"
+ resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514"
+ integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w==
+
+webpack-dev-middleware@^3.7.2:
+ version "3.7.3"
+ resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.3.tgz#0639372b143262e2b84ab95d3b91a7597061c2c5"
+ integrity sha512-djelc/zGiz9nZj/U7PTBi2ViorGJXEWo/3ltkPbDyxCXhhEXkW0ce99falaok4TPj+AsxLiXJR0EBOb0zh9fKQ==
+ dependencies:
+ memory-fs "^0.4.1"
+ mime "^2.4.4"
+ mkdirp "^0.5.1"
+ range-parser "^1.2.1"
+ webpack-log "^2.0.0"
+
[email protected]:
+ version "3.11.1"
+ resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.11.1.tgz#c74028bf5ba8885aaf230e48a20e8936ab8511f0"
+ integrity sha512-u4R3mRzZkbxQVa+MBWi2uVpB5W59H3ekZAJsQlKUTdl7Elcah2EhygTPLmeFXybQkf9i2+L0kn7ik9SnXa6ihQ==
+ dependencies:
+ ansi-html "0.0.7"
+ bonjour "^3.5.0"
+ chokidar "^2.1.8"
+ compression "^1.7.4"
+ connect-history-api-fallback "^1.6.0"
+ debug "^4.1.1"
+ del "^4.1.1"
+ express "^4.17.1"
+ html-entities "^1.3.1"
+ http-proxy-middleware "0.19.1"
+ import-local "^2.0.0"
+ internal-ip "^4.3.0"
+ ip "^1.1.5"
+ is-absolute-url "^3.0.3"
+ killable "^1.0.1"
+ loglevel "^1.6.8"
+ opn "^5.5.0"
+ p-retry "^3.0.1"
+ portfinder "^1.0.26"
+ schema-utils "^1.0.0"
+ selfsigned "^1.10.8"
+ semver "^6.3.0"
+ serve-index "^1.9.1"
+ sockjs "^0.3.21"
+ sockjs-client "^1.5.0"
+ spdy "^4.0.2"
+ strip-ansi "^3.0.1"
+ supports-color "^6.1.0"
+ url "^0.11.0"
+ webpack-dev-middleware "^3.7.2"
+ webpack-log "^2.0.0"
+ ws "^6.2.1"
+ yargs "^13.3.2"
+
+webpack-log@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-2.0.0.tgz#5b7928e0637593f119d32f6227c1e0ac31e1b47f"
+ integrity sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg==
+ dependencies:
+ ansi-colors "^3.0.0"
+ uuid "^3.3.2"
+
[email protected]:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/webpack-manifest-plugin/-/webpack-manifest-plugin-2.2.0.tgz#19ca69b435b0baec7e29fbe90fb4015de2de4f16"
+ integrity sha512-9S6YyKKKh/Oz/eryM1RyLVDVmy3NSPV0JXMRhZ18fJsq+AwGxUY34X54VNwkzYcEmEkDwNxuEOboCZEebJXBAQ==
+ dependencies:
+ fs-extra "^7.0.0"
+ lodash ">=3.5 <5"
+ object.entries "^1.1.0"
+ tapable "^1.0.0"
+
+webpack-sources@^1.1.0, webpack-sources@^1.3.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1, webpack-sources@^1.4.3:
+ version "1.4.3"
+ resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933"
+ integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ==
+ dependencies:
+ source-list-map "^2.0.0"
+ source-map "~0.6.1"
+
[email protected]:
+ version "4.44.2"
+ resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.44.2.tgz#6bfe2b0af055c8b2d1e90ed2cd9363f841266b72"
+ integrity sha512-6KJVGlCxYdISyurpQ0IPTklv+DULv05rs2hseIXer6D7KrUicRDLFb4IUM1S6LUAKypPM/nSiVSuv8jHu1m3/Q==
+ dependencies:
+ "@webassemblyjs/ast" "1.9.0"
+ "@webassemblyjs/helper-module-context" "1.9.0"
+ "@webassemblyjs/wasm-edit" "1.9.0"
+ "@webassemblyjs/wasm-parser" "1.9.0"
+ acorn "^6.4.1"
+ ajv "^6.10.2"
+ ajv-keywords "^3.4.1"
+ chrome-trace-event "^1.0.2"
+ enhanced-resolve "^4.3.0"
+ eslint-scope "^4.0.3"
+ json-parse-better-errors "^1.0.2"
+ loader-runner "^2.4.0"
+ loader-utils "^1.2.3"
+ memory-fs "^0.4.1"
+ micromatch "^3.1.10"
+ mkdirp "^0.5.3"
+ neo-async "^2.6.1"
+ node-libs-browser "^2.2.1"
+ schema-utils "^1.0.0"
+ tapable "^1.1.3"
+ terser-webpack-plugin "^1.4.3"
+ watchpack "^1.7.4"
+ webpack-sources "^1.4.1"
+
+websocket-driver@>=0.5.1, websocket-driver@^0.7.4:
+ version "0.7.4"
+ resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.4.tgz#89ad5295bbf64b480abcba31e4953aca706f5760"
+ integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==
+ dependencies:
+ http-parser-js ">=0.5.1"
+ safe-buffer ">=5.1.0"
+ websocket-extensions ">=0.1.1"
+
+websocket-extensions@>=0.1.1:
+ version "0.1.4"
+ resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42"
+ integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==
+
+whatwg-encoding@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0"
+ integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw==
+ dependencies:
+ iconv-lite "0.4.24"
+
[email protected]:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-2.0.4.tgz#dde6a5df315f9d39991aa17621853d720b85566f"
+ integrity sha512-dcQ1GWpOD/eEQ97k66aiEVpNnapVj90/+R+SXTPYGHpYBBypfKJEQjLrvMZ7YXbKm21gXd4NcuxUTjiv1YtLng==
+
+whatwg-fetch@^3.4.1:
+ version "3.6.1"
+ resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.1.tgz#93bc4005af6c2cc30ba3e42ec3125947c8f54ed3"
+ integrity sha512-IEmN/ZfmMw6G1hgZpVd0LuZXOQDisrMOZrzYd5x3RAK4bMPlJohKUZWZ9t/QsTvH0dV9TbPDcc2OSuIDcihnHA==
+
+whatwg-mimetype@^2.3.0:
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf"
+ integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==
+
+whatwg-url@^8.0.0:
+ version "8.4.0"
+ resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.4.0.tgz#50fb9615b05469591d2b2bd6dfaed2942ed72837"
+ integrity sha512-vwTUFf6V4zhcPkWp/4CQPr1TW9Ml6SF4lVyaIMBdJw5i6qUUJ1QWM4Z6YYVkfka0OUIzVo/0aNtGVGk256IKWw==
+ dependencies:
+ lodash.sortby "^4.7.0"
+ tr46 "^2.0.2"
+ webidl-conversions "^6.1.0"
+
+which-boxed-primitive@^1.0.2:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6"
+ integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==
+ dependencies:
+ is-bigint "^1.0.1"
+ is-boolean-object "^1.1.0"
+ is-number-object "^1.0.4"
+ is-string "^1.0.5"
+ is-symbol "^1.0.3"
+
+which-module@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
+ integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=
+
+which@^1.2.9, which@^1.3.1:
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
+ integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
+ dependencies:
+ isexe "^2.0.0"
+
+which@^2.0.1, which@^2.0.2:
+ version "2.0.2"
+ resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1"
+ integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==
+ dependencies:
+ isexe "^2.0.0"
+
+word-wrap@^1.2.3, word-wrap@~1.2.3:
+ version "1.2.3"
+ resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c"
+ integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==
+
+workbox-background-sync@^5.1.4:
+ version "5.1.4"
+ resolved "https://registry.yarnpkg.com/workbox-background-sync/-/workbox-background-sync-5.1.4.tgz#5ae0bbd455f4e9c319e8d827c055bb86c894fd12"
+ integrity sha512-AH6x5pYq4vwQvfRDWH+vfOePfPIYQ00nCEB7dJRU1e0n9+9HMRyvI63FlDvtFT2AvXVRsXvUt7DNMEToyJLpSA==
+ dependencies:
+ workbox-core "^5.1.4"
+
+workbox-broadcast-update@^5.1.4:
+ version "5.1.4"
+ resolved "https://registry.yarnpkg.com/workbox-broadcast-update/-/workbox-broadcast-update-5.1.4.tgz#0eeb89170ddca7f6914fa3523fb14462891f2cfc"
+ integrity sha512-HTyTWkqXvHRuqY73XrwvXPud/FN6x3ROzkfFPsRjtw/kGZuZkPzfeH531qdUGfhtwjmtO/ZzXcWErqVzJNdXaA==
+ dependencies:
+ workbox-core "^5.1.4"
+
+workbox-build@^5.1.4:
+ version "5.1.4"
+ resolved "https://registry.yarnpkg.com/workbox-build/-/workbox-build-5.1.4.tgz#23d17ed5c32060c363030c8823b39d0eabf4c8c7"
+ integrity sha512-xUcZn6SYU8usjOlfLb9Y2/f86Gdo+fy1fXgH8tJHjxgpo53VVsqRX0lUDw8/JuyzNmXuo8vXX14pXX2oIm9Bow==
+ dependencies:
+ "@babel/core" "^7.8.4"
+ "@babel/preset-env" "^7.8.4"
+ "@babel/runtime" "^7.8.4"
+ "@hapi/joi" "^15.1.0"
+ "@rollup/plugin-node-resolve" "^7.1.1"
+ "@rollup/plugin-replace" "^2.3.1"
+ "@surma/rollup-plugin-off-main-thread" "^1.1.1"
+ common-tags "^1.8.0"
+ fast-json-stable-stringify "^2.1.0"
+ fs-extra "^8.1.0"
+ glob "^7.1.6"
+ lodash.template "^4.5.0"
+ pretty-bytes "^5.3.0"
+ rollup "^1.31.1"
+ rollup-plugin-babel "^4.3.3"
+ rollup-plugin-terser "^5.3.1"
+ source-map "^0.7.3"
+ source-map-url "^0.4.0"
+ stringify-object "^3.3.0"
+ strip-comments "^1.0.2"
+ tempy "^0.3.0"
+ upath "^1.2.0"
+ workbox-background-sync "^5.1.4"
+ workbox-broadcast-update "^5.1.4"
+ workbox-cacheable-response "^5.1.4"
+ workbox-core "^5.1.4"
+ workbox-expiration "^5.1.4"
+ workbox-google-analytics "^5.1.4"
+ workbox-navigation-preload "^5.1.4"
+ workbox-precaching "^5.1.4"
+ workbox-range-requests "^5.1.4"
+ workbox-routing "^5.1.4"
+ workbox-strategies "^5.1.4"
+ workbox-streams "^5.1.4"
+ workbox-sw "^5.1.4"
+ workbox-window "^5.1.4"
+
+workbox-cacheable-response@^5.1.4:
+ version "5.1.4"
+ resolved "https://registry.yarnpkg.com/workbox-cacheable-response/-/workbox-cacheable-response-5.1.4.tgz#9ff26e1366214bdd05cf5a43da9305b274078a54"
+ integrity sha512-0bfvMZs0Of1S5cdswfQK0BXt6ulU5kVD4lwer2CeI+03czHprXR3V4Y8lPTooamn7eHP8Iywi5QjyAMjw0qauA==
+ dependencies:
+ workbox-core "^5.1.4"
+
+workbox-core@^5.1.3, workbox-core@^5.1.4:
+ version "5.1.4"
+ resolved "https://registry.yarnpkg.com/workbox-core/-/workbox-core-5.1.4.tgz#8bbfb2362ecdff30e25d123c82c79ac65d9264f4"
+ integrity sha512-+4iRQan/1D8I81nR2L5vcbaaFskZC2CL17TLbvWVzQ4qiF/ytOGF6XeV54pVxAvKUtkLANhk8TyIUMtiMw2oDg==
+
+workbox-expiration@^5.1.3, workbox-expiration@^5.1.4:
+ version "5.1.4"
+ resolved "https://registry.yarnpkg.com/workbox-expiration/-/workbox-expiration-5.1.4.tgz#92b5df461e8126114943a3b15c55e4ecb920b163"
+ integrity sha512-oDO/5iC65h2Eq7jctAv858W2+CeRW5e0jZBMNRXpzp0ZPvuT6GblUiHnAsC5W5lANs1QS9atVOm4ifrBiYY7AQ==
+ dependencies:
+ workbox-core "^5.1.4"
+
+workbox-google-analytics@^5.1.4:
+ version "5.1.4"
+ resolved "https://registry.yarnpkg.com/workbox-google-analytics/-/workbox-google-analytics-5.1.4.tgz#b3376806b1ac7d7df8418304d379707195fa8517"
+ integrity sha512-0IFhKoEVrreHpKgcOoddV+oIaVXBFKXUzJVBI+nb0bxmcwYuZMdteBTp8AEDJacENtc9xbR0wa9RDCnYsCDLjA==
+ dependencies:
+ workbox-background-sync "^5.1.4"
+ workbox-core "^5.1.4"
+ workbox-routing "^5.1.4"
+ workbox-strategies "^5.1.4"
+
+workbox-navigation-preload@^5.1.4:
+ version "5.1.4"
+ resolved "https://registry.yarnpkg.com/workbox-navigation-preload/-/workbox-navigation-preload-5.1.4.tgz#30d1b720d26a05efc5fa11503e5cc1ed5a78902a"
+ integrity sha512-Wf03osvK0wTflAfKXba//QmWC5BIaIZARU03JIhAEO2wSB2BDROWI8Q/zmianf54kdV7e1eLaIEZhth4K4MyfQ==
+ dependencies:
+ workbox-core "^5.1.4"
+
+workbox-precaching@^5.1.3, workbox-precaching@^5.1.4:
+ version "5.1.4"
+ resolved "https://registry.yarnpkg.com/workbox-precaching/-/workbox-precaching-5.1.4.tgz#874f7ebdd750dd3e04249efae9a1b3f48285fe6b"
+ integrity sha512-gCIFrBXmVQLFwvAzuGLCmkUYGVhBb7D1k/IL7pUJUO5xacjLcFUaLnnsoVepBGAiKw34HU1y/YuqvTKim9qAZA==
+ dependencies:
+ workbox-core "^5.1.4"
+
+workbox-range-requests@^5.1.4:
+ version "5.1.4"
+ resolved "https://registry.yarnpkg.com/workbox-range-requests/-/workbox-range-requests-5.1.4.tgz#7066a12c121df65bf76fdf2b0868016aa2bab859"
+ integrity sha512-1HSujLjgTeoxHrMR2muDW2dKdxqCGMc1KbeyGcmjZZAizJTFwu7CWLDmLv6O1ceWYrhfuLFJO+umYMddk2XMhw==
+ dependencies:
+ workbox-core "^5.1.4"
+
+workbox-routing@^5.1.3, workbox-routing@^5.1.4:
+ version "5.1.4"
+ resolved "https://registry.yarnpkg.com/workbox-routing/-/workbox-routing-5.1.4.tgz#3e8cd86bd3b6573488d1a2ce7385e547b547e970"
+ integrity sha512-8ljknRfqE1vEQtnMtzfksL+UXO822jJlHTIR7+BtJuxQ17+WPZfsHqvk1ynR/v0EHik4x2+826Hkwpgh4GKDCw==
+ dependencies:
+ workbox-core "^5.1.4"
+
+workbox-strategies@^5.1.3, workbox-strategies@^5.1.4:
+ version "5.1.4"
+ resolved "https://registry.yarnpkg.com/workbox-strategies/-/workbox-strategies-5.1.4.tgz#96b1418ccdfde5354612914964074d466c52d08c"
+ integrity sha512-VVS57LpaJTdjW3RgZvPwX0NlhNmscR7OQ9bP+N/34cYMDzXLyA6kqWffP6QKXSkca1OFo/v6v7hW7zrrguo6EA==
+ dependencies:
+ workbox-core "^5.1.4"
+ workbox-routing "^5.1.4"
+
+workbox-streams@^5.1.4:
+ version "5.1.4"
+ resolved "https://registry.yarnpkg.com/workbox-streams/-/workbox-streams-5.1.4.tgz#05754e5e3667bdc078df2c9315b3f41210d8cac0"
+ integrity sha512-xU8yuF1hI/XcVhJUAfbQLa1guQUhdLMPQJkdT0kn6HP5CwiPOGiXnSFq80rAG4b1kJUChQQIGPrq439FQUNVrw==
+ dependencies:
+ workbox-core "^5.1.4"
+ workbox-routing "^5.1.4"
+
+workbox-sw@^5.1.4:
+ version "5.1.4"
+ resolved "https://registry.yarnpkg.com/workbox-sw/-/workbox-sw-5.1.4.tgz#2bb34c9f7381f90d84cef644816d45150011d3db"
+ integrity sha512-9xKnKw95aXwSNc8kk8gki4HU0g0W6KXu+xks7wFuC7h0sembFnTrKtckqZxbSod41TDaGh+gWUA5IRXrL0ECRA==
+
[email protected]:
+ version "5.1.4"
+ resolved "https://registry.yarnpkg.com/workbox-webpack-plugin/-/workbox-webpack-plugin-5.1.4.tgz#7bfe8c16e40fe9ed8937080ac7ae9c8bde01e79c"
+ integrity sha512-PZafF4HpugZndqISi3rZ4ZK4A4DxO8rAqt2FwRptgsDx7NF8TVKP86/huHquUsRjMGQllsNdn4FNl8CD/UvKmQ==
+ dependencies:
+ "@babel/runtime" "^7.5.5"
+ fast-json-stable-stringify "^2.0.0"
+ source-map-url "^0.4.0"
+ upath "^1.1.2"
+ webpack-sources "^1.3.0"
+ workbox-build "^5.1.4"
+
+workbox-window@^5.1.4:
+ version "5.1.4"
+ resolved "https://registry.yarnpkg.com/workbox-window/-/workbox-window-5.1.4.tgz#2740f7dea7f93b99326179a62f1cc0ca2c93c863"
+ integrity sha512-vXQtgTeMCUq/4pBWMfQX8Ee7N2wVC4Q7XYFqLnfbXJ2hqew/cU1uMTD2KqGEgEpE4/30luxIxgE+LkIa8glBYw==
+ dependencies:
+ workbox-core "^5.1.4"
+
+worker-farm@^1.7.0:
+ version "1.7.0"
+ resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8"
+ integrity sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw==
+ dependencies:
+ errno "~0.1.7"
+
+worker-rpc@^0.1.0:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/worker-rpc/-/worker-rpc-0.1.1.tgz#cb565bd6d7071a8f16660686051e969ad32f54d5"
+ integrity sha512-P1WjMrUB3qgJNI9jfmpZ/htmBEjFh//6l/5y8SD9hg1Ef5zTTVVoRjTrTEzPrNBQvmhMxkoTsjOXN10GWU7aCg==
+ dependencies:
+ microevent.ts "~0.1.1"
+
+wrap-ansi@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09"
+ integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==
+ dependencies:
+ ansi-styles "^3.2.0"
+ string-width "^3.0.0"
+ strip-ansi "^5.0.0"
+
+wrap-ansi@^6.2.0:
+ version "6.2.0"
+ resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
+ integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==
+ dependencies:
+ ansi-styles "^4.0.0"
+ string-width "^4.1.0"
+ strip-ansi "^6.0.0"
+
+wrap-ansi@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43"
+ integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==
+ dependencies:
+ ansi-styles "^4.0.0"
+ string-width "^4.1.0"
+ strip-ansi "^6.0.0"
+
+wrappy@1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
+ integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
+
+write-file-atomic@^3.0.0:
+ version "3.0.3"
+ resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8"
+ integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==
+ dependencies:
+ imurmurhash "^0.1.4"
+ is-typedarray "^1.0.0"
+ signal-exit "^3.0.2"
+ typedarray-to-buffer "^3.1.5"
+
+ws@^6.2.1:
+ version "6.2.1"
+ resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb"
+ integrity sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA==
+ dependencies:
+ async-limiter "~1.0.0"
+
+ws@^7.2.3:
+ version "7.4.3"
+ resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.3.tgz#1f9643de34a543b8edb124bdcbc457ae55a6e5cd"
+ integrity sha512-hr6vCR76GsossIRsr8OLR9acVVm1jyfEWvhbNjtgPOrfvAlKzvyeg/P6r8RuDjRyrcQoPQT7K0DGEPc7Ae6jzA==
+
+xml-name-validator@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a"
+ integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw==
+
+xmlchars@^2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb"
+ integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==
+
[email protected]:
+ version "1.8.0"
+ resolved "https://registry.yarnpkg.com/xmlhttprequest/-/xmlhttprequest-1.8.0.tgz#67fe075c5c24fef39f9d65f5f7b7fe75171968fc"
+ integrity sha1-Z/4HXFwk/vOfnWX197f+dRcZaPw=
+
+xtend@^4.0.0, xtend@~4.0.1:
+ version "4.0.2"
+ resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"
+ integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==
+
+y18n@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.1.tgz#8db2b83c31c5d75099bb890b23f3094891e247d4"
+ integrity sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==
+
+y18n@^5.0.5:
+ version "5.0.8"
+ resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55"
+ integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==
+
+yallist@^3.0.2:
+ version "3.1.1"
+ resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd"
+ integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==
+
+yallist@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72"
+ integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==
+
+yaml@^1.10.0, yaml@^1.7.2:
+ version "1.10.0"
+ resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.0.tgz#3b593add944876077d4d683fee01081bd9fff31e"
+ integrity sha512-yr2icI4glYaNG+KWONODapy2/jDdMSDnrONSjblABjD9B4Z5LgiircSt8m8sRZFNi08kG9Sm0uSHtEmP3zaEGg==
+
+yargs-parser@^13.1.2:
+ version "13.1.2"
+ resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.2.tgz#130f09702ebaeef2650d54ce6e3e5706f7a4fb38"
+ integrity sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==
+ dependencies:
+ camelcase "^5.0.0"
+ decamelize "^1.2.0"
+
+yargs-parser@^18.1.2:
+ version "18.1.3"
+ resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0"
+ integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==
+ dependencies:
+ camelcase "^5.0.0"
+ decamelize "^1.2.0"
+
+yargs-parser@^20.2.2:
+ version "20.2.9"
+ resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee"
+ integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==
+
+yargs@^13.3.2:
+ version "13.3.2"
+ resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd"
+ integrity sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==
+ dependencies:
+ cliui "^5.0.0"
+ find-up "^3.0.0"
+ get-caller-file "^2.0.1"
+ require-directory "^2.1.1"
+ require-main-filename "^2.0.0"
+ set-blocking "^2.0.0"
+ string-width "^3.0.0"
+ which-module "^2.0.0"
+ y18n "^4.0.0"
+ yargs-parser "^13.1.2"
+
+yargs@^15.4.1:
+ version "15.4.1"
+ resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8"
+ integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==
+ dependencies:
+ cliui "^6.0.0"
+ decamelize "^1.2.0"
+ find-up "^4.1.0"
+ get-caller-file "^2.0.1"
+ require-directory "^2.1.1"
+ require-main-filename "^2.0.0"
+ set-blocking "^2.0.0"
+ string-width "^4.2.0"
+ which-module "^2.0.0"
+ y18n "^4.0.0"
+ yargs-parser "^18.1.2"
+
+yargs@^16.1.1:
+ version "16.2.0"
+ resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66"
+ integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==
+ dependencies:
+ cliui "^7.0.2"
+ escalade "^3.1.1"
+ get-caller-file "^2.0.5"
+ require-directory "^2.1.1"
+ string-width "^4.2.0"
+ y18n "^5.0.5"
+ yargs-parser "^20.2.2"
+
+yocto-queue@^0.1.0:
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
+ integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==
| Error with ```PredictBatchEnd``` While Running as package
There was a JSON serializing error while testing out ```PredictBatchEnd``` Submodule.
Seems like Array was being passed while converting metadata to JSON for passing it to database
| 2021-08-06T15:38:08 | 0.0 | [] | [] |
|||
Rishit-dagli/TF-Watcher | Rishit-dagli__TF-Watcher-44 | a43136c0e0ddaa454a48ffef4b1d301f9142e5f1 | diff --git a/tfwatcher/__init__.py b/tfwatcher/__init__.py
index 791e03c..65fc086 100644
--- a/tfwatcher/__init__.py
+++ b/tfwatcher/__init__.py
@@ -1,4 +1,1 @@
from .callbacks import epoch, predict, predict_batch, train_batch
-from .version import __version__
-
-__all__ = ["firebase_config", "firebase_helpers", "callbacks"]
diff --git a/tfwatcher/callbacks/__init__.py b/tfwatcher/callbacks/__init__.py
index c7aa8e4..791e03c 100644
--- a/tfwatcher/callbacks/__init__.py
+++ b/tfwatcher/callbacks/__init__.py
@@ -1,7 +1,4 @@
-from .epoch import EpochEnd
-from .predict import PredictEnd
-from .predict_batch import PredictBatchEnd
-from .test_batch import TestBatchEnd
-from .train_batch import TrainBatchEnd
+from .callbacks import epoch, predict, predict_batch, train_batch
+from .version import __version__
-__all__ = ["epoch", "predict", "predict_batch", "test_batch", "train_batch"]
+__all__ = ["firebase_config", "firebase_helpers", "callbacks"]
| Perform Tests on Package
Perform Tests on Package ```tfwatcher.callbacks```
| 2021-08-06T04:47:03 | 0.0 | [] | [] |
|||
Rishit-dagli/TF-Watcher | Rishit-dagli__TF-Watcher-35 | a305b2c53cf6fee6b607bb6f163af54a63d5a5ab | diff --git a/tfwatcher/callbacks/epoch.py b/tfwatcher/callbacks/epoch.py
index 520e41c..ad58559 100644
--- a/tfwatcher/callbacks/epoch.py
+++ b/tfwatcher/callbacks/epoch.py
@@ -56,9 +56,10 @@ def on_epoch_end(self, epoch: int, logs: dict = None):
# Since we have similar logging code use the fact that if first argument of and is False Python doesn't
# execute the second argument
- if (self.is_int and ((epoch + 1) % self.schedule == 0)) or (
- self.is_list and ((epoch + 1) in self.schedule)
- ):
+ if (
+ (self.is_int and ((epoch + 1) % self.schedule == 0))
+ or (self.is_list and ((epoch + 1) in self.schedule))
+ ) or (epoch == 0):
data = logs
data["epoch"] = epoch
data["batch"] = False
diff --git a/tfwatcher/callbacks/predict_batch.py b/tfwatcher/callbacks/predict_batch.py
index 2913b50..2ffc0e0 100644
--- a/tfwatcher/callbacks/predict_batch.py
+++ b/tfwatcher/callbacks/predict_batch.py
@@ -56,9 +56,10 @@ def on_predict_batch_end(self, batch: int, logs: dict = None):
# Since we have similar logging code use the fact that if first argument of and is False Python doesn't
# execute the second argument
- if (self.is_int and ((batch + 1) % self.schedule == 0)) or (
- self.is_list and ((batch + 1) in self.schedule)
- ):
+ if (
+ (self.is_int and ((batch + 1) % self.schedule == 0))
+ or (self.is_list and ((batch + 1) in self.schedule))
+ ) or (batch == 0):
data = logs
data["batch"] = batch
data["epoch"] = False
diff --git a/tfwatcher/callbacks/train_batch.py b/tfwatcher/callbacks/train_batch.py
index d76ded6..a8a1cb0 100644
--- a/tfwatcher/callbacks/train_batch.py
+++ b/tfwatcher/callbacks/train_batch.py
@@ -56,9 +56,11 @@ def on_train_batch_end(self, batch: int, logs: dict = None):
# Since we have similar logging code use the fact that if first argument of and is False Python doesn't
# execute the second argument
- if (self.is_int and ((batch + 1) % self.schedule == 0)) or (
- self.is_list and ((batch + 1) in self.schedule)
- ):
+ if (
+ (self.is_int and ((batch + 1) % self.schedule == 0))
+ or (self.is_list and ((batch + 1) in self.schedule))
+ ) or (batch == 0):
+
data = logs
data["batch"] = batch
data["epoch"] = False
| Strictly log on batch/epoch 0
As of now if we pass `schedule` to 3 let's say it starts logging from epoch or bath 3 whereas epoch or batch 0 should also be logged
| 2021-08-04T16:47:50 | 0.0 | [] | [] |
|||
Rishit-dagli/TF-Watcher | Rishit-dagli__TF-Watcher-29 | 73e92dd3289b83d6ba5fa53944732ce51efa5861 | diff --git a/setup.py b/setup.py
index 0786c46..a6f35a7 100644
--- a/setup.py
+++ b/setup.py
@@ -52,6 +52,6 @@ def get_version(rel_path: str) -> str:
author_email="[email protected]",
install_requires=["tensorflow >= 2.2.0", "pyrebase ~= 3.0.27"],
extras_require={
- "dev": ["check-manifest", "twine", "numpy", "black"],
+ "dev": ["check-manifest", "twine", "numpy", "black", "isort"],
},
)
diff --git a/tfwatcher/callbacks/epoch.py b/tfwatcher/callbacks/epoch.py
index 8bd8485..5d0f931 100644
--- a/tfwatcher/callbacks/epoch.py
+++ b/tfwatcher/callbacks/epoch.py
@@ -3,6 +3,8 @@
import tensorflow as tf
+from ..firebase_helpers import random_char, write_in_callback
+
class EpochEnd(tf.keras.callbacks.Callback):
def __init__(self, schedule: Union[int, list] = 1, round_time: int = 2):
@@ -13,6 +15,9 @@ def __init__(self, schedule: Union[int, list] = 1, round_time: int = 2):
self.times = list()
self.round_time = round_time
+ self.ref_id = random_char(7)
+ print(f"Use this ID to monitor training for this session: {self.ref_id}")
+
self.is_int = False
self.is_list = False
if isinstance(self.schedule, int):
@@ -53,3 +58,5 @@ def on_epoch_end(self, epoch: int, logs: dict = None):
data["batch"] = False
data["avg_time"] = round(mean(self.times), self.round_time)
self.times = list()
+
+ write_in_callback(data=data, ref_id=self.ref_id)
diff --git a/tfwatcher/callbacks/predict.py b/tfwatcher/callbacks/predict.py
index 7e68732..a44acd6 100644
--- a/tfwatcher/callbacks/predict.py
+++ b/tfwatcher/callbacks/predict.py
@@ -1,5 +1,7 @@
import tensorflow as tf
+from ..firebase_helpers import random_char, write_in_callback
+
class PredictEnd(tf.keras.callbacks.Callback):
def __init__(self, round_time: int = 2):
@@ -9,6 +11,9 @@ def __init__(self, round_time: int = 2):
self.end_time = None
self.time = None
+ self.ref_id = random_char(7)
+ print(f"Use this ID to monitor training for this session: {self.ref_id}")
+
def on_predict_begin(self, logs=None):
self.start_time = tf.timestamp()
@@ -23,3 +28,5 @@ def on_predict_end(self, logs=None):
data["epoch"] = False
data["batch"] = False
data["avg_time"] = self.time
+
+ write_in_callback(data=data, ref_id=self.ref_id)
diff --git a/tfwatcher/callbacks/predict_batch.py b/tfwatcher/callbacks/predict_batch.py
index bda7b5e..974da21 100644
--- a/tfwatcher/callbacks/predict_batch.py
+++ b/tfwatcher/callbacks/predict_batch.py
@@ -3,6 +3,8 @@
import tensorflow as tf
+from ..firebase_helpers import random_char, write_in_callback
+
class PredictBatchEnd(tf.keras.callbacks.Callback):
def __init__(self, schedule: Union[int, list] = 1, round_time: int = 2):
@@ -13,6 +15,9 @@ def __init__(self, schedule: Union[int, list] = 1, round_time: int = 2):
self.times = list()
self.round_time = round_time
+ self.ref_id = random_char(7)
+ print(f"Use this ID to monitor training for this session: {self.ref_id}")
+
self.is_int = False
self.is_list = False
if isinstance(self.schedule, int):
@@ -53,3 +58,5 @@ def on_predict_batch_end(self, batch: int, logs: dict = None):
data["epoch"] = False
data["avg_time"] = round(mean(self.times), self.round_time)
self.times = list()
+
+ write_in_callback(data=data, ref_id=self.ref_id)
diff --git a/tfwatcher/callbacks/train_batch.py b/tfwatcher/callbacks/train_batch.py
index 8fe7bd5..6da5169 100644
--- a/tfwatcher/callbacks/train_batch.py
+++ b/tfwatcher/callbacks/train_batch.py
@@ -3,6 +3,8 @@
import tensorflow as tf
+from ..firebase_helpers import random_char, write_in_callback
+
class TrainBatchEnd(tf.keras.callbacks.Callback):
def __init__(self, schedule: Union[int, list] = 1, round_time: int = 2):
@@ -13,6 +15,9 @@ def __init__(self, schedule: Union[int, list] = 1, round_time: int = 2):
self.times = list()
self.round_time = round_time
+ self.ref_id = random_char(7)
+ print(f"Use this ID to monitor training for this session: {self.ref_id}")
+
self.is_int = False
self.is_list = False
if isinstance(self.schedule, int):
@@ -53,3 +58,5 @@ def on_train_batch_end(self, batch: int, logs: dict = None):
data["epoch"] = False
data["avg_time"] = round(mean(self.times), self.round_time)
self.times = list()
+
+ write_in_callback(data=data, ref_id=self.ref_id)
diff --git a/tfwatcher/firebase_helpers.py b/tfwatcher/firebase_helpers.py
index 9444d04..eec5039 100644
--- a/tfwatcher/firebase_helpers.py
+++ b/tfwatcher/firebase_helpers.py
@@ -1,3 +1,6 @@
+import random
+import string
+
import pyrebase
from .firebase_config import get_firebase_config
@@ -12,3 +15,18 @@ def write_to_firebase(data: dict, ref_id: str, level: str):
log_db.child(ref_id).child(1).push(data)
else:
log_db.child(ref_id).child(data[level]).push(data)
+
+
+def write_in_callback(data: dict, ref_id: str):
+ if data["epoch"]:
+ level = "epoch"
+ elif data["batch"]:
+ level = "batch"
+ else:
+ level = "prediction"
+
+ write_to_firebase(data=data, ref_id=ref_id, level=level)
+
+
+def random_char(y: int) -> str:
+ return "".join(random.choice(string.ascii_letters) for _ in range(y))
| Write data to Firebase in each Callback classes
| 2021-08-04T10:20:17 | 0.0 | [] | [] |
|||
Rishit-dagli/TF-Watcher | Rishit-dagli__TF-Watcher-24 | 040989dae6c7a563ad10d98c7e0f254cfd036d0f | diff --git a/setup.py b/setup.py
index 4c3b041..866aa1d 100644
--- a/setup.py
+++ b/setup.py
@@ -1,13 +1,31 @@
+import os.path
+
from setuptools import setup
-exec(open("tfwatcher/version.py").read())
-with open("README.md", "r") as fh:
- long_description = fh.read()
+def read(rel_path: str) -> str:
+ here = os.path.abspath(os.path.dirname(__file__))
+ # intentionally *not* adding an encoding option to open
+ with open(os.path.join(here, rel_path)) as fp:
+ return fp.read()
+
+
+def get_version(rel_path: str) -> str:
+ for line in read(rel_path).splitlines():
+ if line.startswith("__version__"):
+ # __version__ = "0.9"
+ delim = '"' if '"' in line else "'"
+ return line.split(delim)[1]
+ raise RuntimeError("Unable to find version string.")
+
+
+this_directory = os.path.abspath(os.path.dirname(__file__))
+with open(os.path.join(this_directory, "README.md"), encoding="utf-8") as f:
+ long_description = f.read()
setup(
name="tf-watcher",
- version=__version__,
+ version=get_version("tfwatcher/version.py"),
description="Monitor your TensorFlow model training on mobile devices, especially for Google Colab",
packages=["tfwatcher"],
long_description=long_description,
@@ -32,9 +50,7 @@
url="https://github.com/Rishit-dagli/TF-Watcher/",
author="Rishit Dagli",
author_email="[email protected]",
- install_requires=[
- "tensorflow >= 2.2.0",
- ],
+ install_requires=["tensorflow >= 2.2.0", "pyrebase ~= 3.0.0"],
extras_require={
"dev": ["check-manifest", "twine", "numpy", "black"],
},
diff --git a/tfwatcher/callbacks/__init__.py b/tfwatcher/callbacks/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tfwatcher/callbacks/epoch.py b/tfwatcher/callbacks/epoch.py
new file mode 100644
index 0000000..96fdbdb
--- /dev/null
+++ b/tfwatcher/callbacks/epoch.py
@@ -0,0 +1,54 @@
+from statistics import mean
+from typing import Union
+
+import tensorflow as tf
+
+
+class EpochEnd(tf.keras.callbacks.Callback):
+ def __init__(self, schedule: Union[int, list], round_time: int = 2):
+ super(EpochEnd, self).__init__()
+ self.schedule = schedule
+ self.start_time = None
+ self.end_time = None
+ self.times = list()
+ self.round_time = round_time
+
+ self.is_int = False
+ self.is_list = False
+ if isinstance(self.schedule, int):
+ self.is_int = True
+ elif isinstance(self.schedule, list):
+ self.is_list = True
+ else:
+ raise ValueError("schedule should either be an integer or a list")
+
+ if self.is_list:
+ try:
+ self.schedule = list(map(int, self.schedule))
+ except (ValueError, TypeError) as err:
+ raise Exception(
+ "All elements in the list should be convertible to int: {}".format(
+ err
+ )
+ )
+
+ def on_epoch_begin(self, epoch, logs=None):
+ self.start_time = tf.timestamp()
+
+ def on_epoch_end(self, epoch: int, logs: dict = None):
+ self.end_time = tf.timestamp()
+
+ # Use Python built in functions to allow using in @tf.function see
+ # https://github.com/tensorflow/tensorflow/issues/27491#issuecomment-890887810
+ time = float(self.end_time - self.start_time)
+ self.times.append(time)
+
+ # Since we have similar logging code use the fact that if first argument of and is False Python doesn't
+ # execute the second argument
+ if (self.is_int and ((epoch + 1) % self.schedule == 0)) or (
+ self.is_list and ((epoch + 1) in self.schedule)
+ ):
+ data = logs
+ data["epoch"] = epoch
+ data["avg_time"] = round(mean(self.times), self.round_time)
+ self.times = list()
diff --git a/tfwatcher/callbacks/predict.py b/tfwatcher/callbacks/predict.py
new file mode 100644
index 0000000..407b518
--- /dev/null
+++ b/tfwatcher/callbacks/predict.py
@@ -0,0 +1,23 @@
+import tensorflow as tf
+
+
+class PredictEnd(tf.keras.callbacks.Callback):
+ def __init__(self, round_time: int = 2):
+ super(PredictEnd, self).__init__()
+ self.round_time = round_time
+ self.start_time = None
+ self.end_time = None
+ self.time = None
+
+ def on_predict_begin(self, logs=None):
+ self.start_time = tf.timestamp()
+
+ def on_predict_end(self, logs=None):
+ self.end_time = tf.timestamp()
+
+ # Use Python built in functions to allow using in @tf.function see
+ # https://github.com/tensorflow/tensorflow/issues/27491#issuecomment-890887810
+ self.time = float(self.end_time - self.start_time)
+
+ data = logs
+ data["avg_time"] = self.time
diff --git a/tfwatcher/callbacks/predict_batch.py b/tfwatcher/callbacks/predict_batch.py
new file mode 100644
index 0000000..7f42619
--- /dev/null
+++ b/tfwatcher/callbacks/predict_batch.py
@@ -0,0 +1,54 @@
+from statistics import mean
+from typing import Union
+
+import tensorflow as tf
+
+
+class PredictBatchEnd(tf.keras.callbacks.Callback):
+ def __init__(self, schedule: Union[int, list], round_time: int = 2):
+ super(PredictBatchEnd, self).__init__()
+ self.schedule = schedule
+ self.start_time = None
+ self.end_time = None
+ self.times = list()
+ self.round_time = round_time
+
+ self.is_int = False
+ self.is_list = False
+ if isinstance(self.schedule, int):
+ self.is_int = True
+ elif isinstance(self.schedule, list):
+ self.is_list = True
+ else:
+ raise ValueError("schedule should either be an integer or a list")
+
+ if self.is_list:
+ try:
+ self.schedule = list(map(int, self.schedule))
+ except (ValueError, TypeError) as err:
+ raise Exception(
+ "All elements in the list should be convertible to int: {}".format(
+ err
+ )
+ )
+
+ def on_predict_batch_begin(self, batch, logs=None):
+ self.start_time = tf.timestamp()
+
+ def on_predict_batch_end(self, batch: int, logs: dict = None):
+ self.end_time = tf.timestamp()
+
+ # Use Python built in functions to allow using in @tf.function see
+ # https://github.com/tensorflow/tensorflow/issues/27491#issuecomment-890887810
+ time = float(self.end_time - self.start_time)
+ self.times.append(time)
+
+ # Since we have similar logging code use the fact that if first argument of and is False Python doesn't
+ # execute the second argument
+ if (self.is_int and ((batch + 1) % self.schedule == 0)) or (
+ self.is_list and ((batch + 1) in self.schedule)
+ ):
+ data = logs
+ data["epoch"] = batch
+ data["avg_time"] = round(mean(self.times), self.round_time)
+ self.times = list()
diff --git a/tfwatcher/callbacks/train_batch.py b/tfwatcher/callbacks/train_batch.py
new file mode 100644
index 0000000..e118b0c
--- /dev/null
+++ b/tfwatcher/callbacks/train_batch.py
@@ -0,0 +1,54 @@
+from statistics import mean
+from typing import Union
+
+import tensorflow as tf
+
+
+class TrainBatchEnd(tf.keras.callbacks.Callback):
+ def __init__(self, schedule: Union[int, list], round_time: int = 2):
+ super(TrainBatchEnd, self).__init__()
+ self.schedule = schedule
+ self.start_time = None
+ self.end_time = None
+ self.times = list()
+ self.round_time = round_time
+
+ self.is_int = False
+ self.is_list = False
+ if isinstance(self.schedule, int):
+ self.is_int = True
+ elif isinstance(self.schedule, list):
+ self.is_list = True
+ else:
+ raise ValueError("schedule should either be an integer or a list")
+
+ if self.is_list:
+ try:
+ self.schedule = list(map(int, self.schedule))
+ except (ValueError, TypeError) as err:
+ raise Exception(
+ "All elements in the list should be convertible to int: {}".format(
+ err
+ )
+ )
+
+ def on_train_batch_begin(self, batch, logs=None):
+ self.start_time = tf.timestamp()
+
+ def on_train_batch_end(self, batch: int, logs: dict = None):
+ self.end_time = tf.timestamp()
+
+ # Use Python built in functions to allow using in @tf.function see
+ # https://github.com/tensorflow/tensorflow/issues/27491#issuecomment-890887810
+ time = float(self.end_time - self.start_time)
+ self.times.append(time)
+
+ # Since we have similar logging code use the fact that if first argument of and is False Python doesn't
+ # execute the second argument
+ if (self.is_int and ((batch + 1) % self.schedule == 0)) or (
+ self.is_list and ((batch + 1) in self.schedule)
+ ):
+ data = logs
+ data["epoch"] = batch
+ data["avg_time"] = round(mean(self.times), self.round_time)
+ self.times = list()
diff --git a/tfwatcher/firebase_config.py b/tfwatcher/firebase_config.py
new file mode 100644
index 0000000..47b36f2
--- /dev/null
+++ b/tfwatcher/firebase_config.py
@@ -0,0 +1,12 @@
+# It is safe to expose Firebase apiKey publicly, read: https://stackoverflow.com/a/37484053/11878567
+
+
+def get_firebase_config() -> dict:
+ # It is better using a dict literal, read: https://stackoverflow.com/a/6610783/11878567
+ return {
+ "apiKey": "AIzaSyAfCOOzFtKxTa-_pS3lO6URRGR8sVjK7sk",
+ "authDomain": "tf-watcher.firebaseapp.com",
+ "databaseURL": "https://tf-watcher-default-rtdb.firebaseio.com",
+ "projectId": "tf-watcher",
+ "storageBucket": "tf-watcher.appspot.com",
+ }
diff --git a/tfwatcher/firebase_helpers.py b/tfwatcher/firebase_helpers.py
new file mode 100644
index 0000000..9444d04
--- /dev/null
+++ b/tfwatcher/firebase_helpers.py
@@ -0,0 +1,14 @@
+import pyrebase
+
+from .firebase_config import get_firebase_config
+
+
+def write_to_firebase(data: dict, ref_id: str, level: str):
+ # level can be epoch, batch, prediction
+ firebase = pyrebase.initialize_app(get_firebase_config())
+ log_db = firebase.database()
+
+ if level == "prediction":
+ log_db.child(ref_id).child(1).push(data)
+ else:
+ log_db.child(ref_id).child(data[level]).push(data)
| Create helper functions to write data to Firebase
| 2021-08-03T16:44:35 | 0.0 | [] | [] |
|||
Rishit-dagli/TF-Watcher | Rishit-dagli__TF-Watcher-16 | 040989dae6c7a563ad10d98c7e0f254cfd036d0f | diff --git a/setup.py b/setup.py
index 4c3b041..32e6687 100644
--- a/setup.py
+++ b/setup.py
@@ -1,13 +1,31 @@
+import os.path
+
from setuptools import setup
-exec(open("tfwatcher/version.py").read())
-with open("README.md", "r") as fh:
- long_description = fh.read()
+def read(rel_path: str) -> str:
+ here = os.path.abspath(os.path.dirname(__file__))
+ # intentionally *not* adding an encoding option to open
+ with open(os.path.join(here, rel_path)) as fp:
+ return fp.read()
+
+
+def get_version(rel_path: str) -> str:
+ for line in read(rel_path).splitlines():
+ if line.startswith("__version__"):
+ # __version__ = "0.9"
+ delim = '"' if '"' in line else "'"
+ return line.split(delim)[1]
+ raise RuntimeError("Unable to find version string.")
+
+
+this_directory = os.path.abspath(os.path.dirname(__file__))
+with open(os.path.join(this_directory, "README.md"), encoding="utf-8") as f:
+ long_description = f.read()
setup(
name="tf-watcher",
- version=__version__,
+ version=get_version("tfwatcher/version.py"),
description="Monitor your TensorFlow model training on mobile devices, especially for Google Colab",
packages=["tfwatcher"],
long_description=long_description,
diff --git a/tfwatcher/callbacks/__init__.py b/tfwatcher/callbacks/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tfwatcher/callbacks/epoch.py b/tfwatcher/callbacks/epoch.py
new file mode 100644
index 0000000..8bd8485
--- /dev/null
+++ b/tfwatcher/callbacks/epoch.py
@@ -0,0 +1,55 @@
+from statistics import mean
+from typing import Union
+
+import tensorflow as tf
+
+
+class EpochEnd(tf.keras.callbacks.Callback):
+ def __init__(self, schedule: Union[int, list] = 1, round_time: int = 2):
+ super(EpochEnd, self).__init__()
+ self.schedule = schedule
+ self.start_time = None
+ self.end_time = None
+ self.times = list()
+ self.round_time = round_time
+
+ self.is_int = False
+ self.is_list = False
+ if isinstance(self.schedule, int):
+ self.is_int = True
+ elif isinstance(self.schedule, list):
+ self.is_list = True
+ else:
+ raise ValueError("schedule should either be an integer or a list")
+
+ if self.is_list:
+ try:
+ self.schedule = list(map(int, self.schedule))
+ except (ValueError, TypeError) as err:
+ raise Exception(
+ "All elements in the list should be convertible to int: {}".format(
+ err
+ )
+ )
+
+ def on_epoch_begin(self, epoch, logs=None):
+ self.start_time = tf.timestamp()
+
+ def on_epoch_end(self, epoch: int, logs: dict = None):
+ self.end_time = tf.timestamp()
+
+ # Use Python built in functions to allow using in @tf.function see
+ # https://github.com/tensorflow/tensorflow/issues/27491#issuecomment-890887810
+ time = float(self.end_time - self.start_time)
+ self.times.append(time)
+
+ # Since we have similar logging code use the fact that if first argument of and is False Python doesn't
+ # execute the second argument
+ if (self.is_int and ((epoch + 1) % self.schedule == 0)) or (
+ self.is_list and ((epoch + 1) in self.schedule)
+ ):
+ data = logs
+ data["epoch"] = epoch
+ data["batch"] = False
+ data["avg_time"] = round(mean(self.times), self.round_time)
+ self.times = list()
diff --git a/tfwatcher/callbacks/predict.py b/tfwatcher/callbacks/predict.py
new file mode 100644
index 0000000..7e68732
--- /dev/null
+++ b/tfwatcher/callbacks/predict.py
@@ -0,0 +1,25 @@
+import tensorflow as tf
+
+
+class PredictEnd(tf.keras.callbacks.Callback):
+ def __init__(self, round_time: int = 2):
+ super(PredictEnd, self).__init__()
+ self.round_time = round_time
+ self.start_time = None
+ self.end_time = None
+ self.time = None
+
+ def on_predict_begin(self, logs=None):
+ self.start_time = tf.timestamp()
+
+ def on_predict_end(self, logs=None):
+ self.end_time = tf.timestamp()
+
+ # Use Python built in functions to allow using in @tf.function see
+ # https://github.com/tensorflow/tensorflow/issues/27491#issuecomment-890887810
+ self.time = float(self.end_time - self.start_time)
+
+ data = logs
+ data["epoch"] = False
+ data["batch"] = False
+ data["avg_time"] = self.time
diff --git a/tfwatcher/callbacks/predict_batch.py b/tfwatcher/callbacks/predict_batch.py
new file mode 100644
index 0000000..bda7b5e
--- /dev/null
+++ b/tfwatcher/callbacks/predict_batch.py
@@ -0,0 +1,55 @@
+from statistics import mean
+from typing import Union
+
+import tensorflow as tf
+
+
+class PredictBatchEnd(tf.keras.callbacks.Callback):
+ def __init__(self, schedule: Union[int, list] = 1, round_time: int = 2):
+ super(PredictBatchEnd, self).__init__()
+ self.schedule = schedule
+ self.start_time = None
+ self.end_time = None
+ self.times = list()
+ self.round_time = round_time
+
+ self.is_int = False
+ self.is_list = False
+ if isinstance(self.schedule, int):
+ self.is_int = True
+ elif isinstance(self.schedule, list):
+ self.is_list = True
+ else:
+ raise ValueError("schedule should either be an integer or a list")
+
+ if self.is_list:
+ try:
+ self.schedule = list(map(int, self.schedule))
+ except (ValueError, TypeError) as err:
+ raise Exception(
+ "All elements in the list should be convertible to int: {}".format(
+ err
+ )
+ )
+
+ def on_predict_batch_begin(self, batch, logs=None):
+ self.start_time = tf.timestamp()
+
+ def on_predict_batch_end(self, batch: int, logs: dict = None):
+ self.end_time = tf.timestamp()
+
+ # Use Python built in functions to allow using in @tf.function see
+ # https://github.com/tensorflow/tensorflow/issues/27491#issuecomment-890887810
+ time = float(self.end_time - self.start_time)
+ self.times.append(time)
+
+ # Since we have similar logging code use the fact that if first argument of and is False Python doesn't
+ # execute the second argument
+ if (self.is_int and ((batch + 1) % self.schedule == 0)) or (
+ self.is_list and ((batch + 1) in self.schedule)
+ ):
+ data = logs
+ data["batch"] = batch
+ data["epoch"] = False
+ data["avg_time"] = round(mean(self.times), self.round_time)
+ self.times = list()
diff --git a/tfwatcher/callbacks/train_batch.py b/tfwatcher/callbacks/train_batch.py
new file mode 100644
index 0000000..8fe7bd5
--- /dev/null
+++ b/tfwatcher/callbacks/train_batch.py
@@ -0,0 +1,55 @@
+from statistics import mean
+from typing import Union
+
+import tensorflow as tf
+
+
+class TrainBatchEnd(tf.keras.callbacks.Callback):
+ def __init__(self, schedule: Union[int, list] = 1, round_time: int = 2):
+ super(TrainBatchEnd, self).__init__()
+ self.schedule = schedule
+ self.start_time = None
+ self.end_time = None
+ self.times = list()
+ self.round_time = round_time
+
+ self.is_int = False
+ self.is_list = False
+ if isinstance(self.schedule, int):
+ self.is_int = True
+ elif isinstance(self.schedule, list):
+ self.is_list = True
+ else:
+ raise ValueError("schedule should either be an integer or a list")
+
+ if self.is_list:
+ try:
+ self.schedule = list(map(int, self.schedule))
+ except (ValueError, TypeError) as err:
+ raise Exception(
+ "All elements in the list should be convertible to int: {}".format(
+ err
+ )
+ )
+
+ def on_train_batch_begin(self, batch, logs=None):
+ self.start_time = tf.timestamp()
+
+ def on_train_batch_end(self, batch: int, logs: dict = None):
+ self.end_time = tf.timestamp()
+
+ # Use Python built in functions to allow using in @tf.function see
+ # https://github.com/tensorflow/tensorflow/issues/27491#issuecomment-890887810
+ time = float(self.end_time - self.start_time)
+ self.times.append(time)
+
+ # Since we have similar logging code use the fact that if first argument of and is False Python doesn't
+ # execute the second argument
+ if (self.is_int and ((batch + 1) % self.schedule == 0)) or (
+ self.is_list and ((batch + 1) in self.schedule)
+ ):
+ data = logs
+ data["batch"] = batch
+ data["epoch"] = False
+ data["avg_time"] = round(mean(self.times), self.round_time)
+ self.times = list()
| Extract logs in Callbacks
This python package would essentially use TensorFlow Callbacks to extract and send over training data to the database. It would be great to start out with extracting logs and making these callback classes.
| 2021-08-03T06:13:31 | 0.0 | [] | [] |
|||
casact/chainladder-python | casact__chainladder-python-273 | 77700f8fb512a85046a4069c9d65ecd4f09b9fcd | diff --git a/chainladder/core/common.py b/chainladder/core/common.py
index 9a472302..cece2dbf 100644
--- a/chainladder/core/common.py
+++ b/chainladder/core/common.py
@@ -6,39 +6,73 @@
from chainladder.utils.cupy import cp
from chainladder.utils.sparse import sp
from chainladder.utils.dask import dp
+from chainladder.utils.utility_functions import concat
+from chainladder import options
-def _get_full_expectation(cdf_, ultimate_):
+def _get_full_expectation(cdf_, ultimate_, is_cumulative=True):
""" Private method that builds full expectation"""
- from chainladder.utils.utility_functions import concat
full = ultimate_ / cdf_
- return concat((full, ultimate_.copy().rename('development', [9999])), axis=3)
+ if is_cumulative:
+ return concat((full, ultimate_.copy().rename("development", [9999])), axis=3)
-def _get_full_triangle(X, ultimate, expectation=None, n_iters=None):
+ else:
+ tail_ = full.iloc[:, :, :, -1] - ultimate_
+
+ return concat(
+ (full.cum_to_incr(), tail_.copy().rename("development", [9999])), axis=3
+ )
+
+
+def _get_full_triangle(X, ultimate, expectation=None, n_iters=None, is_cumulative=True):
""" Private method that builds full triangle"""
- from chainladder import options
- cdf = X.ldf_.copy()
- xp = cdf.get_array_module()
- cdf = cdf * (ultimate / ultimate)
- cdf = cdf[cdf.valuation<X.valuation_date] * 0 + 1 + cdf[cdf.valuation>=X.valuation_date]
- cdf.values = cdf.values.cumprod(3)
- cdf.valuation_date = pd.to_datetime(options.ULT_VAL)
- cdf = (1 - 1 / cdf)
- cdf.ddims = cdf.ddims + {'Y': 12, 'Q': 3, 'M':1}[cdf.development_grain]
- cdf.ddims[-1] = 9999
- ld = X.latest_diagonal
- if n_iters is not None:
+ # Getting the LDFs and expand for all origins
+ emergence = X.ldf_.copy() * (ultimate / ultimate)
+
+ # Setting LDFs for all of the known diagonals as 1
+ emergence = (
+ emergence[emergence.valuation < X.valuation_date] * 0
+ + 1
+ + emergence[emergence.valuation >= X.valuation_date]
+ )
+
+ emergence.valuation_date = pd.to_datetime(options.ULT_VAL)
+ emergence.values = 1 - 1 / emergence.values.cumprod(axis=3)
+
+ # Shifting the CDFs by development age, and renaming the last column as 9999
+ emergence.ddims = emergence.ddims + \
+ {"Y": 12, "Q": 3, "M": 1}[emergence.development_grain]
+ emergence.ddims[-1] = 9999
+
+ ld = X.incr_to_cum().latest_diagonal
+
+ if n_iters is None:
+ complement = 1 / (1 - emergence)
+ cum_run_off = ld * complement
+
+ else:
cdf_ = X.cdf_
- cdf_.ddims = cdf.ddims
+ cdf_.ddims = emergence.ddims
+
a = (X.latest_diagonal * 0 + expectation) / cdf_ * X.ldf_.values
- complement = xp.nansum(cdf.values[None] ** xp.arange(n_iters)[:, None, None, None, None], 0)
- new_run_off = ((a * (cdf ** n_iters)) + (ld * complement))
+
+ xp = emergence.get_array_module()
+ complement = xp.nansum(
+ emergence.values[None] ** xp.arange(n_iters)[:,
+ None, None, None, None], 0
+ )
+
+ cum_run_off = (a * (emergence ** n_iters)) + (ld * complement)
+
+ cum_run_off = cum_run_off[cum_run_off.valuation > X.valuation_date]
+ cum_run_off.is_cumulative = True
+
+ if is_cumulative:
+ return X + cum_run_off
+
else:
- complement = (1 / (1 - cdf))
- new_run_off = (ld * complement)
- new_run_off = new_run_off[new_run_off.valuation>X.valuation_date]
- return new_run_off + X
+ return (X.incr_to_cum() + cum_run_off).cum_to_incr()
class Common:
@@ -63,10 +97,11 @@ def ibnr_(self):
if not hasattr(self, "ultimate_"):
x = self.__class__.__name__
raise AttributeError("'" + x + "' object has no attribute 'ibnr_'")
- if hasattr(self, 'X_'):
+ if hasattr(self, "X_"):
ld = self.latest_diagonal
else:
- ld = self.latest_diagonal if self.is_cumulative else self.sum(axis=3)
+ ld = self.latest_diagonal if self.is_cumulative else self.sum(
+ axis=3)
ibnr = self.ultimate_ - ld
ibnr.vdims = self.ultimate_.vdims
return ibnr
@@ -74,11 +109,13 @@ def ibnr_(self):
@property
def full_expectation_(self):
if not hasattr(self, "ultimate_"):
- x = self.__class__.__name__
raise AttributeError(
- "'" + x + "' object has no attribute 'full_expectation_'"
+ "'"
+ + self.__class__.__name__
+ + "' object has no attribute 'full_expectation_'"
)
- return _get_full_expectation(self.cdf_, self.ultimate_)
+
+ return _get_full_expectation(self.cdf_, self.ultimate_, self.X_.is_cumulative)
@property
def full_triangle_(self):
@@ -86,17 +123,29 @@ def full_triangle_(self):
raise AttributeError(
"'"
+ self.__class__.__name__
- + "'"
- + " object has no attribute 'full_triangle_'"
+ + "' object has no attribute 'full_triangle_'"
)
+
if hasattr(self, "X_"):
X = self.X_
else:
X = self
- if hasattr(self, 'n_iters'):
- return _get_full_triangle(X, self.ultimate_, self.expectation_, self.n_iters)
+
+ if hasattr(self, "n_iters"):
+ return _get_full_triangle(
+ X, self.ultimate_, self.expectation_, self.n_iters, X.is_cumulative
+ )
else:
- return _get_full_triangle(X, self.ultimate_)
+ return _get_full_triangle(X, self.ultimate_, None, None, X.is_cumulative)
+
+ # full_expectation = _get_full_expectation(
+ # self.cdf_, self.ultimate_, self.X_.is_cumulative
+ # )
+ # frame = self.X_ + full_expectation * 0
+ # xp = self.X_.get_array_module()
+ # fill = (xp.nan_to_num(frame.values) == 0) * (self.X_ * 0 + full_expectation)
+ #
+ # return self.X_ + fill
def pipe(self, func, *args, **kwargs):
return func(self, *args, **kwargs)
@@ -126,7 +175,7 @@ def set_backend(self, backend, inplace=False, deep=False, **kwargs):
if inplace:
# Coming from dask - compute and then recall this method
# going to dask -
- if old_backend == 'dask' and backend != 'dask':
+ if old_backend == "dask" and backend != "dask":
self = self.compute()
old_backend = self.array_backend
if backend in ["numpy", "sparse", "cupy", "dask"]:
@@ -148,7 +197,7 @@ def set_backend(self, backend, inplace=False, deep=False, **kwargs):
"numpy": lambda x: dp.from_array(x, **kwargs),
"cupy": lambda x: dp.from_array(x, **kwargs),
"sparse": lambda x: dp.from_array(x, **kwargs),
- }
+ },
}
if hasattr(self, "values"):
self.values = lookup[backend].get(old_backend, lambda x: x)(
diff --git a/chainladder/utils/utility_functions.py b/chainladder/utils/utility_functions.py
index 7371ba77..dcf4715a 100644
--- a/chainladder/utils/utility_functions.py
+++ b/chainladder/utils/utility_functions.py
@@ -13,7 +13,8 @@
from sklearn.base import BaseEstimator, TransformerMixin
from typing import Iterable, Union
-def load_sample(key : str, *args, **kwargs):
+
+def load_sample(key: str, *args, **kwargs):
""" Function to load datasets included in the chainladder package.
Parameters
@@ -68,6 +69,7 @@ def load_sample(key : str, *args, **kwargs):
development = "PaymentDate"
cumulative = False
df = pd.read_csv(os.path.join(path, "data", key.lower() + ".csv"))
+
return Triangle(
df,
origin=origin,
@@ -80,7 +82,6 @@ def load_sample(key : str, *args, **kwargs):
)
-
def read_pickle(path):
with open(path, "rb") as pkl:
return dill.load(pkl)
@@ -111,8 +112,7 @@ def read_json(json_str, array_backend=None):
j = json.loads(json_str)
y = pd.read_json(j["data"], orient="split", date_unit="ns")
y["origin"] = pd.to_datetime(y["origin"])
- y.columns= [
- c if c != 'valuation' else 'development' for c in y.columns]
+ y.columns = [c if c != "valuation" else "development" for c in y.columns]
y["development"] = pd.to_datetime(y["development"])
index = list(y.columns[: list(y.columns).index("origin")])
columns = list(y.columns[list(y.columns).index("development") + 1 :])
@@ -133,8 +133,8 @@ def read_json(json_str, array_backend=None):
if "sub_tris" in json_dict.keys():
for k, v in json_dict["sub_tris"].items():
setattr(tri, k, read_json(v, array_backend))
- setattr(getattr(tri, k), 'origin_grain', tri.origin_grain)
- setattr(getattr(tri, k), 'development_grain', tri.development_grain)
+ setattr(getattr(tri, k), "origin_grain", tri.origin_grain)
+ setattr(getattr(tri, k), "development_grain", tri.development_grain)
if "dfs" in json_dict.keys():
for k, v in json_dict["dfs"].items():
df = pd.read_json(v)
@@ -183,16 +183,18 @@ def parallelogram_olf(
def set_common_backend(objs):
from chainladder import options
+
priority = options.ARRAY_PRIORITY
backend = priority[np.min([priority.index(i.array_backend) for i in objs])]
return [i.set_backend(backend) for i in objs]
def concat(
- objs : Iterable,
- axis : Union[int, str],
- ignore_index: bool = False,
- sort: bool = False):
+ objs: Iterable,
+ axis: Union[int, str],
+ ignore_index: bool = False,
+ sort: bool = False,
+):
""" Concatenate Triangle objects along a particular axis.
Parameters
@@ -237,7 +239,7 @@ def concat(
objs = set_common_backend(objs)
mapper = {0: "kdims", 1: "vdims", 2: "odims", 3: "ddims"}
for k in mapper.keys():
- if k != axis and k !=1: # All non-concat axes must be identical
+ if k != axis and k != 1: # All non-concat axes must be identical
a = np.array([getattr(obj, mapper[k]) for obj in objs])
assert np.all(a == a[0])
else: # All elements of concat axis must be unique
@@ -274,7 +276,9 @@ def num_to_value(arr, value):
if arr.fill_value == 0 or sp.isnan(arr.fill_value):
arr.coords = arr.coords[:, arr.data != 0]
arr.data = arr.data[arr.data != 0]
- arr = sp(coords=arr.coords, data=arr.data, fill_value=sp.nan, shape=arr.shape)
+ arr = sp(
+ coords=arr.coords, data=arr.data, fill_value=sp.nan, shape=arr.shape
+ )
else:
arr = sp(num_to_nan(np.nan_to_num(arr.todense())), fill_value=value)
else:
@@ -285,6 +289,7 @@ def num_to_value(arr, value):
def num_to_nan(arr):
""" Function that turns all zeros to nan values in an array """
from chainladder import Triangle
+
xp = Triangle.get_array_module(None, arr=arr)
return num_to_value(arr, xp.nan)
@@ -296,6 +301,7 @@ def minimum(x1, x2):
def maximum(x1, x2):
return x1.maximum(x2)
+
class PatsyFormula(BaseEstimator, TransformerMixin):
""" A sklearn-style Transformer for patsy formulas.
@@ -315,11 +321,13 @@ class PatsyFormula(BaseEstimator, TransformerMixin):
The patsy instructions for generating the design_matrix, X.
"""
+
def __init__(self, formula=None):
self.formula = formula
def _check_X(self, X):
from chainladder.core import Triangle
+
if isinstance(X, Triangle):
raise AttributeError("X must be a pandas dataframe, not a Triangle")
@@ -333,7 +341,7 @@ def transform(self, X):
return dmatrix(self.design_info_, X)
-def model_diagnostics(model, name=None, groupby=None):
+def model_diagnostics(model, name=None, groupby=None):
""" A helper function that summarizes various vectors of an
IBNR model as columns of a Triangle
@@ -360,40 +368,50 @@ def model_diagnostics(model, name=None, groupby=None):
if groupby is not None:
obj.X_ = obj.X_.groupby(groupby).sum().cum_to_incr()
obj.ultimate_ = obj.ultimate_.groupby(groupby).sum()
- if hasattr(obj, 'expectation_'):
+ if hasattr(obj, "expectation_"):
obj.expectation_ = obj.expectation_.groupby(groupby).sum()
else:
obj.X_ = obj.X_.incr_to_cum()
val = obj.X_.valuation
- latest = obj.X_.sum('development')
+ latest = obj.X_.sum("development")
run_off = obj.full_expectation_.iloc[..., :-1].dev_to_val().cum_to_incr()
- run_off = run_off[run_off.development>str(obj.X_.valuation_date)]
- run_off = run_off.iloc[..., :{'M': 12, 'Q': 4, 'Y': 1}[obj.X_.development_grain]]
+ run_off = run_off[run_off.development > str(obj.X_.valuation_date)]
+ run_off = run_off.iloc[..., : {"M": 12, "Q": 4, "Y": 1}[obj.X_.development_grain]]
triangles = []
for col in obj.ultimate_.columns:
idx = latest.index
- idx['Measure'] = col
- idx['Model'] = obj.__class__.__name__ if name is None else name
+ idx["Measure"] = col
+ idx["Model"] = obj.__class__.__name__ if name is None else name
idx = idx[list(idx.columns[-2:]) + list(idx.columns[:-2])]
- out = latest[col].rename('columns', ['Latest'])
- if obj.X_.development_grain in ['M']:
- out['Month Incremental'] = obj.X_[col][val==obj.X_.valuation_date].sum('development')
- if obj.X_.development_grain in ['M', 'Q']:
- out['Quarter Incremental'] = (
- obj.X_ -
- obj.X_[val<pd.Period(out.valuation_date, freq='Q').to_timestamp(how='s').strftime('%Y-%m')]
- ).sum('development')[col]
- out['Year Incremental'] = (
- obj.X_ -
- obj.X_[val<str(obj.X_.valuation_date.year)]
- ).sum('development')[col]
- out['IBNR'] = obj.ibnr_[col]
- out['Ultimate'] = obj.ultimate_[col]
+ out = latest[col].rename("columns", ["Latest"])
+ if obj.X_.development_grain in ["M"]:
+ out["Month Incremental"] = obj.X_[col][val == obj.X_.valuation_date].sum(
+ "development"
+ )
+ if obj.X_.development_grain in ["M", "Q"]:
+ out["Quarter Incremental"] = (
+ obj.X_
+ - obj.X_[
+ val
+ < pd.Period(out.valuation_date, freq="Q")
+ .to_timestamp(how="s")
+ .strftime("%Y-%m")
+ ]
+ ).sum("development")[col]
+ out["Year Incremental"] = (
+ obj.X_ - obj.X_[val < str(obj.X_.valuation_date.year)]
+ ).sum("development")[col]
+ out["IBNR"] = obj.ibnr_[col]
+ out["Ultimate"] = obj.ultimate_[col]
for i in range(run_off.shape[-1]):
- out['Run Off ' + str(i+1)] = run_off[col].iloc[..., i]
- if hasattr(obj, 'expectation_'):
- out['Apriori'] = obj.expectation_ if obj.expectation_.shape[1] == 1 else obj.expectation_[col]
+ out["Run Off " + str(i + 1)] = run_off[col].iloc[..., i]
+ if hasattr(obj, "expectation_"):
+ out["Apriori"] = (
+ obj.expectation_
+ if obj.expectation_.shape[1] == 1
+ else obj.expectation_[col]
+ )
out.index = idx
triangles.append(out)
- return concat(triangles,0)
+ return concat(triangles, 0)
| Bug in .full_triangle_
I think there's a bug in `.full_triangle_`
```python
prism = cl.load_sample("prism")
bf_model = cl.BornhuetterFerguson(apriori=0.90)
bf_fit = bf_model.fit(
X=prism["Incurred"].sum(), sample_weight=prism["Incurred"].latest_diagonal.sum()
)
```
My understanding is that `.full_expectation_`, is the expected triangle, backfilling all known values. This looks good.
```python
bf_fit.ultimate_ - bf_fit.full_expectation_.iloc[:, :, :, -1]
```
However, when calling `.full_triangle_`, which is the expected triangle, not backfilling all known values (upper left triangle). I am getting non-zero results.
```python
bf_fit.ultimate_ - bf_fit.full_triangle_.iloc[:, :, :, -1]
```
| Interesting. `full_triangle_` is incremental, likely because the underlying triangle is incremental.
```python
print(bf_fit.full_expectation_.is_cumulative)
print(bf_fit.full_triangle_.is_cumulative)
print(bf_fit.ultimate_ - bf_fit.full_triangle_.incr_to_cum().iloc[..., -1])
```
We should probably make `full_expectation_` also behave as the underlying triangle in incremental fashion. I don't think we should state them as cumulative (unless the user does so with their original triangle). It is better to assume the user is using incremental triangles to save on RAM. Incremental is my prefered format when working with large triangles.
Instead of making it defaulted to incremental, don't you think it's better that the triangles match? If the underlying triangle is cumulative, the `full_expectation_` and `full_triangle_` should both be cumulative. I think this is more intuitive. What do you think?
sorry, yes, thats what I mean. Whichever `is_cumulative` status the main triangle has should be inherited by both `full_expectation_` and `full_triangle_`
In the above example, the main triangle is incremental, so I was simply suggesting the `full_triangle_` is fine, but `full_expectation_` is not.
Ok cool, glad we are on the same page now. Let me try to fix this one. | 2022-03-14T05:27:13 | 0.0 | [] | [] |
||
casact/chainladder-python | casact__chainladder-python-139 | 5d0104fd0d23432a20e69f8a5a67320ec9e5ddaf | diff --git a/README.rst b/README.rst
index 25e43570..cf2f4575 100644
--- a/README.rst
+++ b/README.rst
@@ -115,8 +115,8 @@ To instal using conda: ``conda install -c conda-forge chainladder``
Alternatively for pre-release functionality, install directly from github:
``pip install git+https://github.com/casact/chainladder-python/``
-Note: This package requires Python>=3.5 to Python<3.9, numpy 1.12.0 and
-later, pandas 0.23.0 and later, scikit-learn 0.18.0 and later.
+Note: This package requires Python>=3.5 pandas 0.23.0 and later,
+sparse 0.9 and later, scikit-learn 0.23.0 and later.
Questions?
----------
diff --git a/chainladder/core/base.py b/chainladder/core/base.py
index a6fa2e12..ee3c969f 100644
--- a/chainladder/core/base.py
+++ b/chainladder/core/base.py
@@ -173,7 +173,7 @@ def __init__(
self.array_backend = "sparse"
self.values = num_to_nan(
sp(
- coords.T,
+ coords.T.astype('int64'),
amts,
prune=True,
has_duplicates=False,
diff --git a/chainladder/utils/sparse.py b/chainladder/utils/sparse.py
index 0a08d32b..95536eeb 100644
--- a/chainladder/utils/sparse.py
+++ b/chainladder/utils/sparse.py
@@ -32,9 +32,7 @@ def nan_to_num(a):
if hasattr(a, "fill_value"):
a = a.copy()
a.data[np.isnan(a.data)] = 0.0
- if a.fill_value != 0.0:
- a.fill_value = 0.0
- return sp(a)
+ return sp(coords=a.coords, data=a.data, fill_value=0.0, shape=a.shape)
def ones(*args, **kwargs):
@@ -56,13 +54,10 @@ def nanmedian(a, axis=None, keepdims=None, *args, **kwargs):
def nanmean(a, axis=None, keepdims=None, *args, **kwargs):
n = sp.nansum(a, axis=axis, keepdims=keepdims)
d = sp.nansum(sp.nan_to_num(a) != 0, axis=axis, keepdims=keepdims).astype(n.dtype)
- n.fill_value = np.nan
- d.fill_value = np.nan
- n = sp(n)
- d = sp(d)
+ n = sp(data=n.data, coords=n.coords, fill_value=np.nan, shape=n.shape)
+ d = sp(data=d.data, coords=d.coords, fill_value=np.nan, shape=d.shape)
out = n / d
- out.fill_value = 0
- return sp(out)
+ return sp(data=out.data, coords=out.coords, fill_value=0, shape=out.shape)
def array(a, *args, **kwargs):
diff --git a/chainladder/utils/utility_functions.py b/chainladder/utils/utility_functions.py
index 5b26660c..12bcb07b 100644
--- a/chainladder/utils/utility_functions.py
+++ b/chainladder/utils/utility_functions.py
@@ -255,10 +255,9 @@ def num_to_nan(arr):
backend = arr.__class__.__module__.split(".")[0]
if backend == "sparse":
if arr.fill_value == 0 or sp.isnan(arr.fill_value):
- arr.fill_value = sp.nan
arr.coords = arr.coords[:, arr.data != 0]
arr.data = arr.data[arr.data != 0]
- arr = sp(arr)
+ arr = sp(coords=arr.coords, data=arr.data, fill_value=sp.nan, shape=arr.shape)
else:
arr = sp(num_to_nan(np.nan_to_num(arr.todense())), fill_value=sp.nan)
else:
diff --git a/chainladder/utils/weighted_regression.py b/chainladder/utils/weighted_regression.py
index 2cd6f780..55eea84c 100644
--- a/chainladder/utils/weighted_regression.py
+++ b/chainladder/utils/weighted_regression.py
@@ -52,8 +52,8 @@ def _fit_OLS(self):
y[w == 0] = xp.nan
else:
w2 = w.copy()
- w2.fill_value = sp.nan
- x, y = x * sp(w2), y * sp(w2)
+ w2 = sp(data=w2.data, coords=w2.coords, fill_value=sp.nan, shape=w2.shape)
+ x, y = x * w2, y * w2
slope = num_to_nan(
xp.nansum(w * x * y, axis) - xp.nansum(x * w, axis) * xp.nanmean(y, axis)
) / num_to_nan(
diff --git a/requirements.txt b/requirements.txt
index c71fbe4d..844ca796 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,6 +1,6 @@
-pandas
-scikit-learn
-sparse
+pandas>=0.23
+scikit-learn>=0.23
+sparse>=0.9
dill
xlcompose
patsy
| Incompatible with sparse 0.12.0
When running
```
cl.load_sample('clrd')
```
yields "IndexError: arrays used as indices must be of integer (or boolean) type"
Fixed by downgrading to sparse--0.9.1
Installation issues with Python 3.9
There appears to be an issue installing on Python 3.9 due to ```numba``` installation issues as detailed in the following Issue;-
```
https://github.com/numba/numba/issues/6345#issue-718596580
```
Is there a workaround available, please?
| This along with #122 can be covered by better CI pipeline. I think the [xarray](https://github.com/pydata/xarray) project has a similar, but far more robust setup that we can emulate. They are using github actions, pytest, and conda environments - same as chainladder.
Hi @aegerton , Thankd for reporting as I wasn't aware of this. `numba` is a hard requirement by way of the `sparse` package. `chainladder` doesn't have too many dependencies, but the ones it has are major ones.
- `pandas`
- `scikit-learn`
- `sparse`
- `dill`
- `xlcompose` (deprecated)
Most of these packages have professional development teams supporting them, numba included. So if they don't have a work-around, I doubt I'd be skilled enough to find one - specifically a workaround for using chainladder on python>=3.9. `conda` environments help and if I create one with `chainladder` as the only requirement, it gives me `python==3.8.5` if you're okay with a downgrade.
#121 is another recent chainladder issue due to upgrading to the latest in other dependencies. I think what I need to do with this issue is to create a "bleeding edge" CI/CD pipeline that tests against the latest releases of all dependencies to at least be aware of these conflicts when they arise.
Oh, it's already in, which means it should be in the next release.
_Originally posted by @hameerabbasi in https://github.com/pydata/sparse/issues/426#issuecomment-762042578_
It looks like this should resolve once the `sparse` package does its next release. It looks like their workaround is:
Sparse now installs for python 3.9 if the exact following version is used as a dependency:
`numba==0.53.0rc1.post1`. But a regular `pip install sparse` does not work unless `numba==0.53.0rc1.post1` is already installed.
_Originally posted by @JackWetherell in https://github.com/pydata/sparse/issues/426#issuecomment-777428853_
Thanks @jbogaardt for the usual prompt and helpful response. It is greatly appreciated.
I'll keep an eye open for the ```sparse``` release and try again then. In the meantime, I have it working with ```Python 3.8.6``` after downgrading.
Is it worth adding a note about dependency issues with ```3.9``` to the ```README```, please?
> Is it worth adding a note about dependency issues with `3.9` to the `README`, please?
Yes, of course.
> It looks like this should resolve once the `sparse` package does its next release. It looks like their workaround is:
Small correction, `sparse` doesn't need a new release, `numba` does. `sparse` should magically "just work".
Thank you @hameerabbasi. You do fantastic work on `sparse`!
I will leave a comment here once the final numba/llvmlite release is published, and the suggested workaround from my sparse issue is no longer needed.
> > Is it worth adding a note about dependency issues with `3.9` to the `README`, please?
>
> Yes, of course.
Thanks again @jbogaardt. Saw the change. Congratulations on the release of ```0.8.0```
The numba/llvmlite release is published. The above workaround is no longer needed. sparse now installs with python 3.9.
The current versions of the relevant dependencies are:
```
sparse==0.11.2
numba==0.53.0
llvmlite==0.36.0
```
Thank you @JackWetherell for letting us know. I'll give it a try. | 2021-03-26T02:42:16 | 0.0 | [] | [] |
||
leonardt/hwtypes | leonardt__hwtypes-148 | f15b8a6e37d0e5c51c94a15464c714c1b1e6b43d | diff --git a/hwtypes/__init__.py b/hwtypes/__init__.py
index 6858443..ea48e32 100644
--- a/hwtypes/__init__.py
+++ b/hwtypes/__init__.py
@@ -7,3 +7,4 @@
from .fp_vector import *
from .smt_fp_vector import *
from .modifiers import *
+from .smt_int import *
diff --git a/hwtypes/smt_bit_vector.py b/hwtypes/smt_bit_vector.py
index 75d515c..0aa8c4a 100644
--- a/hwtypes/smt_bit_vector.py
+++ b/hwtypes/smt_bit_vector.py
@@ -643,11 +643,15 @@ def substitute(self, *subs : tp.List[tp.Tuple["SBV", "SBV"]]):
# def __int__(self):
# return self.as_uint()
#
-# def as_uint(self):
-# return self._value.bv_unsigned_value()
-#
-# def as_sint(self):
-# return self._value.bv_signed_value()
+ def as_uint(self):
+ #Avoids circular import
+ from . import SMTInt
+ return SMTInt(self)
+
+ def as_sint(self):
+ #Avoids circular import
+ from . import SMTInt
+ return SMTInt(self[:-1]) - SMTInt(self & (1<<(self.size-1)))
#
# @classmethod
# def random(cls, width):
diff --git a/hwtypes/smt_int.py b/hwtypes/smt_int.py
new file mode 100644
index 0000000..e4eeb16
--- /dev/null
+++ b/hwtypes/smt_int.py
@@ -0,0 +1,148 @@
+import itertools as it
+import functools as ft
+from .smt_bit_vector import SMTBit, SMTBitVector
+
+import pysmt
+import pysmt.shortcuts as smt
+from pysmt.typing import INT
+
+from collections import defaultdict
+import re
+import warnings
+import weakref
+
+__ALL__ = ['SMTInt']
+
+_var_counters = defaultdict(it.count)
+_name_table = weakref.WeakValueDictionary()
+
+def _gen_name(prefix='V'):
+ name = f'{prefix}_{next(_var_counters[prefix])}'
+ while name in _name_table:
+ name = f'{prefix}_{next(_var_counters[prefix])}'
+ return name
+
+_name_re = re.compile(r'V_\d+')
+
+class _SMYBOLIC:
+ def __repr__(self):
+ return 'SYMBOLIC'
+
+class _AUTOMATIC:
+ def __repr__(self):
+ return 'AUTOMATIC'
+
+SMYBOLIC = _SMYBOLIC()
+AUTOMATIC = _AUTOMATIC()
+
+def int_cast(fn):
+ @ft.wraps(fn)
+ def wrapped(self, other):
+ if isinstance(other, SMTInt):
+ return fn(self, other)
+ else:
+ try:
+ other = SMTInt(other)
+ except TypeError:
+ return NotImplemented
+ return fn(self, other)
+ return wrapped
+
+class SMTInt:
+ def __init__(self, value=SMYBOLIC, *, name=AUTOMATIC, prefix=AUTOMATIC):
+ if (name is not AUTOMATIC or prefix is not AUTOMATIC) and value is not SMYBOLIC:
+ raise TypeError('Can only name symbolic variables')
+ elif name is not AUTOMATIC and prefix is not AUTOMATIC:
+ raise ValueError('Can only set either name or prefix not both')
+ elif name is not AUTOMATIC:
+ if not isinstance(name, str):
+ raise TypeError('Name must be string')
+ elif name in _name_table:
+ raise ValueError(f'Name {name} already in use')
+ elif _name_re.fullmatch(name):
+ warnings.warn('Name looks like an auto generated name, this might break things')
+ _name_table[name] = self
+ elif prefix is not AUTOMATIC:
+ name = _gen_name(prefix)
+ _name_table[name] = self
+ elif name is AUTOMATIC and value is SMYBOLIC:
+ name = _gen_name()
+ _name_table[name] = self
+
+ if value is SMYBOLIC:
+ self._value = smt.Symbol(name, INT)
+ elif isinstance(value, pysmt.fnode.FNode):
+ if value.get_type().is_int_type():
+ self._value = value
+ elif value.get_type().is_bv_type():
+ self._value = smt.BVToNatural(value)
+ else:
+ raise TypeError(f'Expected int type not {value.get_type()}')
+ elif isinstance(value, SMTInt):
+ self._value = value._value
+ elif isinstance(value, SMTBitVector):
+ self._value = smt.BVToNatural(value.value)
+ elif isinstance(value, bool):
+ self._value = smt.Int(int(value))
+ elif isinstance(value, int):
+ self._value = smt.Int(value)
+ elif hasattr(value, '__int__'):
+ self._value = smt.Int(int(value))
+ else:
+ raise TypeError("Can't coerce {} to Int".format(type(value)))
+
+ self._name = name
+ self._value = smt.simplify(self._value)
+
+ def __repr__(self):
+ if self._name is not AUTOMATIC:
+ return f'{type(self)}({self._name})'
+ else:
+ return f'{type(self)}({self._value})'
+
+ @property
+ def value(self):
+ return self._value
+
+ def __neg__(self):
+ return SMTInt(0) - self
+
+ @int_cast
+ def __sub__(self, other: 'SMTInt') -> 'SMTInt':
+ return SMTInt(self.value - other.value)
+
+ @int_cast
+ def __add__(self, other: 'SMTInt') -> 'SMTInt':
+ return SMTInt(self.value + other.value)
+
+ @int_cast
+ def __mul__(self, other: 'SMTInt') -> 'SMTInt':
+ return SMTInt(self.value * other.value)
+
+ @int_cast
+ def __floordiv__(self, other: 'SMTInt') -> 'SMTInt':
+ return SMTInt(smt.Div(self.value, other.value))
+
+ @int_cast
+ def __ge__(self, other: 'SMTInt') -> SMTBit:
+ return SMTBit(self.value >= other.value)
+
+ @int_cast
+ def __gt__(self, other: 'SMTInt') -> SMTBit:
+ return SMTBit(self.value > other.value)
+
+ @int_cast
+ def __le__(self, other: 'SMTInt') -> SMTBit:
+ return SMTBit(self.value <= other.value)
+
+ @int_cast
+ def __lt__(self, other: 'SMTInt') -> SMTBit:
+ return SMTBit(self.value < other.value)
+
+ @int_cast
+ def __eq__(self, other: 'SMTInt') -> SMTBit:
+ return SMTBit(smt.Equals(self.value, other.value))
+
+ @int_cast
+ def __ne__(self, other: 'SMTInt') -> SMTBit:
+ return SMTBit(smt.NotEquals(self.value, other.value))
| SMT Int class [Enhancement]
For building up constraints for synthesis problems it is useful to have access to an Int class that plays nicely with SMTBit.
Current implementation in branch: smt_int
Semantics defined in: https://smtlib.cs.uiowa.edu/theories-Ints.shtml
Would be nice to include methods to translate between SMTInt and SMTBitVector.
| 2022-09-22T18:47:29 | 0.0 | [] | [] |
|||
AlexHill/django-relativity | AlexHill__django-relativity-20 | e9c40feaac544049510139a3c87dfc29384a65e4 | diff --git a/relativity/fields.py b/relativity/fields.py
index b045964..43603cd 100644
--- a/relativity/fields.py
+++ b/relativity/fields.py
@@ -242,9 +242,11 @@ def get_extra_restriction(self, where_class, alias, related_alias):
def _resolve_expression_local_references(cls, expr, obj):
if isinstance(expr, L):
return expr._relativity_resolve_for_instance(obj)
- else:
+ elif hasattr(expr, "get_source_expressions"):
for source_expr in expr.get_source_expressions():
cls._resolve_expression_local_references(source_expr, obj)
+ else:
+ return expr
return expr
def get_forward_related_filter(self, obj):
| Passing boolean value in predicate raises AttributeError
I have the following `Relationship` defined in my project:
```
active_offer = Relationship(
to="pricing.Offer",
predicate=Q(
deleted=False, product_id=L("product_id"), offer_target=L("offer_target")
),
multiple=False,
)
```
After upgrading to 0.2.4 from 0.2.1, this definition raises an AttributeError. Wrapping False in a call to `django.db.models.Value` seems to fix it.
Attached is a traceback of the error I encountered
[relativity_traceback.txt](https://github.com/AlexHill/django-relativity/files/7288041/relativity_traceback.txt)
| Thanks for the report.
I expect this will happen with other primitives as well. It should be
possible to wrap them in Value automatically where necessary - I’ll have a
look at this tonight.
Alex
On Wed, 6 Oct 2021 at 1:38 am, Domenic Ariaudo ***@***.***>
wrote:
> I have the following Relationship defined in my project:
>
> `active_offer = Relationship(
> to="pricing.Offer",
> predicate=Q(
> deleted=False, product_id=L("product_id"), offer_target=L("offer_target")
> ),
> multiple=False,
> )'
>
> After upgrading to 0.2.4 from 0.2.1, this definition raises an
> AttributeError. Wrapping False in a call to django.db.models.Value seems
> to fix it.
>
> Attached is a traceback of the error I encountered
>
> relativity_traceback.txt
> <https://github.com/AlexHill/django-relativity/files/7288041/relativity_traceback.txt>
>
> —
> You are receiving this because you are subscribed to this thread.
> Reply to this email directly, view it on GitHub
> <https://github.com/AlexHill/django-relativity/issues/19>, or unsubscribe
> <https://github.com/notifications/unsubscribe-auth/AAHW6G5KW4TJV7QG76GHOKLUFMZ2ZANCNFSM5FMHIYVQ>
> .
> Triage notifications on the go with GitHub Mobile for iOS
> <https://apps.apple.com/app/apple-store/id1477376905?ct=notification-email&mt=8&pt=524675>
> or Android
> <https://play.google.com/store/apps/details?id=com.github.android&referrer=utm_campaign%3Dnotification-email%26utm_medium%3Demail%26utm_source%3Dgithub>.
>
>
| 2021-10-06T02:20:01 | 0.0 | [] | [] |
||
AlexHill/django-relativity | AlexHill__django-relativity-14 | bfa6d2e0ca3dc7e1194d365c7a98b936eb80257a | diff --git a/CHANGELOG.md b/CHANGELOG.md
index a09d577..ca73e57 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,8 @@
# django-relativity changelog
+## unreleased
+- Fixed crash when reverse_multiple=False
+
## 0.2.1 - 2020-02-10
- Restored Q and F from relativity.compat as aliases to django.db.models, with a deprecation warning on import
- Added monkeypatch for migrations where a model with a Relationship is the target of a ForeignKey
diff --git a/relativity/fields.py b/relativity/fields.py
index 7fcb0f8..d274cc8 100644
--- a/relativity/fields.py
+++ b/relativity/fields.py
@@ -77,11 +77,9 @@ def create_relationship_many_manager(base_manager, rel):
class RelationshipManager(base_manager):
def __init__(self, instance):
super(RelationshipManager, self).__init__()
-
self.instance = instance
self.model = rel.related_model
self.field = rel.field
-
self.core_filters = {self.field.name: instance}
def __call__(self, **kwargs):
@@ -262,7 +260,7 @@ def get_forward_related_filter(self, obj):
# noinspection PyProtectedMember
-class ManyToManyRelationshipDescriptor(ReverseManyToOneDescriptor):
+class MultipleRelationshipDescriptor(ReverseManyToOneDescriptor):
@cached_property
def related_manager_cls(self):
related_model = self.rel.related_model
@@ -272,6 +270,10 @@ def related_manager_cls(self):
return manager
+class SingleRelationshipDescriptor(ReverseOneToOneDescriptor):
+ pass
+
+
# noinspection PyProtectedMember
class Relationship(models.ForeignObject):
"""
@@ -290,18 +292,16 @@ class Relationship(models.ForeignObject):
def __init__(self, to, predicate, **kwargs):
self.multiple = kwargs.pop("multiple", True)
self.reverse_multiple = kwargs.pop("reverse_multiple", True)
- if self.multiple and self.reverse_multiple:
- self.accessor_class = ManyToManyRelationshipDescriptor
- self.related_accessor_class = ManyToManyRelationshipDescriptor
+
+ if self.multiple:
+ self.accessor_class = MultipleRelationshipDescriptor
else:
- if self.multiple:
- self.accessor_class = ManyToManyRelationshipDescriptor
- else:
- self.accessor_class = ReverseOneToOneDescriptor
- if self.reverse_multiple:
- self.related_accessor_class = ManyToManyRelationshipDescriptor
- else:
- self.related_accessor_class = ReverseOneToOneDescriptor
+ self.accessor_class = SingleRelationshipDescriptor
+
+ if self.reverse_multiple:
+ self.related_accessor_class = MultipleRelationshipDescriptor
+ else:
+ self.related_accessor_class = SingleRelationshipDescriptor
kwargs.setdefault("on_delete", models.DO_NOTHING)
kwargs.setdefault("from_fields", [])
@@ -343,21 +343,7 @@ def get_forward_related_filter(self, obj):
Return the filter arguments which select the instances of self.model
that are related to obj.
"""
- q = self.field.predicate
- q = q() if callable(q) else q
-
- # If this is a simple restriction that can be expressed as an AND of
- # two basic field lookups, we can return a dictionary of filters...
- if q.connector == Q.AND and all(type(c) == tuple for c in q.children):
- return {
- lookup: getattr(obj, v.name) if isinstance(v, L) else v
- for lookup, v in q.children
- }
-
- # ...otherwise, we return this lookup and let the compiler figure it
- # out. This will involve a join where the above method might not.
- else:
- return {self.name: obj}
+ return {self.name: obj}
def resolve_related_fields(self):
return []
| Exception when using reverse_multiple=False
I have this model:
```
sep=Value('/')
class TreeNode(Model):
name = CharField(max_length=30)
parent_id = CharField(max_length=512, db_index=True, blank=True, default='')
children = Relationship('self',
predicate=Q(parent_id=Concat(L('parent_id'), sep, L('name'))),
reverse_multiple=False,
related_name='parent')
```
When trying to access parent field:
```
obj=TreeNode.objects.last()
print(obj.parent)
```
I get this stack trace:
```
Traceback (most recent call last):
File "C:\apps\Python37\lib\site-packages\django\db\models\fields\related_descriptors.py", line 401, in __get__
rel_obj = self.related.get_cached_value(instance)
File "C:\apps\Python37\lib\site-packages\django\db\models\fields\mixins.py", line 13, in get_cached_value
return instance._state.fields_cache[cache_name]
KeyError: 'parent'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\apps\Python37\lib\site-packages\django_admin_shell\views.py", line 42, in run_code
exec(code)
File "<string>", line 11, in <module>
File "C:\apps\Python37\lib\site-packages\django\db\models\fields\related_descriptors.py", line 407, in __get__
filter_args = self.related.field.get_forward_related_filter(instance)
File "C:\apps\Python37\lib\site-packages\relativity\fields.py", line 347, in get_forward_related_filter
q = self.field.predicate
AttributeError: 'CustomForeignObjectRel' object has no attribute 'predicate'
```
| I managed to fix it by changing `related_accessor_class` in `Relationship` to something like this:
```
class ReverseManyToOneRelationshipDescriptor(ReverseManyToOneDescriptor):
def __get__(self, instance, cls=None):
if instance is None:
return self
return self.related_manager_cls(instance).get()
def _get_set_deprecation_msg_params(self):
return (
'reverse side of a related set',
'related fields',
)
```
| 2020-05-30T07:18:30 | 0.0 | [] | [] |
||
judoscale/judoscale-python | judoscale__judoscale-python-70 | d0e0fed5206f12b08bd69fa0f002cf834a35f293 | diff --git a/judoscale/celery/collector.py b/judoscale/celery/collector.py
index 01e8ce1..1100e8c 100644
--- a/judoscale/celery/collector.py
+++ b/judoscale/celery/collector.py
@@ -129,9 +129,10 @@ def collect(self) -> List[Metric]:
Metric.for_queue(queue_name=queue, oldest_job_ts=published_at)
)
else:
+ task_id = task.get("id", None)
logger.warning(
"Unable to find `published_at` in task properties for "
- f"task ID {task['id']}."
+ f"task ID {task_id}."
)
else:
metrics.append(
| Unable to find `published_at` in task properties when TRACK_BUSY_JOBS is enabled
My old setup worked fine (django, celery, redis on Heroku) with Judoscale, but I recently tried turning on this flag to see more info in the Judoscale UI for my worker fynos:
```
judoscale_celery(app, extra_config={"CELERY": {"TRACK_BUSY_JOBS": True}})
```
But immediately started seeing errors around `KeyError: 'id'` coming from [here](https://github.com/judoscale/judoscale-python/blob/d0e0fed5206f12b08bd69fa0f002cf834a35f293/judoscale/celery/collector.py#L126-L135).
My env is setup using the steps here: https://github.com/judoscale/judoscale-python#using-judoscale-with-celery-and-redis, so I'm not really sure what I might be missing... Any pointers would be much appreciated, thank you!
| Thanks for the report! Could you share your log output up to and including the error? You can email it to Judoscale support if you want to keep it private. | 2023-08-26T14:09:06 | 0.0 | [] | [] |
||
judoscale/judoscale-python | judoscale__judoscale-python-60 | e101123c8fc29d61b71de67cfa1dcee194ee7e4b | diff --git a/judoscale/asgi/middleware.py b/judoscale/asgi/middleware.py
index 7106061..d3c2c56 100644
--- a/judoscale/asgi/middleware.py
+++ b/judoscale/asgi/middleware.py
@@ -3,6 +3,7 @@
from judoscale.core.adapter import Adapter, AdapterInfo
from judoscale.core.config import config as judoconfig
+from judoscale.core.logger import logger
from judoscale.core.metric import Metric
from judoscale.core.metrics_collectors import WebMetricsCollector
from judoscale.core.reporter import reporter
@@ -14,6 +15,11 @@ class RequestQueueTimeMiddleware:
def __init__(self, app, extra_config: Mapping = {}, **kwargs):
self.app = app
judoconfig.update(extra_config)
+
+ if not judoconfig.is_enabled:
+ logger.info("Not activated - no API URL provivded")
+ return
+
self.collector = WebMetricsCollector(judoconfig)
adapter = Adapter(
identifier=f"judoscale-{self.platform}",
@@ -28,14 +34,15 @@ async def __call__(self, scope, receive, send):
await self.app(scope, receive, send)
return
- for header, value in scope["headers"]:
- if header.lower() == b"x-request-start":
- request_start = value.decode()
- if metric := Metric.for_web(request_start):
- self.collector.add(metric)
- break
+ if judoconfig.is_enabled:
+ for header, value in scope["headers"]:
+ if header.lower() == b"x-request-start":
+ request_start = value.decode()
+ if metric := Metric.for_web(request_start):
+ self.collector.add(metric)
+ break
- reporter.ensure_running()
+ reporter.ensure_running()
await self.app(scope, receive, send)
diff --git a/judoscale/celery/__init__.py b/judoscale/celery/__init__.py
index ddcdd3d..5f36405 100644
--- a/judoscale/celery/__init__.py
+++ b/judoscale/celery/__init__.py
@@ -8,6 +8,7 @@
from judoscale.celery.collector import CeleryMetricsCollector
from judoscale.core.adapter import Adapter, AdapterInfo
from judoscale.core.config import config as judoconfig
+from judoscale.core.logger import logger
from judoscale.core.reporter import reporter
@@ -20,6 +21,11 @@ def judoscale_celery(celery: Celery, extra_config: Mapping = {}) -> None:
celery.conf.task_send_sent_event = True
judoconfig.update(extra_config)
+
+ if not judoconfig.is_enabled:
+ logger.info("Not activated - no API URL provivded")
+ return
+
collector = CeleryMetricsCollector(config=judoconfig, broker=celery)
adapter = Adapter(
identifier="judoscale-celery",
diff --git a/judoscale/core/config.py b/judoscale/core/config.py
index bd121e5..7215755 100644
--- a/judoscale/core/config.py
+++ b/judoscale/core/config.py
@@ -71,6 +71,10 @@ def for_render(cls, env: Mapping):
api_base_url = f"https://adapter.judoscale.com/api/{service_id}"
return cls(runtime_container, api_base_url, env)
+ @property
+ def is_enabled(self) -> bool:
+ return bool(self["API_BASE_URL"])
+
def update(self, new_config: Mapping):
for k, v in new_config.items():
k = k.upper()
diff --git a/judoscale/django/apps.py b/judoscale/django/apps.py
index 55d0218..56115f3 100644
--- a/judoscale/django/apps.py
+++ b/judoscale/django/apps.py
@@ -17,7 +17,7 @@ class JudoscaleDjangoConfig(AppConfig):
def ready(self):
judoconfig.update(getattr(settings, "JUDOSCALE", {}))
- if judoconfig["API_BASE_URL"] is None:
+ if not judoconfig.is_enabled:
logger.info("Not activated - No API URL provided")
return
diff --git a/judoscale/flask/judoscale.py b/judoscale/flask/judoscale.py
index 1894e16..87b7a57 100644
--- a/judoscale/flask/judoscale.py
+++ b/judoscale/flask/judoscale.py
@@ -5,6 +5,7 @@
from judoscale.core.adapter import Adapter, AdapterInfo
from judoscale.core.config import config as judoconfig
+from judoscale.core.logger import logger
from judoscale.core.metric import Metric
from judoscale.core.metrics_collectors import WebMetricsCollector
from judoscale.core.reporter import reporter
@@ -28,6 +29,11 @@ def __init__(self, app: Optional[Flask] = None):
def init_app(self, app: Flask):
judoconfig.update(app.config.get("JUDOSCALE", {}))
+
+ if not judoconfig.is_enabled:
+ logger.info("Not activated - no API URL provivded")
+ return
+
collector = WebMetricsCollector(judoconfig)
adapter = Adapter(
identifier="judoscale-flask",
diff --git a/judoscale/rq/__init__.py b/judoscale/rq/__init__.py
index e9af414..14ef574 100644
--- a/judoscale/rq/__init__.py
+++ b/judoscale/rq/__init__.py
@@ -5,12 +5,18 @@
from judoscale.core.adapter import Adapter, AdapterInfo
from judoscale.core.config import config as judoconfig
+from judoscale.core.logger import logger
from judoscale.core.reporter import reporter
from judoscale.rq.collector import RQMetricsCollector
def judoscale_rq(redis: Redis, extra_config: Mapping = {}) -> None:
judoconfig.update(extra_config)
+
+ if not judoconfig.is_enabled:
+ logger.info("Not activated - no API URL provivded")
+ return
+
collector = RQMetricsCollector(config=judoconfig, redis=redis)
adapter = Adapter(
identifier="judoscale-rq",
diff --git a/judoscale/rq/apps.py b/judoscale/rq/apps.py
index 7b0b09f..254e8e4 100644
--- a/judoscale/rq/apps.py
+++ b/judoscale/rq/apps.py
@@ -20,7 +20,7 @@ class JudoscaleRQConfig(AppConfig):
def ready(self):
judoconfig.update(getattr(settings, "JUDOSCALE", {}))
- if judoconfig["API_BASE_URL"] is None:
+ if not judoconfig.is_enabled:
logger.info("Not activated - No API URL provided")
return
diff --git a/sample-apps/django_celery_sample/blog/views.py b/sample-apps/django_celery_sample/blog/views.py
index ec59762..554979c 100644
--- a/sample-apps/django_celery_sample/blog/views.py
+++ b/sample-apps/django_celery_sample/blog/views.py
@@ -32,14 +32,17 @@ def many_tasks(request):
def index(request):
# Log message in level warning as this is Django's default logging level
logger.warning("Hello, world")
- catcher_url = settings.JUDOSCALE["API_BASE_URL"].replace("/inspect/", "/p/")
- return HttpResponse(
- "Judoscale Django Celery Sample App. "
- f"<a target='_blank' href={catcher_url}>Metrics</a>"
- "<form action='/task' method='POST'>"
- "<input type='submit' value='Add task'>"
- "</form>"
- "<form action='/batch_task' method='POST'>"
- "<input type='submit' value='Add 10 tasks'>"
- "</form>"
- )
+ if url := settings.JUDOSCALE.get("API_BASE_URL"):
+ catcher_url = url.replace("/inspect/", "/p/")
+ return HttpResponse(
+ "Judoscale Django Celery Sample App. "
+ f"<a target='_blank' href={catcher_url}>Metrics</a>"
+ "<form action='/task' method='POST'>"
+ "<input type='submit' value='Add task'>"
+ "</form>"
+ "<form action='/batch_task' method='POST'>"
+ "<input type='submit' value='Add 10 tasks'>"
+ "</form>"
+ )
+ else:
+ return HttpResponse("Judoscale Django Celery Sample App. No API URL provided.")
diff --git a/sample-apps/django_rq_sample/blog/views.py b/sample-apps/django_rq_sample/blog/views.py
index ce41d69..48394b2 100644
--- a/sample-apps/django_rq_sample/blog/views.py
+++ b/sample-apps/django_rq_sample/blog/views.py
@@ -38,14 +38,17 @@ def many_tasks(request):
def index(request):
# Log message in level warning as this is Django's default logging level
logger.warning("Hello, world")
- catcher_url = settings.JUDOSCALE["API_BASE_URL"].replace("/inspect/", "/p/")
- return HttpResponse(
- "Judoscale Django RQ Sample App. "
- f"<a target='_blank' href={catcher_url}>Metrics</a>"
- "<form action='/task' method='POST'>"
- "<input type='submit' value='Add task'>"
- "</form>"
- "<form action='/batch_task' method='POST'>"
- "<input type='submit' value='Add 10 tasks'>"
- "</form>"
- )
+ if url := settings.JUDOSCALE.get("API_BASE_URL"):
+ catcher_url = url.replace("/inspect/", "/p/")
+ return HttpResponse(
+ "Judoscale Django RQ Sample App. "
+ f"<a target='_blank' href={catcher_url}>Metrics</a>"
+ "<form action='/task' method='POST'>"
+ "<input type='submit' value='Add task'>"
+ "</form>"
+ "<form action='/batch_task' method='POST'>"
+ "<input type='submit' value='Add 10 tasks'>"
+ "</form>"
+ )
+ else:
+ return HttpResponse("Judoscale Django RQ Sample App. No API URL provided.")
diff --git a/sample-apps/django_sample/blog/views.py b/sample-apps/django_sample/blog/views.py
index 61a6ad8..13eec15 100644
--- a/sample-apps/django_sample/blog/views.py
+++ b/sample-apps/django_sample/blog/views.py
@@ -9,8 +9,11 @@
def index(request):
# Log message in level warning as this is Django's default logging level
logger.warning("Hello, world")
- catcher_url = settings.JUDOSCALE["API_BASE_URL"].replace("/inspect/", "/p/")
- return HttpResponse(
- "Judoscale Django Sample App. "
- f"<a target='_blank' href={catcher_url}>Metrics</a>"
- )
+ if url := settings.JUDOSCALE.get("API_BASE_URL"):
+ catcher_url = url.replace("/inspect/", "/p/")
+ return HttpResponse(
+ "Judoscale Django Sample App. "
+ f"<a target='_blank' href={catcher_url}>Metrics</a>"
+ )
+ else:
+ return HttpResponse("Judoscale Django Sample App. No API URL provided.")
diff --git a/sample-apps/fastapi_celery_sample/app/main.py b/sample-apps/fastapi_celery_sample/app/main.py
index ee2a590..541ccd4 100644
--- a/sample-apps/fastapi_celery_sample/app/main.py
+++ b/sample-apps/fastapi_celery_sample/app/main.py
@@ -24,17 +24,22 @@ def create_app():
@app.get("/")
async def index():
logger.warning("Hello, world")
- catcher_url = settings.JUDOSCALE["API_BASE_URL"].replace("/inspect/", "/p/")
- return HTMLResponse(
- "Judoscale FastAPI Celery Sample App. "
- f"<a target='_blank' href={catcher_url}>Metrics</a>"
- "<form action='/task' method='POST'>"
- "<input type='submit' value='Add task'>"
- "</form>"
- "<form action='/batch_task' method='POST'>"
- "<input type='submit' value='Add 10 tasks'>"
- "</form>"
- )
+ if url := settings.JUDOSCALE.get("API_BASE_URL"):
+ catcher_url = url.replace("/inspect/", "/p/")
+ return HTMLResponse(
+ "Judoscale FastAPI Celery Sample App. "
+ f"<a target='_blank' href={catcher_url}>Metrics</a>"
+ "<form action='/task' method='POST'>"
+ "<input type='submit' value='Add task'>"
+ "</form>"
+ "<form action='/batch_task' method='POST'>"
+ "<input type='submit' value='Add 10 tasks'>"
+ "</form>"
+ )
+ else:
+ return HTMLResponse(
+ "Judoscale FastAPI Celery Sample App. No API URL provided."
+ )
@app.post("/task")
async def task():
diff --git a/sample-apps/fastapi_sample/app/main.py b/sample-apps/fastapi_sample/app/main.py
index 8dedaf3..30c016c 100644
--- a/sample-apps/fastapi_sample/app/main.py
+++ b/sample-apps/fastapi_sample/app/main.py
@@ -15,10 +15,13 @@ def create_app() -> FastAPI:
@app.get("/")
async def index():
- catcher_url = judoconfig["API_BASE_URL"].replace("/inspect/", "/p/")
- return HTMLResponse(
- "Judoscale FastAPI Sample App. "
- f"<a target='_blank' href={catcher_url}>Metrics</a>"
- )
+ if url := judoconfig.get("API_BASE_URL"):
+ catcher_url = url.replace("/inspect/", "/p/")
+ return HTMLResponse(
+ "Judoscale FastAPI Sample App. "
+ f"<a target='_blank' href={catcher_url}>Metrics</a>"
+ )
+ else:
+ return HTMLResponse("Judoscale FastAPI Sample App. No API URL provided.")
return app
diff --git a/sample-apps/flask_celery_sample/app/app.py b/sample-apps/flask_celery_sample/app/app.py
index 628f17c..4aa0400 100644
--- a/sample-apps/flask_celery_sample/app/app.py
+++ b/sample-apps/flask_celery_sample/app/app.py
@@ -23,19 +23,20 @@ def create_app():
@app.get("/")
def index():
current_app.logger.warning("Hello, world")
- catcher_url = current_app.config["JUDOSCALE"]["API_BASE_URL"].replace(
- "/inspect/", "/p/"
- )
- return (
- "Judoscale Flask Celery Sample App. "
- f"<a target='_blank' href={catcher_url}>Metrics</a>"
- "<form action='/task' method='POST'>"
- "<input type='submit' value='Add task'>"
- "</form>"
- "<form action='/batch_task' method='POST'>"
- "<input type='submit' value='Add 10 tasks'>"
- "</form>"
- )
+ if url := current_app.config["JUDOSCALE"].get("API_BASE_URL"):
+ catcher_url = url.replace("/inspect/", "/p/")
+ return (
+ "Judoscale Flask Celery Sample App. "
+ f"<a target='_blank' href={catcher_url}>Metrics</a>"
+ "<form action='/task' method='POST'>"
+ "<input type='submit' value='Add task'>"
+ "</form>"
+ "<form action='/batch_task' method='POST'>"
+ "<input type='submit' value='Add 10 tasks'>"
+ "</form>"
+ )
+ else:
+ return "Judoscale Flask Celery Sample App. No API URL provided."
@app.post("/task")
def task():
diff --git a/sample-apps/flask_rq_sample/app/app.py b/sample-apps/flask_rq_sample/app/app.py
index 884da08..ab55349 100644
--- a/sample-apps/flask_rq_sample/app/app.py
+++ b/sample-apps/flask_rq_sample/app/app.py
@@ -29,19 +29,20 @@ def create_app():
@app.get("/")
def index():
current_app.logger.warning("Hello, world")
- catcher_url = current_app.config["JUDOSCALE"]["API_BASE_URL"].replace(
- "/inspect/", "/p/"
- )
- return (
- "Judoscale Flask RQ Sample App. "
- f"<a target='_blank' href={catcher_url}>Metrics</a>"
- "<form action='/task' method='POST'>"
- "<input type='submit' value='Add task'>"
- "</form>"
- "<form action='/batch_task' method='POST'>"
- "<input type='submit' value='Add 10 tasks'>"
- "</form>"
- )
+ if url := current_app.config["JUDOSCALE"].get("API_BASE_URL"):
+ catcher_url = url.replace("/inspect/", "/p/")
+ return (
+ "Judoscale Flask RQ Sample App. "
+ f"<a target='_blank' href={catcher_url}>Metrics</a>"
+ "<form action='/task' method='POST'>"
+ "<input type='submit' value='Add task'>"
+ "</form>"
+ "<form action='/batch_task' method='POST'>"
+ "<input type='submit' value='Add 10 tasks'>"
+ "</form>"
+ )
+ else:
+ return "Judoscale Flask RQ Sample App. No API URL provided."
@app.post("/task")
def task():
diff --git a/sample-apps/flask_sample/app.py b/sample-apps/flask_sample/app.py
index e31268b..f239054 100644
--- a/sample-apps/flask_sample/app.py
+++ b/sample-apps/flask_sample/app.py
@@ -15,13 +15,14 @@ def create_app():
@app.route("/", methods=["GET"])
def index():
current_app.logger.warning("Hello, world")
- catcher_url = current_app.config["JUDOSCALE"]["API_BASE_URL"].replace(
- "/inspect/", "/p/"
- )
- return (
- "Judoscale Flask Sample App. "
- f"<a target='_blank' href={catcher_url}>Metrics</a>"
- )
+ if url := current_app.config["JUDOSCALE"].get("API_BASE_URL"):
+ catcher_url = url.replace("/inspect/", "/p/")
+ return (
+ "Judoscale Flask Sample App. "
+ f"<a target='_blank' href={catcher_url}>Metrics</a>"
+ )
+ else:
+ return "Judoscale Flask Sample App. No API URL provided."
return app
| Default config for non heroku environments
The default config for non Heroku environments is currently set as follows:
https://github.com/judoscale/judoscale-python/blob/e101123c8fc29d61b71de67cfa1dcee194ee7e4b/judoscale/core/config.py#L53
so `API_BASE_URL` is set as an empty string.
The code that determines if Judoscale should run, though, seems to compare `API_BASE_URL` against `None`:
https://github.com/judoscale/judoscale-python/blob/e101123c8fc29d61b71de67cfa1dcee194ee7e4b/judoscale/django/apps.py#L20
We've been seeing Judoscale attempting to run locally on our dev machines, and pretty sure it's because this check is returning True.
Should the default config instead return `return cls(None, None, env)` so the `API_BASE_URL` is set to None instead of ""? Or alternatively should the check be `if judoconfig["API_BASE_URL"]:`? Happy to put up a pull request if this seems reasonable.
| Hey @jasonsbrooks,
Thanks for this bug report. I'm able to reproduce this locally and I'm going to patch this today.
Awesome, thanks so much @karls ! | 2023-04-27T17:52:29 | 0.0 | [] | [] |
||
judoscale/judoscale-python | judoscale__judoscale-python-35 | 2e8cf9a50638d4115c1038e9db0688b3a8332034 | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index d3fcf5e..08dd291 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -18,7 +18,7 @@ jobs:
uses: google-github-actions/release-please-action@v3
with:
release-type: python
- package-name: judoscale-python
+ package-name: judoscale
bump-minor-pre-major: true
publish:
@@ -59,11 +59,11 @@ jobs:
key: venv-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }}
- name: Install dependencies
- run: poetry install --no-interaction --no-root
+ run: poetry install --all-extras --no-interaction --no-root
if: ${{ steps.cache-deps.outputs.cache-hit != 'true' }}
- name: Install library
- run: poetry install --no-interaction
+ run: poetry install --all-extras --no-interaction
- name: Publish to PyPI
run: |
diff --git a/README.md b/README.md
index fa6bce8..af318d3 100644
--- a/README.md
+++ b/README.md
@@ -1,33 +1,27 @@
-# judoscale-python
+# Judoscale
This is the official Python adapter for [Judoscale](https://elements.heroku.com/addons/judoscale). You can use Judoscale without it, but this gives you request queue time metrics and job queue time (for supported job processors).
-## Installation
-
-Add judoscale-python to your <code>requirements.txt</code> file or the equivalent:
-
-```
-judoscale-python >= 1.0.0rc1
-```
-
-Then run this from a terminal to install the package:
-
-```sh
-pip install -r requirements.txt
-```
+It is recommended to install the specific web framework and/or background job library support as "extras" to the `judoscale` PyPI package. This ensures that checking if the installed web framework and/or background task processing library is supported happens at dependency resolution time.
## Supported web frameworks
-- [x] Django
-- [x] Flask
+- [x] [Django](#using-judoscale-with-django)
+- [x] [Flask](#using-judoscale-with-flask)
- [ ] FastAPI
## Supported job processors
-- [x] Celery
+- [x] [Celery](#using-judoscale-with-celery-and-redis) (with Redis as the broker)
- [ ] RQ
-## Using Judoscale with Django
+# Using Judoscale with Django
+
+Install Judoscale for Django with:
+
+```sh
+$ pip install 'judoscale[django]'
+```
Add Judoscale app to `settings.py`:
@@ -60,8 +54,13 @@ Once deployed, you will see your request queue time metrics available in the Jud
# Using Judoscale with Flask
-The Flask support for Judoscale is packaged into a Flask extension. Import the extension class and use like you normally would in a Flask application:
+Install Judoscale for Flask with:
+
+```sh
+$ pip install 'judoscale[flask]'
+```
+The Flask support for Judoscale is packaged into a Flask extension. Import the extension class and use like you normally would in a Flask application:
```py
# app.py
@@ -108,9 +107,15 @@ Note the [official recommendations for configuring Flask](https://flask.palletsp
# Using Judoscale with Celery and Redis
+Install Judoscale for Celery with:
+
+```sh
+$ pip install 'judoscale[celery-redis]'
+```
+
> **NOTE 1:** The Judoscale Celery integration currently only works with the [Redis broker](https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/index.html#redis).
-> **NOTE 2:** Using [task priorities](https://docs.celeryq.dev/en/latest/userguide/calling.html#advanced-options) is currently not supported by `judoscale-python`. You can still use task priorities, but `judoscale-python` won't see and report metrics on any queues other than the default, unprioritised queue.
+> **NOTE 2:** Using [task priorities](https://docs.celeryq.dev/en/latest/userguide/calling.html#advanced-options) is currently not supported by `judoscale`. You can still use task priorities, but `judoscale` won't see and report metrics on any queues other than the default, unprioritised queue.
Judoscale can automatically scale the number of Celery workers based on the queue latency (the age of the oldest pending task in the queue).
@@ -138,13 +143,13 @@ judoscale_celery(broker, extra_config={"LOG_LEVEL": "DEBUG"})
## Development
-This repo includes a `sample-apps` directory containing apps you can run locally. These apps use the judoscale-python adapter, but they override `API_BASE_URL` so they're not connected to the real Judoscale API. Instead, they post API requests to https://requestcatcher.com so you can observe the API behavior.
+This repo includes a `sample-apps` directory containing apps you can run locally. These apps use the `judoscale` adapter, but they override `API_BASE_URL` so they're not connected to the real Judoscale API. Instead, they post API requests to https://requestinspector.com so you can observe the API behavior.
See the `README` in a sample app for details on how to set it up and run locally.
### Contributing
-`judoscale-python` uses [Poetry](https://python-poetry.org/) for managing dependencies and packaging the project. Head over to the [installations instructions](https://python-poetry.org/docs/#installing-with-the-official-installer) and install Poetry, if needed.
+`judoscale` uses [Poetry](https://python-poetry.org/) for managing dependencies and packaging the project. Head over to the [installations instructions](https://python-poetry.org/docs/#installing-with-the-official-installer) and install Poetry, if needed.
Clone the repo with
@@ -163,7 +168,7 @@ Poetry (version 1.3.1)
Install dependencies with Poetry and activate the virtualenv
```sh
-$ poetry install
+$ poetry install --all-extras
$ poetry shell
```
diff --git a/judoscale/core/adapter.py b/judoscale/core/adapter.py
index eacef0e..0524669 100644
--- a/judoscale/core/adapter.py
+++ b/judoscale/core/adapter.py
@@ -5,7 +5,7 @@
from judoscale.core.metrics_collectors import Collector
-JUDOSCALE_VERSION = get_distribution("judoscale-python").version
+JUDOSCALE_VERSION = get_distribution("judoscale").version
@dataclass
diff --git a/poetry.lock b/poetry.lock
index 4fbb0e8..f26e9e8 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -4,8 +4,8 @@
name = "amqp"
version = "5.1.1"
description = "Low-level AMQP client for Python (fork of amqplib)."
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = ">=3.6"
files = [
{file = "amqp-5.1.1-py3-none-any.whl", hash = "sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359"},
@@ -19,8 +19,8 @@ vine = ">=5.0.0"
name = "asgiref"
version = "3.5.2"
description = "ASGI specs, helper code, and adapters"
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = ">=3.7"
files = [
{file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"},
@@ -34,8 +34,8 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
name = "async-timeout"
version = "4.0.2"
description = "Timeout context manager for asyncio programs"
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = ">=3.6"
files = [
{file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
@@ -46,8 +46,8 @@ files = [
name = "backports-zoneinfo"
version = "0.2.1"
description = "Backport of the standard library zoneinfo module"
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = ">=3.6"
files = [
{file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"},
@@ -75,8 +75,8 @@ tzdata = ["tzdata"]
name = "billiard"
version = "3.6.4.0"
description = "Python multiprocessing fork with improvements and bugfixes"
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = "*"
files = [
{file = "billiard-3.6.4.0-py3-none-any.whl", hash = "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b"},
@@ -123,8 +123,8 @@ uvloop = ["uvloop (>=0.15.2)"]
name = "celery"
version = "5.2.7"
description = "Distributed Task Queue."
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = ">=3.7"
files = [
{file = "celery-5.2.7-py3-none-any.whl", hash = "sha256:138420c020cd58d6707e6257b6beda91fd39af7afde5d36c6334d175302c0e14"},
@@ -206,7 +206,7 @@ unicode-backport = ["unicodedata2"]
name = "click"
version = "8.1.3"
description = "Composable command line interface toolkit"
-category = "dev"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -221,8 +221,8 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""}
name = "click-didyoumean"
version = "0.3.0"
description = "Enables git-like *did-you-mean* feature in click"
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = ">=3.6.2,<4.0.0"
files = [
{file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"},
@@ -236,8 +236,8 @@ click = ">=7"
name = "click-plugins"
version = "1.1.1"
description = "An extension module for click to enable registering CLI commands via setuptools entry-points."
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = "*"
files = [
{file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"},
@@ -254,8 +254,8 @@ dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"]
name = "click-repl"
version = "0.2.0"
description = "REPL plugin for Click"
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = "*"
files = [
{file = "click-repl-0.2.0.tar.gz", hash = "sha256:cd12f68d745bf6151210790540b4cb064c7b13e571bc64b6957d98d120dacfd8"},
@@ -271,7 +271,7 @@ six = "*"
name = "colorama"
version = "0.4.6"
description = "Cross-platform colored terminal text."
-category = "dev"
+category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
files = [
@@ -283,8 +283,8 @@ files = [
name = "django"
version = "4.1.4"
description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design."
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = ">=3.8"
files = [
{file = "Django-4.1.4-py3-none-any.whl", hash = "sha256:0b223bfa55511f950ff741983d408d78d772351284c75e9f77d2b830b6b4d148"},
@@ -318,6 +318,29 @@ mccabe = ">=0.7.0,<0.8.0"
pycodestyle = ">=2.10.0,<2.11.0"
pyflakes = ">=3.0.0,<3.1.0"
+[[package]]
+name = "flask"
+version = "2.2.2"
+description = "A simple framework for building complex web applications."
+category = "main"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "Flask-2.2.2-py3-none-any.whl", hash = "sha256:b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526"},
+ {file = "Flask-2.2.2.tar.gz", hash = "sha256:642c450d19c4ad482f96729bd2a8f6d32554aa1e231f4f6b4e7e5264b16cca2b"},
+]
+
+[package.dependencies]
+click = ">=8.0"
+importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""}
+itsdangerous = ">=2.0"
+Jinja2 = ">=3.0"
+Werkzeug = ">=2.2.2"
+
+[package.extras]
+async = ["asgiref (>=3.2)"]
+dotenv = ["python-dotenv"]
+
[[package]]
name = "idna"
version = "3.4"
@@ -330,6 +353,26 @@ files = [
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
]
+[[package]]
+name = "importlib-metadata"
+version = "6.0.0"
+description = "Read metadata from Python packages"
+category = "main"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"},
+ {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"},
+]
+
+[package.dependencies]
+zipp = ">=0.5"
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+perf = ["ipython"]
+testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
+
[[package]]
name = "isort"
version = "5.11.2"
@@ -348,12 +391,42 @@ pipfile-deprecated-finder = ["pipreqs", "requirementslib"]
plugins = ["setuptools"]
requirements-deprecated-finder = ["pip-api", "pipreqs"]
+[[package]]
+name = "itsdangerous"
+version = "2.1.2"
+description = "Safely pass data to untrusted environments and back."
+category = "main"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"},
+ {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"},
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.2"
+description = "A very fast and expressive template engine."
+category = "main"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
+ {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
[[package]]
name = "kombu"
version = "5.2.4"
description = "Messaging library for Python."
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = ">=3.7"
files = [
{file = "kombu-5.2.4-py3-none-any.whl", hash = "sha256:8b213b24293d3417bcf0d2f5537b7f756079e3ea232a8386dcc89a59fd2361a4"},
@@ -380,6 +453,66 @@ sqs = ["boto3 (>=1.9.12)", "pycurl (>=7.44.1,<7.45.0)", "urllib3 (>=1.26.7)"]
yaml = ["PyYAML (>=3.10)"]
zookeeper = ["kazoo (>=1.3.1)"]
+[[package]]
+name = "markupsafe"
+version = "2.1.2"
+description = "Safely add untrusted strings to HTML/XML markup."
+category = "main"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"},
+ {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"},
+]
+
[[package]]
name = "mccabe"
version = "0.7.0"
@@ -436,8 +569,8 @@ test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock
name = "prompt-toolkit"
version = "3.0.36"
description = "Library for building powerful interactive command lines in Python"
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = ">=3.6.2"
files = [
{file = "prompt_toolkit-3.0.36-py3-none-any.whl", hash = "sha256:aa64ad242a462c5ff0363a7b9cfe696c20d55d9fc60c11fd8e632d064804d305"},
@@ -475,8 +608,8 @@ files = [
name = "pytz"
version = "2022.7.1"
description = "World timezone definitions, modern and historical"
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = "*"
files = [
{file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"},
@@ -487,8 +620,8 @@ files = [
name = "redis"
version = "4.4.2"
description = "Python client for Redis database and key-value store"
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = ">=3.7"
files = [
{file = "redis-4.4.2-py3-none-any.whl", hash = "sha256:e6206448e2f8a432871d07d432c13ed6c2abcf6b74edb436c99752b1371be387"},
@@ -528,8 +661,8 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
@@ -540,8 +673,8 @@ files = [
name = "sqlparse"
version = "0.4.3"
description = "A non-validating SQL parser."
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = ">=3.5"
files = [
{file = "sqlparse-0.4.3-py3-none-any.whl", hash = "sha256:0323c0ec29cd52bceabc1b4d9d579e311f3e4961b98d174201d5622a23b85e34"},
@@ -576,8 +709,8 @@ files = [
name = "tzdata"
version = "2022.7"
description = "Provider of IANA time zone data"
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = ">=2"
files = [
{file = "tzdata-2022.7-py2.py3-none-any.whl", hash = "sha256:2b88858b0e3120792a3c0635c23daf36a7d7eeeca657c323da299d2094402a0d"},
@@ -605,8 +738,8 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
name = "vine"
version = "5.0.0"
description = "Promises, promises, promises."
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = ">=3.6"
files = [
{file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"},
@@ -617,15 +750,54 @@ files = [
name = "wcwidth"
version = "0.2.6"
description = "Measures the displayed width of unicode strings in a terminal"
-category = "dev"
-optional = false
+category = "main"
+optional = true
python-versions = "*"
files = [
{file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"},
{file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"},
]
+[[package]]
+name = "werkzeug"
+version = "2.2.2"
+description = "The comprehensive WSGI web application library."
+category = "main"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "Werkzeug-2.2.2-py3-none-any.whl", hash = "sha256:f979ab81f58d7318e064e99c4506445d60135ac5cd2e177a2de0089bfd4c9bd5"},
+ {file = "Werkzeug-2.2.2.tar.gz", hash = "sha256:7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.1.1"
+
+[package.extras]
+watchdog = ["watchdog"]
+
+[[package]]
+name = "zipp"
+version = "3.12.1"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+category = "main"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "zipp-3.12.1-py3-none-any.whl", hash = "sha256:6c4fe274b8f85ec73c37a8e4e3fa00df9fb9335da96fb789e3b96b318e5097b3"},
+ {file = "zipp-3.12.1.tar.gz", hash = "sha256:a3cac813d40993596b39ea9e93a18e8a2076d5c378b8bc88ec32ab264e04ad02"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
+
+[extras]
+celery-redis = ["celery"]
+django = ["django"]
+flask = ["flask"]
+
[metadata]
lock-version = "2.0"
python-versions = "^3.8"
-content-hash = "b9cac0fe167474a35bef7b85b96392f7722ba88fd650b92d206708fe66a60f39"
+content-hash = "860c49e8d968c11a3241f938a2a0a23db64f92b60b72233fb883d87cb3d12c3c"
diff --git a/pyproject.toml b/pyproject.toml
index 19d401f..ffb2f39 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,5 +1,5 @@
[tool.poetry]
-name = "judoscale-python"
+name = "judoscale"
version = "0.1.1"
description = "Official Python adapter for Judoscale — the advanced autoscaler for Heroku"
authors = [
@@ -21,13 +21,19 @@ packages = [{include = "judoscale"}]
[tool.poetry.dependencies]
python = "^3.8"
requests = "<3.0.0"
+django = { version = ">=2.1.0,<5.0.0", optional = true }
+flask = { version = ">=1.1.0,<3.0.0", optional = true }
+celery = { version = ">=4.4.0,<6.0.0", extras = ["redis"], optional = true }
[tool.poetry.group.dev.dependencies]
black = "^22.12.0"
-flake8 = { version = "^6.0.0", python = ">=3.8.1,<4.0.0" }
isort = "^5.11.2"
-django = "^4.1.4"
-celery = {extras = ["redis"], version = "^5.2.7"}
+flake8 = { version = "^6.0.0", python = ">=3.8.1,<4.0.0" }
+
+[tool.poetry.extras]
+django = ["django"]
+flask = ["flask"]
+celery-redis = ["celery"]
[tool.isort]
profile = "black"
diff --git a/sample-apps/celery_sample/README.md b/sample-apps/celery_sample/README.md
index be46673..2f3f509 100644
--- a/sample-apps/celery_sample/README.md
+++ b/sample-apps/celery_sample/README.md
@@ -1,6 +1,6 @@
# Celery Sample App
-This is a minimal Flask app for testing Celery with the judoscale-python package.
+This is a minimal Flask app for testing Celery with the judoscale package.
## Prerequisites
@@ -16,7 +16,7 @@ This is a minimal Flask app for testing Celery with the judoscale-python package
$ poetry install
```
-This will install the dependencies, including `judoscale-python` as a [path dependency](https://python-poetry.org/docs/dependency-specification/#path-dependencies).
+This will install the dependencies, including `judoscale` as a [path dependency](https://python-poetry.org/docs/dependency-specification/#path-dependencies).
## Run the app
diff --git a/sample-apps/celery_sample/bin/dev b/sample-apps/celery_sample/bin/dev
index d4b9819..c3a33c8 100755
--- a/sample-apps/celery_sample/bin/dev
+++ b/sample-apps/celery_sample/bin/dev
@@ -1,7 +1,7 @@
#!/bin/bash
# Heroku sets the DYNO environment variable on every dyno.
-# We're doing the same thing here so we can verify the judoscale-python
+# We're doing the same thing here so we can verify the judoscale
# package is using it as expected.
heroku local
diff --git a/sample-apps/celery_sample/poetry.lock b/sample-apps/celery_sample/poetry.lock
index 0d77f82..fc2ed9a 100644
--- a/sample-apps/celery_sample/poetry.lock
+++ b/sample-apps/celery_sample/poetry.lock
@@ -389,7 +389,7 @@ MarkupSafe = ">=2.0"
i18n = ["Babel (>=2.7)"]
[[package]]
-name = "judoscale-python"
+name = "judoscale"
version = "0.1.1"
description = "Official Python adapter for Judoscale — the advanced autoscaler for Heroku"
category = "main"
@@ -399,8 +399,15 @@ files = []
develop = true
[package.dependencies]
+celery = {version = ">=4.4.0,<6.0.0", extras = ["redis"], optional = true}
+flask = {version = ">=1.1.0,<3.0.0", optional = true}
requests = "<3.0.0"
+[package.extras]
+celery-redis = ["celery[redis] (>=4.4.0,<6.0.0)"]
+django = ["django (>=2.1.0,<5.0.0)"]
+flask = ["flask (>=1.1.0,<3.0.0)"]
+
[package.source]
type = "directory"
url = "../.."
@@ -526,14 +533,14 @@ files = [
[[package]]
name = "redis"
-version = "4.4.2"
+version = "4.5.1"
description = "Python client for Redis database and key-value store"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "redis-4.4.2-py3-none-any.whl", hash = "sha256:e6206448e2f8a432871d07d432c13ed6c2abcf6b74edb436c99752b1371be387"},
- {file = "redis-4.4.2.tar.gz", hash = "sha256:a010f6cb7378065040a02839c3f75c7e0fb37a87116fb4a95be82a95552776c7"},
+ {file = "redis-4.5.1-py3-none-any.whl", hash = "sha256:5deb072d26e67d2be1712603bfb7947ec3431fb0eec9c578994052e33035af6d"},
+ {file = "redis-4.5.1.tar.gz", hash = "sha256:1eec3741cda408d3a5f84b78d089c8b8d895f21b3b050988351e925faf202864"},
]
[package.dependencies]
@@ -567,14 +574,14 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "setuptools"
-version = "66.1.1"
+version = "67.2.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "setuptools-66.1.1-py3-none-any.whl", hash = "sha256:6f590d76b713d5de4e49fe4fbca24474469f53c83632d5d0fd056f7ff7e8112b"},
- {file = "setuptools-66.1.1.tar.gz", hash = "sha256:ac4008d396bc9cd983ea483cb7139c0240a07bbc74ffb6232fceffedc6cf03a8"},
+ {file = "setuptools-67.2.0-py3-none-any.whl", hash = "sha256:16ccf598aab3b506593c17378473978908a2734d7336755a8769b480906bec1c"},
+ {file = "setuptools-67.2.0.tar.gz", hash = "sha256:b440ee5f7e607bb8c9de15259dba2583dd41a38879a7abc1d43a71c59524da48"},
]
[package.extras]
@@ -655,21 +662,21 @@ watchdog = ["watchdog"]
[[package]]
name = "zipp"
-version = "3.11.0"
+version = "3.13.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "zipp-3.11.0-py3-none-any.whl", hash = "sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa"},
- {file = "zipp-3.11.0.tar.gz", hash = "sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766"},
+ {file = "zipp-3.13.0-py3-none-any.whl", hash = "sha256:e8b2a36ea17df80ffe9e2c4fda3f693c3dad6df1697d3cd3af232db680950b0b"},
+ {file = "zipp-3.13.0.tar.gz", hash = "sha256:23f70e964bc11a34cef175bc90ba2914e1e4545ea1e3e2f67c079671883f9cb6"},
]
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.8"
-content-hash = "ad4ec0dfff5a8631b17c5589c051deb5093a7709895aeafd57d85f140532d503"
+content-hash = "c2ea3756034a945b0c0e0082e5cbe881ff44e965b1c086e620e408773570766a"
diff --git a/sample-apps/celery_sample/pyproject.toml b/sample-apps/celery_sample/pyproject.toml
index 5cd3b6b..ebd7d8d 100644
--- a/sample-apps/celery_sample/pyproject.toml
+++ b/sample-apps/celery_sample/pyproject.toml
@@ -9,9 +9,9 @@ packages = [{include = "celery_sample"}]
[tool.poetry.dependencies]
python = "^3.8"
-Flask = "^2.2.2"
+Flask = "^2.0"
celery = {extras = ["redis"], version = "^5.2.7"}
-judoscale-python = { path = "../../", develop = true }
+judoscale = { path = "../../", extras=["celery-redis", "flask"], develop = true }
gunicorn = "^20.1.0"
diff --git a/sample-apps/django_2_2_21/django_2_2_21/settings.py b/sample-apps/django_2_2_21/django_2_2_21/settings.py
index aea0e53..85e6b12 100644
--- a/sample-apps/django_2_2_21/django_2_2_21/settings.py
+++ b/sample-apps/django_2_2_21/django_2_2_21/settings.py
@@ -43,7 +43,7 @@
JUDOSCALE = {
# This sample app is intended to be run locally, so Judoscale API requests are
# sent to a mock endpoint.
- "API_BASE_URL": "https://judoscale-python.requestcatcher.com",
+ "API_BASE_URL": "https://requestinspector.com/inspect/judoscale-django",
"LOG_LEVEL": "DEBUG",
"REPORT_INTERVAL_SECONDS": 2,
}
diff --git a/sample-apps/django_sample/README.md b/sample-apps/django_sample/README.md
index 9271716..cac78b7 100644
--- a/sample-apps/django_sample/README.md
+++ b/sample-apps/django_sample/README.md
@@ -1,6 +1,6 @@
# Django Sample App
-This is a minimal Django app to test the judoscale-python package.
+This is a minimal Django app to test the judoscale package.
## Prerequisites
@@ -14,7 +14,7 @@ This is a minimal Django app to test the judoscale-python package.
$ poetry install
```
-This will install the dependencies, including `judoscale-python` as a [path dependency](https://python-poetry.org/docs/dependency-specification/#path-dependencies).
+This will install the dependencies, including `judoscale` as a [path dependency](https://python-poetry.org/docs/dependency-specification/#path-dependencies).
## Run the app
@@ -25,11 +25,12 @@ Run `bin/dev` to run the app in development mode. This will run `heroku local`,
## How to use this sample app
-Open https://judoscale-django.requestcatcher.com in a browser. The sample app is configured to use this Request Catcher endpoint as a mock for the Judoscale Adapter API. This page will monitor all API requests sent from the adapter.
+Open https://requestinspector.com/p/judoscale-django in a browser. The sample app is configured to use this Request Catcher endpoint as a mock for the Judoscale Adapter API. This page will monitor all API requests sent from the adapter.
Start the app via `bin/dev`, then open http://localhost:5000. Continue to reload this page to collect and report more request metrics. You will see Judoscale Adatper API requests logged in Request Catcher.
## How to run the local django tests
+
```sh
$ ./bin/test
```
diff --git a/sample-apps/django_sample/bin/dev b/sample-apps/django_sample/bin/dev
index 9e31faa..1826a55 100755
--- a/sample-apps/django_sample/bin/dev
+++ b/sample-apps/django_sample/bin/dev
@@ -1,9 +1,9 @@
#!/bin/bash
-# Install the local judoscale-python package so we can test changes before publishing.
+# Install the local judoscale package so we can test changes before publishing.
# Heroku sets the DYNO environment variable on every dyno.
-# We're doing the same thing here so we can verify the judoscale-python
+# We're doing the same thing here so we can verify the judoscale
# package is using it as expected.
DYNO=web.1 heroku local
diff --git a/sample-apps/django_sample/poetry.lock b/sample-apps/django_sample/poetry.lock
index 051e34f..e89db87 100644
--- a/sample-apps/django_sample/poetry.lock
+++ b/sample-apps/django_sample/poetry.lock
@@ -156,14 +156,14 @@ files = [
[[package]]
name = "django"
-version = "4.1.5"
+version = "4.1.7"
description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design."
category = "main"
optional = false
python-versions = ">=3.8"
files = [
- {file = "Django-4.1.5-py3-none-any.whl", hash = "sha256:4b214a05fe4c99476e99e2445c8b978c8369c18d4dea8e22ec412862715ad763"},
- {file = "Django-4.1.5.tar.gz", hash = "sha256:ff56ebd7ead0fd5dbe06fe157b0024a7aaea2e0593bb3785fb594cf94dad58ef"},
+ {file = "Django-4.1.7-py3-none-any.whl", hash = "sha256:f2f431e75adc40039ace496ad3b9f17227022e8b11566f4b363da44c7e44761e"},
+ {file = "Django-4.1.7.tar.gz", hash = "sha256:44f714b81c5f190d9d2ddad01a532fe502fa01c4cb8faf1d081f4264ed15dcd8"},
]
[package.dependencies]
@@ -210,7 +210,7 @@ files = [
]
[[package]]
-name = "judoscale-python"
+name = "judoscale"
version = "0.1.1"
description = "Official Python adapter for Judoscale — the advanced autoscaler for Heroku"
category = "main"
@@ -220,8 +220,14 @@ files = []
develop = true
[package.dependencies]
+django = {version = ">=2.1.0,<5.0.0", optional = true}
requests = "<3.0.0"
+[package.extras]
+celery-redis = ["celery[redis] (>=4.4.0,<6.0.0)"]
+django = ["django (>=2.1.0,<5.0.0)"]
+flask = ["flask (>=1.1.0,<3.0.0)"]
+
[package.source]
type = "directory"
url = "../.."
@@ -250,14 +256,14 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "setuptools"
-version = "66.0.0"
+version = "67.2.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "setuptools-66.0.0-py3-none-any.whl", hash = "sha256:a78d01d1e2c175c474884671dde039962c9d74c7223db7369771fcf6e29ceeab"},
- {file = "setuptools-66.0.0.tar.gz", hash = "sha256:bd6eb2d6722568de6d14b87c44a96fac54b2a45ff5e940e639979a3d1792adb6"},
+ {file = "setuptools-67.2.0-py3-none-any.whl", hash = "sha256:16ccf598aab3b506593c17378473978908a2734d7336755a8769b480906bec1c"},
+ {file = "setuptools-67.2.0.tar.gz", hash = "sha256:b440ee5f7e607bb8c9de15259dba2583dd41a38879a7abc1d43a71c59524da48"},
]
[package.extras]
@@ -309,4 +315,4 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.8"
-content-hash = "455d907557eb9c9af5f238b4ab97531728a59dbc94b56436be7eeea9773f4535"
+content-hash = "648ebfec9cc1599180555eb403012fb5ed3b0b592b3440c101f5777d5075861a"
diff --git a/sample-apps/django_sample/pyproject.toml b/sample-apps/django_sample/pyproject.toml
index 19f242a..ab6d4c3 100644
--- a/sample-apps/django_sample/pyproject.toml
+++ b/sample-apps/django_sample/pyproject.toml
@@ -13,9 +13,9 @@ packages = [{include = "judoscale_django_sample"}]
[tool.poetry.dependencies]
python = "^3.8"
-Django = "^4.1.5"
+Django = "^4.0"
gunicorn = "^20.1.0"
-judoscale-python = { path = "../../", develop = true }
+judoscale = { path = "../../", extras=["django"], develop = true }
[build-system]
diff --git a/sample-apps/flask_sample/README.md b/sample-apps/flask_sample/README.md
index 1470a28..5b68ba8 100644
--- a/sample-apps/flask_sample/README.md
+++ b/sample-apps/flask_sample/README.md
@@ -1,6 +1,6 @@
# Flask Sample App
-This is a minimal Flask app to test the judoscale-python package.
+This is a minimal Flask app to test the judoscale package.
## Prerequisites
@@ -15,7 +15,7 @@ This is a minimal Flask app to test the judoscale-python package.
$ poetry install
```
-This will install the dependencies, including `judoscale-python` as a [path dependency](https://python-poetry.org/docs/dependency-specification/#path-dependencies).
+This will install the dependencies, including `judoscale` as a [path dependency](https://python-poetry.org/docs/dependency-specification/#path-dependencies).
## Run the app
@@ -26,7 +26,7 @@ Run `bin/dev` to run the app in development mode. This will run `heroku local`,
## How to use this sample app
-Open https://judoscale-flask.requestcatcher.com in a browser. The sample app is configured to use this Request Catcher endpoint as a mock for the Judoscale Adapter API. This page will monitor all API requests sent from the adapter.
+Open https://requestinspector.com/p/judoscale-flask in a browser. The sample app is configured to use this Request Catcher endpoint as a mock for the Judoscale Adapter API. This page will monitor all API requests sent from the adapter.
Start the app via `./bin/dev`, then open http://localhost:5000. Continue to reload this page to collect and report more request metrics. You will see Judoscale Adatper API requests logged in Request Catcher.
diff --git a/sample-apps/flask_sample/bin/dev b/sample-apps/flask_sample/bin/dev
index 3e0a7f7..66f4a94 100755
--- a/sample-apps/flask_sample/bin/dev
+++ b/sample-apps/flask_sample/bin/dev
@@ -1,7 +1,7 @@
#!/bin/bash
# Heroku sets the DYNO environment variable on every dyno.
-# We're doing the same thing here so we can verify the judoscale-python
+# We're doing the same thing here so we can verify the judoscale
# package is using it as expected.
DYNO=web.1 heroku local
diff --git a/sample-apps/flask_sample/poetry.lock b/sample-apps/flask_sample/poetry.lock
index 71399c5..20c2bb8 100644
--- a/sample-apps/flask_sample/poetry.lock
+++ b/sample-apps/flask_sample/poetry.lock
@@ -14,19 +14,102 @@ files = [
[[package]]
name = "charset-normalizer"
-version = "2.1.1"
+version = "3.0.1"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main"
optional = false
-python-versions = ">=3.6.0"
+python-versions = "*"
files = [
- {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
- {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
+ {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"},
+ {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"},
+ {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"},
+ {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"},
+ {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"},
+ {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"},
+ {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"},
+ {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"},
]
-[package.extras]
-unicode-backport = ["unicodedata2"]
-
[[package]]
name = "click"
version = "8.1.3"
@@ -68,6 +151,7 @@ files = [
[package.dependencies]
click = ">=8.0"
+importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""}
itsdangerous = ">=2.0"
Jinja2 = ">=3.0"
Werkzeug = ">=2.2.2"
@@ -109,6 +193,26 @@ files = [
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
]
+[[package]]
+name = "importlib-metadata"
+version = "6.0.0"
+description = "Read metadata from Python packages"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"},
+ {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"},
+]
+
+[package.dependencies]
+zipp = ">=0.5"
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+perf = ["ipython"]
+testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
+
[[package]]
name = "itsdangerous"
version = "2.1.2"
@@ -140,87 +244,103 @@ MarkupSafe = ">=2.0"
i18n = ["Babel (>=2.7)"]
[[package]]
-name = "judoscale-python"
-version = "0.1.0"
+name = "judoscale"
+version = "0.1.1"
description = "Official Python adapter for Judoscale — the advanced autoscaler for Heroku"
category = "main"
optional = false
-python-versions = "^3.10"
+python-versions = "^3.8"
files = []
develop = true
[package.dependencies]
+flask = {version = ">=1.1.0,<3.0.0", optional = true}
requests = "<3.0.0"
+[package.extras]
+celery-redis = ["celery[redis] (>=4.4.0,<6.0.0)"]
+django = ["django (>=2.1.0,<5.0.0)"]
+flask = ["flask (>=1.1.0,<3.0.0)"]
+
[package.source]
type = "directory"
url = "../.."
[[package]]
name = "markupsafe"
-version = "2.1.1"
+version = "2.1.2"
description = "Safely add untrusted strings to HTML/XML markup."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"},
- {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"},
+ {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"},
]
[[package]]
name = "requests"
-version = "2.28.1"
+version = "2.28.2"
description = "Python HTTP for Humans."
category = "main"
optional = false
python-versions = ">=3.7, <4"
files = [
- {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
- {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
+ {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"},
+ {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"},
]
[package.dependencies]
certifi = ">=2017.4.17"
-charset-normalizer = ">=2,<3"
+charset-normalizer = ">=2,<4"
idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<1.27"
@@ -230,31 +350,31 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "setuptools"
-version = "65.6.3"
+version = "67.2.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"},
- {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"},
+ {file = "setuptools-67.2.0-py3-none-any.whl", hash = "sha256:16ccf598aab3b506593c17378473978908a2734d7336755a8769b480906bec1c"},
+ {file = "setuptools-67.2.0.tar.gz", hash = "sha256:b440ee5f7e607bb8c9de15259dba2583dd41a38879a7abc1d43a71c59524da48"},
]
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "urllib3"
-version = "1.26.13"
+version = "1.26.14"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
files = [
- {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"},
- {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"},
+ {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"},
+ {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"},
]
[package.extras]
@@ -280,7 +400,23 @@ MarkupSafe = ">=2.1.1"
[package.extras]
watchdog = ["watchdog"]
+[[package]]
+name = "zipp"
+version = "3.13.0"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "zipp-3.13.0-py3-none-any.whl", hash = "sha256:e8b2a36ea17df80ffe9e2c4fda3f693c3dad6df1697d3cd3af232db680950b0b"},
+ {file = "zipp-3.13.0.tar.gz", hash = "sha256:23f70e964bc11a34cef175bc90ba2914e1e4545ea1e3e2f67c079671883f9cb6"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
+
[metadata]
lock-version = "2.0"
-python-versions = "^3.10"
-content-hash = "119f6359103c1d88ae8de7f81e2b73720191af8b7814775c76239cfd174a8fc0"
+python-versions = "^3.8"
+content-hash = "4edd26c454c8f6b74c109cbb47f197faa38436e9b192e4f6b755d407a1b75d4a"
diff --git a/sample-apps/flask_sample/pyproject.toml b/sample-apps/flask_sample/pyproject.toml
index cb8529a..b6afede 100644
--- a/sample-apps/flask_sample/pyproject.toml
+++ b/sample-apps/flask_sample/pyproject.toml
@@ -13,9 +13,9 @@ packages = [{include = "flask_sample"}]
[tool.poetry.dependencies]
python = "^3.8"
-Flask = "^2.2.2"
+Flask = "^2.0"
gunicorn = "^20.1.0"
-judoscale-python = { path = "../../", develop = true }
+judoscale = { path = "../../", extras=["flask"], develop = true }
[build-system]
requires = ["poetry-core"]
| Include adapter metadata in report payload
This PR introduces a very thin abstraction called `Adapter`. An Adapter wraps metadata about the Judoscale library version and the language/framework version. It also wraps the metrics collector for each integration, as needed.
Adapters are registered with the Reporter which sends metrics as before and also now includes adapter metadata under the `"adapters"` key.
Example payload from the Celery sample app:
```json
{
"dyno": "web.1",
"pid": 85868,
"config": {
"log_level": "DEBUG",
"report_interval_seconds": 15
},
"adapters": {
"judoscale-python": {
"platform_version": "3.10.4",
"adapter_version": "0.1.1"
},
"judoscale-celery": {
"platform_version": "5.2.7",
"adapter_version": "0.1.1"
},
"judoscale-flask": {
"platform_version": "2.2.2",
"adapter_version": "0.1.1"
}
},
"metrics": [
[
1676292771,
531063,
"queue_time",
"high"
],
[
1676292771,
532618,
"queue_time",
"low"
],
[
1676292757,
1,
"queue_time",
null
],
[
1676292757,
6,
"queue_time",
null
]
]
}
```
| 2023-02-14T09:16:03 | 0.0 | [] | [] |
|||
open-mmlab/mmpretrain | open-mmlab__mmpretrain-1499 | fec3da781f70bd25fd1edddde2fbc60b6c62a29e | diff --git a/mmpretrain/models/heads/multi_label_cls_head.py b/mmpretrain/models/heads/multi_label_cls_head.py
index e69b52778b4..ca36bfe06e7 100644
--- a/mmpretrain/models/heads/multi_label_cls_head.py
+++ b/mmpretrain/models/heads/multi_label_cls_head.py
@@ -93,7 +93,7 @@ def _get_loss(self, cls_score: torch.Tensor,
num_classes = cls_score.size()[-1]
# Unpack data samples and pack targets
if 'gt_score' in data_samples[0]:
- target = torch.stack([i.gt_score for i in data_samples])
+ target = torch.stack([i.gt_score.float() for i in data_samples])
else:
target = torch.stack([
label_to_onehot(i.gt_label, num_classes) for i in data_samples
| [Bug] RuntimeError: result type Float can't be cast to the desired output type Long
### Branch
main branch (mmpretrain version)
### Describe the bug
This problem happend when I tried to run `multi-label` training task.
I implemented a new datatype, and with json format data, like:
```
"data_list": [
{
"img_path": "black_dress_0.jpg",
"img_label": [
0,
4
]
```
Actually, I have no idea how to organize the data and datatype with `mmpretrain` because there is less doc about it. So could u please write more doc about it ? The `multi-label` task, together with `multi-task`.
Thanks!
### Environment
Does not matter
### Other information
More documentation is needed about `multi-label` and `multi-task`
| Thank you very much for your feedback, we will start working on additional tutorials in this area right away.
Can you provide the full error message of the reported error as well? So I can locate the error location faster.
Sure, below are the details:
```
Traceback (most recent call last):
File "D:\codeWorld\detection\mmpretrain\tools\train.py", line 159, in <module>
main()
File "D:\codeWorld\detection\mmpretrain\tools\train.py", line 155, in main
runner.train()
File "D:\codeMaker\conda\envs\detection\lib\site-packages\mmengine\runner\runner.py", line 1701, in train
model = self.train_loop.run() # type: ignore
File "D:\codeMaker\conda\envs\detection\lib\site-packages\mmengine\runner\loops.py", line 96, in run
self.run_epoch()
File "D:\codeMaker\conda\envs\detection\lib\site-packages\mmengine\runner\loops.py", line 112, in run_epoch
self.run_iter(idx, data_batch)
File "D:\codeMaker\conda\envs\detection\lib\site-packages\mmengine\runner\loops.py", line 128, in run_iter
outputs = self.runner.model.train_step(
File "D:\codeMaker\conda\envs\detection\lib\site-packages\mmengine\model\base_model\base_model.py", line 114, in train_step
losses = self._run_forward(data, mode='loss') # type: ignore
File "D:\codeMaker\conda\envs\detection\lib\site-packages\mmengine\model\base_model\base_model.py", line 326, in _run_forward
results = self(**data, mode=mode)
File "D:\codeMaker\conda\envs\detection\lib\site-packages\torch\nn\modules\module.py", line 1190, in _call_impl
return forward_call(*input, **kwargs)
File "D:\codeWorld\detection\mmpretrain\mmpretrain\models\classifiers\image.py", line 116, in forward
return self.loss(inputs, data_samples)
File "D:\codeWorld\detection\mmpretrain\mmpretrain\models\classifiers\image.py", line 226, in loss
return self.head.loss(feats, data_samples)
File "D:\codeWorld\detection\mmpretrain\mmpretrain\models\heads\multi_label_cls_head.py", line 87, in loss
losses = self._get_loss(cls_score, data_samples, **kwargs)
File "D:\codeWorld\detection\mmpretrain\mmpretrain\models\heads\multi_label_cls_head.py", line 104, in _get_loss
loss = self.loss_module(
File "D:\codeMaker\conda\envs\detection\lib\site-packages\torch\nn\modules\module.py", line 1190, in _call_impl
return forward_call(*input, **kwargs)
File "D:\codeWorld\detection\mmpretrain\mmpretrain\models\losses\cross_entropy_loss.py", line 201, in forward
loss_cls = self.loss_weight * self.cls_criterion(
File "D:\codeWorld\detection\mmpretrain\mmpretrain\models\losses\cross_entropy_loss.py", line 115, in binary_cross_entropy
loss = F.binary_cross_entropy_with_logits(
File "D:\codeMaker\conda\envs\detection\lib\site-packages\torch\nn\functional.py", line 3162, in binary_cross_entropy_with_logits
return torch.binary_cross_entropy_with_logits(input, target, weight, pos_weight, reduction_enum)
RuntimeError: result type Float can't be cast to the desired output type Long
Process finished with exit code 1
```
But, I am not sure whether it's related to my dataset type.
Do you use LabelSmooth, MixUp, or CutMix? Stop them and then try again.
No, and I just changed the code from
```
loss = F.binary_cross_entropy_with_logits(
pred,
label,
weight=class_weight,
pos_weight=pos_weight,
reduction='none')
```
to
```
loss = F.binary_cross_entropy_with_logits(
pred,
label.float(),
weight=class_weight,
pos_weight=pos_weight,
reduction='none')
```
And it works with this problem. However another error arises. So I think it's better to wait for your tutorials. | 2023-04-19T02:18:08 | 0.0 | [] | [] |
||
open-mmlab/mmpretrain | open-mmlab__mmpretrain-1253 | 14dcb69092b4847d968cf8ec1423ff6c004f06b6 | diff --git a/README.md b/README.md
index 95f3c9f5106..00a028fe286 100644
--- a/README.md
+++ b/README.md
@@ -155,6 +155,7 @@ Results and models are available in the [model zoo](https://mmclassification.rea
- [x] [BEiT](https://github.com/open-mmlab/mmclassification/tree/1.x/configs/beit) / [BEiT v2](https://github.com/open-mmlab/mmclassification/tree/1.x/configs/beitv2)
- [x] [EVA](https://github.com/open-mmlab/mmclassification/tree/1.x/configs/eva)
- [x] [MixMIM](https://github.com/open-mmlab/mmclassification/tree/1.x/configs/mixmim)
+- [x] [EfficientNetV2](https://github.com/open-mmlab/mmclassification/tree/1.x/configs/efficientnet_v2)
</details>
diff --git a/configs/_base_/models/efficientnet_v2/efficientnet_v2_b0.py b/configs/_base_/models/efficientnet_v2/efficientnet_v2_b0.py
new file mode 100644
index 00000000000..d42e32905ed
--- /dev/null
+++ b/configs/_base_/models/efficientnet_v2/efficientnet_v2_b0.py
@@ -0,0 +1,12 @@
+# model settings
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(type='EfficientNetV2', arch='b0'),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1280,
+ loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
+ topk=(1, 5),
+ ))
diff --git a/configs/_base_/models/efficientnet_v2/efficientnet_v2_b1.py b/configs/_base_/models/efficientnet_v2/efficientnet_v2_b1.py
new file mode 100644
index 00000000000..10736fc5046
--- /dev/null
+++ b/configs/_base_/models/efficientnet_v2/efficientnet_v2_b1.py
@@ -0,0 +1,12 @@
+# model settings
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(type='EfficientNetV2', arch='b1'),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1280,
+ loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
+ topk=(1, 5),
+ ))
diff --git a/configs/_base_/models/efficientnet_v2/efficientnet_v2_b2.py b/configs/_base_/models/efficientnet_v2/efficientnet_v2_b2.py
new file mode 100644
index 00000000000..61f477120e0
--- /dev/null
+++ b/configs/_base_/models/efficientnet_v2/efficientnet_v2_b2.py
@@ -0,0 +1,12 @@
+# model settings
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(type='EfficientNetV2', arch='b2'),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1408,
+ loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
+ topk=(1, 5),
+ ))
diff --git a/configs/_base_/models/efficientnet_v2/efficientnet_v2_b3.py b/configs/_base_/models/efficientnet_v2/efficientnet_v2_b3.py
new file mode 100644
index 00000000000..14e523fd2e4
--- /dev/null
+++ b/configs/_base_/models/efficientnet_v2/efficientnet_v2_b3.py
@@ -0,0 +1,12 @@
+# model settings
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(type='EfficientNetV2', arch='b3'),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1536,
+ loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
+ topk=(1, 5),
+ ))
diff --git a/configs/_base_/models/efficientnet_v2/efficientnet_v2_l.py b/configs/_base_/models/efficientnet_v2/efficientnet_v2_l.py
new file mode 100644
index 00000000000..456467d6fa0
--- /dev/null
+++ b/configs/_base_/models/efficientnet_v2/efficientnet_v2_l.py
@@ -0,0 +1,12 @@
+# model settings
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(type='EfficientNetV2', arch='l'),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1280,
+ loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
+ topk=(1, 5),
+ ))
diff --git a/configs/_base_/models/efficientnet_v2/efficientnet_v2_m.py b/configs/_base_/models/efficientnet_v2/efficientnet_v2_m.py
new file mode 100644
index 00000000000..8e4d303f624
--- /dev/null
+++ b/configs/_base_/models/efficientnet_v2/efficientnet_v2_m.py
@@ -0,0 +1,12 @@
+# model settings
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(type='EfficientNetV2', arch='m'),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1280,
+ loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
+ topk=(1, 5),
+ ))
diff --git a/configs/_base_/models/efficientnet_v2/efficientnet_v2_s.py b/configs/_base_/models/efficientnet_v2/efficientnet_v2_s.py
new file mode 100644
index 00000000000..866648223c7
--- /dev/null
+++ b/configs/_base_/models/efficientnet_v2/efficientnet_v2_s.py
@@ -0,0 +1,12 @@
+# model settings
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(type='EfficientNetV2', arch='s'),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1280,
+ loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
+ topk=(1, 5),
+ ))
diff --git a/configs/_base_/models/efficientnet_v2/efficientnet_v2_xl.py b/configs/_base_/models/efficientnet_v2/efficientnet_v2_xl.py
new file mode 100644
index 00000000000..2216c9daa7d
--- /dev/null
+++ b/configs/_base_/models/efficientnet_v2/efficientnet_v2_xl.py
@@ -0,0 +1,12 @@
+# model settings
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(type='EfficientNetV2', arch='xl'),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1280,
+ loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
+ topk=(1, 5),
+ ))
diff --git a/configs/_base_/models/efficientnet_v2/efficientnetv2_b0.py b/configs/_base_/models/efficientnet_v2/efficientnetv2_b0.py
new file mode 100644
index 00000000000..d42e32905ed
--- /dev/null
+++ b/configs/_base_/models/efficientnet_v2/efficientnetv2_b0.py
@@ -0,0 +1,12 @@
+# model settings
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(type='EfficientNetV2', arch='b0'),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1280,
+ loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
+ topk=(1, 5),
+ ))
diff --git a/configs/_base_/models/efficientnet_v2/efficientnetv2_b1.py b/configs/_base_/models/efficientnet_v2/efficientnetv2_b1.py
new file mode 100644
index 00000000000..10736fc5046
--- /dev/null
+++ b/configs/_base_/models/efficientnet_v2/efficientnetv2_b1.py
@@ -0,0 +1,12 @@
+# model settings
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(type='EfficientNetV2', arch='b1'),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1280,
+ loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
+ topk=(1, 5),
+ ))
diff --git a/configs/_base_/models/efficientnet_v2/efficientnetv2_b2.py b/configs/_base_/models/efficientnet_v2/efficientnetv2_b2.py
new file mode 100644
index 00000000000..61f477120e0
--- /dev/null
+++ b/configs/_base_/models/efficientnet_v2/efficientnetv2_b2.py
@@ -0,0 +1,12 @@
+# model settings
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(type='EfficientNetV2', arch='b2'),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1408,
+ loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
+ topk=(1, 5),
+ ))
diff --git a/configs/_base_/models/efficientnet_v2/efficientnetv2_b3.py b/configs/_base_/models/efficientnet_v2/efficientnetv2_b3.py
new file mode 100644
index 00000000000..14e523fd2e4
--- /dev/null
+++ b/configs/_base_/models/efficientnet_v2/efficientnetv2_b3.py
@@ -0,0 +1,12 @@
+# model settings
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(type='EfficientNetV2', arch='b3'),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1536,
+ loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
+ topk=(1, 5),
+ ))
diff --git a/configs/_base_/models/efficientnet_v2/efficientnetv2_l.py b/configs/_base_/models/efficientnet_v2/efficientnetv2_l.py
new file mode 100644
index 00000000000..456467d6fa0
--- /dev/null
+++ b/configs/_base_/models/efficientnet_v2/efficientnetv2_l.py
@@ -0,0 +1,12 @@
+# model settings
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(type='EfficientNetV2', arch='l'),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1280,
+ loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
+ topk=(1, 5),
+ ))
diff --git a/configs/_base_/models/efficientnet_v2/efficientnetv2_m.py b/configs/_base_/models/efficientnet_v2/efficientnetv2_m.py
new file mode 100644
index 00000000000..8e4d303f624
--- /dev/null
+++ b/configs/_base_/models/efficientnet_v2/efficientnetv2_m.py
@@ -0,0 +1,12 @@
+# model settings
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(type='EfficientNetV2', arch='m'),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1280,
+ loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
+ topk=(1, 5),
+ ))
diff --git a/configs/_base_/models/efficientnet_v2/efficientnetv2_s.py b/configs/_base_/models/efficientnet_v2/efficientnetv2_s.py
new file mode 100644
index 00000000000..866648223c7
--- /dev/null
+++ b/configs/_base_/models/efficientnet_v2/efficientnetv2_s.py
@@ -0,0 +1,12 @@
+# model settings
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(type='EfficientNetV2', arch='s'),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1280,
+ loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
+ topk=(1, 5),
+ ))
diff --git a/configs/_base_/models/efficientnet_v2/efficientnetv2_xl.py b/configs/_base_/models/efficientnet_v2/efficientnetv2_xl.py
new file mode 100644
index 00000000000..2216c9daa7d
--- /dev/null
+++ b/configs/_base_/models/efficientnet_v2/efficientnetv2_xl.py
@@ -0,0 +1,12 @@
+# model settings
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(type='EfficientNetV2', arch='xl'),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1280,
+ loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
+ topk=(1, 5),
+ ))
diff --git a/configs/efficientnet_v2/README.md b/configs/efficientnet_v2/README.md
new file mode 100644
index 00000000000..b249ef56539
--- /dev/null
+++ b/configs/efficientnet_v2/README.md
@@ -0,0 +1,116 @@
+# EfficientNetV2
+
+> [EfficientNetV2: Smaller Models and Faster Training](https://arxiv.org/abs/2104.00298)
+
+<!-- [ALGORITHM] -->
+
+## Abstract
+
+This paper introduces EfficientNetV2, a new family of convolutional networks that have faster training speed and better parameter efficiency than previous models. To develop this family of models, we use a combination of training-aware neural architecture search and scaling, to jointly optimize training speed and parameter efficiency. The models were searched from the search space enriched with new ops such as Fused-MBConv. Our experiments show that EfficientNetV2 models train much faster than state-of-the-art models while being up to 6.8x smaller. Our training can be further sped up by progressively increasing the image size during training, but it often causes a drop in accuracy. To compensate for this accuracy drop, we propose to adaptively adjust regularization (e.g., dropout and data augmentation) as well, such that we can achieve both fast training and good accuracy. With progressive learning, our EfficientNetV2 significantly outperforms previous models on ImageNet and CIFAR/Cars/Flowers datasets. By pretraining on the same ImageNet21k, our EfficientNetV2 achieves 87.3% top-1 accuracy on ImageNet ILSVRC2012, outperforming the recent ViT by 2.0% accuracy while training 5x-11x faster using the same computing resources. Code will be available at https://github.com/google/automl/tree/master/efficientnetv2.
+
+<div align=center>
+<img src="https://user-images.githubusercontent.com/18586273/208616931-0c5107f1-f08c-48d3-8694-7a6eaf227dc2.png" width="50%"/>
+</div>
+
+## How to use it?
+
+<!-- [TABS-BEGIN] -->
+
+**Predict image**
+
+```python
+>>> import torch
+>>> from mmcls.apis import init_model, inference_model
+>>>
+>>> model = init_model('configs/efficientnet_v2/efficientnetv2-b0_8xb32_in1k.py', "https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-b0_8xb32_in1k_20221219-9689f21f.pth")
+>>> predict = inference_model(model, 'demo/demo.JPEG')
+>>> print(predict['pred_class'])
+sea snake
+>>> print(predict['pred_score'])
+0.3147328197956085
+```
+
+**Use the model**
+
+```python
+>>> import torch
+>>> from mmcls import get_model
+>>>
+>>> model = get_model("efficientnetv2-b0_3rdparty_in1k", pretrained=True)
+>>> model.eval()
+>>> inputs = torch.rand(1, 3, 224, 224).to(model.data_preprocessor.device)
+>>> # To get classification scores.
+>>> out = model(inputs)
+>>> print(out.shape)
+torch.Size([1, 1000])
+>>> # To extract features.
+>>> outs = model.extract_feat(inputs)
+>>> print(outs[0].shape)
+torch.Size([1, 1280])
+```
+
+**Train/Test Command**
+
+Place the ImageNet dataset to the `data/imagenet/` directory, or prepare datasets according to the [docs](https://mmclassification.readthedocs.io/en/1.x/user_guides/dataset_prepare.html#prepare-dataset).
+
+Train:
+
+```shell
+python tools/train.py configs/efficientnet_v2/efficientnetv2-b0_8xb32_in1k.py
+```
+
+Test:
+
+```shell
+python tools/test.py configs/efficientnet_v2/efficientnetv2-b0_8xb32_in1k.py https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-b0_8xb32_in1k_20221219-9689f21f.pth
+```
+
+<!-- [TABS-END] -->
+
+For more configurable parameters, please refer to the [API](https://mmclassification.readthedocs.io/en/1.x/api/generated/mmcls.models.backbones.EfficientNetV2.html#mmcls.models.backbones.EfficientNetV2).
+
+## Results and models
+
+### ImageNet-1k
+
+| Model | Pretrain | Params(M) | Flops(G) | Top-1 (%) | Top-5 (%) | Config | Download |
+| :------------------------------------------------: | :----------: | :-------: | :------: | :-------: | :-------: | :-----------------------------------------------: | :----------------------------------------------------: |
+| EfficientNetV2-b0\* (`efficientnetv2-b0_3rdparty_in1k`) | From scratch | 7.14 | 0.92 | 78.52 | 94.44 | [config](./efficientnetv2-b0_8xb32_in1k.py) | [model](https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-b0_3rdparty_in1k_20221221-9ef6e736.pth) |
+| EfficientNetV2-b1\* (`efficientnetv2-b1_3rdparty_in1k`) | From scratch | 8.14 | 1.44 | 79.80 | 94.89 | [config](./efficientnetv2-b1_8xb32_in1k.py) | [model](https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-b1_3rdparty_in1k_20221221-6955d9ce.pth) |
+| EfficientNetV2-b2\* (`efficientnetv2-b2_3rdparty_in1k`) | From scratch | 10.10 | 1.99 | 80.63 | 95.30 | [config](./efficientnetv2-b2_8xb32_in1k.py) | [model](https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-b2_3rdparty_in1k_20221221-74f7d493.pth) |
+| EfficientNetV2-b3\* (`efficientnetv2-b3_3rdparty_in1k`) | From scratch | 14.36 | 3.50 | 82.03 | 95.88 | [config](./efficientnetv2-b3_8xb32_in1k.py) | [model](https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-b3_3rdparty_in1k_20221221-b6f07a36.pth) |
+| EfficientNetV2-s\* (`efficientnetv2-s_3rdparty_in1k`) | From scratch | 21.46 | 9.72 | 83.82 | 96.67 | [config](./efficientnetv2-s_8xb32_in1k-384px.py) | [model](https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-s_3rdparty_in1k_20221220-f0eaff9d.pth) |
+| EfficientNetV2-m\* (`efficientnetv2-m_3rdparty_in1k`) | From scratch | 54.14 | 26.88 | 85.01 | 97.26 | [config](./efficientnetv2-m_8xb32_in1k-480px.py) | [model](https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-m_3rdparty_in1k_20221220-9dc0c729.pth) |
+| EfficientNetV2-l\* (`efficientnetv2-l_3rdparty_in1k`) | From scratch | 118.52 | 60.14 | 85.43 | 97.31 | [config](./efficientnetv2-l_8xb32_in1k-480px.py) | [model](https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-l_3rdparty_in1k_20221220-5c3bac0f.pth) |
+| EfficientNetV2-s\* (`efficientnetv2-s_in21k-pre_3rdparty_in1k`) | ImageNet 21k | 21.46 | 9.72 | 84.29 | 97.26 | [config](./efficientnetv2-s_8xb32_in1k-384px.py) | [model](https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-s_in21k-pre-3rdparty_in1k_20221220-7a7c8475.pth) |
+| EfficientNetV2-m\* (`efficientnetv2-m_in21k-pre_3rdparty_in1k`) | ImageNet 21k | 54.14 | 26.88 | 85.47 | 97.76 | [config](./efficientnetv2-m_8xb32_in1k-480px.py) | [model](https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-m_in21k-pre-3rdparty_in1k_20221220-a1013a04.pth) |
+| EfficientNetV2-l\* (`efficientnetv2-l_in21k-pre_3rdparty_in1k`) | ImageNet 21k | 118.52 | 60.14 | 86.31 | 97.99 | [config](./efficientnetv2-l_8xb32_in1k-480px.py) | [model](https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-l_in21k-pre-3rdparty_in1k_20221220-63df0efd.pth) |
+| EfficientNetV2-xl\* (`efficientnetv2-xl_in21k-pre_3rdparty_in1k`) | ImageNet 21k | 208.12 | 98.34 | 86.39 | 97.83 | [config](./efficientnetv2-xl_8xb32_in1k-512px.py) | [model](https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-xl_in21k-pre-3rdparty_in1k_20221220-583ac18b.pth) |
+
+*Models with * are converted from the [official repo](https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py). The config files of these models are only for inference. We don't ensure these config files' training accuracy and welcome you to contribute your reproduction results.*
+
+### Pre-trained Models In ImageNet-21K
+
+The pre-trained models are only used to fine-tune, and therefore cannot be trained and don't have evaluation results.
+
+| Model | Params(M) | Flops(G) | Config | Download |
+| :------------------------------------------------------: | :-------: | :------: | :-----------------------------------------------: | :----------------------------------------------------------------------------: |
+| EfficientNetV2-s\* (`efficientnetv2-s_3rdparty_in21k`) | 21.46 | 9.72 | [config](./efficientnetv2-s_8xb32_in1k-384px.py) | [model](https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-s_3rdparty_in21k_20221220-c0572b56.pth) |
+| EfficientNetV2-m\* (`efficientnetv2-m_3rdparty_in21k`) | 54.14 | 26.88 | [config](./efficientnetv2-m_8xb32_in1k-480px.py) | [model](https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-m_3rdparty_in21k_20221220-073e944c.pth) |
+| EfficientNetV2-l\* (`efficientnetv2-l_3rdparty_in21k`) | 118.52 | 60.14 | [config](./efficientnetv2-l_8xb32_in1k-480px.py) | [model](https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-l_3rdparty_in21k_20221220-f28f91e1.pth) |
+| EfficientNetV2-xl\* (`efficientnetv2-xl_3rdparty_in21k`) | 208.12 | 98.34 | [config](./efficientnetv2-xl_8xb32_in1k-512px.py) | [model](https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-xl_3rdparty_in21k_20221220-b2c9329c.pth) |
+
+*Models with * are converted from the [official repo](https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py).*
+
+## Citation
+
+```bibtex
+@inproceedings{tan2021efficientnetv2,
+ title={Efficientnetv2: Smaller models and faster training},
+ author={Tan, Mingxing and Le, Quoc},
+ booktitle={International Conference on Machine Learning},
+ pages={10096--10106},
+ year={2021},
+ organization={PMLR}
+}
+```
diff --git a/configs/efficientnet_v2/efficientnet_v2-b0_8xb32_in1k.py b/configs/efficientnet_v2/efficientnet_v2-b0_8xb32_in1k.py
new file mode 100644
index 00000000000..0ce48d43ae1
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnet_v2-b0_8xb32_in1k.py
@@ -0,0 +1,58 @@
+_base_ = [
+ '../_base_/models/efficientnet_v2/efficientnet_v2_b0.py',
+ '../_base_/datasets/imagenet_bs32.py',
+ '../_base_/schedules/imagenet_bs256.py',
+ '../_base_/default_runtime.py',
+]
+
+# dataset settings
+dataset_type = 'ImageNet'
+data_preprocessor = dict(
+ num_classes=1000,
+ # RGB format normalization parameters
+ mean=[123.675, 116.28, 103.53],
+ std=[58.395, 57.12, 57.375],
+ # convert image from BGR to RGB
+ to_rgb=True,
+)
+
+bgr_mean = data_preprocessor['mean'][::-1]
+bgr_std = data_preprocessor['std'][::-1]
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(
+ type='RandomResizedCrop',
+ scale=192,
+ backend='pillow',
+ interpolation='bicubic'),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(
+ type='RandAugment',
+ policies='timm_increasing',
+ num_policies=2,
+ total_level=10,
+ magnitude_level=9,
+ magnitude_std=0.5,
+ hparams=dict(
+ pad_val=[round(x) for x in bgr_mean], interpolation='bicubic')),
+ dict(
+ type='RandomErasing',
+ erase_prob=0.25,
+ mode='rand',
+ min_area_ratio=0.02,
+ max_area_ratio=1 / 3,
+ fill_color=bgr_mean,
+ fill_std=bgr_std),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=224, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnet_v2-b1_8xb32_in1k.py b/configs/efficientnet_v2/efficientnet_v2-b1_8xb32_in1k.py
new file mode 100644
index 00000000000..9d628d05981
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnet_v2-b1_8xb32_in1k.py
@@ -0,0 +1,23 @@
+_base_ = [
+ '../_base_/models/efficientnet_v2/efficientnet_v2_b1.py',
+ '../_base_/datasets/imagenet_bs32.py',
+ '../_base_/schedules/imagenet_bs256.py',
+ '../_base_/default_runtime.py',
+]
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=192),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=240),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnet_v2-b2_8xb32_in1k.py b/configs/efficientnet_v2/efficientnet_v2-b2_8xb32_in1k.py
new file mode 100644
index 00000000000..e15f7698e81
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnet_v2-b2_8xb32_in1k.py
@@ -0,0 +1,23 @@
+_base_ = [
+ '../_base_/models/efficientnet_v2/efficientnet_v2_b2.py',
+ '../_base_/datasets/imagenet_bs32.py',
+ '../_base_/schedules/imagenet_bs256.py',
+ '../_base_/default_runtime.py',
+]
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=208),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=260),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnet_v2-b3_8xb32_in1k.py b/configs/efficientnet_v2/efficientnet_v2-b3_8xb32_in1k.py
new file mode 100644
index 00000000000..2f4b664dfe2
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnet_v2-b3_8xb32_in1k.py
@@ -0,0 +1,23 @@
+_base_ = [
+ '../_base_/models/efficientnet_v2/efficientnet_v2_b3.py',
+ '../_base_/datasets/imagenet_bs32.py',
+ '../_base_/schedules/imagenet_bs256.py',
+ '../_base_/default_runtime.py',
+]
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=240),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=300),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnet_v2-l_8xb32_in1k.py b/configs/efficientnet_v2/efficientnet_v2-l_8xb32_in1k.py
new file mode 100644
index 00000000000..5b150138319
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnet_v2-l_8xb32_in1k.py
@@ -0,0 +1,34 @@
+_base_ = [
+ '../_base_/models/efficientnet_v2/efficientnet_v2_l.py',
+ '../_base_/datasets/imagenet_bs32.py',
+ '../_base_/schedules/imagenet_bs256.py',
+ '../_base_/default_runtime.py',
+]
+
+# dataset settings
+dataset_type = 'ImageNet'
+data_preprocessor = dict(
+ num_classes=1000,
+ # RGB format normalization parameters
+ mean=[127.5, 127.5, 127.5],
+ std=[127.5, 127.5, 127.5],
+ # convert image from BGR to RGB
+ to_rgb=True,
+)
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=384, crop_padding=0),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=480, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnet_v2-l_8xb32_in21ft1k.py b/configs/efficientnet_v2/efficientnet_v2-l_8xb32_in21ft1k.py
new file mode 100644
index 00000000000..5b150138319
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnet_v2-l_8xb32_in21ft1k.py
@@ -0,0 +1,34 @@
+_base_ = [
+ '../_base_/models/efficientnet_v2/efficientnet_v2_l.py',
+ '../_base_/datasets/imagenet_bs32.py',
+ '../_base_/schedules/imagenet_bs256.py',
+ '../_base_/default_runtime.py',
+]
+
+# dataset settings
+dataset_type = 'ImageNet'
+data_preprocessor = dict(
+ num_classes=1000,
+ # RGB format normalization parameters
+ mean=[127.5, 127.5, 127.5],
+ std=[127.5, 127.5, 127.5],
+ # convert image from BGR to RGB
+ to_rgb=True,
+)
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=384, crop_padding=0),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=480, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnet_v2-m_8xb32_in1k.py b/configs/efficientnet_v2/efficientnet_v2-m_8xb32_in1k.py
new file mode 100644
index 00000000000..80884286c81
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnet_v2-m_8xb32_in1k.py
@@ -0,0 +1,34 @@
+_base_ = [
+ '../_base_/models/efficientnet_v2/efficientnet_v2_m.py',
+ '../_base_/datasets/imagenet_bs32.py',
+ '../_base_/schedules/imagenet_bs256.py',
+ '../_base_/default_runtime.py',
+]
+
+# dataset settings
+dataset_type = 'ImageNet'
+data_preprocessor = dict(
+ num_classes=1000,
+ # RGB format normalization parameters
+ mean=[127.5, 127.5, 127.5],
+ std=[127.5, 127.5, 127.5],
+ # convert image from BGR to RGB
+ to_rgb=True,
+)
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=384, crop_padding=0),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=480, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnet_v2-m_8xb32_in21ft1k.py b/configs/efficientnet_v2/efficientnet_v2-m_8xb32_in21ft1k.py
new file mode 100644
index 00000000000..80884286c81
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnet_v2-m_8xb32_in21ft1k.py
@@ -0,0 +1,34 @@
+_base_ = [
+ '../_base_/models/efficientnet_v2/efficientnet_v2_m.py',
+ '../_base_/datasets/imagenet_bs32.py',
+ '../_base_/schedules/imagenet_bs256.py',
+ '../_base_/default_runtime.py',
+]
+
+# dataset settings
+dataset_type = 'ImageNet'
+data_preprocessor = dict(
+ num_classes=1000,
+ # RGB format normalization parameters
+ mean=[127.5, 127.5, 127.5],
+ std=[127.5, 127.5, 127.5],
+ # convert image from BGR to RGB
+ to_rgb=True,
+)
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=384, crop_padding=0),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=480, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnet_v2-s_8xb32_in1k.py b/configs/efficientnet_v2/efficientnet_v2-s_8xb32_in1k.py
new file mode 100644
index 00000000000..ee85b9f3336
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnet_v2-s_8xb32_in1k.py
@@ -0,0 +1,58 @@
+_base_ = [
+ '../_base_/models/efficientnet_v2/efficientnet_v2_s.py',
+ '../_base_/datasets/imagenet_bs32.py',
+ '../_base_/schedules/imagenet_bs256.py',
+ '../_base_/default_runtime.py',
+]
+
+# dataset settings
+dataset_type = 'ImageNet'
+data_preprocessor = dict(
+ num_classes=1000,
+ # RGB format normalization parameters
+ mean=[127.5, 127.5, 127.5],
+ std=[127.5, 127.5, 127.5],
+ # convert image from BGR to RGB
+ to_rgb=True,
+)
+
+bgr_mean = data_preprocessor['mean'][::-1]
+bgr_std = data_preprocessor['std'][::-1]
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(
+ type='RandomResizedCrop',
+ scale=300,
+ backend='pillow',
+ interpolation='bicubic'),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(
+ type='RandAugment',
+ policies='timm_increasing',
+ num_policies=2,
+ total_level=10,
+ magnitude_level=9,
+ magnitude_std=0.5,
+ hparams=dict(
+ pad_val=[round(x) for x in bgr_mean], interpolation='bicubic')),
+ dict(
+ type='RandomErasing',
+ erase_prob=0.25,
+ mode='rand',
+ min_area_ratio=0.02,
+ max_area_ratio=1 / 3,
+ fill_color=bgr_mean,
+ fill_std=bgr_std),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=384, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnet_v2-s_8xb32_in21ft1k.py b/configs/efficientnet_v2/efficientnet_v2-s_8xb32_in21ft1k.py
new file mode 100644
index 00000000000..2eed7b68890
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnet_v2-s_8xb32_in21ft1k.py
@@ -0,0 +1,34 @@
+_base_ = [
+ '../_base_/models/efficientnet_v2/efficientnet_v2_s.py',
+ '../_base_/datasets/imagenet_bs32.py',
+ '../_base_/schedules/imagenet_bs256.py',
+ '../_base_/default_runtime.py',
+]
+
+# dataset settings
+dataset_type = 'ImageNet'
+data_preprocessor = dict(
+ num_classes=1000,
+ # RGB format normalization parameters
+ mean=[127.5, 127.5, 127.5],
+ std=[127.5, 127.5, 127.5],
+ # convert image from BGR to RGB
+ to_rgb=True,
+)
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=300, crop_padding=0),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=384, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnet_v2-xl_8xb32_in21ft1k.py b/configs/efficientnet_v2/efficientnet_v2-xl_8xb32_in21ft1k.py
new file mode 100644
index 00000000000..6309ee4bbf6
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnet_v2-xl_8xb32_in21ft1k.py
@@ -0,0 +1,34 @@
+_base_ = [
+ '../_base_/models/efficientnet_v2/efficientnet_v2_xl.py',
+ '../_base_/datasets/imagenet_bs32.py',
+ '../_base_/schedules/imagenet_bs256.py',
+ '../_base_/default_runtime.py',
+]
+
+# dataset settings
+dataset_type = 'ImageNet'
+data_preprocessor = dict(
+ num_classes=1000,
+ # RGB format normalization parameters
+ mean=[127.5, 127.5, 127.5],
+ std=[127.5, 127.5, 127.5],
+ # convert image from BGR to RGB
+ to_rgb=True,
+)
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=384, crop_padding=0),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=512, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnetv2-b0_8xb32_in1k.py b/configs/efficientnet_v2/efficientnetv2-b0_8xb32_in1k.py
new file mode 100644
index 00000000000..c8a64f56029
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnetv2-b0_8xb32_in1k.py
@@ -0,0 +1,58 @@
+_base_ = [
+ '../_base_/models/efficientnet_v2/efficientnetv2_b0.py',
+ '../_base_/datasets/imagenet_bs32.py',
+ '../_base_/schedules/imagenet_bs256.py',
+ '../_base_/default_runtime.py',
+]
+
+# dataset settings
+dataset_type = 'ImageNet'
+data_preprocessor = dict(
+ num_classes=1000,
+ # RGB format normalization parameters
+ mean=[123.675, 116.28, 103.53],
+ std=[58.395, 57.12, 57.375],
+ # convert image from BGR to RGB
+ to_rgb=True,
+)
+
+bgr_mean = data_preprocessor['mean'][::-1]
+bgr_std = data_preprocessor['std'][::-1]
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(
+ type='RandomResizedCrop',
+ scale=192,
+ backend='pillow',
+ interpolation='bicubic'),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(
+ type='RandAugment',
+ policies='timm_increasing',
+ num_policies=2,
+ total_level=10,
+ magnitude_level=9,
+ magnitude_std=0.5,
+ hparams=dict(
+ pad_val=[round(x) for x in bgr_mean], interpolation='bicubic')),
+ dict(
+ type='RandomErasing',
+ erase_prob=0.25,
+ mode='rand',
+ min_area_ratio=0.02,
+ max_area_ratio=1 / 3,
+ fill_color=bgr_mean,
+ fill_std=bgr_std),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=224, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnetv2-b1_8xb32_in1k.py b/configs/efficientnet_v2/efficientnetv2-b1_8xb32_in1k.py
new file mode 100644
index 00000000000..33f48dfd26a
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnetv2-b1_8xb32_in1k.py
@@ -0,0 +1,21 @@
+_base_ = ['./efficientnetv2-b0_8xb32_in1k.py']
+
+# model setting
+model = dict(backbone=dict(arch='b1'), head=dict(in_channels=1280, ))
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=192),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=240, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnetv2-b2_8xb32_in1k.py b/configs/efficientnet_v2/efficientnetv2-b2_8xb32_in1k.py
new file mode 100644
index 00000000000..497c2aa3727
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnetv2-b2_8xb32_in1k.py
@@ -0,0 +1,21 @@
+_base_ = ['./efficientnetv2-b0_8xb32_in1k.py']
+
+# model setting
+model = dict(backbone=dict(arch='b2'), head=dict(in_channels=1408, ))
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=208),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=260, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnetv2-b3_8xb32_in1k.py b/configs/efficientnet_v2/efficientnetv2-b3_8xb32_in1k.py
new file mode 100644
index 00000000000..16f82c3a512
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnetv2-b3_8xb32_in1k.py
@@ -0,0 +1,21 @@
+_base_ = ['./efficientnetv2-b0_8xb32_in1k.py']
+
+# model setting
+model = dict(backbone=dict(arch='b3'), head=dict(in_channels=1536, ))
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=240),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=300, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnetv2-l_8xb32_in1k-480px.py b/configs/efficientnet_v2/efficientnetv2-l_8xb32_in1k-480px.py
new file mode 100644
index 00000000000..2bef5591c87
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnetv2-l_8xb32_in1k-480px.py
@@ -0,0 +1,23 @@
+_base_ = [
+ 'efficientnetv2-s_8xb32_in1k-384px.py',
+]
+
+# model setting
+model = dict(backbone=dict(arch='l'), )
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=384, crop_padding=0),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=480, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnetv2-l_8xb32_in21k.py b/configs/efficientnet_v2/efficientnetv2-l_8xb32_in21k.py
new file mode 100644
index 00000000000..179c72075f6
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnetv2-l_8xb32_in21k.py
@@ -0,0 +1,4 @@
+_base_ = ['./efficientnetv2-s_8xb32_in21k.py']
+
+# model setting
+model = dict(backbone=dict(arch='l'), )
diff --git a/configs/efficientnet_v2/efficientnetv2-m_8xb32_in1k-480px.py b/configs/efficientnet_v2/efficientnetv2-m_8xb32_in1k-480px.py
new file mode 100644
index 00000000000..06f941e2eeb
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnetv2-m_8xb32_in1k-480px.py
@@ -0,0 +1,23 @@
+_base_ = [
+ 'efficientnetv2-s_8xb32_in1k-384px.py',
+]
+
+# model setting
+model = dict(backbone=dict(arch='m'), )
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=384, crop_padding=0),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=480, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnetv2-m_8xb32_in21k.py b/configs/efficientnet_v2/efficientnetv2-m_8xb32_in21k.py
new file mode 100644
index 00000000000..f04d616376a
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnetv2-m_8xb32_in21k.py
@@ -0,0 +1,4 @@
+_base_ = ['./efficientnetv2-s_8xb32_in21k.py']
+
+# model setting
+model = dict(backbone=dict(arch='m'), )
diff --git a/configs/efficientnet_v2/efficientnetv2-s_8xb32_in1k-384px.py b/configs/efficientnet_v2/efficientnetv2-s_8xb32_in1k-384px.py
new file mode 100644
index 00000000000..2d9b8e4f7fb
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnetv2-s_8xb32_in1k-384px.py
@@ -0,0 +1,34 @@
+_base_ = [
+ '../_base_/models/efficientnet_v2/efficientnetv2_s.py',
+ '../_base_/datasets/imagenet_bs32.py',
+ '../_base_/schedules/imagenet_bs256.py',
+ '../_base_/default_runtime.py',
+]
+
+# dataset settings
+dataset_type = 'ImageNet'
+data_preprocessor = dict(
+ num_classes=1000,
+ # RGB format normalization parameters
+ mean=[127.5, 127.5, 127.5],
+ std=[127.5, 127.5, 127.5],
+ # convert image from BGR to RGB
+ to_rgb=True,
+)
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=300, crop_padding=0),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=384, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnetv2-s_8xb32_in21k.py b/configs/efficientnet_v2/efficientnetv2-s_8xb32_in21k.py
new file mode 100644
index 00000000000..e45369463ac
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnetv2-s_8xb32_in21k.py
@@ -0,0 +1,43 @@
+_base_ = [
+ '../_base_/models/efficientnet_v2/efficientnetv2_s.py',
+ '../_base_/datasets/imagenet_bs32.py',
+ '../_base_/schedules/imagenet_bs256.py',
+ '../_base_/default_runtime.py',
+]
+
+# model setting
+model = dict(head=dict(num_classes=21843))
+
+# dataset settings
+dataset_type = 'ImageNet21k'
+data_preprocessor = dict(
+ num_classes=21843,
+ # RGB format normalization parameters
+ mean=[127.5, 127.5, 127.5],
+ std=[127.5, 127.5, 127.5],
+ # convert image from BGR to RGB
+ to_rgb=True,
+)
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=224),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=224, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+
+# schedule setting
+optim_wrapper = dict(
+ optimizer=dict(lr=4e-3),
+ clip_grad=dict(max_norm=5.0),
+)
diff --git a/configs/efficientnet_v2/efficientnetv2-xl_8xb32_in1k-512px.py b/configs/efficientnet_v2/efficientnetv2-xl_8xb32_in1k-512px.py
new file mode 100644
index 00000000000..ea161aa655a
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnetv2-xl_8xb32_in1k-512px.py
@@ -0,0 +1,23 @@
+_base_ = [
+ 'efficientnetv2-s_8xb32_in1k-384px.py',
+]
+
+# model setting
+model = dict(backbone=dict(arch='xl'), )
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetRandomCrop', scale=384, crop_padding=0),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(type='EfficientNetCenterCrop', crop_size=512, crop_padding=0),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(dataset=dict(pipeline=train_pipeline))
+val_dataloader = dict(dataset=dict(pipeline=test_pipeline))
+test_dataloader = dict(dataset=dict(pipeline=test_pipeline))
diff --git a/configs/efficientnet_v2/efficientnetv2-xl_8xb32_in21k.py b/configs/efficientnet_v2/efficientnetv2-xl_8xb32_in21k.py
new file mode 100644
index 00000000000..e2ee84cb32f
--- /dev/null
+++ b/configs/efficientnet_v2/efficientnetv2-xl_8xb32_in21k.py
@@ -0,0 +1,4 @@
+_base_ = ['./efficientnetv2-s_8xb32_in21k.py']
+
+# model setting
+model = dict(backbone=dict(arch='xl'), )
diff --git a/configs/efficientnet_v2/metafile.yml b/configs/efficientnet_v2/metafile.yml
new file mode 100644
index 00000000000..cfbdd5f3e8c
--- /dev/null
+++ b/configs/efficientnet_v2/metafile.yml
@@ -0,0 +1,255 @@
+Collections:
+ - Name: EfficientNetV2
+ Metadata:
+ Training Data: ImageNet-1k
+ Architecture:
+ - 1x1 Convolution
+ - Average Pooling
+ - Convolution
+ - Dense Connections
+ - Dropout
+ - Inverted Residual Block
+ - RMSProp
+ - Squeeze-and-Excitation Block
+ - Swish
+ Paper:
+ URL: https://arxiv.org/abs/2104.00298
+ Title: "EfficientNetV2: Smaller Models and Faster Training"
+ README: configs/efficientnet_v2/README.md
+ Code:
+ URL: https://github.com/open-mmlab/mmclassification/blob/dev-1.x/mmcls/models/backbones/beit.py
+ Version: v1.0.0rc4
+
+Models:
+ - Name: efficientnetv2-b0_3rdparty_in1k
+ Metadata:
+ FLOPs: 919843360
+ Parameters: 7139704
+ In Collection: EfficientNetV2
+ Results:
+ - Dataset: ImageNet-1k
+ Metrics:
+ Top 1 Accuracy: 78.52
+ Top 5 Accuracy: 94.44
+ Task: Image Classification
+ Weights: https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-b0_3rdparty_in1k_20221221-9ef6e736.pth
+ Config: configs/efficientnet_v2/efficientnetv2-b0_8xb32_in1k.py
+ Converted From:
+ Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_b0-c7cc451f.pth
+ Code: https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py
+ - Name: efficientnetv2-b1_3rdparty_in1k
+ Metadata:
+ FLOPs: 1438287552
+ Parameters: 8141052
+ In Collection: EfficientNetV2
+ Results:
+ - Dataset: ImageNet-1k
+ Metrics:
+ Top 1 Accuracy: 79.80
+ Top 5 Accuracy: 94.89
+ Task: Image Classification
+ Weights: https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-b1_3rdparty_in1k_20221221-6955d9ce.pth
+ Config: configs/efficientnet_v2/efficientnetv2-b1_8xb32_in1k.py
+ Converted From:
+ Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_b1-be6e41b0.pth
+ Code: https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py
+ - Name: efficientnetv2-b2_3rdparty_in1k
+ Metadata:
+ FLOPs: 1986433080
+ Parameters: 10096086
+ In Collection: EfficientNetV2
+ Results:
+ - Dataset: ImageNet-1k
+ Metrics:
+ Top 1 Accuracy: 80.63
+ Top 5 Accuracy: 95.30
+ Task: Image Classification
+ Weights: https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-b2_3rdparty_in1k_20221221-74f7d493.pth
+ Config: configs/efficientnet_v2/efficientnetv2-b2_8xb32_in1k.py
+ Converted From:
+ Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_b2-847de54e.pth
+ Code: https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py
+ - Name: efficientnetv2-b3_3rdparty_in1k
+ Metadata:
+ FLOPs: 3498068400
+ Parameters: 14358406
+ In Collection: EfficientNetV2
+ Results:
+ - Dataset: ImageNet-1k
+ Metrics:
+ Top 1 Accuracy: 82.03
+ Top 5 Accuracy: 95.88
+ Task: Image Classification
+ Weights: https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-b3_3rdparty_in1k_20221221-b6f07a36.pth
+ Config: configs/efficientnet_v2/efficientnetv2-b3_8xb32_in1k.py
+ Converted From:
+ Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_b3-57773f13.pth
+ Code: https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py
+ - Name: efficientnetv2-s_3rdparty_in1k
+ Metadata:
+ FLOPs: 9719420928
+ Parameters: 21458488
+ In Collection: EfficientNetV2
+ Results:
+ - Dataset: ImageNet-1k
+ Metrics:
+ Top 1 Accuracy: 83.82
+ Top 5 Accuracy: 96.67
+ Task: Image Classification
+ Weights: https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-s_3rdparty_in1k_20221220-f0eaff9d.pth
+ Config: configs/efficientnet_v2/efficientnetv2-s_8xb32_in1k-384px.py
+ Converted From:
+ Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_s-eb54923e.pth
+ Code: https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py
+ - Name: efficientnetv2-m_3rdparty_in1k
+ Metadata:
+ FLOPs: 26880363584
+ Parameters: 54139356
+ In Collection: EfficientNetV2
+ Results:
+ - Dataset: ImageNet-1k
+ Metrics:
+ Top 1 Accuracy: 85.01
+ Top 5 Accuracy: 97.26
+ Task: Image Classification
+ Weights: https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-m_3rdparty_in1k_20221220-9dc0c729.pth
+ Config: configs/efficientnet_v2/efficientnetv2-m_8xb32_in1k-480px.py
+ Converted From:
+ Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_m-cc09e0cd.pth
+ Code: https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py
+ - Name: efficientnetv2-l_3rdparty_in1k
+ Metadata:
+ FLOPs: 60142387008
+ Parameters: 118515272
+ In Collection: EfficientNetV2
+ Results:
+ - Dataset: ImageNet-1k
+ Metrics:
+ Top 1 Accuracy: 85.43
+ Top 5 Accuracy: 97.31
+ Task: Image Classification
+ Weights: https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-l_3rdparty_in1k_20221220-5c3bac0f.pth
+ Config: configs/efficientnet_v2/efficientnetv2-l_8xb32_in1k-480px.py
+ Converted From:
+ Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_l-d664b728.pth
+ Code: https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py
+ - Name: efficientnetv2-s_in21k-pre_3rdparty_in1k
+ Metadata:
+ Training Data:
+ - ImageNet-21k
+ - ImageNet-1k
+ FLOPs: 9719420928
+ Parameters: 21458488
+ In Collection: EfficientNetV2
+ Results:
+ - Dataset: ImageNet-1k
+ Metrics:
+ Top 1 Accuracy: 84.29
+ Top 5 Accuracy: 97.26
+ Task: Image Classification
+ Weights: https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-s_in21k-pre-3rdparty_in1k_20221220-7a7c8475.pth
+ Config: configs/efficientnet_v2/efficientnetv2-s_8xb32_in1k-384px.py
+ Converted From:
+ Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_s_21ft1k-d7dafa41.pth
+ Code: https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py
+ - Name: efficientnetv2-m_in21k-pre_3rdparty_in1k
+ Metadata:
+ Training Data:
+ - ImageNet-21k
+ - ImageNet-1k
+ FLOPs: 26880363584
+ Parameters: 54139356
+ In Collection: EfficientNetV2
+ Results:
+ - Dataset: ImageNet-1k
+ Metrics:
+ Top 1 Accuracy: 85.47
+ Top 5 Accuracy: 97.76
+ Task: Image Classification
+ Weights: https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-m_in21k-pre-3rdparty_in1k_20221220-a1013a04.pth
+ Config: configs/efficientnet_v2/efficientnetv2-m_8xb32_in1k-480px.py
+ Converted From:
+ Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_m_21ft1k-bf41664a.pth
+ Code: https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py
+ - Name: efficientnetv2-l_in21k-pre_3rdparty_in1k
+ Metadata:
+ Training Data:
+ - ImageNet-21k
+ - ImageNet-1k
+ FLOPs: 60142387008
+ Parameters: 118515272
+ In Collection: EfficientNetV2
+ Results:
+ - Dataset: ImageNet-1k
+ Metrics:
+ Top 1 Accuracy: 86.31
+ Top 5 Accuracy: 97.99
+ Task: Image Classification
+ Weights: https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-l_in21k-pre-3rdparty_in1k_20221220-63df0efd.pth
+ Config: configs/efficientnet_v2/efficientnetv2-l_8xb32_in1k-480px.py
+ Converted From:
+ Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_l_21ft1k-60127a9d.pth
+ Code: https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py
+ - Name: efficientnetv2-xl_in21k-pre_3rdparty_in1k
+ Metadata:
+ Training Data:
+ - ImageNet-21k
+ - ImageNet-1k
+ FLOPs: 98341230592
+ Parameters: 208119808
+ In Collection: EfficientNetV2
+ Results:
+ - Dataset: ImageNet-1k
+ Metrics:
+ Top 1 Accuracy: 86.39
+ Top 5 Accuracy: 97.83
+ Task: Image Classification
+ Weights: https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-xl_in21k-pre-3rdparty_in1k_20221220-583ac18b.pth
+ Config: configs/efficientnet_v2/efficientnetv2-xl_8xb32_in1k-512px.py
+ Converted From:
+ Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_xl_in21ft1k-06c35c48.pth
+ Code: https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py
+ - Name: efficientnetv2-s_3rdparty_in21k
+ Metadata:
+ FLOPs: 3309720768
+ Parameters: 48158371
+ In Collection: EfficientNetV2
+ Results: null
+ Weights: https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-s_3rdparty_in21k_20221220-c0572b56.pth
+ Config: configs/efficientnet_v2/efficientnetv2-s_8xb32_in21k.py
+ Converted From:
+ Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_s_21k-6337ad01.pth
+ Code: https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py
+ - Name: efficientnetv2-m_3rdparty_in21k
+ Metadata:
+ FLOPs: 5861638208
+ Parameters: 80839239
+ In Collection: EfficientNetV2
+ Results: null
+ Weights: https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-m_3rdparty_in21k_20221220-073e944c.pth
+ Config: configs/efficientnet_v2/efficientnetv2-m_8xb32_in21k.py
+ Converted From:
+ Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_m_21k-361418a2.pth
+ Code: https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py
+ - Name: efficientnetv2-l_3rdparty_in21k
+ Metadata:
+ FLOPs: 13114950464
+ Parameters: 145215155
+ In Collection: EfficientNetV2
+ Results: null
+ Weights: https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-l_3rdparty_in21k_20221220-f28f91e1.pth
+ Config: configs/efficientnet_v2/efficientnetv2-l_8xb32_in21k.py
+ Converted From:
+ Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_l_21k-91a19ec9.pth
+ Code: https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py
+ - Name: efficientnetv2-xl_3rdparty_in21k
+ Metadata:
+ FLOPs: 18855244288
+ Parameters: 234819691
+ In Collection: EfficientNetV2
+ Results: null
+ Weights: https://download.openmmlab.com/mmclassification/v0/efficientnetv2/efficientnetv2-xl_3rdparty_in21k_20221220-b2c9329c.pth
+ Config: configs/efficientnet_v2/efficientnetv2-xl_8xb32_in21k.py
+ Converted From:
+ Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_xl_in21k-fd7e8abf.pth
+ Code: https://github.com/rwightman/pytorch-image-models/blob/main/timm/models/efficientnet.py
diff --git a/docs/en/api/models.rst b/docs/en/api/models.rst
index 24ce3f77411..36befae92fe 100644
--- a/docs/en/api/models.rst
+++ b/docs/en/api/models.rst
@@ -73,6 +73,7 @@ Backbones
EdgeNeXt
EfficientFormer
EfficientNet
+ EfficientNetV2
HRNet
HorNet
InceptionV3
diff --git a/mmcls/models/backbones/__init__.py b/mmcls/models/backbones/__init__.py
index b583d988dfe..1e22bb67e2c 100644
--- a/mmcls/models/backbones/__init__.py
+++ b/mmcls/models/backbones/__init__.py
@@ -12,6 +12,7 @@
from .edgenext import EdgeNeXt
from .efficientformer import EfficientFormer
from .efficientnet import EfficientNet
+from .efficientnet_v2 import EfficientNetV2
from .hornet import HorNet
from .hrnet import HRNet
from .inception_v3 import InceptionV3
@@ -78,6 +79,7 @@
'PCPVT',
'SVT',
'EfficientNet',
+ 'EfficientNetV2',
'ConvNeXt',
'HRNet',
'ResNetV1c',
diff --git a/mmcls/models/backbones/efficientnet.py b/mmcls/models/backbones/efficientnet.py
index be0b08a218d..b7ea5a82b2f 100644
--- a/mmcls/models/backbones/efficientnet.py
+++ b/mmcls/models/backbones/efficientnet.py
@@ -69,7 +69,7 @@ def __init__(self,
in_channels=in_channels,
out_channels=mid_channels,
kernel_size=kernel_size,
- stride=1,
+ stride=stride,
padding=kernel_size // 2,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg,
@@ -82,9 +82,9 @@ def __init__(self,
in_channels=mid_channels,
out_channels=out_channels,
kernel_size=1,
- stride=stride,
+ stride=1,
padding=0,
- conv_cfg=conv_cfg,
+ conv_cfg=None,
norm_cfg=norm_cfg,
act_cfg=None)
diff --git a/mmcls/models/backbones/efficientnet_v2.py b/mmcls/models/backbones/efficientnet_v2.py
new file mode 100644
index 00000000000..84539c49deb
--- /dev/null
+++ b/mmcls/models/backbones/efficientnet_v2.py
@@ -0,0 +1,343 @@
+# Copyright (c) OpenMMLab. All rights reserved.
+from typing import Sequence, Tuple
+
+import torch
+import torch.nn as nn
+from mmcv.cnn.bricks import ConvModule, DropPath
+from mmengine.model import Sequential
+from torch import Tensor
+
+from mmcls.models.backbones.base_backbone import BaseBackbone
+from mmcls.models.backbones.efficientnet import EdgeResidual as FusedMBConv
+from mmcls.models.utils import InvertedResidual as MBConv
+from mmcls.registry import MODELS
+
+
+class EnhancedConvModule(ConvModule):
+ """ConvModule with short-cut and droppath.
+
+ Args:
+ in_channels (int): Number of channels in the input feature map.
+ Same as that in ``nn._ConvNd``.
+ out_channels (int): Number of channels produced by the convolution.
+ Same as that in ``nn._ConvNd``.
+ kernel_size (int | tuple[int]): Size of the convolving kernel.
+ Same as that in ``nn._ConvNd``.
+ stride (int | tuple[int]): Stride of the convolution.
+ Same as that in ``nn._ConvNd``.
+ has_skip (bool): Whether there is short-cut. Defaults to False.
+ drop_path_rate (float): Stochastic depth rate. Default 0.0.
+ padding (int | tuple[int]): Zero-padding added to both sides of
+ the input. Same as that in ``nn._ConvNd``.
+ dilation (int | tuple[int]): Spacing between kernel elements.
+ Same as that in ``nn._ConvNd``.
+ groups (int): Number of blocked connections from input channels to
+ output channels. Same as that in ``nn._ConvNd``.
+ bias (bool | str): If specified as `auto`, it will be decided by the
+ norm_cfg. Bias will be set as True if `norm_cfg` is None, otherwise
+ False. Default: "auto".
+ conv_cfg (dict): Config dict for convolution layer. Default: None,
+ which means using conv2d.
+ norm_cfg (dict): Config dict for normalization layer. Default: None.
+ act_cfg (dict): Config dict for activation layer.
+ Default: dict(type='ReLU').
+ inplace (bool): Whether to use inplace mode for activation.
+ Default: True.
+ with_spectral_norm (bool): Whether use spectral norm in conv module.
+ Default: False.
+ padding_mode (str): If the `padding_mode` has not been supported by
+ current `Conv2d` in PyTorch, we will use our own padding layer
+ instead. Currently, we support ['zeros', 'circular'] with official
+ implementation and ['reflect'] with our own implementation.
+ Default: 'zeros'.
+ order (tuple[str]): The order of conv/norm/activation layers. It is a
+ sequence of "conv", "norm" and "act". Common examples are
+ ("conv", "norm", "act") and ("act", "conv", "norm").
+ Default: ('conv', 'norm', 'act').
+ """
+
+ def __init__(self, *args, has_skip=False, drop_path_rate=0, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.has_skip = has_skip
+ if self.has_skip and (self.in_channels != self.out_channels
+ or self.stride != (1, 1)):
+ raise ValueError('the stride must be 1 and the `in_channels` and'
+ ' `out_channels` must be the same , when '
+ '`has_skip` is True in `EnhancedConvModule` .')
+ self.drop_path = DropPath(
+ drop_path_rate) if drop_path_rate else nn.Identity()
+
+ def forward(self, x: torch.Tensor, **kwargs) -> torch.Tensor:
+ short_cut = x
+ x = super().forward(x, **kwargs)
+ if self.has_skip:
+ x = self.drop_path(x) + short_cut
+ return x
+
+
[email protected]_module()
+class EfficientNetV2(BaseBackbone):
+ """EfficientNetV2 backbone.
+
+ A PyTorch implementation of EfficientNetV2 introduced by:
+ `EfficientNetV2: Smaller Models and Faster Training
+ <https://arxiv.org/abs/2104.00298>`_
+
+ Args:
+ arch (str): Architecture of efficientnetv2. Defaults to s.
+ in_channels (int): Number of input image channels. Defaults to 3.
+ drop_path_rate (float): The ratio of the stochastic depth.
+ Defaults to 0.0.
+ out_indices (Sequence[int]): Output from which stages.
+ Defaults to (-1, ).
+ frozen_stages (int): Stages to be frozen (all param fixed).
+ Defaults to 0, which means not freezing any parameters.
+ conv_cfg (dict): Config dict for convolution layer.
+ Defaults to None, which means using conv2d.
+ norm_cfg (dict): Config dict for normalization layer.
+ Defaults to dict(type='BN').
+ act_cfg (dict): Config dict for activation layer.
+ Defaults to dict(type='Swish').
+ norm_eval (bool): Whether to set norm layers to eval mode, namely,
+ freeze running stats (mean and var). Note: Effect on Batch Norm
+ and its variants only. Defaults to False.
+ with_cp (bool): Use checkpoint or not. Using checkpoint will save some
+ memory while slowing down the training speed. Defaults to False.
+ """
+
+ # Parameters to build layers. From left to right:
+ # - repeat (int): The repeat number of the block in the layer
+ # - kernel_size (int): The kernel size of the layer
+ # - stride (int): The stride of the first block of the layer
+ # - expand_ratio (int, float): The expand_ratio of the mid_channels
+ # - in_channel (int): The number of in_channels of the layer
+ # - out_channel (int): The number of out_channels of the layer
+ # - se_ratio (float): The sequeeze ratio of SELayer.
+ # - block_type (int): -2: ConvModule, -1: EnhancedConvModule,
+ # 0: FusedMBConv, 1: MBConv
+ arch_settings = {
+ **dict.fromkeys(['small', 's'], [[2, 3, 1, 1, 24, 24, 0.0, -1],
+ [4, 3, 2, 4, 24, 48, 0.0, 0],
+ [4, 3, 2, 4, 48, 64, 0.0, 0],
+ [6, 3, 2, 4, 64, 128, 0.25, 1],
+ [9, 3, 1, 6, 128, 160, 0.25, 1],
+ [15, 3, 2, 6, 160, 256, 0.25, 1],
+ [1, 1, 1, 1, 256, 1280, 0.0, -2]]),
+ **dict.fromkeys(['m', 'medium'], [[3, 3, 1, 1, 24, 24, 0.0, -1],
+ [5, 3, 2, 4, 24, 48, 0.0, 0],
+ [5, 3, 2, 4, 48, 80, 0.0, 0],
+ [7, 3, 2, 4, 80, 160, 0.25, 1],
+ [14, 3, 1, 6, 160, 176, 0.25, 1],
+ [18, 3, 2, 6, 176, 304, 0.25, 1],
+ [5, 3, 1, 6, 304, 512, 0.25, 1],
+ [1, 1, 1, 1, 512, 1280, 0.0, -2]]),
+ **dict.fromkeys(['l', 'large'], [[4, 3, 1, 1, 32, 32, 0.0, -1],
+ [7, 3, 2, 4, 32, 64, 0.0, 0],
+ [7, 3, 2, 4, 64, 96, 0.0, 0],
+ [10, 3, 2, 4, 96, 192, 0.25, 1],
+ [19, 3, 1, 6, 192, 224, 0.25, 1],
+ [25, 3, 2, 6, 224, 384, 0.25, 1],
+ [7, 3, 1, 6, 384, 640, 0.25, 1],
+ [1, 1, 1, 1, 640, 1280, 0.0, -2]]),
+ **dict.fromkeys(['xl'], [[4, 3, 1, 1, 32, 32, 0.0, -1],
+ [8, 3, 2, 4, 32, 64, 0.0, 0],
+ [8, 3, 2, 4, 64, 96, 0.0, 0],
+ [16, 3, 2, 4, 96, 192, 0.25, 1],
+ [24, 3, 1, 6, 192, 256, 0.25, 1],
+ [32, 3, 2, 6, 256, 512, 0.25, 1],
+ [8, 3, 1, 6, 512, 640, 0.25, 1],
+ [1, 1, 1, 1, 640, 1280, 0.0, -2]]),
+ **dict.fromkeys(['b0'], [[1, 3, 1, 1, 32, 16, 0.0, -1],
+ [2, 3, 2, 4, 16, 32, 0.0, 0],
+ [2, 3, 2, 4, 32, 48, 0.0, 0],
+ [3, 3, 2, 4, 48, 96, 0.25, 1],
+ [5, 3, 1, 6, 96, 112, 0.25, 1],
+ [8, 3, 2, 6, 112, 192, 0.25, 1],
+ [1, 1, 1, 1, 192, 1280, 0.0, -2]]),
+ **dict.fromkeys(['b1'], [[2, 3, 1, 1, 32, 16, 0.0, -1],
+ [3, 3, 2, 4, 16, 32, 0.0, 0],
+ [3, 3, 2, 4, 32, 48, 0.0, 0],
+ [4, 3, 2, 4, 48, 96, 0.25, 1],
+ [6, 3, 1, 6, 96, 112, 0.25, 1],
+ [9, 3, 2, 6, 112, 192, 0.25, 1],
+ [1, 1, 1, 1, 192, 1280, 0.0, -2]]),
+ **dict.fromkeys(['b2'], [[2, 3, 1, 1, 32, 16, 0.0, -1],
+ [3, 3, 2, 4, 16, 32, 0.0, 0],
+ [3, 3, 2, 4, 32, 56, 0.0, 0],
+ [4, 3, 2, 4, 56, 104, 0.25, 1],
+ [6, 3, 1, 6, 104, 120, 0.25, 1],
+ [10, 3, 2, 6, 120, 208, 0.25, 1],
+ [1, 1, 1, 1, 208, 1408, 0.0, -2]]),
+ **dict.fromkeys(['b3'], [[2, 3, 1, 1, 40, 16, 0.0, -1],
+ [3, 3, 2, 4, 16, 40, 0.0, 0],
+ [3, 3, 2, 4, 40, 56, 0.0, 0],
+ [5, 3, 2, 4, 56, 112, 0.25, 1],
+ [7, 3, 1, 6, 112, 136, 0.25, 1],
+ [12, 3, 2, 6, 136, 232, 0.25, 1],
+ [1, 1, 1, 1, 232, 1536, 0.0, -2]])
+ }
+
+ def __init__(self,
+ arch: str = 's',
+ in_channels: int = 3,
+ drop_path_rate: float = 0.,
+ out_indices: Sequence[int] = (-1, ),
+ frozen_stages: int = 0,
+ conv_cfg=dict(type='Conv2dAdaptivePadding'),
+ norm_cfg=dict(type='BN', eps=1e-3, momentum=0.1),
+ act_cfg=dict(type='Swish'),
+ norm_eval: bool = False,
+ with_cp: bool = False,
+ init_cfg=[
+ dict(type='Kaiming', layer='Conv2d'),
+ dict(
+ type='Constant',
+ layer=['_BatchNorm', 'GroupNorm'],
+ val=1)
+ ]):
+ super(EfficientNetV2, self).__init__(init_cfg)
+ assert arch in self.arch_settings, \
+ f'"{arch}" is not one of the arch_settings ' \
+ f'({", ".join(self.arch_settings.keys())})'
+ self.arch = self.arch_settings[arch]
+ if frozen_stages not in range(len(self.arch) + 1):
+ raise ValueError('frozen_stages must be in range(0, '
+ f'{len(self.arch)}), but get {frozen_stages}')
+ self.drop_path_rate = drop_path_rate
+ self.frozen_stages = frozen_stages
+ self.norm_eval = norm_eval
+ self.with_cp = with_cp
+
+ self.layers = nn.ModuleList()
+ assert self.arch[-1][-1] == -2, \
+ f'the last block_type of `arch_setting` must be -2 ,' \
+ f'but get `{self.arch[-1][-1]}`'
+ self.in_channels = in_channels
+ self.out_channels = self.arch[-1][5]
+ self.conv_cfg = conv_cfg
+ self.norm_cfg = norm_cfg
+ self.act_cfg = act_cfg
+
+ self.make_layers()
+
+ # there len(slef.arch) + 2 layers in the backbone
+ # including: the first + len(self.arch) layers + the last
+ if isinstance(out_indices, int):
+ out_indices = [out_indices]
+ assert isinstance(out_indices, Sequence), \
+ f'"out_indices" must by a sequence or int, ' \
+ f'get {type(out_indices)} instead.'
+ out_indices = list(out_indices)
+ for i, index in enumerate(out_indices):
+ if index < 0:
+ out_indices[i] = len(self.layers) + index
+ assert 0 <= out_indices[i] <= len(self.layers), \
+ f'Invalid out_indices {index}.'
+ self.out_indices = out_indices
+
+ def make_layers(self, ):
+ # make the first layer
+ self.layers.append(
+ ConvModule(
+ in_channels=self.in_channels,
+ out_channels=self.arch[0][4],
+ kernel_size=3,
+ stride=2,
+ conv_cfg=self.conv_cfg,
+ norm_cfg=self.norm_cfg,
+ act_cfg=self.act_cfg))
+
+ in_channels = self.arch[0][4]
+ layer_setting = self.arch[:-1]
+
+ total_num_blocks = sum([x[0] for x in layer_setting])
+ block_idx = 0
+ dpr = [
+ x.item()
+ for x in torch.linspace(0, self.drop_path_rate, total_num_blocks)
+ ] # stochastic depth decay rule
+
+ for layer_cfg in layer_setting:
+ layer = []
+ (repeat, kernel_size, stride, expand_ratio, _, out_channels,
+ se_ratio, block_type) = layer_cfg
+ for i in range(repeat):
+ stride = stride if i == 0 else 1
+ if block_type == -1:
+ has_skip = stride == 1 and in_channels == out_channels
+ droppath_rate = dpr[block_idx] if has_skip else 0.0
+ layer.append(
+ EnhancedConvModule(
+ in_channels=in_channels,
+ out_channels=out_channels,
+ kernel_size=kernel_size,
+ has_skip=has_skip,
+ drop_path_rate=droppath_rate,
+ stride=stride,
+ padding=1,
+ conv_cfg=None,
+ norm_cfg=self.norm_cfg,
+ act_cfg=self.act_cfg))
+ in_channels = out_channels
+ else:
+ mid_channels = int(in_channels * expand_ratio)
+ se_cfg = None
+ if block_type != 0 and se_ratio > 0:
+ se_cfg = dict(
+ channels=mid_channels,
+ ratio=expand_ratio * (1.0 / se_ratio),
+ divisor=1,
+ act_cfg=(self.act_cfg, dict(type='Sigmoid')))
+ block = FusedMBConv if block_type == 0 else MBConv
+ conv_cfg = self.conv_cfg if stride == 2 else None
+ layer.append(
+ block(
+ in_channels=in_channels,
+ out_channels=out_channels,
+ mid_channels=mid_channels,
+ kernel_size=kernel_size,
+ stride=stride,
+ se_cfg=se_cfg,
+ conv_cfg=conv_cfg,
+ norm_cfg=self.norm_cfg,
+ act_cfg=self.act_cfg,
+ drop_path_rate=dpr[block_idx],
+ with_cp=self.with_cp))
+ in_channels = out_channels
+ block_idx += 1
+ self.layers.append(Sequential(*layer))
+
+ # make the last layer
+ self.layers.append(
+ ConvModule(
+ in_channels=in_channels,
+ out_channels=self.out_channels,
+ kernel_size=self.arch[-1][1],
+ stride=self.arch[-1][2],
+ conv_cfg=self.conv_cfg,
+ norm_cfg=self.norm_cfg,
+ act_cfg=self.act_cfg))
+
+ def forward(self, x: Tensor) -> Tuple[Tensor]:
+ outs = []
+ for i, layer in enumerate(self.layers):
+ x = layer(x)
+ if i in self.out_indices:
+ outs.append(x)
+
+ return tuple(outs)
+
+ def _freeze_stages(self):
+ for i in range(self.frozen_stages):
+ m = self.layers[i]
+ m.eval()
+ for param in m.parameters():
+ param.requires_grad = False
+
+ def train(self, mode=True):
+ super(EfficientNetV2, self).train(mode)
+ self._freeze_stages()
+ if mode and self.norm_eval:
+ for m in self.modules():
+ if isinstance(m, nn.BatchNorm2d):
+ m.eval()
diff --git a/model-index.yml b/model-index.yml
index a761ab8a225..f248a852e0e 100644
--- a/model-index.yml
+++ b/model-index.yml
@@ -46,3 +46,4 @@ Import:
- configs/eva/metafile.yml
- configs/revvit/metafile.yml
- configs/mixmim/metafile.yml
+ - configs/efficientnet_v2/metafile.yml
diff --git a/tools/model_converters/efficientnetv2_to_mmcls.py b/tools/model_converters/efficientnetv2_to_mmcls.py
new file mode 100644
index 00000000000..b6ae4ec1c8f
--- /dev/null
+++ b/tools/model_converters/efficientnetv2_to_mmcls.py
@@ -0,0 +1,99 @@
+# Copyright (c) OpenMMLab. All rights reserved.
+"""convert the weights of efficientnetv2 in
+timm(https://github.com/rwightman/pytorch-image-models) to mmcls format."""
+import argparse
+import os.path as osp
+
+import mmengine
+import torch
+from mmengine.runner import CheckpointLoader
+
+
+def convert_from_efficientnetv2_timm(param):
+ # main change_key
+ param_lst = list(param.keys())
+ op = str(int(param_lst[-9][7]) + 2)
+ new_key = dict()
+ for name in param_lst:
+ data = param[name]
+ if 'blocks' not in name:
+ if 'conv_stem' in name:
+ name = name.replace('conv_stem', 'backbone.layers.0.conv')
+ if 'bn1' in name:
+ name = name.replace('bn1', 'backbone.layers.0.bn')
+ if 'conv_head' in name:
+ # if efficientnet-v2_s/base/b1/b2/b3,op = 7,
+ # if for m/l/xl , op = 8
+ name = name.replace('conv_head', f'backbone.layers.{op}.conv')
+ if 'bn2' in name:
+ name = name.replace('bn2', f'backbone.layers.{op}.bn')
+ if 'classifier' in name:
+ name = name.replace('classifier', 'head.fc')
+ else:
+ operator = int(name[7])
+ if operator == 0:
+ name = name[:7] + str(operator + 1) + name[8:]
+ name = name.replace('blocks', 'backbone.layers')
+ if 'conv' in name:
+ name = name.replace('conv', 'conv')
+ if 'bn1' in name:
+ name = name.replace('bn1', 'bn')
+ elif operator < 3:
+ name = name[:7] + str(operator + 1) + name[8:]
+ name = name.replace('blocks', 'backbone.layers')
+ if 'conv_exp' in name:
+ name = name.replace('conv_exp', 'conv1.conv')
+ if 'conv_pwl' in name:
+ name = name.replace('conv_pwl', 'conv2.conv')
+ if 'bn1' in name:
+ name = name.replace('bn1', 'conv1.bn')
+ if 'bn2' in name:
+ name = name.replace('bn2', 'conv2.bn')
+ else:
+ name = name[:7] + str(operator + 1) + name[8:]
+ name = name.replace('blocks', 'backbone.layers')
+ if 'conv_pwl' in name:
+ name = name.replace('conv_pwl', 'linear_conv.conv')
+ if 'conv_pw' in name:
+ name = name.replace('conv_pw', 'expand_conv.conv')
+ if 'conv_dw' in name:
+ name = name.replace('conv_dw', 'depthwise_conv.conv')
+ if 'bn1' in name:
+ name = name.replace('bn1', 'expand_conv.bn')
+ if 'bn2' in name:
+ name = name.replace('bn2', 'depthwise_conv.bn')
+ if 'bn3' in name:
+ name = name.replace('bn3', 'linear_conv.bn')
+ if 'se.conv_reduce' in name:
+ name = name.replace('se.conv_reduce', 'se.conv1.conv')
+ if 'se.conv_expand' in name:
+ name = name.replace('se.conv_expand', 'se.conv2.conv')
+ new_key[name] = data
+ return new_key
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Convert pretrained efficientnetv2 '
+ 'models in timm to mmcls style.')
+ parser.add_argument('src', help='src model path or url')
+ # The dst path must be a full path of the new checkpoint.
+ parser.add_argument('dst', help='save path')
+ args = parser.parse_args()
+
+ checkpoint = CheckpointLoader.load_checkpoint(args.src, map_location='cpu')
+
+ if 'state_dict' in checkpoint:
+ state_dict = checkpoint['state_dict']
+ else:
+ state_dict = checkpoint
+
+ weight = convert_from_efficientnetv2_timm(state_dict)
+ mmengine.mkdir_or_exist(osp.dirname(args.dst))
+ torch.save(weight, args.dst)
+
+ print('Done!!')
+
+
+if __name__ == '__main__':
+ main()
| mobilenetV2测试imagenet(val),得到的准确率只有0.02
你好,我使用下面的指令对imagenet 的测试集进行测试,却得不到官方的结果
尝试1:使用pycharm 运行tools/test.py , 加上这个 configs/imagenet/mobilenet_v2_b32x8.py checkpoint/mobilenet_v2_batch256_20200708-3b2dc3af.pth
尝试2:
python tools/test.py configs/imagenet/mobilenet_v2_b32x8.py checkpoint/mobilenet_v2_batch256_20200708-3b2dc3af.pth
尝试3:
bash ./tools/dist_test.sh configs/imagenet/mobilenet_v2_b32x8.py checkpoint/mobilenet_v2_batch256_20200708-3b2dc3af.pth 1
得到的准确率都是0.02,与官方结果相差甚远
[Feature] Support EfficietNetV2
### Describe the feature
paper: https://arxiv.org/abs/2104.00298
code: https://github.com/google/automl/tree/master/efficientnetv2
### Will you implement it?
- [ ] I would like to implement this feature and create a PR!
| Hello @SuanTangYu, Thanks for reporting the issue. But I have tested the model downloaded from the model zoo using the second and the third way you provided. And all the test results are consistent with the accuracies reported in https://github.com/open-mmlab/mmclassification/blob/master/docs/model_zoo.md.
I'm afraid your issue is caused by other reasons. Are there any errors or warnings when you test the models?
What changes did you make to the code?
你好,我已经找到原因了,是因为我的测试数据集的标签对不上号,重新下载了一个新的val.txt之后就没问题了,实在是打扰了
Nice feature! PR is welcomed. | 2022-12-09T02:30:13 | 0.0 | [] | [] |
||
open-mmlab/mmpretrain | open-mmlab__mmpretrain-1181 | 743ca2d602631856a971510090c386712d0eac32 | diff --git a/docs/en/api/engine.rst b/docs/en/api/engine.rst
index a85760aa57f..d1fa82bde95 100644
--- a/docs/en/api/engine.rst
+++ b/docs/en/api/engine.rst
@@ -31,7 +31,8 @@ Hooks
ClassNumCheckHook
PreciseBNHook
VisualizationHook
- SwitchRecipeHook
+ PrepareProtoBeforeValLoopHook
+ SetAdaptiveMarginsHook
.. module:: mmcls.engine.optimizers
diff --git a/docs/en/api/models.rst b/docs/en/api/models.rst
index 2894f630926..8b0bfab2f3f 100644
--- a/docs/en/api/models.rst
+++ b/docs/en/api/models.rst
@@ -140,6 +140,7 @@ Heads
EfficientFormerClsHead
DeiTClsHead
ConformerHead
+ ArcFaceClsHead
MultiLabelClsHead
MultiLabelLinearClsHead
CSRAClsHead
diff --git a/mmcls/engine/hooks/__init__.py b/mmcls/engine/hooks/__init__.py
index 29d73fb462a..54343b7af19 100644
--- a/mmcls/engine/hooks/__init__.py
+++ b/mmcls/engine/hooks/__init__.py
@@ -1,5 +1,6 @@
# Copyright (c) OpenMMLab. All rights reserved.
from .class_num_check_hook import ClassNumCheckHook
+from .margin_head_hooks import SetAdaptiveMarginsHook
from .precise_bn_hook import PreciseBNHook
from .retriever_hooks import PrepareProtoBeforeValLoopHook
from .switch_recipe_hook import SwitchRecipeHook
@@ -7,5 +8,6 @@
__all__ = [
'ClassNumCheckHook', 'PreciseBNHook', 'VisualizationHook',
- 'SwitchRecipeHook', 'PrepareProtoBeforeValLoopHook'
+ 'SwitchRecipeHook', 'PrepareProtoBeforeValLoopHook',
+ 'SetAdaptiveMarginsHook'
]
diff --git a/mmcls/engine/hooks/margin_head_hooks.py b/mmcls/engine/hooks/margin_head_hooks.py
new file mode 100644
index 00000000000..7ca878433d2
--- /dev/null
+++ b/mmcls/engine/hooks/margin_head_hooks.py
@@ -0,0 +1,61 @@
+# Copyright (c) OpenMMLab. All rights reserved
+import numpy as np
+from mmengine.hooks import Hook
+from mmengine.model import is_model_wrapper
+
+from mmcls.models.heads import ArcFaceClsHead
+from mmcls.registry import HOOKS
+
+
[email protected]_module()
+class SetAdaptiveMarginsHook(Hook):
+ r"""Set adaptive-margins in ArcFaceClsHead based on the power of
+ category-wise count.
+
+ A PyTorch implementation of paper `Google Landmark Recognition 2020
+ Competition Third Place Solution <https://arxiv.org/abs/2010.05350>`_.
+ The margins will be
+ :math:`\text{f}(n) = (marginMax - marginMin) · norm(n^p) + marginMin`.
+ The `n` indicates the number of occurrences of a category.
+
+ Args:
+ margin_min (float): Lower bound of margins. Defaults to 0.05.
+ margin_max (float): Upper bound of margins. Defaults to 0.5.
+ power (float): The power of category freqercy. Defaults to -0.25.
+ """
+
+ def __init__(self, margin_min=0.05, margin_max=0.5, power=-0.25) -> None:
+ self.margin_min = margin_min
+ self.margin_max = margin_max
+ self.margin_range = margin_max - margin_min
+ self.p = power
+
+ def before_train(self, runner):
+ """change the margins in ArcFaceClsHead.
+
+ Args:
+ runner (obj: `Runner`): Runner.
+ """
+ model = runner.model
+ if is_model_wrapper(model):
+ model = model.module
+
+ if (hasattr(model, 'head')
+ and not isinstance(model.head, ArcFaceClsHead)):
+ raise ValueError(
+ 'Hook ``SetFreqPowAdvMarginsHook`` could only be used '
+ f'for ``ArcFaceClsHead``, but get {type(model.head)}')
+
+ # generate margins base on the dataset.
+ gt_labels = runner.train_dataloader.dataset.get_gt_labels()
+ label_count = np.bincount(gt_labels)
+ label_count[label_count == 0] = 1 # At least one occurrence
+ pow_freq = np.power(label_count, self.p)
+
+ min_f, max_f = pow_freq.min(), pow_freq.max()
+ normized_pow_freq = (pow_freq - min_f) / (max_f - min_f)
+ margins = normized_pow_freq * self.margin_range + self.margin_min
+
+ assert len(margins) == runner.model.head.num_classes
+
+ model.head.set_margins(margins)
diff --git a/mmcls/models/backbones/hornet.py b/mmcls/models/backbones/hornet.py
index aa98aa0a79b..e6d107045f5 100644
--- a/mmcls/models/backbones/hornet.py
+++ b/mmcls/models/backbones/hornet.py
@@ -250,13 +250,16 @@ def forward(self, x):
@MODELS.register_module()
class HorNet(BaseBackbone):
- """HorNet
- A PyTorch impl of : `HorNet: Efficient High-Order Spatial Interactions
- with Recursive Gated Convolutions`
- Inspiration from
- https://github.com/raoyongming/HorNet
+ """HorNet.
+
+ A PyTorch implementation of paper `HorNet: Efficient High-Order Spatial
+ Interactions with Recursive Gated Convolutions
+ <https://arxiv.org/abs/2207.14284>`_ .
+ Inspiration from https://github.com/raoyongming/HorNet
+
Args:
arch (str | dict): HorNet architecture.
+
If use string, choose from 'tiny', 'small', 'base' and 'large'.
If use dict, it should have below keys:
- **base_dim** (int): The base dimensions of embedding.
@@ -264,6 +267,7 @@ class HorNet(BaseBackbone):
- **orders** (List[int]): The number of order of gnConv in each
stage.
- **dw_cfg** (List[dict]): The Config for dw conv.
+
Defaults to 'tiny'.
in_channels (int): Number of input image channels. Defaults to 3.
drop_path_rate (float): Stochastic depth rate. Defaults to 0.
diff --git a/mmcls/models/heads/__init__.py b/mmcls/models/heads/__init__.py
index 104f1c53588..3e359d37227 100644
--- a/mmcls/models/heads/__init__.py
+++ b/mmcls/models/heads/__init__.py
@@ -1,10 +1,10 @@
# Copyright (c) OpenMMLab. All rights reserved.
-from .arcface_head import ArcFaceClsHead
from .cls_head import ClsHead
from .conformer_head import ConformerHead
from .deit_head import DeiTClsHead
from .efficientformer_head import EfficientFormerClsHead
from .linear_head import LinearClsHead
+from .margin_head import ArcFaceClsHead
from .multi_label_cls_head import MultiLabelClsHead
from .multi_label_csra_head import CSRAClsHead
from .multi_label_linear_head import MultiLabelLinearClsHead
diff --git a/mmcls/models/heads/arcface_head.py b/mmcls/models/heads/arcface_head.py
deleted file mode 100644
index 23cb3d23024..00000000000
--- a/mmcls/models/heads/arcface_head.py
+++ /dev/null
@@ -1,176 +0,0 @@
-# Copyright (c) OpenMMLab. All rights reserved.
-import math
-from typing import List, Optional, Tuple
-
-import torch
-import torch.nn as nn
-import torch.nn.functional as F
-
-from mmcls.registry import MODELS
-from mmcls.structures import ClsDataSample
-from .cls_head import ClsHead
-
-
-class NormLinear(nn.Linear):
- """An enhanced linear layer, which could normalize the input and the linear
- weight.
-
- Args:
- in_features (int): size of each input sample.
- out_features (int): size of each output sample
- bias (bool): Whether there is bias. If set to ``False``, the
- layer will not learn an additive bias. Defaults to ``True``.
- feature_norm (bool): Whether to normalize the input feature.
- Defaults to ``True``.
- weight_norm (bool):Whether to normalize the weight.
- Defaults to ``True``.
- """
-
- def __init__(self,
- in_features: int,
- out_features: int,
- bias: bool = False,
- feature_norm: bool = True,
- weight_norm: bool = True):
-
- super().__init__(in_features, out_features, bias=bias)
- self.weight_norm = weight_norm
- self.feature_norm = feature_norm
-
- def forward(self, input: torch.Tensor) -> torch.Tensor:
- if self.feature_norm:
- input = F.normalize(input)
- if self.weight_norm:
- weight = F.normalize(self.weight)
- else:
- weight = self.weight
- return F.linear(input, weight, self.bias)
-
-
[email protected]_module()
-class ArcFaceClsHead(ClsHead):
- """ArcFace classifier head.
-
- Args:
- num_classes (int): Number of categories excluding the background
- category.
- in_channels (int): Number of channels in the input feature map.
- s (float): Norm of input feature. Defaults to 30.0.
- m (float): Margin. Defaults to 0.5.
- easy_margin (bool): Avoid theta + m >= PI. Defaults to False.
- ls_eps (float): Label smoothing. Defaults to 0.
- bias (bool): Whether to use bias in norm layer. Defaults to False.
- loss (dict): Config of classification loss. Defaults to
- ``dict(type='CrossEntropyLoss', loss_weight=1.0)``.
- init_cfg (dict, optional): the config to control the initialization.
- Defaults to None.
- """
-
- def __init__(self,
- num_classes: int,
- in_channels: int,
- s: float = 30.0,
- m: float = 0.50,
- easy_margin: bool = False,
- ls_eps: float = 0.0,
- bias: bool = False,
- loss: dict = dict(type='CrossEntropyLoss', loss_weight=1.0),
- init_cfg: Optional[dict] = None):
-
- super(ArcFaceClsHead, self).__init__(init_cfg=init_cfg)
- self.loss_module = MODELS.build(loss)
-
- self.in_channels = in_channels
- self.num_classes = num_classes
-
- if self.num_classes <= 0:
- raise ValueError(
- f'num_classes={num_classes} must be a positive integer')
-
- self.s = s
- self.m = m
- self.ls_eps = ls_eps
-
- self.norm_linear = NormLinear(in_channels, num_classes, bias=bias)
-
- self.easy_margin = easy_margin
- self.th = math.cos(math.pi - m)
- self.mm = math.sin(math.pi - m) * m
-
- def pre_logits(self, feats: Tuple[torch.Tensor]) -> torch.Tensor:
- """The process before the final classification head.
-
- The input ``feats`` is a tuple of tensor, and each tensor is the
- feature of a backbone stage. In ``ArcFaceHead``, we just obtain the
- feature of the last stage.
- """
- # The ArcFaceHead doesn't have other module, just return after
- # unpacking.
- return feats[-1]
-
- def forward(self,
- feats: Tuple[torch.Tensor],
- target: Optional[torch.Tensor] = None) -> torch.Tensor:
- """The forward process."""
-
- pre_logits = self.pre_logits(feats)
-
- # cos=(a*b)/(||a||*||b||)
- cosine = self.norm_linear(pre_logits)
-
- if target is None:
- return self.s * cosine
-
- phi = torch.cos(torch.acos(cosine) + self.m)
-
- if self.easy_margin:
- # when cosine>0, choose phi
- # when cosine<=0, choose cosine
- phi = torch.where(cosine > 0, phi, cosine)
- else:
- # when cos>th, choose phi
- # when cos<=th, choose cosine-mm
- phi = torch.where(cosine > self.th, phi, cosine - self.mm)
-
- one_hot = torch.zeros(cosine.size(), device=pre_logits.device)
- one_hot.scatter_(1, target.view(-1, 1).long(), 1)
- if self.ls_eps > 0:
- one_hot = (1 -
- self.ls_eps) * one_hot + self.ls_eps / self.num_classes
-
- output = (one_hot * phi) + ((1.0 - one_hot) * cosine)
- return output * self.s
-
- def loss(self, feats: Tuple[torch.Tensor],
- data_samples: List[ClsDataSample], **kwargs) -> dict:
- """Calculate losses from the classification score.
-
- Args:
- feats (tuple[Tensor]): The features extracted from the backbone.
- Multiple stage inputs are acceptable but only the last stage
- will be used to classify. The shape of every item should be
- ``(num_samples, num_classes)``.
- data_samples (List[ClsDataSample]): The annotation data of
- every samples.
- **kwargs: Other keyword arguments to forward the loss module.
-
- Returns:
- dict[str, Tensor]: a dictionary of loss components
- """
-
- if 'score' in data_samples[0].gt_label:
- # Batch augmentation may convert labels to one-hot format scores.
- target = torch.stack([i.gt_label.score for i in data_samples])
- else:
- target = torch.cat([i.gt_label.label for i in data_samples])
-
- # The part can be traced by torch.fx
- cls_score = self(feats, target)
-
- # compute loss
- losses = dict()
- loss = self.loss_module(
- cls_score, target, avg_factor=cls_score.size(0), **kwargs)
- losses['loss'] = loss
-
- return losses
diff --git a/mmcls/models/heads/margin_head.py b/mmcls/models/heads/margin_head.py
new file mode 100644
index 00000000000..ffd8ee8ae36
--- /dev/null
+++ b/mmcls/models/heads/margin_head.py
@@ -0,0 +1,299 @@
+# Copyright (c) OpenMMLab. All rights reserved.
+import math
+from typing import List, Optional, Sequence, Tuple, Union
+
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from mmengine.fileio import list_from_file
+from mmengine.runner import autocast
+from mmengine.utils import is_seq_of
+
+from mmcls.models.losses import convert_to_one_hot
+from mmcls.registry import MODELS
+from mmcls.structures import ClsDataSample
+from .cls_head import ClsHead
+
+
+class NormProduct(nn.Linear):
+ """An enhanced linear layer with k clustering centers to calculate product
+ between normalized input and linear weight.
+
+ Args:
+ in_features (int): size of each input sample.
+ out_features (int): size of each output sample
+ k (int): The number of clustering centers. Defaults to 1.
+ bias (bool): Whether there is bias. If set to ``False``, the
+ layer will not learn an additive bias. Defaults to ``True``.
+ feature_norm (bool): Whether to normalize the input feature.
+ Defaults to ``True``.
+ weight_norm (bool):Whether to normalize the weight.
+ Defaults to ``True``.
+ """
+
+ def __init__(self,
+ in_features: int,
+ out_features: int,
+ k=1,
+ bias: bool = False,
+ feature_norm: bool = True,
+ weight_norm: bool = True):
+
+ super().__init__(in_features, out_features * k, bias=bias)
+ self.weight_norm = weight_norm
+ self.feature_norm = feature_norm
+ self.out_features = out_features
+ self.k = k
+
+ def forward(self, input: torch.Tensor) -> torch.Tensor:
+ if self.feature_norm:
+ input = F.normalize(input)
+ if self.weight_norm:
+ weight = F.normalize(self.weight)
+ else:
+ weight = self.weight
+ cosine_all = F.linear(input, weight, self.bias)
+
+ if self.k == 1:
+ return cosine_all
+ else:
+ cosine_all = cosine_all.view(-1, self.out_features, self.k)
+ cosine, _ = torch.max(cosine_all, dim=2)
+ return cosine
+
+
[email protected]_module()
+class ArcFaceClsHead(ClsHead):
+ """ArcFace classifier head.
+
+ A PyTorch implementation of paper `ArcFace: Additive Angular Margin Loss
+ for Deep Face Recognition <https://arxiv.org/abs/1801.07698>`_ and
+ `Sub-center ArcFace: Boosting Face Recognition by Large-Scale Noisy Web
+ Faces <https://link.springer.com/chapter/10.1007/978-3-030-58621-8_43>`_
+
+ Example:
+ To use ArcFace in config files.
+
+ 1. use vanilla ArcFace
+
+ .. code:: python
+
+ mode = dict(
+ backbone = xxx,
+ neck = xxxx,
+ head=dict(
+ type='ArcFaceClsHead',
+ num_classes=5000,
+ in_channels=1024,
+ loss = dict(type='CrossEntropyLoss', loss_weight=1.0),
+ init_cfg=None),
+ )
+
+ 2. use SubCenterArcFace with 3 sub-centers
+
+ .. code:: python
+
+ mode = dict(
+ backbone = xxx,
+ neck = xxxx,
+ head=dict(
+ type='ArcFaceClsHead',
+ num_classes=5000,
+ in_channels=1024,
+ num_subcenters=3,
+ loss = dict(type='CrossEntropyLoss', loss_weight=1.0),
+ init_cfg=None),
+ )
+
+ 3. use SubCenterArcFace With CountPowerAdaptiveMargins
+
+ .. code:: python
+
+ mode = dict(
+ backbone = xxx,
+ neck = xxxx,
+ head=dict(
+ type='ArcFaceClsHead',
+ num_classes=5000,
+ in_channels=1024,
+ num_subcenters=3,
+ loss = dict(type='CrossEntropyLoss', loss_weight=1.0),
+ init_cfg=None),
+ )
+
+ custom_hooks = [dict(type='SetAdaptiveMarginsHook')]
+
+
+ Args:
+ num_classes (int): Number of categories excluding the background
+ category.
+ in_channels (int): Number of channels in the input feature map.
+ num_subcenters (int): Number of subcenters. Defaults to 1.
+ scale (float): Scale factor of output logit. Defaults to 64.0.
+ margins (float): The penalty margin. Could be the fllowing formats:
+
+ - float: The margin, would be same for all the categories.
+ - Sequence[float]: The category-based margins list.
+ - str: A '.txt' file path which contains a list. Each line
+ represents the margin of a category, and the number in the
+ i-th row indicates the margin of the i-th class.
+
+ Defaults to 0.5.
+ easy_margin (bool): Avoid theta + m >= PI. Defaults to False.
+ loss (dict): Config of classification loss. Defaults to
+ ``dict(type='CrossEntropyLoss', loss_weight=1.0)``.
+ init_cfg (dict, optional): the config to control the initialization.
+ Defaults to None.
+ """
+
+ def __init__(self,
+ num_classes: int,
+ in_channels: int,
+ num_subcenters: int = 1,
+ scale: float = 64.,
+ margins: Optional[Union[float, Sequence[float], str]] = 0.50,
+ easy_margin: bool = False,
+ loss: dict = dict(type='CrossEntropyLoss', loss_weight=1.0),
+ init_cfg: Optional[dict] = None):
+
+ super(ArcFaceClsHead, self).__init__(init_cfg=init_cfg)
+ self.loss_module = MODELS.build(loss)
+
+ assert num_subcenters >= 1 and num_classes >= 0
+ self.in_channels = in_channels
+ self.num_classes = num_classes
+ self.num_subcenters = num_subcenters
+ self.scale = scale
+ self.easy_margin = easy_margin
+
+ self.norm_product = NormProduct(in_channels, num_classes,
+ num_subcenters)
+
+ if isinstance(margins, float):
+ margins = [margins] * num_classes
+ elif isinstance(margins, str) and margins.endswith('.txt'):
+ margins = [float(item) for item in list_from_file(margins)]
+ else:
+ assert is_seq_of(list(margins), (float, int)), (
+ 'the attribute `margins` in ``ArcFaceClsHead`` should be a '
+ ' float, a Sequence of float, or a ".txt" file path.')
+
+ assert len(margins) == num_classes, \
+ 'The length of margins must be equal with num_classes.'
+
+ self.register_buffer(
+ 'margins', torch.tensor(margins).float(), persistent=False)
+ # To make `phi` monotonic decreasing, refers to
+ # https://github.com/deepinsight/insightface/issues/108
+ sinm_m = torch.sin(math.pi - self.margins) * self.margins
+ threshold = torch.cos(math.pi - self.margins)
+ self.register_buffer('sinm_m', sinm_m, persistent=False)
+ self.register_buffer('threshold', threshold, persistent=False)
+
+ def set_margins(self, margins: Union[Sequence[float], float]) -> None:
+ """set margins of arcface head.
+
+ Args:
+ margins (Union[Sequence[float], float]): The marigins.
+ """
+ if isinstance(margins, float):
+ margins = [margins] * self.num_classes
+ assert is_seq_of(
+ list(margins), float) and (len(margins) == self.num_classes), (
+ f'margins must be Sequence[Union(float, int)], get {margins}')
+
+ self.margins = torch.tensor(
+ margins, device=self.margins.device, dtype=torch.float32)
+ self.sinm_m = torch.sin(self.margins) * self.margins
+ self.threshold = -torch.cos(self.margins)
+
+ def pre_logits(self, feats: Tuple[torch.Tensor]) -> torch.Tensor:
+ """The process before the final classification head.
+
+ The input ``feats`` is a tuple of tensor, and each tensor is the
+ feature of a backbone stage. In ``ArcFaceHead``, we just obtain the
+ feature of the last stage.
+ """
+ # The ArcFaceHead doesn't have other module, just return after
+ # unpacking.
+ return feats[-1]
+
+ def _get_logit_with_margin(self, pre_logits, target):
+ """add arc margin to the cosine in target index.
+
+ The target must be in index format.
+ """
+ assert target.dim() == 1 or (
+ target.dim() == 2 and target.shape[1] == 1), \
+ 'The target must be in index format.'
+ cosine = self.norm_product(pre_logits)
+ phi = torch.cos(torch.acos(cosine) + self.margins)
+
+ if self.easy_margin:
+ # when cosine>0, choose phi
+ # when cosine<=0, choose cosine
+ phi = torch.where(cosine > 0, phi, cosine)
+ else:
+ # when cos>th, choose phi
+ # when cos<=th, choose cosine-mm
+ phi = torch.where(cosine > self.threshold, phi,
+ cosine - self.sinm_m)
+
+ target = convert_to_one_hot(target, self.num_classes)
+ output = target * phi + (1 - target) * cosine
+ return output
+
+ def forward(self,
+ feats: Tuple[torch.Tensor],
+ target: Optional[torch.Tensor] = None) -> torch.Tensor:
+ """The forward process."""
+ # Disable AMP
+ with autocast(enabled=False):
+ pre_logits = self.pre_logits(feats)
+
+ if target is None:
+ # when eval, logit is the cosine between W and pre_logits;
+ # cos(theta_yj) = (x/||x||) * (W/||W||)
+ logit = self.norm_product(pre_logits)
+ else:
+ # when training, add a margin to the pre_logits where target is
+ # True, then logit is the cosine between W and new pre_logits
+ logit = self._get_logit_with_margin(pre_logits, target)
+
+ return self.scale * logit
+
+ def loss(self, feats: Tuple[torch.Tensor],
+ data_samples: List[ClsDataSample], **kwargs) -> dict:
+ """Calculate losses from the classification score.
+
+ Args:
+ feats (tuple[Tensor]): The features extracted from the backbone.
+ Multiple stage inputs are acceptable but only the last stage
+ will be used to classify. The shape of every item should be
+ ``(num_samples, num_classes)``.
+ data_samples (List[ClsDataSample]): The annotation data of
+ every samples.
+ **kwargs: Other keyword arguments to forward the loss module.
+
+ Returns:
+ dict[str, Tensor]: a dictionary of loss components
+ """
+ # Unpack data samples and pack targets
+ label_target = torch.cat([i.gt_label.label for i in data_samples])
+ if 'score' in data_samples[0].gt_label:
+ # Batch augmentation may convert labels to one-hot format scores.
+ target = torch.stack([i.gt_label.score for i in data_samples])
+ else:
+ # change the labels to to one-hot format scores.
+ target = label_target
+
+ # the index format target would be used
+ cls_score = self(feats, label_target)
+
+ # compute loss
+ losses = dict()
+ loss = self.loss_module(
+ cls_score, target, avg_factor=cls_score.size(0), **kwargs)
+ losses['loss'] = loss
+
+ return losses
| [Feature] Support Sub-center ArcFace
### Branch
1.x branch (1.0.0rc2 or other 1.x version)
### Describe the feature
paper: https://www.ecva.net/papers/eccv_2020/papers_ECCV/papers/123560715.pdf
github: https://github.com/deepinsight/insightface
### Will you implement it?
- [x] I would like to implement this feature and create a PR!
| I have implemented a version based on the current arcface, but due to some recent affairs, I need to wait some days to submit up, I hope you can review that. @okotaku
@Ezra-Yu Thank you, I got it! | 2022-11-09T08:51:20 | 0.0 | [] | [] |
||
open-mmlab/mmpretrain | open-mmlab__mmpretrain-1143 | 940a06f645a9b62632a411e7bff1f6dedcb0e9cb | diff --git a/docker/Dockerfile b/docker/Dockerfile
index 2f0b204dbc9..c8fe08bb241 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -4,6 +4,10 @@ ARG CUDNN="7"
FROM pytorch/pytorch:${PYTORCH}-cuda${CUDA}-cudnn${CUDNN}-devel
+# fetch the key refer to https://forums.developer.nvidia.com/t/18-04-cuda-docker-image-is-broken/212892/9
+RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64/3bf863cc.pub 32
+RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1804/x86_64/7fa2af80.pub
+
ENV TORCH_CUDA_ARCH_LIST="6.0 6.1 7.0+PTX"
ENV TORCH_NVCC_FLAGS="-Xfatbin -compress-all"
ENV CMAKE_PREFIX_PATH="(dirname(which conda))/../"
diff --git a/docker/serve/Dockerfile b/docker/serve/Dockerfile
index d6d7343e7f8..db0bf7081e7 100644
--- a/docker/serve/Dockerfile
+++ b/docker/serve/Dockerfile
@@ -3,6 +3,10 @@ ARG CUDA="10.2"
ARG CUDNN="7"
FROM pytorch/pytorch:${PYTORCH}-cuda${CUDA}-cudnn${CUDNN}-devel
+# fetch the key refer to https://forums.developer.nvidia.com/t/18-04-cuda-docker-image-is-broken/212892/9
+RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64/3bf863cc.pub 32
+RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1804/x86_64/7fa2af80.pub
+
ARG MMENGINE="0.2.0"
ARG MMCV="2.0.0rc1"
ARG MMCLS="1.0.0rc2"
@@ -23,6 +27,7 @@ RUN export FORCE_CUDA=1
# TORCHSEVER
RUN pip install torchserve torch-model-archiver
+RUN pip install nvgpu
# MMLAB
ARG PYTORCH
diff --git a/docs/en/index.rst b/docs/en/index.rst
index 7060cff485e..b0a3e6d3052 100644
--- a/docs/en/index.rst
+++ b/docs/en/index.rst
@@ -32,6 +32,7 @@ You can switch between Chinese and English documentation in the lower-left corne
useful_tools/verify_dataset.md
useful_tools/log_result_analysis.md
useful_tools/complexity_analysis.md
+ useful_tools/model_serving.md
.. toctree::
:maxdepth: 1
diff --git a/docs/en/useful_tools/model_serving.md b/docs/en/useful_tools/model_serving.md
new file mode 100644
index 00000000000..acedfe21c64
--- /dev/null
+++ b/docs/en/useful_tools/model_serving.md
@@ -0,0 +1,87 @@
+# Torchserve Deployment
+
+In order to serve an `MMClassification` model with [`TorchServe`](https://pytorch.org/serve/), you can follow the steps:
+
+## 1. Convert model from MMClassification to TorchServe
+
+```shell
+python tools/torchserve/mmcls2torchserve.py ${CONFIG_FILE} ${CHECKPOINT_FILE} \
+--output-folder ${MODEL_STORE} \
+--model-name ${MODEL_NAME}
+```
+
+```{note}
+${MODEL_STORE} needs to be an absolute path to a folder.
+```
+
+Example:
+
+```shell
+python tools/torchserve/mmcls2torchserve.py \
+ configs/resnet/resnet18_8xb32_in1k.py \
+ checkpoints/resnet18_8xb32_in1k_20210831-fbbb1da6.pth \
+ --output-folder ./checkpoints \
+ --model-name resnet18_in1k
+```
+
+## 2. Build `mmcls-serve` docker image
+
+```shell
+docker build -t mmcls-serve:latest docker/serve/
+```
+
+## 3. Run `mmcls-serve`
+
+Check the official docs for [running TorchServe with docker](https://github.com/pytorch/serve/blob/master/docker/README.md#running-torchserve-in-a-production-docker-environment).
+
+In order to run in GPU, you need to install [nvidia-docker](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html). You can omit the `--gpus` argument in order to run in GPU.
+
+Example:
+
+```shell
+docker run --rm \
+--cpus 8 \
+--gpus device=0 \
+-p8080:8080 -p8081:8081 -p8082:8082 \
+--mount type=bind,source=`realpath ./checkpoints`,target=/home/model-server/model-store \
+mmcls-serve:latest
+```
+
+```{note}
+`realpath ./checkpoints` points to the absolute path of "./checkpoints", and you can replace it with the absolute path where you store torchserve models.
+```
+
+[Read the docs](https://github.com/pytorch/serve/blob/master/docs/rest_api.md) about the Inference (8080), Management (8081) and Metrics (8082) APis
+
+## 4. Test deployment
+
+```shell
+curl http://127.0.0.1:8080/predictions/${MODEL_NAME} -T demo/demo.JPEG
+```
+
+You should obtain a response similar to:
+
+```json
+{
+ "pred_label": 58,
+ "pred_score": 0.38102269172668457,
+ "pred_class": "water snake"
+}
+```
+
+And you can use `test_torchserver.py` to compare result of TorchServe and PyTorch, and visualize them.
+
+```shell
+python tools/torchserve/test_torchserver.py ${IMAGE_FILE} ${CONFIG_FILE} ${CHECKPOINT_FILE} ${MODEL_NAME}
+[--inference-addr ${INFERENCE_ADDR}] [--device ${DEVICE}]
+```
+
+Example:
+
+```shell
+python tools/torchserve/test_torchserver.py \
+ demo/demo.JPEG \
+ configs/resnet/resnet18_8xb32_in1k.py \
+ checkpoints/resnet18_8xb32_in1k_20210831-fbbb1da6.pth \
+ resnet18_in1k
+```
diff --git a/docs/zh_CN/index.rst b/docs/zh_CN/index.rst
index 2c4251e48d9..0b1da11af0a 100644
--- a/docs/zh_CN/index.rst
+++ b/docs/zh_CN/index.rst
@@ -32,6 +32,7 @@ You can switch between Chinese and English documentation in the lower-left corne
useful_tools/verify_dataset.md
useful_tools/log_result_analysis.md
useful_tools/complexity_analysis.md
+ useful_tools/model_serving.md
.. toctree::
:maxdepth: 1
diff --git a/docs/zh_CN/useful_tools/model_serving.md b/docs/zh_CN/useful_tools/model_serving.md
new file mode 100644
index 00000000000..8ad63e7ba17
--- /dev/null
+++ b/docs/zh_CN/useful_tools/model_serving.md
@@ -0,0 +1,87 @@
+# TorchServe 部署
+
+为了使用 [`TorchServe`](https://pytorch.org/serve/) 部署一个 `MMClassification` 模型,需要进行以下几步:
+
+## 1. 转换 MMClassification 模型至 TorchServe
+
+```shell
+python tools/torchserve/mmcls2torchserve.py ${CONFIG_FILE} ${CHECKPOINT_FILE} \
+--output-folder ${MODEL_STORE} \
+--model-name ${MODEL_NAME}
+```
+
+```{note}
+${MODEL_STORE} 需要是一个文件夹的绝对路径。
+```
+
+示例:
+
+```shell
+python tools/torchserve/mmcls2torchserve.py \
+ configs/resnet/resnet18_8xb32_in1k.py \
+ checkpoints/resnet18_8xb32_in1k_20210831-fbbb1da6.pth \
+ --output-folder ./checkpoints \
+ --model-name resnet18_in1k
+```
+
+## 2. 构建 `mmcls-serve` docker 镜像
+
+```shell
+docker build -t mmcls-serve:latest docker/serve/
+```
+
+## 3. 运行 `mmcls-serve` 镜像
+
+请参考官方文档 [基于 docker 运行 TorchServe](https://github.com/pytorch/serve/blob/master/docker/README.md#running-torchserve-in-a-production-docker-environment).
+
+为了使镜像能够使用 GPU 资源,需要安装 [nvidia-docker](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html)。之后可以传递 `--gpus` 参数以在 GPU 上运。
+
+示例:
+
+```shell
+docker run --rm \
+--cpus 8 \
+--gpus device=0 \
+-p8080:8080 -p8081:8081 -p8082:8082 \
+--mount type=bind,source=`realpath ./checkpoints`,target=/home/model-server/model-store \
+mmcls-serve:latest
+```
+
+```{note}
+`realpath ./checkpoints` 是 "./checkpoints" 的绝对路径,你可以将其替换为你保存 TorchServe 模型的目录的绝对路径。
+```
+
+参考 [该文档](https://github.com/pytorch/serve/blob/master/docs/rest_api.md) 了解关于推理 (8080),管理 (8081) 和指标 (8082) 等 API 的信息。
+
+## 4. 测试部署
+
+```shell
+curl http://127.0.0.1:8080/predictions/${MODEL_NAME} -T demo/demo.JPEG
+```
+
+您应该获得类似于以下内容的响应:
+
+```json
+{
+ "pred_label": 58,
+ "pred_score": 0.38102269172668457,
+ "pred_class": "water snake"
+}
+```
+
+另外,你也可以使用 `test_torchserver.py` 来比较 TorchServe 和 PyTorch 的结果,并进行可视化。
+
+```shell
+python tools/torchserve/test_torchserver.py ${IMAGE_FILE} ${CONFIG_FILE} ${CHECKPOINT_FILE} ${MODEL_NAME}
+[--inference-addr ${INFERENCE_ADDR}] [--device ${DEVICE}]
+```
+
+示例:
+
+```shell
+python tools/torchserve/test_torchserver.py \
+ demo/demo.JPEG \
+ configs/resnet/resnet18_8xb32_in1k.py \
+ checkpoints/resnet18_8xb32_in1k_20210831-fbbb1da6.pth \
+ resnet18_in1k
+```
diff --git a/tools/deployment/onnx2tensorrt.py b/tools/deployment/onnx2tensorrt.py
deleted file mode 100644
index 8f71b6158d3..00000000000
--- a/tools/deployment/onnx2tensorrt.py
+++ /dev/null
@@ -1,155 +0,0 @@
-# Copyright (c) OpenMMLab. All rights reserved.
-import argparse
-import os
-import os.path as osp
-import warnings
-
-import numpy as np
-
-
-def get_GiB(x: int):
- """return x GiB."""
- return x * (1 << 30)
-
-
-def onnx2tensorrt(onnx_file,
- trt_file,
- input_shape,
- max_batch_size,
- fp16_mode=False,
- verify=False,
- workspace_size=1):
- """Create tensorrt engine from onnx model.
-
- Args:
- onnx_file (str): Filename of the input ONNX model file.
- trt_file (str): Filename of the output TensorRT engine file.
- input_shape (list[int]): Input shape of the model.
- eg [1, 3, 224, 224].
- max_batch_size (int): Max batch size of the model.
- verify (bool, optional): Whether to verify the converted model.
- Defaults to False.
- workspace_size (int, optional): Maximum workspace of GPU.
- Defaults to 1.
- """
- import onnx
- from mmcv.tensorrt import TRTWraper, onnx2trt, save_trt_engine
-
- onnx_model = onnx.load(onnx_file)
- # create trt engine and wrapper
- assert max_batch_size >= 1
- max_shape = [max_batch_size] + list(input_shape[1:])
- opt_shape_dict = {'input': [input_shape, input_shape, max_shape]}
- max_workspace_size = get_GiB(workspace_size)
- trt_engine = onnx2trt(
- onnx_model,
- opt_shape_dict,
- fp16_mode=fp16_mode,
- max_workspace_size=max_workspace_size)
- save_dir, _ = osp.split(trt_file)
- if save_dir:
- os.makedirs(save_dir, exist_ok=True)
- save_trt_engine(trt_engine, trt_file)
- print(f'Successfully created TensorRT engine: {trt_file}')
-
- if verify:
- import onnxruntime as ort
- import torch
-
- input_img = torch.randn(*input_shape)
- input_img_cpu = input_img.detach().cpu().numpy()
- input_img_cuda = input_img.cuda()
-
- # Get results from ONNXRuntime
- session_options = ort.SessionOptions()
- sess = ort.InferenceSession(onnx_file, session_options)
-
- # get input and output names
- input_names = [_.name for _ in sess.get_inputs()]
- output_names = [_.name for _ in sess.get_outputs()]
-
- onnx_outputs = sess.run(None, {
- input_names[0]: input_img_cpu,
- })
-
- # Get results from TensorRT
- trt_model = TRTWraper(trt_file, input_names, output_names)
- with torch.no_grad():
- trt_outputs = trt_model({input_names[0]: input_img_cuda})
- trt_outputs = [
- trt_outputs[_].detach().cpu().numpy() for _ in output_names
- ]
-
- # Compare results
- np.testing.assert_allclose(
- onnx_outputs[0], trt_outputs[0], rtol=1e-05, atol=1e-05)
- print('The numerical values are the same ' +
- 'between ONNXRuntime and TensorRT')
-
-
-def parse_args():
- parser = argparse.ArgumentParser(
- description='Convert MMClassification models from ONNX to TensorRT')
- parser.add_argument('model', help='Filename of the input ONNX model')
- parser.add_argument(
- '--trt-file',
- type=str,
- default='tmp.trt',
- help='Filename of the output TensorRT engine')
- parser.add_argument(
- '--verify',
- action='store_true',
- help='Verify the outputs of ONNXRuntime and TensorRT')
- parser.add_argument(
- '--shape',
- type=int,
- nargs='+',
- default=[224, 224],
- help='Input size of the model')
- parser.add_argument(
- '--max-batch-size',
- type=int,
- default=1,
- help='Maximum batch size of TensorRT model.')
- parser.add_argument('--fp16', action='store_true', help='Enable fp16 mode')
- parser.add_argument(
- '--workspace-size',
- type=int,
- default=1,
- help='Max workspace size of GPU in GiB')
- args = parser.parse_args()
- return args
-
-
-if __name__ == '__main__':
-
- args = parse_args()
-
- if len(args.shape) == 1:
- input_shape = (1, 3, args.shape[0], args.shape[0])
- elif len(args.shape) == 2:
- input_shape = (1, 3) + tuple(args.shape)
- else:
- raise ValueError('invalid input shape')
-
- # Create TensorRT engine
- onnx2tensorrt(
- args.model,
- args.trt_file,
- input_shape,
- args.max_batch_size,
- fp16_mode=args.fp16,
- verify=args.verify,
- workspace_size=args.workspace_size)
-
- # Following strings of text style are from colorama package
- bright_style, reset_style = '\x1b[1m', '\x1b[0m'
- red_text, blue_text = '\x1b[31m', '\x1b[34m'
- white_background = '\x1b[107m'
-
- msg = white_background + bright_style + red_text
- msg += 'DeprecationWarning: This tool will be deprecated in future. '
- msg += blue_text + 'Welcome to use the unified model deployment toolbox '
- msg += 'MMDeploy: https://github.com/open-mmlab/mmdeploy'
- msg += reset_style
- warnings.warn(msg)
diff --git a/tools/deployment/pytorch2mlmodel.py b/tools/deployment/pytorch2mlmodel.py
deleted file mode 100644
index 814cbe94e75..00000000000
--- a/tools/deployment/pytorch2mlmodel.py
+++ /dev/null
@@ -1,160 +0,0 @@
-# Copyright (c) OpenMMLab. All rights reserved.
-import argparse
-import os
-import os.path as osp
-import warnings
-from functools import partial
-
-import mmcv
-import numpy as np
-import torch
-from mmcv.runner import load_checkpoint
-from torch import nn
-
-from mmcls.models import build_classifier
-
-torch.manual_seed(3)
-
-try:
- import coremltools as ct
-except ImportError:
- raise ImportError('Please install coremltools to enable output file.')
-
-
-def _demo_mm_inputs(input_shape: tuple, num_classes: int):
- """Create a superset of inputs needed to run test or train batches.
-
- Args:
- input_shape (tuple):
- input batch dimensions
- num_classes (int):
- number of semantic classes
- """
- (N, C, H, W) = input_shape
- rng = np.random.RandomState(0)
- imgs = rng.rand(*input_shape)
- gt_labels = rng.randint(
- low=0, high=num_classes, size=(N, 1)).astype(np.uint8)
- mm_inputs = {
- 'imgs': torch.FloatTensor(imgs).requires_grad_(False),
- 'gt_labels': torch.LongTensor(gt_labels),
- }
- return mm_inputs
-
-
-def pytorch2mlmodel(model: nn.Module, input_shape: tuple, output_file: str,
- add_norm: bool, norm: dict):
- """Export Pytorch model to mlmodel format that can be deployed in apple
- devices through torch.jit.trace and the coremltools library.
-
- Optionally, embed the normalization step as a layer to the model.
-
- Args:
- model (nn.Module): Pytorch model we want to export.
- input_shape (tuple): Use this input shape to construct
- the corresponding dummy input and execute the model.
- show (bool): Whether print the computation graph. Default: False.
- output_file (string): The path to where we store the output
- TorchScript model.
- add_norm (bool): Whether to embed the normalization layer to the
- output model.
- norm (dict): image normalization config for embedding it as a layer
- to the output model.
- """
- model.cpu().eval()
-
- num_classes = model.head.num_classes
- mm_inputs = _demo_mm_inputs(input_shape, num_classes)
-
- imgs = mm_inputs.pop('imgs')
- img_list = [img[None, :] for img in imgs]
- model.forward = partial(model.forward, img_metas={}, return_loss=False)
-
- with torch.no_grad():
- trace_model = torch.jit.trace(model, img_list[0])
- save_dir, _ = osp.split(output_file)
- if save_dir:
- os.makedirs(save_dir, exist_ok=True)
-
- if add_norm:
- means, stds = norm.mean, norm.std
- if stds.count(stds[0]) != len(stds):
- warnings.warn(f'Image std from config is {stds}. However, '
- 'current version of coremltools (5.1) uses a '
- 'global std rather than the channel-specific '
- 'values that torchvision uses. A mean will be '
- 'taken but this might tamper with the resulting '
- 'model\'s predictions. For more details refer '
- 'to the coreml docs on ImageType pre-processing')
- scale = np.mean(stds)
- else:
- scale = stds[0]
-
- bias = [-mean / scale for mean in means]
- image_input = ct.ImageType(
- name='input_1',
- shape=input_shape,
- scale=1 / scale,
- bias=bias,
- color_layout='RGB',
- channel_first=True)
-
- coreml_model = ct.convert(trace_model, inputs=[image_input])
- coreml_model.save(output_file)
- else:
- coreml_model = ct.convert(
- trace_model, inputs=[ct.TensorType(shape=input_shape)])
- coreml_model.save(output_file)
-
- print(f'Successfully exported coreml model: {output_file}')
-
-
-def parse_args():
- parser = argparse.ArgumentParser(
- description='Convert MMCls to MlModel format for apple devices')
- parser.add_argument('config', help='test config file path')
- parser.add_argument('--checkpoint', help='checkpoint file', type=str)
- parser.add_argument('--output-file', type=str, default='model.mlmodel')
- parser.add_argument(
- '--shape',
- type=int,
- nargs='+',
- default=[224, 224],
- help='input image size')
- parser.add_argument(
- '--add-norm-layer',
- action='store_true',
- help='embed normalization layer to deployed model')
- args = parser.parse_args()
- return args
-
-
-if __name__ == '__main__':
- args = parse_args()
-
- if len(args.shape) == 1:
- input_shape = (1, 3, args.shape[0], args.shape[0])
- elif len(args.shape) == 2:
- input_shape = (
- 1,
- 3,
- ) + tuple(args.shape)
- else:
- raise ValueError('invalid input shape')
-
- cfg = mmcv.Config.fromfile(args.config)
- cfg.model.pretrained = None
-
- # build the model and load checkpoint
- classifier = build_classifier(cfg.model)
-
- if args.checkpoint:
- load_checkpoint(classifier, args.checkpoint, map_location='cpu')
-
- # convert model to mlmodel file
- pytorch2mlmodel(
- classifier,
- input_shape,
- output_file=args.output_file,
- add_norm=args.add_norm_layer,
- norm=cfg.img_norm_cfg)
diff --git a/tools/deployment/pytorch2onnx.py b/tools/deployment/pytorch2onnx.py
deleted file mode 100644
index 1da95946706..00000000000
--- a/tools/deployment/pytorch2onnx.py
+++ /dev/null
@@ -1,246 +0,0 @@
-# Copyright (c) OpenMMLab. All rights reserved.
-import argparse
-import warnings
-from functools import partial
-
-import mmcv
-import numpy as np
-import onnxruntime as rt
-import torch
-from mmcv.onnx import register_extra_symbolics
-from mmcv.runner import load_checkpoint
-
-from mmcls.models import build_classifier
-
-torch.manual_seed(3)
-
-
-def _demo_mm_inputs(input_shape, num_classes):
- """Create a superset of inputs needed to run test or train batches.
-
- Args:
- input_shape (tuple):
- input batch dimensions
- num_classes (int):
- number of semantic classes
- """
- (N, C, H, W) = input_shape
- rng = np.random.RandomState(0)
- imgs = rng.rand(*input_shape)
- gt_labels = rng.randint(
- low=0, high=num_classes, size=(N, 1)).astype(np.uint8)
- mm_inputs = {
- 'imgs': torch.FloatTensor(imgs).requires_grad_(True),
- 'gt_labels': torch.LongTensor(gt_labels),
- }
- return mm_inputs
-
-
-def pytorch2onnx(model,
- input_shape,
- opset_version=11,
- dynamic_export=False,
- show=False,
- output_file='tmp.onnx',
- do_simplify=False,
- verify=False):
- """Export Pytorch model to ONNX model and verify the outputs are same
- between Pytorch and ONNX.
-
- Args:
- model (nn.Module): Pytorch model we want to export.
- input_shape (tuple): Use this input shape to construct
- the corresponding dummy input and execute the model.
- opset_version (int): The onnx op version. Default: 11.
- show (bool): Whether print the computation graph. Default: False.
- output_file (string): The path to where we store the output ONNX model.
- Default: `tmp.onnx`.
- verify (bool): Whether compare the outputs between Pytorch and ONNX.
- Default: False.
- """
- model.cpu().eval()
-
- if hasattr(model.head, 'num_classes'):
- num_classes = model.head.num_classes
- # Some backbones use `num_classes=-1` to disable top classifier.
- elif getattr(model.backbone, 'num_classes', -1) > 0:
- num_classes = model.backbone.num_classes
- else:
- raise AttributeError('Cannot find "num_classes" in both head and '
- 'backbone, please check the config file.')
-
- mm_inputs = _demo_mm_inputs(input_shape, num_classes)
-
- imgs = mm_inputs.pop('imgs')
- img_list = [img[None, :] for img in imgs]
-
- # replace original forward function
- origin_forward = model.forward
- model.forward = partial(model.forward, img_metas={}, return_loss=False)
- register_extra_symbolics(opset_version)
-
- # support dynamic shape export
- if dynamic_export:
- dynamic_axes = {
- 'input': {
- 0: 'batch',
- 2: 'width',
- 3: 'height'
- },
- 'probs': {
- 0: 'batch'
- }
- }
- else:
- dynamic_axes = {}
-
- with torch.no_grad():
- torch.onnx.export(
- model, (img_list, ),
- output_file,
- input_names=['input'],
- output_names=['probs'],
- export_params=True,
- keep_initializers_as_inputs=True,
- dynamic_axes=dynamic_axes,
- verbose=show,
- opset_version=opset_version)
- print(f'Successfully exported ONNX model: {output_file}')
- model.forward = origin_forward
-
- if do_simplify:
- import onnx
- import onnxsim
- from mmcv import digit_version
-
- min_required_version = '0.3.0'
- assert digit_version(mmcv.__version__) >= digit_version(
- min_required_version
- ), f'Requires to install onnx-simplify>={min_required_version}'
-
- if dynamic_axes:
- input_shape = (input_shape[0], input_shape[1], input_shape[2] * 2,
- input_shape[3] * 2)
- else:
- input_shape = (input_shape[0], input_shape[1], input_shape[2],
- input_shape[3])
- imgs = _demo_mm_inputs(input_shape, model.head.num_classes).pop('imgs')
- input_dic = {'input': imgs.detach().cpu().numpy()}
- input_shape_dic = {'input': list(input_shape)}
-
- model_opt, check_ok = onnxsim.simplify(
- output_file,
- input_shapes=input_shape_dic,
- input_data=input_dic,
- dynamic_input_shape=dynamic_export)
- if check_ok:
- onnx.save(model_opt, output_file)
- print(f'Successfully simplified ONNX model: {output_file}')
- else:
- print('Failed to simplify ONNX model.')
- if verify:
- # check by onnx
- import onnx
- onnx_model = onnx.load(output_file)
- onnx.checker.check_model(onnx_model)
-
- # test the dynamic model
- if dynamic_export:
- dynamic_test_inputs = _demo_mm_inputs(
- (input_shape[0], input_shape[1], input_shape[2] * 2,
- input_shape[3] * 2), model.head.num_classes)
- imgs = dynamic_test_inputs.pop('imgs')
- img_list = [img[None, :] for img in imgs]
-
- # check the numerical value
- # get pytorch output
- pytorch_result = model(img_list, img_metas={}, return_loss=False)[0]
-
- # get onnx output
- input_all = [node.name for node in onnx_model.graph.input]
- input_initializer = [
- node.name for node in onnx_model.graph.initializer
- ]
- net_feed_input = list(set(input_all) - set(input_initializer))
- assert (len(net_feed_input) == 1)
- sess = rt.InferenceSession(output_file)
- onnx_result = sess.run(
- None, {net_feed_input[0]: img_list[0].detach().numpy()})[0]
- if not np.allclose(pytorch_result, onnx_result):
- raise ValueError(
- 'The outputs are different between Pytorch and ONNX')
- print('The outputs are same between Pytorch and ONNX')
-
-
-def parse_args():
- parser = argparse.ArgumentParser(description='Convert MMCls to ONNX')
- parser.add_argument('config', help='test config file path')
- parser.add_argument('--checkpoint', help='checkpoint file', default=None)
- parser.add_argument('--show', action='store_true', help='show onnx graph')
- parser.add_argument(
- '--verify', action='store_true', help='verify the onnx model')
- parser.add_argument('--output-file', type=str, default='tmp.onnx')
- parser.add_argument('--opset-version', type=int, default=11)
- parser.add_argument(
- '--simplify',
- action='store_true',
- help='Whether to simplify onnx model.')
- parser.add_argument(
- '--shape',
- type=int,
- nargs='+',
- default=[224, 224],
- help='input image size')
- parser.add_argument(
- '--dynamic-export',
- action='store_true',
- help='Whether to export ONNX with dynamic input shape. \
- Defaults to False.')
- args = parser.parse_args()
- return args
-
-
-if __name__ == '__main__':
- args = parse_args()
-
- if len(args.shape) == 1:
- input_shape = (1, 3, args.shape[0], args.shape[0])
- elif len(args.shape) == 2:
- input_shape = (
- 1,
- 3,
- ) + tuple(args.shape)
- else:
- raise ValueError('invalid input shape')
-
- cfg = mmcv.Config.fromfile(args.config)
- cfg.model.pretrained = None
-
- # build the model and load checkpoint
- classifier = build_classifier(cfg.model)
-
- if args.checkpoint:
- load_checkpoint(classifier, args.checkpoint, map_location='cpu')
-
- # convert model to onnx file
- pytorch2onnx(
- classifier,
- input_shape,
- opset_version=args.opset_version,
- show=args.show,
- dynamic_export=args.dynamic_export,
- output_file=args.output_file,
- do_simplify=args.simplify,
- verify=args.verify)
-
- # Following strings of text style are from colorama package
- bright_style, reset_style = '\x1b[1m', '\x1b[0m'
- red_text, blue_text = '\x1b[31m', '\x1b[34m'
- white_background = '\x1b[107m'
-
- msg = white_background + bright_style + red_text
- msg += 'DeprecationWarning: This tool will be deprecated in future. '
- msg += blue_text + 'Welcome to use the unified model deployment toolbox '
- msg += 'MMDeploy: https://github.com/open-mmlab/mmdeploy'
- msg += reset_style
- warnings.warn(msg)
diff --git a/tools/deployment/pytorch2torchscript.py b/tools/deployment/pytorch2torchscript.py
deleted file mode 100644
index f261b7c9526..00000000000
--- a/tools/deployment/pytorch2torchscript.py
+++ /dev/null
@@ -1,139 +0,0 @@
-# Copyright (c) OpenMMLab. All rights reserved.
-import argparse
-import os
-import os.path as osp
-from functools import partial
-
-import mmcv
-import numpy as np
-import torch
-from mmcv.runner import load_checkpoint
-from torch import nn
-
-from mmcls.models import build_classifier
-
-torch.manual_seed(3)
-
-
-def _demo_mm_inputs(input_shape: tuple, num_classes: int):
- """Create a superset of inputs needed to run test or train batches.
-
- Args:
- input_shape (tuple):
- input batch dimensions
- num_classes (int):
- number of semantic classes
- """
- (N, C, H, W) = input_shape
- rng = np.random.RandomState(0)
- imgs = rng.rand(*input_shape)
- gt_labels = rng.randint(
- low=0, high=num_classes, size=(N, 1)).astype(np.uint8)
- mm_inputs = {
- 'imgs': torch.FloatTensor(imgs).requires_grad_(False),
- 'gt_labels': torch.LongTensor(gt_labels),
- }
- return mm_inputs
-
-
-def pytorch2torchscript(model: nn.Module, input_shape: tuple, output_file: str,
- verify: bool):
- """Export Pytorch model to TorchScript model through torch.jit.trace and
- verify the outputs are same between Pytorch and TorchScript.
-
- Args:
- model (nn.Module): Pytorch model we want to export.
- input_shape (tuple): Use this input shape to construct
- the corresponding dummy input and execute the model.
- show (bool): Whether print the computation graph. Default: False.
- output_file (string): The path to where we store the output
- TorchScript model.
- verify (bool): Whether compare the outputs between Pytorch
- and TorchScript through loading generated output_file.
- """
- model.cpu().eval()
-
- num_classes = model.head.num_classes
- mm_inputs = _demo_mm_inputs(input_shape, num_classes)
-
- imgs = mm_inputs.pop('imgs')
- img_list = [img[None, :] for img in imgs]
-
- # replace original forward function
- origin_forward = model.forward
- model.forward = partial(model.forward, img_metas={}, return_loss=False)
-
- with torch.no_grad():
- trace_model = torch.jit.trace(model, img_list[0])
- save_dir, _ = osp.split(output_file)
- if save_dir:
- os.makedirs(save_dir, exist_ok=True)
- trace_model.save(output_file)
- print(f'Successfully exported TorchScript model: {output_file}')
- model.forward = origin_forward
-
- if verify:
- # load by torch.jit
- jit_model = torch.jit.load(output_file)
-
- # check the numerical value
- # get pytorch output
- pytorch_result = model(img_list, img_metas={}, return_loss=False)[0]
-
- # get jit output
- jit_result = jit_model(img_list[0])[0].detach().numpy()
- if not np.allclose(pytorch_result, jit_result):
- raise ValueError(
- 'The outputs are different between Pytorch and TorchScript')
- print('The outputs are same between Pytorch and TorchScript')
-
-
-def parse_args():
- parser = argparse.ArgumentParser(
- description='Convert MMCls to TorchScript')
- parser.add_argument('config', help='test config file path')
- parser.add_argument('--checkpoint', help='checkpoint file', type=str)
- parser.add_argument(
- '--verify',
- action='store_true',
- help='verify the TorchScript model',
- default=False)
- parser.add_argument('--output-file', type=str, default='tmp.pt')
- parser.add_argument(
- '--shape',
- type=int,
- nargs='+',
- default=[224, 224],
- help='input image size')
- args = parser.parse_args()
- return args
-
-
-if __name__ == '__main__':
- args = parse_args()
-
- if len(args.shape) == 1:
- input_shape = (1, 3, args.shape[0], args.shape[0])
- elif len(args.shape) == 2:
- input_shape = (
- 1,
- 3,
- ) + tuple(args.shape)
- else:
- raise ValueError('invalid input shape')
-
- cfg = mmcv.Config.fromfile(args.config)
- cfg.model.pretrained = None
-
- # build the model and load checkpoint
- classifier = build_classifier(cfg.model)
-
- if args.checkpoint:
- load_checkpoint(classifier, args.checkpoint, map_location='cpu')
-
- # convert model to TorchScript file
- pytorch2torchscript(
- classifier,
- input_shape,
- output_file=args.output_file,
- verify=args.verify)
diff --git a/tools/deployment/mmcls2torchserve.py b/tools/torchserve/mmcls2torchserve.py
similarity index 93%
rename from tools/deployment/mmcls2torchserve.py
rename to tools/torchserve/mmcls2torchserve.py
index dc4521452f2..a8a17bde88d 100644
--- a/tools/deployment/mmcls2torchserve.py
+++ b/tools/torchserve/mmcls2torchserve.py
@@ -3,14 +3,15 @@
from pathlib import Path
from tempfile import TemporaryDirectory
-from mmengine.config import Config
-from mmengine.utils import mkdir_or_exist
+import mmengine
try:
from model_archiver.model_packaging import package_model
from model_archiver.model_packaging_utils import ModelExportUtils
except ImportError:
- package_model = None
+ raise ImportError(
+ 'Please run `pip install torchserve torch-model-archiver"` to '
+ 'install required third-party libraries.')
def mmcls2torchserve(
@@ -44,9 +45,9 @@ def mmcls2torchserve(
If True, if there is an existing `{model_name}.mar`
file under `output_folder` it will be overwritten.
"""
- mkdir_or_exist(output_folder)
+ mmengine.mkdir_or_exist(output_folder)
- config = Config.fromfile(config_file)
+ config = mmengine.Config.fromfile(config_file)
with TemporaryDirectory() as tmpdir:
config.dump(f'{tmpdir}/config.py')
diff --git a/tools/deployment/mmcls_handler.py b/tools/torchserve/mmcls_handler.py
similarity index 100%
rename from tools/deployment/mmcls_handler.py
rename to tools/torchserve/mmcls_handler.py
| [Bug] module 'mmcv' has no attribute 'mkdir_or_exist'
### Branch
1.x branch (1.0.0rc2 or other 1.x version)
### Describe the bug
i think you forget to modify mmcv to mmengine here https://github.com/open-mmlab/mmclassification/blob/1.x/tools/deployment/mmcls2torchserve.py#L6
### Environment
```
{'sys.platform': 'linux',
'Python': '3.8.13 (default, Mar 28 2022, 11:38:47) [GCC 7.5.0]',
'CUDA available': True,
'numpy_random_seed': 2147483648,
'GPU 0': 'NVIDIA GeForce RTX 2060 with Max-Q Design',
'CUDA_HOME': '/usr/local/cuda',
'NVCC': 'Cuda compilation tools, release 11.0, V11.0.194',
'GCC': 'gcc (Ubuntu 9.4.0-1ubuntu1~20.04.1) 9.4.0',
'PyTorch': '1.12.1+cu102',
'PyTorch compiling details': 'PyTorch built with:\n'
' - GCC 7.3\n'
' - C++ Version: 201402\n'
' - Intel(R) oneAPI Math Kernel Library Version '
'2021.4-Product Build 20210904 for Intel(R) 64 '
'architecture applications\n'
' - Intel(R) MKL-DNN v2.6.0 (Git Hash '
'52b5f107dd9cf10910aaa19cb47f3abf9b349815)\n'
' - OpenMP 201511 (a.k.a. OpenMP 4.5)\n'
' - LAPACK is enabled (usually provided by '
'MKL)\n'
' - NNPACK is enabled\n'
' - CPU capability usage: AVX2\n'
' - CUDA Runtime 10.2\n'
' - NVCC architecture flags: '
'-gencode;arch=compute_37,code=sm_37;-gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_70,code=sm_70\n'
' - CuDNN 7.6.5\n'
' - Magma 2.5.2\n'
' - Build settings: BLAS_INFO=mkl, '
'BUILD_TYPE=Release, CUDA_VERSION=10.2, '
'CUDNN_VERSION=7.6.5, '
'CXX_COMPILER=/opt/rh/devtoolset-7/root/usr/bin/c++, '
'CXX_FLAGS= -fabi-version=11 -Wno-deprecated '
'-fvisibility-inlines-hidden -DUSE_PTHREADPOOL '
'-fopenmp -DNDEBUG -DUSE_KINETO -DUSE_FBGEMM '
'-DUSE_QNNPACK -DUSE_PYTORCH_QNNPACK '
'-DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE '
'-DEDGE_PROFILER_USE_KINETO -O2 -fPIC '
'-Wno-narrowing -Wall -Wextra '
'-Werror=return-type '
'-Wno-missing-field-initializers '
'-Wno-type-limits -Wno-array-bounds '
'-Wno-unknown-pragmas -Wno-unused-parameter '
'-Wno-unused-function -Wno-unused-result '
'-Wno-unused-local-typedefs -Wno-strict-overflow '
'-Wno-strict-aliasing '
'-Wno-error=deprecated-declarations '
'-Wno-stringop-overflow -Wno-psabi '
'-Wno-error=pedantic -Wno-error=redundant-decls '
'-Wno-error=old-style-cast '
'-fdiagnostics-color=always -faligned-new '
'-Wno-unused-but-set-variable '
'-Wno-maybe-uninitialized -fno-math-errno '
'-fno-trapping-math -Werror=format '
'-Wno-stringop-overflow, LAPACK_INFO=mkl, '
'PERF_WITH_AVX=1, PERF_WITH_AVX2=1, '
'PERF_WITH_AVX512=1, TORCH_VERSION=1.12.1, '
'USE_CUDA=ON, USE_CUDNN=ON, USE_EXCEPTION_PTR=1, '
'USE_GFLAGS=OFF, USE_GLOG=OFF, USE_MKL=ON, '
'USE_MKLDNN=OFF, USE_MPI=OFF, USE_NCCL=ON, '
'USE_NNPACK=ON, USE_OPENMP=ON, USE_ROCM=OFF, \n',
'TorchVision': '0.13.1+cu102',
'OpenCV': '4.6.0',
'MMEngine': '0.2.0',
'MMClassification': '1.0.0rc2+b855bc0'}
```
### Other information
_No response_
| Yes. Thank you for your report. Can you fix that and create a PR?
yes of course
@Ezra-Yu here is the PR : https://github.com/open-mmlab/mmclassification/pull/1143
Good Job! I will test it.
@Ezra-Yu did you test the inference after deployment?
https://github.com/open-mmlab/mmclassification/pull/1143#issuecomment-1292230518
@marouaneamz Sorry for later reply. (Since I have to download&install the docker, debug it when I test that)
Yes, I have tested it after deployment refer to [this tutorial](https://mmclassification.readthedocs.io/en/master/tools/model_serving.html#convert-model-from-mmclassification-to-torchserve). And there are errors besides the one you mentioned here.
> @Ezra-Yu In my understanding, the default _scope for the registry will be initialized in the runner. to use inference-mmcls in deploy servers it must be run with runners or hardcoded default_scope.
You are right, there is some error when testing. The PR https://github.com/open-mmlab/mmclassification/pull/1139 and https://github.com/open-mmlab/mmclassification/pull/1118 are going to solve this problem.
If you really want to run the example, you can modify the [`docker/serve/Dockerfile`](https://github.com/open-mmlab/mmclassification/blob/dev-1.x/docker/serve/Dockerfile) as flowing:
```
ARG PYTORCH="1.8.1"
ARG CUDA="10.2"
ARG CUDNN="7"
FROM pytorch/pytorch:${PYTORCH}-cuda${CUDA}-cudnn${CUDNN}-devel
# fetch the key refer to https://forums.developer.nvidia.com/t/18-04-cuda-docker-image-is-broken/212892/9
RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64/3bf863cc.pub 32
RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1804/x86_64/7fa2af80.pub
ARG MMENGINE="0.2.0"
ARG MMCV="2.0.0rc1"
ARG MMCLS="1.0.0rc2"
ENV PYTHONUNBUFFERED TRUE
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \
ca-certificates \
g++ \
openjdk-11-jre-headless \
# MMDet Requirements
ffmpeg libsm6 libxext6 git ninja-build libglib2.0-0 libsm6 libxrender-dev libxext6 \
&& rm -rf /var/lib/apt/lists/*
ENV PATH="/opt/conda/bin:$PATH"
RUN export FORCE_CUDA=1
# TORCHSEVER
RUN pip install torchserve torch-model-archiver
RUn pip install nvgpu
# MMLAB
ARG PYTORCH
ARG CUDA
RUN pip install mmengine==${MMENGINE}
RUN ["/bin/bash", "-c", "pip install mmcv==${MMCV} -f https://download.openmmlab.com/mmcv/dist/cu${CUDA//./}/torch${PYTORCH}/index.html"]
RUN pip3 install git+https://github.com/mzr1996/mmclassification.git@1x-model-pages
# this branch has solved that
# RUN pip install mmcls==${MMCLS}
RUN useradd -m model-server \
&& mkdir -p /home/model-server/tmp
COPY entrypoint.sh /usr/local/bin/entrypoint.sh
RUN chmod +x /usr/local/bin/entrypoint.sh \
&& chown -R model-server /home/model-server
COPY config.properties /home/model-server/config.properties
RUN mkdir /home/model-server/model-store && chown -R model-server /home/model-server/model-store
EXPOSE 8080 8081 8082
USER model-server
WORKDIR /home/model-server
ENV TEMP=/home/model-server/tmp
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
CMD ["serve"]
```
In my env, it works fine as :

We will fix it in `branch 1.x` in next version. | 2022-10-26T10:50:04 | 0.0 | [] | [] |
||
open-mmlab/mmpretrain | open-mmlab__mmpretrain-1105 | 992d13e772ba015abb34cc539e7a3562183b136c | diff --git a/README.md b/README.md
index 964dee2bc01..ad948bed410 100644
--- a/README.md
+++ b/README.md
@@ -150,6 +150,7 @@ Results and models are available in the [model zoo](https://mmclassification.rea
- [x] [MViT](https://github.com/open-mmlab/mmclassification/tree/1.x/configs/mvit)
- [x] [HorNet](https://github.com/open-mmlab/mmclassification/tree/master/configs/hornet)
- [x] [MobileViT](https://github.com/open-mmlab/mmclassification/tree/1.x/configs/mobilevit)
+- [x] [DaViT](https://github.com/open-mmlab/mmclassification/tree/1.x/configs/davit)
</details>
diff --git a/README_zh-CN.md b/README_zh-CN.md
index 8d15d7e0d5b..d6e9665ec03 100644
--- a/README_zh-CN.md
+++ b/README_zh-CN.md
@@ -149,6 +149,7 @@ mim install -e .
- [x] [MViT](https://github.com/open-mmlab/mmclassification/tree/1.x/configs/mvit)
- [x] [HorNet](https://github.com/open-mmlab/mmclassification/tree/master/configs/hornet)
- [x] [MobileViT](https://github.com/open-mmlab/mmclassification/tree/1.x/configs/mobilevit)
+- [x] [DaViT](https://github.com/open-mmlab/mmclassification/tree/1.x/configs/davit)
</details>
diff --git a/configs/_base_/datasets/imagenet_bs256_davit_224.py b/configs/_base_/datasets/imagenet_bs256_davit_224.py
new file mode 100644
index 00000000000..faf46523a84
--- /dev/null
+++ b/configs/_base_/datasets/imagenet_bs256_davit_224.py
@@ -0,0 +1,84 @@
+# dataset settings
+dataset_type = 'ImageNet'
+data_preprocessor = dict(
+ num_classes=1000,
+ # RGB format normalization parameters
+ mean=[123.675, 116.28, 103.53],
+ std=[58.395, 57.12, 57.375],
+ # convert image from BGR to RGB
+ to_rgb=True,
+)
+
+bgr_mean = data_preprocessor['mean'][::-1]
+bgr_std = data_preprocessor['std'][::-1]
+
+train_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(
+ type='RandomResizedCrop',
+ scale=224,
+ backend='pillow',
+ interpolation='bicubic'),
+ dict(type='RandomFlip', prob=0.5, direction='horizontal'),
+ dict(
+ type='RandAugment',
+ policies='timm_increasing',
+ num_policies=2,
+ total_level=10,
+ magnitude_level=9,
+ magnitude_std=0.5,
+ hparams=dict(
+ pad_val=[round(x) for x in bgr_mean], interpolation='bicubic')),
+ dict(
+ type='RandomErasing',
+ erase_prob=0.25,
+ mode='rand',
+ min_area_ratio=0.02,
+ max_area_ratio=1 / 3,
+ fill_color=bgr_mean,
+ fill_std=bgr_std),
+ dict(type='PackClsInputs'),
+]
+
+test_pipeline = [
+ dict(type='LoadImageFromFile'),
+ dict(
+ type='ResizeEdge',
+ scale=236,
+ edge='short',
+ backend='pillow',
+ interpolation='bicubic'),
+ dict(type='CenterCrop', crop_size=224),
+ dict(type='PackClsInputs'),
+]
+
+train_dataloader = dict(
+ batch_size=64,
+ num_workers=5,
+ dataset=dict(
+ type=dataset_type,
+ data_root='data/imagenet',
+ ann_file='meta/train.txt',
+ data_prefix='train',
+ pipeline=train_pipeline),
+ sampler=dict(type='DefaultSampler', shuffle=True),
+ persistent_workers=True,
+)
+
+val_dataloader = dict(
+ batch_size=64,
+ num_workers=5,
+ dataset=dict(
+ type=dataset_type,
+ data_root='data/imagenet',
+ ann_file='meta/val.txt',
+ data_prefix='val',
+ pipeline=test_pipeline),
+ sampler=dict(type='DefaultSampler', shuffle=False),
+ persistent_workers=True,
+)
+val_evaluator = dict(type='Accuracy', topk=(1, 5))
+
+# If you want standard test, please manually configure the test dataset
+test_dataloader = val_dataloader
+test_evaluator = val_evaluator
diff --git a/configs/_base_/models/davit/davit-base.py b/configs/_base_/models/davit/davit-base.py
new file mode 100644
index 00000000000..0dbf07739ec
--- /dev/null
+++ b/configs/_base_/models/davit/davit-base.py
@@ -0,0 +1,16 @@
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(
+ type='DaViT', arch='base', out_indices=(3, ), drop_path_rate=0.4),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=1024,
+ loss=dict(
+ type='LabelSmoothLoss', label_smooth_val=0.1, mode='original'),
+ ),
+ train_cfg=dict(augments=[
+ dict(type='Mixup', alpha=0.8),
+ dict(type='CutMix', alpha=1.0)
+ ]))
diff --git a/configs/_base_/models/davit/davit-small.py b/configs/_base_/models/davit/davit-small.py
new file mode 100644
index 00000000000..2fa0325552c
--- /dev/null
+++ b/configs/_base_/models/davit/davit-small.py
@@ -0,0 +1,16 @@
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(
+ type='DaViT', arch='small', out_indices=(3, ), drop_path_rate=0.2),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=768,
+ loss=dict(
+ type='LabelSmoothLoss', label_smooth_val=0.1, mode='original'),
+ ),
+ train_cfg=dict(augments=[
+ dict(type='Mixup', alpha=0.8),
+ dict(type='CutMix', alpha=1.0)
+ ]))
diff --git a/configs/_base_/models/davit/davit-tiny.py b/configs/_base_/models/davit/davit-tiny.py
new file mode 100644
index 00000000000..29432d28bd0
--- /dev/null
+++ b/configs/_base_/models/davit/davit-tiny.py
@@ -0,0 +1,16 @@
+model = dict(
+ type='ImageClassifier',
+ backbone=dict(
+ type='DaViT', arch='t', out_indices=(3, ), drop_path_rate=0.1),
+ neck=dict(type='GlobalAveragePooling'),
+ head=dict(
+ type='LinearClsHead',
+ num_classes=1000,
+ in_channels=768,
+ loss=dict(
+ type='LabelSmoothLoss', label_smooth_val=0.1, mode='original'),
+ ),
+ train_cfg=dict(augments=[
+ dict(type='Mixup', alpha=0.8),
+ dict(type='CutMix', alpha=1.0)
+ ]))
diff --git a/configs/davit/README.md b/configs/davit/README.md
new file mode 100644
index 00000000000..1d026f02ce9
--- /dev/null
+++ b/configs/davit/README.md
@@ -0,0 +1,38 @@
+# DaViT
+
+> [DaViT: Dual Attention Vision Transformers](https://arxiv.org/abs/2204.03645v1)
+
+<!-- [ALGORITHM] -->
+
+## Abstract
+
+In this work, we introduce Dual Attention Vision Transformers (DaViT), a simple yet effective vision transformer architecture that is able to capture global context while maintaining computational efficiency. We propose approaching the problem from an orthogonal angle: exploiting self-attention mechanisms with both "spatial tokens" and "channel tokens". With spatial tokens, the spatial dimension defines the token scope, and the channel dimension defines the token feature dimension. With channel tokens, we have the inverse: the channel dimension defines the token scope, and the spatial dimension defines the token feature dimension. We further group tokens along the sequence direction for both spatial and channel tokens to maintain the linear complexity of the entire model. We show that these two self-attentions complement each other: (i) since each channel token contains an abstract representation of the entire image, the channel attention naturally captures global interactions and representations by taking all spatial positions into account when computing attention scores between channels; (ii) the spatial attention refines the local representations by performing fine-grained interactions across spatial locations, which in turn helps the global information modeling in channel attention. Extensive experiments show our DaViT achieves state-of-the-art performance on four different tasks with efficient computations. Without extra data, DaViT-Tiny, DaViT-Small, and DaViT-Base achieve 82.8%, 84.2%, and 84.6% top-1 accuracy on ImageNet-1K with 28.3M, 49.7M, and 87.9M parameters, respectively. When we further scale up DaViT with 1.5B weakly supervised image and text pairs, DaViT-Gaint reaches 90.4% top-1 accuracy on ImageNet-1K.
+
+<div align=center>
+<img src="https://user-images.githubusercontent.com/24734142/196125065-e232409b-f710-4729-b657-4e5f9158f2d1.png" width="90%"/>
+</div>
+
+## Results and models
+
+### ImageNet-1k
+
+| Model | Pretrain | resolution | Params(M) | Flops(G) | Top-1 (%) | Top-5 (%) | Config | Download |
+| :-------: | :----------: | :--------: | :-------: | :------: | :-------: | :-------: | :------------------------------------: | :----------------------------------------------------------------------------------------------: |
+| DaViT-T\* | From scratch | 224x224 | 28.36 | 4.54 | 82.24 | 96.13 | [config](./davit-tiny_4xb256_in1k.py) | [model](https://download.openmmlab.com/mmclassification/v0/davit/davit-tiny_3rdparty_in1k_20221116-700fdf7d.pth) |
+| DaViT-S\* | From scratch | 224x224 | 49.74 | 8.79 | 83.61 | 96.75 | [config](./davit-small_4xb256_in1k.py) | [model](https://download.openmmlab.com/mmclassification/v0/davit/davit-small_3rdparty_in1k_20221116-51a849a6.pth) |
+| DaViT-B\* | From scratch | 224x224 | 87.95 | 15.5 | 84.09 | 96.82 | [config](./davit-base_4xb256_in1k.py) | [model](https://download.openmmlab.com/mmclassification/v0/davit/davit-base_3rdparty_in1k_20221116-19e0d956.pth) |
+
+*Models with * are converted from the [official repo](https://github.com/dingmyu/davit). The config files of these models are only for validation. We don't ensure these config files' training accuracy and welcome you to contribute your reproduction results.*
+
+Note: Inference accuracy is a bit lower than paper result because of inference code for classification doesn't exist.
+
+## Citation
+
+```
+@inproceedings{ding2022davit,
+ title={DaViT: Dual Attention Vision Transformer},
+ author={Ding, Mingyu and Xiao, Bin and Codella, Noel and Luo, Ping and Wang, Jingdong and Yuan, Lu},
+ booktitle={ECCV},
+ year={2022},
+}
+```
diff --git a/configs/davit/davit-base_4xb256_in1k.py b/configs/davit/davit-base_4xb256_in1k.py
new file mode 100644
index 00000000000..071702fa7b6
--- /dev/null
+++ b/configs/davit/davit-base_4xb256_in1k.py
@@ -0,0 +1,9 @@
+_base_ = [
+ '../_base_/models/davit/davit-base.py',
+ '../_base_/datasets/imagenet_bs256_davit_224.py',
+ '../_base_/schedules/imagenet_bs1024_adamw_swin.py',
+ '../_base_/default_runtime.py'
+]
+
+# data settings
+train_dataloader = dict(batch_size=256)
diff --git a/configs/davit/davit-small_4xb256_in1k.py b/configs/davit/davit-small_4xb256_in1k.py
new file mode 100644
index 00000000000..e341031016c
--- /dev/null
+++ b/configs/davit/davit-small_4xb256_in1k.py
@@ -0,0 +1,9 @@
+_base_ = [
+ '../_base_/models/davit/davit-small.py',
+ '../_base_/datasets/imagenet_bs256_davit_224.py',
+ '../_base_/schedules/imagenet_bs1024_adamw_swin.py',
+ '../_base_/default_runtime.py'
+]
+
+# data settings
+train_dataloader = dict(batch_size=256)
diff --git a/configs/davit/davit-tiny_4xb256_in1k.py b/configs/davit/davit-tiny_4xb256_in1k.py
new file mode 100644
index 00000000000..a16d87f4630
--- /dev/null
+++ b/configs/davit/davit-tiny_4xb256_in1k.py
@@ -0,0 +1,9 @@
+_base_ = [
+ '../_base_/models/davit/davit-tiny.py',
+ '../_base_/datasets/imagenet_bs256_davit_224.py',
+ '../_base_/schedules/imagenet_bs1024_adamw_swin.py',
+ '../_base_/default_runtime.py'
+]
+
+# data settings
+train_dataloader = dict(batch_size=256)
diff --git a/configs/davit/metafile.yml b/configs/davit/metafile.yml
new file mode 100644
index 00000000000..02b4933dc59
--- /dev/null
+++ b/configs/davit/metafile.yml
@@ -0,0 +1,71 @@
+Collections:
+ - Name: DaViT
+ Metadata:
+ Architecture:
+ - GELU
+ - Layer Normalization
+ - Multi-Head Attention
+ - Scaled Dot-Product Attention
+ Paper:
+ URL: https://arxiv.org/abs/2204.03645v1
+ Title: 'DaViT: Dual Attention Vision Transformers'
+ README: configs/davit/README.md
+ Code:
+ URL: https://github.com/open-mmlab/mmclassification/blob/v1.0.0rc3/mmcls/models/backbones/davit.py
+ Version: v1.0.0rc3
+
+Models:
+ - Name: davit-tiny_3rdparty_in1k
+ In Collection: DaViT
+ Metadata:
+ FLOPs: 4539698688
+ Parameters: 28360168
+ Training Data:
+ - ImageNet-1k
+ Results:
+ - Dataset: ImageNet-1k
+ Task: Image Classification
+ Metrics:
+ Top 1 Accuracy: 82.24
+ Top 5 Accuracy: 96.13
+ Weights: https://download.openmmlab.com/mmclassification/v0/davit/davit-tiny_3rdparty_in1k_20221116-700fdf7d.pth
+ Converted From:
+ Weights: https://drive.google.com/file/d/1RSpi3lxKaloOL5-or20HuG975tbPwxRZ/view?usp=sharing
+ Code: https://github.com/dingmyu/davit/blob/main/mmdet/mmdet/models/backbones/davit.py#L355
+ Config: configs/davit/davit-tiny_4xb256_in1k.py
+ - Name: davit-small_3rdparty_in1k
+ In Collection: DaViT
+ Metadata:
+ FLOPs: 8799942144
+ Parameters: 49745896
+ Training Data:
+ - ImageNet-1k
+ Results:
+ - Dataset: ImageNet-1k
+ Task: Image Classification
+ Metrics:
+ Top 1 Accuracy: 83.61
+ Top 5 Accuracy: 96.75
+ Weights: https://download.openmmlab.com/mmclassification/v0/davit/davit-small_3rdparty_in1k_20221116-51a849a6.pth
+ Converted From:
+ Weights: https://drive.google.com/file/d/1q976ruj45mt0RhO9oxhOo6EP_cmj4ahQ/view?usp=sharing
+ Code: https://github.com/dingmyu/davit/blob/main/mmdet/mmdet/models/backbones/davit.py#L355
+ Config: configs/davit/davit-small_4xb256_in1k.py
+ - Name: davit-base_3rdparty_in1k
+ In Collection: DaViT
+ Metadata:
+ FLOPs: 15509702656
+ Parameters: 87954408
+ Training Data:
+ - ImageNet-1k
+ Results:
+ - Dataset: ImageNet-1k
+ Task: Image Classification
+ Metrics:
+ Top 1 Accuracy: 84.09
+ Top 5 Accuracy: 96.82
+ Weights: https://download.openmmlab.com/mmclassification/v0/davit/davit-base_3rdparty_in1k_20221116-19e0d956.pth
+ Converted From:
+ Weights: https://drive.google.com/file/d/1u9sDBEueB-YFuLigvcwf4b2YyA4MIVsZ/view?usp=sharing
+ Code: https://github.com/dingmyu/davit/blob/main/mmdet/mmdet/models/backbones/davit.py#L355
+ Config: configs/davit/davit-base_4xb256_in1k.py
diff --git a/docs/en/api/models.rst b/docs/en/api/models.rst
index 8a30f9485ad..8442b7a28a2 100644
--- a/docs/en/api/models.rst
+++ b/docs/en/api/models.rst
@@ -65,6 +65,7 @@ Backbones
Conformer
ConvMixer
ConvNeXt
+ DaViT
DeiT3
DenseNet
DistilledVisionTransformer
diff --git a/mmcls/models/backbones/__init__.py b/mmcls/models/backbones/__init__.py
index 97666124030..35f410b766e 100644
--- a/mmcls/models/backbones/__init__.py
+++ b/mmcls/models/backbones/__init__.py
@@ -4,6 +4,7 @@
from .convmixer import ConvMixer
from .convnext import ConvNeXt
from .cspnet import CSPDarkNet, CSPNet, CSPResNet, CSPResNeXt
+from .davit import DaViT
from .deit import DistilledVisionTransformer
from .deit3 import DeiT3
from .densenet import DenseNet
@@ -93,4 +94,5 @@
'DeiT3',
'HorNet',
'MobileViT',
+ 'DaViT',
]
diff --git a/mmcls/models/backbones/davit.py b/mmcls/models/backbones/davit.py
new file mode 100644
index 00000000000..2febe5da3d3
--- /dev/null
+++ b/mmcls/models/backbones/davit.py
@@ -0,0 +1,834 @@
+# Copyright (c) OpenMMLab. All rights reserved.
+from copy import deepcopy
+from typing import Sequence, Tuple
+
+import torch
+import torch.nn as nn
+import torch.utils.checkpoint as cp
+from mmcv.cnn import build_conv_layer, build_norm_layer
+from mmcv.cnn.bricks import Conv2d
+from mmcv.cnn.bricks.transformer import FFN, AdaptivePadding, PatchEmbed
+from mmengine.model import BaseModule, ModuleList
+from mmengine.utils import to_2tuple
+from mmengine.utils.dl_utils.parrots_wrapper import _BatchNorm
+
+from mmcls.models.backbones.base_backbone import BaseBackbone
+from mmcls.registry import MODELS
+from ..utils import ShiftWindowMSA
+
+
+class DaViTWindowMSA(BaseModule):
+ """Window based multi-head self-attention (W-MSA) module for DaViT.
+
+ The differences between DaViTWindowMSA & WindowMSA:
+ 1. Without relative position bias.
+
+ Args:
+ embed_dims (int): Number of input channels.
+ window_size (tuple[int]): The height and width of the window.
+ num_heads (int): Number of attention heads.
+ qkv_bias (bool, optional): If True, add a learnable bias to q, k, v.
+ Defaults to True.
+ qk_scale (float, optional): Override default qk scale of
+ ``head_dim ** -0.5`` if set. Defaults to None.
+ attn_drop (float, optional): Dropout ratio of attention weight.
+ Defaults to 0.
+ proj_drop (float, optional): Dropout ratio of output. Defaults to 0.
+ init_cfg (dict, optional): The extra config for initialization.
+ Defaults to None.
+ """
+
+ def __init__(self,
+ embed_dims,
+ window_size,
+ num_heads,
+ qkv_bias=True,
+ qk_scale=None,
+ attn_drop=0.,
+ proj_drop=0.,
+ init_cfg=None):
+
+ super().__init__(init_cfg)
+ self.embed_dims = embed_dims
+ self.window_size = window_size # Wh, Ww
+ self.num_heads = num_heads
+ head_embed_dims = embed_dims // num_heads
+ self.scale = qk_scale or head_embed_dims**-0.5
+
+ self.qkv = nn.Linear(embed_dims, embed_dims * 3, bias=qkv_bias)
+ self.attn_drop = nn.Dropout(attn_drop)
+ self.proj = nn.Linear(embed_dims, embed_dims)
+ self.proj_drop = nn.Dropout(proj_drop)
+
+ self.softmax = nn.Softmax(dim=-1)
+
+ def forward(self, x, mask=None):
+ """
+ Args:
+
+ x (tensor): input features with shape of (num_windows*B, N, C)
+ mask (tensor, Optional): mask with shape of (num_windows, Wh*Ww,
+ Wh*Ww), value should be between (-inf, 0].
+ """
+ B_, N, C = x.shape
+ qkv = self.qkv(x).reshape(B_, N, 3, self.num_heads,
+ C // self.num_heads).permute(2, 0, 3, 1, 4)
+ q, k, v = qkv[0], qkv[1], qkv[
+ 2] # make torchscript happy (cannot use tensor as tuple)
+
+ q = q * self.scale
+ attn = (q @ k.transpose(-2, -1))
+
+ if mask is not None:
+ nW = mask.shape[0]
+ attn = attn.view(B_ // nW, nW, self.num_heads, N,
+ N) + mask.unsqueeze(1).unsqueeze(0)
+ attn = attn.view(-1, self.num_heads, N, N)
+ attn = self.softmax(attn)
+ else:
+ attn = self.softmax(attn)
+
+ attn = self.attn_drop(attn)
+
+ x = (attn @ v).transpose(1, 2).reshape(B_, N, C)
+ x = self.proj(x)
+ x = self.proj_drop(x)
+ return x
+
+ @staticmethod
+ def double_step_seq(step1, len1, step2, len2):
+ seq1 = torch.arange(0, step1 * len1, step1)
+ seq2 = torch.arange(0, step2 * len2, step2)
+ return (seq1[:, None] + seq2[None, :]).reshape(1, -1)
+
+
+class ConvPosEnc(BaseModule):
+ """DaViT conv pos encode block.
+
+ Args:
+ embed_dims (int): Number of input channels.
+ kernel_size (int): The kernel size of the first convolution.
+ Defaults to 3.
+ init_cfg (dict, optional): The extra config for initialization.
+ Defaults to None.
+ """
+
+ def __init__(self, embed_dims, kernel_size=3, init_cfg=None):
+ super(ConvPosEnc, self).__init__(init_cfg)
+ self.proj = Conv2d(
+ embed_dims,
+ embed_dims,
+ kernel_size,
+ stride=1,
+ padding=kernel_size // 2,
+ groups=embed_dims)
+
+ def forward(self, x, size: Tuple[int, int]):
+ B, N, C = x.shape
+ H, W = size
+ assert N == H * W
+
+ feat = x.transpose(1, 2).view(B, C, H, W)
+ feat = self.proj(feat)
+ feat = feat.flatten(2).transpose(1, 2)
+ x = x + feat
+ return x
+
+
+class DaViTDownSample(BaseModule):
+ """DaViT down sampole block.
+
+ Args:
+ in_channels (int): The number of input channels.
+ out_channels (int): The number of output channels.
+ conv_type (str): The type of convolution
+ to generate patch embedding. Default: "Conv2d".
+ kernel_size (int): The kernel size of the first convolution.
+ Defaults to 2.
+ stride (int): The stride of the second convluation module.
+ Defaults to 2.
+ padding (int | tuple | string ): The padding length of
+ embedding conv. When it is a string, it means the mode
+ of adaptive padding, support "same" and "corner" now.
+ Defaults to "corner".
+ dilation (int): Dilation of the convolution layers. Defaults to 1.
+ bias (bool): Bias of embed conv. Default: True.
+ norm_cfg (dict, optional): Config dict for normalization layer.
+ Defaults to ``dict(type='LN')``.
+ init_cfg (dict, optional): The extra config for initialization.
+ Defaults to None.
+ """
+
+ def __init__(self,
+ in_channels,
+ out_channels,
+ conv_type='Conv2d',
+ kernel_size=2,
+ stride=2,
+ padding='same',
+ dilation=1,
+ bias=True,
+ norm_cfg=None,
+ init_cfg=None):
+ super().__init__(init_cfg=init_cfg)
+ self.out_channels = out_channels
+ if stride is None:
+ stride = kernel_size
+
+ kernel_size = to_2tuple(kernel_size)
+ stride = to_2tuple(stride)
+ dilation = to_2tuple(dilation)
+
+ if isinstance(padding, str):
+ self.adaptive_padding = AdaptivePadding(
+ kernel_size=kernel_size,
+ stride=stride,
+ dilation=dilation,
+ padding=padding)
+ # disable the padding of conv
+ padding = 0
+ else:
+ self.adaptive_padding = None
+ padding = to_2tuple(padding)
+
+ self.projection = build_conv_layer(
+ dict(type=conv_type),
+ in_channels=in_channels,
+ out_channels=out_channels,
+ kernel_size=kernel_size,
+ stride=stride,
+ padding=padding,
+ dilation=dilation,
+ bias=bias)
+
+ if norm_cfg is not None:
+ self.norm = build_norm_layer(norm_cfg, in_channels)[1]
+ else:
+ self.norm = None
+
+ def forward(self, x, input_size):
+ if self.adaptive_padding:
+ x = self.adaptive_padding(x)
+ H, W = input_size
+ B, L, C = x.shape
+ assert L == H * W, 'input feature has wrong size'
+
+ x = self.norm(x)
+ x = x.reshape(B, H, W, C).permute(0, 3, 1, 2).contiguous()
+
+ x = self.projection(x)
+ output_size = (x.size(2), x.size(3))
+ x = x.flatten(2).transpose(1, 2)
+ return x, output_size
+
+
+class ChannelAttention(BaseModule):
+ """DaViT channel attention.
+
+ Args:
+ embed_dims (int): Number of input channels.
+ num_heads (int): Number of attention heads.
+ qkv_bias (bool): enable bias for qkv if True. Defaults to True.
+ init_cfg (dict, optional): The extra config for initialization.
+ Defaults to None.
+ """
+
+ def __init__(self, embed_dims, num_heads=8, qkv_bias=False, init_cfg=None):
+ super().__init__(init_cfg)
+ self.embed_dims = embed_dims
+ self.num_heads = num_heads
+ self.head_dims = embed_dims // num_heads
+ self.scale = self.head_dims**-0.5
+
+ self.qkv = nn.Linear(embed_dims, embed_dims * 3, bias=qkv_bias)
+ self.proj = nn.Linear(embed_dims, embed_dims)
+
+ def forward(self, x):
+ B, N, _ = x.shape
+
+ qkv = self.qkv(x).reshape(B, N, 3, self.num_heads,
+ self.head_dims).permute(2, 0, 3, 1, 4)
+ q, k, v = qkv[0], qkv[1], qkv[2]
+
+ k = k * self.scale
+ attention = k.transpose(-1, -2) @ v
+ attention = attention.softmax(dim=-1)
+
+ x = (attention @ q.transpose(-1, -2)).transpose(-1, -2)
+ x = x.transpose(1, 2).reshape(B, N, self.embed_dims)
+ x = self.proj(x)
+ return x
+
+
+class ChannelBlock(BaseModule):
+ """DaViT channel attention block.
+
+ Args:
+ embed_dims (int): Number of input channels.
+ num_heads (int): Number of attention heads.
+ window_size (int): The height and width of the window. Defaults to 7.
+ ffn_ratio (float): The expansion ratio of feedforward network hidden
+ layer channels. Defaults to 4.
+ qkv_bias (bool): enable bias for qkv if True. Defaults to True.
+ drop_path (float): The drop path rate after attention and ffn.
+ Defaults to 0.
+ ffn_cfgs (dict): The extra config of FFN. Defaults to empty dict.
+ norm_cfg (dict): The config of norm layers.
+ Defaults to ``dict(type='LN')``.
+ with_cp (bool): Use checkpoint or not. Using checkpoint will save some
+ memory while slowing down the training speed. Defaults to False.
+ init_cfg (dict, optional): The extra config for initialization.
+ Defaults to None.
+ """
+
+ def __init__(self,
+ embed_dims,
+ num_heads,
+ ffn_ratio=4.,
+ qkv_bias=False,
+ drop_path=0.,
+ ffn_cfgs=dict(),
+ norm_cfg=dict(type='LN'),
+ with_cp=False,
+ init_cfg=None):
+ super().__init__(init_cfg)
+ self.with_cp = with_cp
+
+ self.cpe1 = ConvPosEnc(embed_dims=embed_dims, kernel_size=3)
+ self.norm1 = build_norm_layer(norm_cfg, embed_dims)[1]
+ self.attn = ChannelAttention(
+ embed_dims, num_heads=num_heads, qkv_bias=qkv_bias)
+ self.cpe2 = ConvPosEnc(embed_dims=embed_dims, kernel_size=3)
+
+ _ffn_cfgs = {
+ 'embed_dims': embed_dims,
+ 'feedforward_channels': int(embed_dims * ffn_ratio),
+ 'num_fcs': 2,
+ 'ffn_drop': 0,
+ 'dropout_layer': dict(type='DropPath', drop_prob=drop_path),
+ 'act_cfg': dict(type='GELU'),
+ **ffn_cfgs
+ }
+ self.norm2 = build_norm_layer(norm_cfg, embed_dims)[1]
+ self.ffn = FFN(**_ffn_cfgs)
+
+ def forward(self, x, hw_shape):
+
+ def _inner_forward(x):
+ x = self.cpe1(x, hw_shape)
+ identity = x
+ x = self.norm1(x)
+ x = self.attn(x)
+ x = x + identity
+
+ x = self.cpe2(x, hw_shape)
+ identity = x
+ x = self.norm2(x)
+ x = self.ffn(x, identity=identity)
+
+ return x
+
+ if self.with_cp and x.requires_grad:
+ x = cp.checkpoint(_inner_forward, x)
+ else:
+ x = _inner_forward(x)
+
+ return x
+
+
+class SpatialBlock(BaseModule):
+ """DaViT spatial attention block.
+
+ Args:
+ embed_dims (int): Number of input channels.
+ num_heads (int): Number of attention heads.
+ window_size (int): The height and width of the window. Defaults to 7.
+ ffn_ratio (float): The expansion ratio of feedforward network hidden
+ layer channels. Defaults to 4.
+ qkv_bias (bool): enable bias for qkv if True. Defaults to True.
+ drop_path (float): The drop path rate after attention and ffn.
+ Defaults to 0.
+ pad_small_map (bool): If True, pad the small feature map to the window
+ size, which is common used in detection and segmentation. If False,
+ avoid shifting window and shrink the window size to the size of
+ feature map, which is common used in classification.
+ Defaults to False.
+ attn_cfgs (dict): The extra config of Shift Window-MSA.
+ Defaults to empty dict.
+ ffn_cfgs (dict): The extra config of FFN. Defaults to empty dict.
+ norm_cfg (dict): The config of norm layers.
+ Defaults to ``dict(type='LN')``.
+ with_cp (bool): Use checkpoint or not. Using checkpoint will save some
+ memory while slowing down the training speed. Defaults to False.
+ init_cfg (dict, optional): The extra config for initialization.
+ Defaults to None.
+ """
+
+ def __init__(self,
+ embed_dims,
+ num_heads,
+ window_size=7,
+ ffn_ratio=4.,
+ qkv_bias=True,
+ drop_path=0.,
+ pad_small_map=False,
+ attn_cfgs=dict(),
+ ffn_cfgs=dict(),
+ norm_cfg=dict(type='LN'),
+ with_cp=False,
+ init_cfg=None):
+
+ super(SpatialBlock, self).__init__(init_cfg)
+ self.with_cp = with_cp
+
+ self.cpe1 = ConvPosEnc(embed_dims=embed_dims, kernel_size=3)
+ self.norm1 = build_norm_layer(norm_cfg, embed_dims)[1]
+ _attn_cfgs = {
+ 'embed_dims': embed_dims,
+ 'num_heads': num_heads,
+ 'shift_size': 0,
+ 'window_size': window_size,
+ 'dropout_layer': dict(type='DropPath', drop_prob=drop_path),
+ 'qkv_bias': qkv_bias,
+ 'pad_small_map': pad_small_map,
+ 'window_msa': DaViTWindowMSA,
+ **attn_cfgs
+ }
+ self.attn = ShiftWindowMSA(**_attn_cfgs)
+ self.cpe2 = ConvPosEnc(embed_dims=embed_dims, kernel_size=3)
+
+ _ffn_cfgs = {
+ 'embed_dims': embed_dims,
+ 'feedforward_channels': int(embed_dims * ffn_ratio),
+ 'num_fcs': 2,
+ 'ffn_drop': 0,
+ 'dropout_layer': dict(type='DropPath', drop_prob=drop_path),
+ 'act_cfg': dict(type='GELU'),
+ **ffn_cfgs
+ }
+ self.norm2 = build_norm_layer(norm_cfg, embed_dims)[1]
+ self.ffn = FFN(**_ffn_cfgs)
+
+ def forward(self, x, hw_shape):
+
+ def _inner_forward(x):
+ x = self.cpe1(x, hw_shape)
+ identity = x
+ x = self.norm1(x)
+ x = self.attn(x, hw_shape)
+ x = x + identity
+
+ x = self.cpe2(x, hw_shape)
+ identity = x
+ x = self.norm2(x)
+ x = self.ffn(x, identity=identity)
+
+ return x
+
+ if self.with_cp and x.requires_grad:
+ x = cp.checkpoint(_inner_forward, x)
+ else:
+ x = _inner_forward(x)
+
+ return x
+
+
+class DaViTBlock(BaseModule):
+ """DaViT block.
+
+ Args:
+ embed_dims (int): Number of input channels.
+ num_heads (int): Number of attention heads.
+ window_size (int): The height and width of the window. Defaults to 7.
+ ffn_ratio (float): The expansion ratio of feedforward network hidden
+ layer channels. Defaults to 4.
+ qkv_bias (bool): enable bias for qkv if True. Defaults to True.
+ drop_path (float): The drop path rate after attention and ffn.
+ Defaults to 0.
+ pad_small_map (bool): If True, pad the small feature map to the window
+ size, which is common used in detection and segmentation. If False,
+ avoid shifting window and shrink the window size to the size of
+ feature map, which is common used in classification.
+ Defaults to False.
+ attn_cfgs (dict): The extra config of Shift Window-MSA.
+ Defaults to empty dict.
+ ffn_cfgs (dict): The extra config of FFN. Defaults to empty dict.
+ norm_cfg (dict): The config of norm layers.
+ Defaults to ``dict(type='LN')``.
+ with_cp (bool): Use checkpoint or not. Using checkpoint will save some
+ memory while slowing down the training speed. Defaults to False.
+ init_cfg (dict, optional): The extra config for initialization.
+ Defaults to None.
+ """
+
+ def __init__(self,
+ embed_dims,
+ num_heads,
+ window_size=7,
+ ffn_ratio=4.,
+ qkv_bias=True,
+ drop_path=0.,
+ pad_small_map=False,
+ attn_cfgs=dict(),
+ ffn_cfgs=dict(),
+ norm_cfg=dict(type='LN'),
+ with_cp=False,
+ init_cfg=None):
+
+ super(DaViTBlock, self).__init__(init_cfg)
+ self.spatial_block = SpatialBlock(
+ embed_dims,
+ num_heads,
+ window_size=window_size,
+ ffn_ratio=ffn_ratio,
+ qkv_bias=qkv_bias,
+ drop_path=drop_path,
+ pad_small_map=pad_small_map,
+ attn_cfgs=attn_cfgs,
+ ffn_cfgs=ffn_cfgs,
+ norm_cfg=norm_cfg,
+ with_cp=with_cp)
+ self.channel_block = ChannelBlock(
+ embed_dims,
+ num_heads,
+ ffn_ratio=ffn_ratio,
+ qkv_bias=qkv_bias,
+ drop_path=drop_path,
+ ffn_cfgs=ffn_cfgs,
+ norm_cfg=norm_cfg,
+ with_cp=False)
+
+ def forward(self, x, hw_shape):
+ x = self.spatial_block(x, hw_shape)
+ x = self.channel_block(x, hw_shape)
+
+ return x
+
+
+class DaViTBlockSequence(BaseModule):
+ """Module with successive DaViT blocks and downsample layer.
+
+ Args:
+ embed_dims (int): Number of input channels.
+ depth (int): Number of successive DaViT blocks.
+ num_heads (int): Number of attention heads.
+ window_size (int): The height and width of the window. Defaults to 7.
+ ffn_ratio (float): The expansion ratio of feedforward network hidden
+ layer channels. Defaults to 4.
+ qkv_bias (bool): enable bias for qkv if True. Defaults to True.
+ downsample (bool): Downsample the output of blocks by patch merging.
+ Defaults to False.
+ downsample_cfg (dict): The extra config of the patch merging layer.
+ Defaults to empty dict.
+ drop_paths (Sequence[float] | float): The drop path rate in each block.
+ Defaults to 0.
+ block_cfgs (Sequence[dict] | dict): The extra config of each block.
+ Defaults to empty dicts.
+ with_cp (bool): Use checkpoint or not. Using checkpoint will save some
+ memory while slowing down the training speed. Defaults to False.
+ pad_small_map (bool): If True, pad the small feature map to the window
+ size, which is common used in detection and segmentation. If False,
+ avoid shifting window and shrink the window size to the size of
+ feature map, which is common used in classification.
+ Defaults to False.
+ init_cfg (dict, optional): The extra config for initialization.
+ Defaults to None.
+ """
+
+ def __init__(self,
+ embed_dims,
+ depth,
+ num_heads,
+ window_size=7,
+ ffn_ratio=4.,
+ qkv_bias=True,
+ downsample=False,
+ downsample_cfg=dict(),
+ drop_paths=0.,
+ block_cfgs=dict(),
+ with_cp=False,
+ pad_small_map=False,
+ init_cfg=None):
+ super().__init__(init_cfg)
+
+ if not isinstance(drop_paths, Sequence):
+ drop_paths = [drop_paths] * depth
+
+ if not isinstance(block_cfgs, Sequence):
+ block_cfgs = [deepcopy(block_cfgs) for _ in range(depth)]
+
+ self.embed_dims = embed_dims
+ self.blocks = ModuleList()
+ for i in range(depth):
+ _block_cfg = {
+ 'embed_dims': embed_dims,
+ 'num_heads': num_heads,
+ 'window_size': window_size,
+ 'ffn_ratio': ffn_ratio,
+ 'qkv_bias': qkv_bias,
+ 'drop_path': drop_paths[i],
+ 'with_cp': with_cp,
+ 'pad_small_map': pad_small_map,
+ **block_cfgs[i]
+ }
+ block = DaViTBlock(**_block_cfg)
+ self.blocks.append(block)
+
+ if downsample:
+ _downsample_cfg = {
+ 'in_channels': embed_dims,
+ 'out_channels': 2 * embed_dims,
+ 'norm_cfg': dict(type='LN'),
+ **downsample_cfg
+ }
+ self.downsample = DaViTDownSample(**_downsample_cfg)
+ else:
+ self.downsample = None
+
+ def forward(self, x, in_shape, do_downsample=True):
+ for block in self.blocks:
+ x = block(x, in_shape)
+
+ if self.downsample is not None and do_downsample:
+ x, out_shape = self.downsample(x, in_shape)
+ else:
+ out_shape = in_shape
+ return x, out_shape
+
+ @property
+ def out_channels(self):
+ if self.downsample:
+ return self.downsample.out_channels
+ else:
+ return self.embed_dims
+
+
[email protected]_module()
+class DaViT(BaseBackbone):
+ """DaViT.
+
+ A PyTorch implement of : `DaViT: Dual Attention Vision Transformers
+ <https://arxiv.org/abs/2204.03645v1>`_
+
+ Inspiration from
+ https://github.com/dingmyu/davit
+
+ Args:
+ arch (str | dict): DaViT architecture. If use string, choose from
+ 'tiny', 'small', 'base' and 'large', 'huge', 'giant'. If use dict,
+ it should have below keys:
+
+ - **embed_dims** (int): The dimensions of embedding.
+ - **depths** (List[int]): The number of blocks in each stage.
+ - **num_heads** (List[int]): The number of heads in attention
+ modules of each stage.
+
+ Defaults to 't'.
+ patch_size (int | tuple): The patch size in patch embedding.
+ Defaults to 4.
+ in_channels (int): The num of input channels. Defaults to 3.
+ window_size (int): The height and width of the window. Defaults to 7.
+ ffn_ratio (float): The expansion ratio of feedforward network hidden
+ layer channels. Defaults to 4.
+ qkv_bias (bool): Whether to add bias for qkv in attention modules.
+ Defaults to True.
+ drop_path_rate (float): Stochastic depth rate. Defaults to 0.1.
+ out_after_downsample (bool): Whether to output the feature map of a
+ stage after the following downsample layer. Defaults to False.
+ pad_small_map (bool): If True, pad the small feature map to the window
+ size, which is common used in detection and segmentation. If False,
+ avoid shifting window and shrink the window size to the size of
+ feature map, which is common used in classification.
+ Defaults to False.
+ norm_cfg (dict): Config dict for normalization layer for all output
+ features. Defaults to ``dict(type='LN')``
+ stage_cfgs (Sequence[dict] | dict): Extra config dict for each
+ stage. Defaults to an empty dict.
+ frozen_stages (int): Stages to be frozen (stop grad and set eval mode).
+ -1 means not freezing any parameters. Defaults to -1.
+ norm_eval (bool): Whether to set norm layers to eval mode, namely,
+ freeze running stats (mean and var). Note: Effect on Batch Norm
+ and its variants only. Defaults to False.
+ out_indices (Sequence | int): Output from which stages.
+ Defaults to -1, means the last stage.
+ with_cp (bool): Use checkpoint or not. Using checkpoint will save some
+ memory while slowing down the training speed. Defaults to False.
+ init_cfg (dict, optional): The Config for initialization.
+ Defaults to None.
+ """
+ arch_zoo = {
+ **dict.fromkeys(['t', 'tiny'], {
+ 'embed_dims': 96,
+ 'depths': [1, 1, 3, 1],
+ 'num_heads': [3, 6, 12, 24]
+ }),
+ **dict.fromkeys(['s', 'small'], {
+ 'embed_dims': 96,
+ 'depths': [1, 1, 9, 1],
+ 'num_heads': [3, 6, 12, 24]
+ }),
+ **dict.fromkeys(['b', 'base'], {
+ 'embed_dims': 128,
+ 'depths': [1, 1, 9, 1],
+ 'num_heads': [4, 8, 16, 32]
+ }),
+ **dict.fromkeys(
+ ['l', 'large'], {
+ 'embed_dims': 192,
+ 'depths': [1, 1, 9, 1],
+ 'num_heads': [6, 12, 24, 48]
+ }),
+ **dict.fromkeys(
+ ['h', 'huge'], {
+ 'embed_dims': 256,
+ 'depths': [1, 1, 9, 1],
+ 'num_heads': [8, 16, 32, 64]
+ }),
+ **dict.fromkeys(
+ ['g', 'giant'], {
+ 'embed_dims': 384,
+ 'depths': [1, 1, 12, 3],
+ 'num_heads': [12, 24, 48, 96]
+ }),
+ }
+
+ def __init__(self,
+ arch='t',
+ patch_size=4,
+ in_channels=3,
+ window_size=7,
+ ffn_ratio=4.,
+ qkv_bias=True,
+ drop_path_rate=0.1,
+ out_after_downsample=False,
+ pad_small_map=False,
+ norm_cfg=dict(type='LN'),
+ stage_cfgs=dict(),
+ frozen_stages=-1,
+ norm_eval=False,
+ out_indices=(3, ),
+ with_cp=False,
+ init_cfg=None):
+ super().__init__(init_cfg)
+
+ if isinstance(arch, str):
+ arch = arch.lower()
+ assert arch in set(self.arch_zoo), \
+ f'Arch {arch} is not in default archs {set(self.arch_zoo)}'
+ self.arch_settings = self.arch_zoo[arch]
+ else:
+ essential_keys = {'embed_dims', 'depths', 'num_heads'}
+ assert isinstance(arch, dict) and essential_keys <= set(arch), \
+ f'Custom arch needs a dict with keys {essential_keys}'
+ self.arch_settings = arch
+
+ self.embed_dims = self.arch_settings['embed_dims']
+ self.depths = self.arch_settings['depths']
+ self.num_heads = self.arch_settings['num_heads']
+ self.num_layers = len(self.depths)
+ self.out_indices = out_indices
+ self.out_after_downsample = out_after_downsample
+ self.frozen_stages = frozen_stages
+ self.norm_eval = norm_eval
+
+ # stochastic depth decay rule
+ total_depth = sum(self.depths)
+ dpr = [
+ x.item() for x in torch.linspace(0, drop_path_rate, total_depth)
+ ] # stochastic depth decay rule
+
+ _patch_cfg = dict(
+ in_channels=in_channels,
+ embed_dims=self.embed_dims,
+ conv_type='Conv2d',
+ kernel_size=7,
+ stride=patch_size,
+ padding='same',
+ norm_cfg=dict(type='LN'),
+ )
+ self.patch_embed = PatchEmbed(**_patch_cfg)
+
+ self.stages = ModuleList()
+ embed_dims = [self.embed_dims]
+ for i, (depth,
+ num_heads) in enumerate(zip(self.depths, self.num_heads)):
+ if isinstance(stage_cfgs, Sequence):
+ stage_cfg = stage_cfgs[i]
+ else:
+ stage_cfg = deepcopy(stage_cfgs)
+ downsample = True if i < self.num_layers - 1 else False
+ _stage_cfg = {
+ 'embed_dims': embed_dims[-1],
+ 'depth': depth,
+ 'num_heads': num_heads,
+ 'window_size': window_size,
+ 'ffn_ratio': ffn_ratio,
+ 'qkv_bias': qkv_bias,
+ 'downsample': downsample,
+ 'drop_paths': dpr[:depth],
+ 'with_cp': with_cp,
+ 'pad_small_map': pad_small_map,
+ **stage_cfg
+ }
+
+ stage = DaViTBlockSequence(**_stage_cfg)
+ self.stages.append(stage)
+
+ dpr = dpr[depth:]
+ embed_dims.append(stage.out_channels)
+
+ self.num_features = embed_dims[:-1]
+
+ # add a norm layer for each output
+ for i in out_indices:
+ if norm_cfg is not None:
+ norm_layer = build_norm_layer(norm_cfg,
+ self.num_features[i])[1]
+ else:
+ norm_layer = nn.Identity()
+
+ self.add_module(f'norm{i}', norm_layer)
+
+ def train(self, mode=True):
+ super().train(mode)
+ self._freeze_stages()
+ if mode and self.norm_eval:
+ for m in self.modules():
+ # trick: eval have effect on BatchNorm only
+ if isinstance(m, _BatchNorm):
+ m.eval()
+
+ def _freeze_stages(self):
+ if self.frozen_stages >= 0:
+ self.patch_embed.eval()
+ for param in self.patch_embed.parameters():
+ param.requires_grad = False
+
+ for i in range(0, self.frozen_stages + 1):
+ m = self.stages[i]
+ m.eval()
+ for param in m.parameters():
+ param.requires_grad = False
+ for i in self.out_indices:
+ if i <= self.frozen_stages:
+ for param in getattr(self, f'norm{i}').parameters():
+ param.requires_grad = False
+
+ def forward(self, x):
+ x, hw_shape = self.patch_embed(x)
+
+ outs = []
+ for i, stage in enumerate(self.stages):
+ x, hw_shape = stage(
+ x, hw_shape, do_downsample=self.out_after_downsample)
+ if i in self.out_indices:
+ norm_layer = getattr(self, f'norm{i}')
+ out = norm_layer(x)
+ out = out.view(-1, *hw_shape,
+ self.num_features[i]).permute(0, 3, 1,
+ 2).contiguous()
+ outs.append(out)
+ if stage.downsample is not None and not self.out_after_downsample:
+ x, hw_shape = stage.downsample(x, hw_shape)
+
+ return tuple(outs)
diff --git a/model-index.yml b/model-index.yml
index 7d0a60e9472..c190b5a8143 100644
--- a/model-index.yml
+++ b/model-index.yml
@@ -37,3 +37,4 @@ Import:
- configs/deit3/metafile.yml
- configs/hornet/metafile.yml
- configs/mobilevit/metafile.yml
+ - configs/davit/metafile.yml
diff --git a/tools/model_converters/davit_to_mmcls.py b/tools/model_converters/davit_to_mmcls.py
new file mode 100644
index 00000000000..acd76ed563d
--- /dev/null
+++ b/tools/model_converters/davit_to_mmcls.py
@@ -0,0 +1,86 @@
+# Copyright (c) OpenMMLab. All rights reserved.
+import argparse
+import os.path as osp
+from collections import OrderedDict
+
+import mmengine
+import torch
+from mmengine.runner import CheckpointLoader
+
+
+def convert_davit(ckpt):
+
+ new_ckpt = OrderedDict()
+
+ for k, v in list(ckpt.items()):
+ new_v = v
+ if k.startswith('patch_embeds.0'):
+ new_k = k.replace('patch_embeds.0', 'patch_embed')
+ new_k = new_k.replace('proj', 'projection')
+ elif k.startswith('patch_embeds'):
+ if k.startswith('patch_embeds.1'):
+ new_k = k.replace('patch_embeds.1', 'stages.0.downsample')
+ elif k.startswith('patch_embeds.2'):
+ new_k = k.replace('patch_embeds.2', 'stages.1.downsample')
+ elif k.startswith('patch_embeds.3'):
+ new_k = k.replace('patch_embeds.3', 'stages.2.downsample')
+ new_k = new_k.replace('proj', 'projection')
+ elif k.startswith('main_blocks'):
+ new_k = k.replace('main_blocks', 'stages')
+ for num_stages in range(4):
+ for num_blocks in range(9):
+ if f'{num_stages}.{num_blocks}.0' in k:
+ new_k = new_k.replace(
+ f'{num_stages}.{num_blocks}.0',
+ f'{num_stages}.blocks.{num_blocks}.spatial_block')
+ elif f'{num_stages}.{num_blocks}.1' in k:
+ new_k = new_k.replace(
+ f'{num_stages}.{num_blocks}.1',
+ f'{num_stages}.blocks.{num_blocks}.channel_block')
+ if 'cpe.0' in k:
+ new_k = new_k.replace('cpe.0', 'cpe1')
+ elif 'cpe.1' in k:
+ new_k = new_k.replace('cpe.1', 'cpe2')
+ if 'mlp' in k:
+ new_k = new_k.replace('mlp.fc1', 'ffn.layers.0.0')
+ new_k = new_k.replace('mlp.fc2', 'ffn.layers.1')
+ if 'spatial_block.attn' in new_k:
+ new_k = new_k.replace('spatial_block.attn',
+ 'spatial_block.attn.w_msa')
+ elif k.startswith('norms'):
+ new_k = k.replace('norms', 'norm3')
+ elif k.startswith('head'):
+ new_k = k.replace('head', 'head.fc')
+ else:
+ new_k = k
+
+ if not new_k.startswith('head'):
+ new_k = 'backbone.' + new_k
+ new_ckpt[new_k] = new_v
+ return new_ckpt
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Convert keys in pretrained van models to mmcls style.')
+ parser.add_argument('src', help='src model path or url')
+ # The dst path must be a full path of the new checkpoint.
+ parser.add_argument('dst', help='save path')
+ args = parser.parse_args()
+
+ checkpoint = CheckpointLoader.load_checkpoint(args.src, map_location='cpu')
+
+ if 'state_dict' in checkpoint:
+ state_dict = checkpoint['state_dict']
+ else:
+ state_dict = checkpoint
+
+ weight = convert_davit(state_dict)
+ mmengine.mkdir_or_exist(osp.dirname(args.dst))
+ torch.save(weight, args.dst)
+
+ print('Done!!')
+
+
+if __name__ == '__main__':
+ main()
| [Feature] Support DaViT
### Describe the feature
paper: https://arxiv.org/pdf/2204.03645.pdf
code: https://github.com/dingmyu/davit
### Will you implement it?
- [X] I would like to implement this feature and create a PR!
| Thank you for your suggestion. We will consider adding this backbone. Community contributions are also welcome.
I plan to make a PR! | 2022-10-19T01:49:15 | 0.0 | [] | [] |
||
open-mmlab/mmpretrain | open-mmlab__mmpretrain-721 | c708770b427df933e63aff6be0cfe571a18e868a | diff --git a/mmcls/datasets/dataset_wrappers.py b/mmcls/datasets/dataset_wrappers.py
index 6aef6563849..4b471963b4b 100644
--- a/mmcls/datasets/dataset_wrappers.py
+++ b/mmcls/datasets/dataset_wrappers.py
@@ -311,6 +311,14 @@ def __init__(self,
else:
self.indices = indices[:test_start] + indices[test_end:]
+ def get_cat_ids(self, idx):
+ return self.dataset.get_cat_ids(self.indices[idx])
+
+ def get_gt_labels(self):
+ dataset_gt_labels = self.dataset.get_gt_labels()
+ gt_labels = np.array([dataset_gt_labels[idx] for idx in self.indices])
+ return gt_labels
+
def __getitem__(self, idx):
return self.dataset[self.indices[idx]]
| How can I test the accuracy of the model for each class
I want to know how accurate is the model for each class, or how can I get the Confusion matrix.Looking forward to getting help
| Hello, we haven't exposed the confusion matrix calculation API by now. But you can call it directly.
For example, get the classification score of every sample at first:
```shell
python tools/test.py your_config_file your_checkpoint --out result.pkl --out-items class_scores
```
And then, calculate the confusion matrix:
```python
>>> import mmcv
>>> from mmcls.datasets import build_dataset
>>> from mmcls.core.evaluation import calculate_confusion_matrix
>>> cfg = mmcv.Config.fromfile("your_config_file")
>>> dataset = build_dataset(cfg.data.test)
>>> pred = mmcv.load("./result.pkl")['class_scores']
>>> matrix = calculate_confusion_matrix(result['class_scores'], dataset.get_gt_labels())
>>> print(matrix)
tensor([[47., 0., 0., ..., 0., 0., 0.],
[ 0., 46., 0., ..., 0., 0., 0.],
[ 0., 0., 38., ..., 0., 0., 0.],
...,
[ 0., 0., 0., ..., 36., 0., 0.],
[ 0., 0., 0., ..., 0., 24., 0.],
[ 0., 0., 0., ..., 0., 0., 26.]])
>>> import matplotlib.pyplot as plt
>>> plt.imshow(matrix[:20, :20]) # Visualize the first twenty classes.
>>> plt.show()
```

Wow, thank you very much | 2022-03-03T07:12:54 | 0.0 | [] | [] |
||
open-mmlab/mmpretrain | open-mmlab__mmpretrain-694 | bca695b68471ed821261427ca3505d0c1e78ba3e | diff --git a/tools/convert_models/mlpmixer_to_mmcls.py b/tools/convert_models/mlpmixer_to_mmcls.py
index 5e16d6a51bc..74e25083c78 100644
--- a/tools/convert_models/mlpmixer_to_mmcls.py
+++ b/tools/convert_models/mlpmixer_to_mmcls.py
@@ -49,7 +49,7 @@ def convert_weights(weight):
dst = Path(args.dst)
if dst.suffix != '.pth':
print('The path should contain the name of the pth format file.')
- exit()
+ exit(1)
dst.parent.mkdir(parents=True, exist_ok=True)
original_model = torch.load(args.src, map_location='cpu')
diff --git a/tools/convert_models/reparameterize_repvgg.py b/tools/convert_models/reparameterize_repvgg.py
index 0eb7b203b42..4102398a5f0 100644
--- a/tools/convert_models/reparameterize_repvgg.py
+++ b/tools/convert_models/reparameterize_repvgg.py
@@ -35,7 +35,7 @@ def main():
save_path = Path(args.save_path)
if save_path.suffix != '.pth':
print('The path should contain the name of the pth format file.')
- exit()
+ exit(1)
save_path.parent.mkdir(parents=True, exist_ok=True)
convert_repvggblock_param(args.config_path, args.checkpoint_path,
diff --git a/tools/convert_models/repvgg_to_mmcls.py b/tools/convert_models/repvgg_to_mmcls.py
index fb281010839..c86decbf54d 100644
--- a/tools/convert_models/repvgg_to_mmcls.py
+++ b/tools/convert_models/repvgg_to_mmcls.py
@@ -49,7 +49,7 @@ def main():
dst = Path(args.dst)
if dst.suffix != '.pth':
print('The path should contain the name of the pth format file.')
- exit()
+ exit(1)
dst.parent.mkdir(parents=True, exist_ok=True)
convert(args.src, args.dst)
| Return code should not be zero if scripts run failed.
https://github.com/open-mmlab/mmclassification/blob/f9eb9b409b65361167446c7c28e240f64aa7cf18/tools/deployment/test.py#L83
https://github.com/open-mmlab/mmclassification/blob/fc8adbc1490efa158cce66f4d0ed3a981a8a8d3b/tools/convert_models/mlpmixer_to_mmcls.py#L52
| Is any number except zero OK? | 2022-02-15T05:34:37 | 0.0 | [] | [] |
||
open-mmlab/mmpretrain | open-mmlab__mmpretrain-686 | a7f8e96b31c10ab3e9c133293ca406e6e548475b | diff --git a/mmcls/models/backbones/conformer.py b/mmcls/models/backbones/conformer.py
index 0eab9c6a286..fc88620bb1f 100644
--- a/mmcls/models/backbones/conformer.py
+++ b/mmcls/models/backbones/conformer.py
@@ -5,6 +5,7 @@
import torch.nn.functional as F
from mmcv.cnn import build_activation_layer, build_norm_layer
from mmcv.cnn.bricks.drop import DropPath
+from mmcv.cnn.bricks.transformer import AdaptivePadding
from mmcv.cnn.utils.weight_init import trunc_normal_
from mmcls.utils import get_root_logger
@@ -438,9 +439,16 @@ def __init__(self,
self.maxpool = nn.MaxPool2d(
kernel_size=3, stride=2, padding=1) # 1 / 4 [56, 56]
+ assert patch_size % 16 == 0, 'The patch size of Conformer must ' \
+ 'be divisible by 16.'
+ trans_down_stride = patch_size // 4
+
+ # To solve the issue #680
+ # Auto pad the feature map to be divisible by trans_down_stride
+ self.auto_pad = AdaptivePadding(trans_down_stride, trans_down_stride)
+
# 1 stage
stage1_channels = int(base_channels * self.channel_ratio)
- trans_down_stride = patch_size // 4
self.conv_1 = ConvBlock(
in_channels=64,
out_channels=stage1_channels,
@@ -587,6 +595,7 @@ def forward(self, x):
# stem
x_base = self.maxpool(self.act1(self.bn1(self.conv1(x))))
+ x_base = self.auto_pad(x_base)
# 1 stage [N, 64, 56, 56] -> [N, 128, 56, 56]
x = self.conv_1(x_base, out_conv2=False)
| Conformer model does not support different input image sizes
The conformer model backbone gives error when tries to give input image size like (241x241) or (242x242) ,but works fine with some image sizes and also changes in hyperparameters patch_size gives the error in tensor size matching . It is better if it has hyperparameter like img_size like other models
| Sounds like a bug, we will fix it later
> Sounds like a bug, we will fix it later
Thanks | 2022-02-08T04:17:45 | 0.0 | [] | [] |
||
open-mmlab/mmpretrain | open-mmlab__mmpretrain-568 | 00700d570f21c6a4106f86596e45c06106f786b2 | diff --git a/mmcls/core/visualization/image.py b/mmcls/core/visualization/image.py
index 78146d048ec..b89003f1047 100644
--- a/mmcls/core/visualization/image.py
+++ b/mmcls/core/visualization/image.py
@@ -1,11 +1,7 @@
-from threading import Timer
-
-import matplotlib
import matplotlib.pyplot as plt
import mmcv
import numpy as np
from matplotlib.backend_bases import CloseEvent
-from matplotlib.blocking_input import BlockingInput
# A small value
EPS = 1e-2
@@ -41,7 +37,7 @@ class BaseFigureContextManager:
"""
def __init__(self, axis=False, fig_save_cfg={}, fig_show_cfg={}) -> None:
- self.is_inline = 'inline' in matplotlib.get_backend()
+ self.is_inline = 'inline' in plt.get_backend()
# Because save and show need different figure size
# We set two figure and axes to handle save and show
@@ -52,7 +48,6 @@ def __init__(self, axis=False, fig_save_cfg={}, fig_show_cfg={}) -> None:
self.fig_show: plt.Figure = None
self.fig_show_cfg = fig_show_cfg
self.ax_show: plt.Axes = None
- self.blocking_input: BlockingInput = None
self.axis = axis
@@ -83,8 +78,6 @@ def _initialize_fig_show(self):
fig.subplots_adjust(left=0, right=1, bottom=0, top=1)
self.fig_show, self.ax_show = fig, ax
- self.blocking_input = BlockingInput(
- self.fig_show, eventslist=('key_press_event', 'close_event'))
def __exit__(self, exc_type, exc_value, traceback):
if self.is_inline:
@@ -95,14 +88,6 @@ def __exit__(self, exc_type, exc_value, traceback):
plt.close(self.fig_save)
plt.close(self.fig_show)
- try:
- # In matplotlib>=3.4.0, with TkAgg, plt.close will destroy
- # window after idle, need to update manually.
- # Refers to https://github.com/matplotlib/matplotlib/blob/v3.4.x/lib/matplotlib/backends/_backend_tk.py#L470 # noqa: E501
- self.fig_show.canvas.manager.window.update()
- except AttributeError:
- pass
-
def prepare(self):
if self.is_inline:
# if use inline backend, just rebuild the fig_save.
@@ -121,29 +106,59 @@ def prepare(self):
self.ax_show.cla()
self.ax_show.axis(self.axis)
- def wait_continue(self, timeout=0):
+ def wait_continue(self, timeout=0, continue_key=' ') -> int:
+ """Show the image and wait for the user's input.
+
+ This implementation refers to
+ https://github.com/matplotlib/matplotlib/blob/v3.5.x/lib/matplotlib/_blocking_input.py
+
+ Args:
+ timeout (int): If positive, continue after ``timeout`` seconds.
+ Defaults to 0.
+ continue_key (str): The key for users to continue. Defaults to
+ the space key.
+
+ Returns:
+ int: If zero, means time out or the user pressed ``continue_key``,
+ and if one, means the user closed the show figure.
+ """ # noqa: E501
if self.is_inline:
# If use inline backend, interactive input and timeout is no use.
return
- # In matplotlib==3.4.x, with TkAgg, official timeout api of
- # start_event_loop cannot work properly. Use a Timer to directly stop
- # event loop.
- if timeout > 0:
- timer = Timer(timeout, self.fig_show.canvas.stop_event_loop)
- timer.start()
+ if self.fig_show.canvas.manager:
+ # Ensure that the figure is shown
+ self.fig_show.show()
+
while True:
- # Disable matplotlib default hotkey to close figure.
- with plt.rc_context({'keymap.quit': []}):
- key_press = self.blocking_input(n=1, timeout=0)
- # Timeout or figure is closed or press space or press 'q'
- if len(key_press) == 0 or isinstance(
- key_press[0],
- CloseEvent) or key_press[0].key in ['q', ' ']:
- break
- if timeout > 0:
- timer.cancel()
+ # Connect the events to the handler function call.
+ event = None
+
+ def handler(ev):
+ # Set external event variable
+ nonlocal event
+ # Qt backend may fire two events at the same time,
+ # use a condition to avoid missing close event.
+ event = ev if not isinstance(event, CloseEvent) else event
+ self.fig_show.canvas.stop_event_loop()
+
+ cids = [
+ self.fig_show.canvas.mpl_connect(name, handler)
+ for name in ('key_press_event', 'close_event')
+ ]
+
+ try:
+ self.fig_show.canvas.start_event_loop(timeout)
+ finally: # Run even on exception like ctrl-c.
+ # Disconnect the callbacks.
+ for cid in cids:
+ self.fig_show.canvas.mpl_disconnect(cid)
+
+ if isinstance(event, CloseEvent):
+ return 1 # Quit for close.
+ elif event is None or event.key == continue_key:
+ return 0 # Quit for continue.
class ImshowInfosContextManager(BaseFigureContextManager):
@@ -259,6 +274,7 @@ def put_img_infos(self,
if out_file is not None:
mmcv.imwrite(img_save, out_file)
+ ret = 0
if show and not self.is_inline:
# Reserve some space for the tip.
self.ax_show.set_title(win_name)
@@ -274,13 +290,13 @@ def put_img_infos(self,
# Refresh canvas, necessary for Qt5 backend.
self.fig_show.canvas.draw()
- self.wait_continue(timeout=wait_time)
+ ret = self.wait_continue(timeout=wait_time)
elif (not show) and self.is_inline:
# If use inline backend, we use fig_save to show the image
# So we need to close it if users don't want to show.
plt.close(self.fig_save)
- return img_save
+ return ret, img_save
def imshow_infos(img,
@@ -313,7 +329,7 @@ def imshow_infos(img,
np.ndarray: The image with extra infomations.
"""
with ImshowInfosContextManager(fig_size=fig_size) as manager:
- img = manager.put_img_infos(
+ _, img = manager.put_img_infos(
img,
infos,
text_color=text_color,
diff --git a/tools/visualizations/vis_pipeline.py b/tools/visualizations/vis_pipeline.py
index 59fc6efb4a8..e5deda7cbdb 100644
--- a/tools/visualizations/vis_pipeline.py
+++ b/tools/visualizations/vis_pipeline.py
@@ -238,7 +238,7 @@ def main():
infos = dict(label=CLASSES[item['gt_label']])
- manager.put_img_infos(
+ ret, _ = manager.put_img_infos(
image,
infos,
font_size=20,
@@ -248,6 +248,10 @@ def main():
progressBar.update()
+ if ret == 1:
+ print('\nMannualy interrupted.')
+ break
+
if __name__ == '__main__':
main()
| When I'm following the tutorial, after input these words, i received 'BlockingInput' object has no attribute 'figure'
[here is the conmmand]
python demo/image_demo.py demo/demo.JPEG configs/resnet/resnet50_b32x8_imagenet.py \ https://download.openmmlab.com/mmclassification/v0/resnet/resnet50_8xb32_in1k_20210831-ea4938fc.pth
[here is the error]
Traceback (most recent call last):
File "demo/image_demo.py", line 25, in <module>
main()
File "demo/image_demo.py", line 21, in main
show_result_pyplot(model, args.img, result)
File "/home/ubuntu/anaconda3/envs/mmclassification/lib/python3.7/site-packages/mmcls/apis/inference.py", line 119, in show_result_pyplot
wait_time=wait_time)
File "/home/ubuntu/anaconda3/envs/mmclassification/lib/python3.7/site-packages/mmcls/models/classifiers/base.py", line 213, in show_result
out_file=out_file)
File "/home/ubuntu/anaconda3/envs/mmclassification/lib/python3.7/site-packages/mmcls/core/visualization/image.py", line 325, in imshow_infos
out_file=out_file)
File "/home/ubuntu/anaconda3/envs/mmclassification/lib/python3.7/site-packages/mmcls/core/visualization/image.py", line 277, in put_img_infos
self.wait_continue(timeout=wait_time)
File "/home/ubuntu/anaconda3/envs/mmclassification/lib/python3.7/site-packages/mmcls/core/visualization/image.py", line 138, in wait_continue
key_press = self.blocking_input(n=1, timeout=0)
File "/home/ubuntu/anaconda3/envs/mmclassification/lib/python3.7/site-packages/matplotlib/blocking_input.py", line 86, in __call__
if self.figure.canvas.manager:
AttributeError: 'BlockingInput' object has no attribute 'figure'
| That's a known issue, it's caused by a bug of new version matplotlib, which refers to https://github.com/matplotlib/matplotlib/pull/21791
I will fix it later, please roll back the matplotlib temporarily.
```shell
pip install matplotlib==3.4.3
``` | 2021-11-30T08:43:32 | 0.0 | [] | [] |
||
open-mmlab/mmpretrain | open-mmlab__mmpretrain-563 | 321ad09e6d2dd96af9ba39eaf58d84e76a9d9b33 | diff --git a/mmcls/datasets/__init__.py b/mmcls/datasets/__init__.py
index 64fd5ba5516..167fef5cf3d 100644
--- a/mmcls/datasets/__init__.py
+++ b/mmcls/datasets/__init__.py
@@ -4,7 +4,7 @@
build_dataset, build_sampler)
from .cifar import CIFAR10, CIFAR100
from .dataset_wrappers import (ClassBalancedDataset, ConcatDataset,
- RepeatDataset)
+ KFoldDataset, RepeatDataset)
from .imagenet import ImageNet
from .imagenet21k import ImageNet21k
from .mnist import MNIST, FashionMNIST
@@ -17,5 +17,5 @@
'VOC', 'MultiLabelDataset', 'build_dataloader', 'build_dataset',
'DistributedSampler', 'ConcatDataset', 'RepeatDataset',
'ClassBalancedDataset', 'DATASETS', 'PIPELINES', 'ImageNet21k', 'SAMPLERS',
- 'build_sampler', 'RepeatAugSampler'
+ 'build_sampler', 'RepeatAugSampler', 'KFoldDataset'
]
diff --git a/mmcls/datasets/base_dataset.py b/mmcls/datasets/base_dataset.py
index 3c9edf15b2e..7a2f310925a 100644
--- a/mmcls/datasets/base_dataset.py
+++ b/mmcls/datasets/base_dataset.py
@@ -118,6 +118,7 @@ def evaluate(self,
results,
metric='accuracy',
metric_options=None,
+ indices=None,
logger=None):
"""Evaluate the dataset.
@@ -128,6 +129,8 @@ def evaluate(self,
metric_options (dict, optional): Options for calculating metrics.
Allowed keys are 'topk', 'thrs' and 'average_mode'.
Defaults to None.
+ indices (list, optional): The indices of samples corresponding to
+ the results. Defaults to None.
logger (logging.Logger | str, optional): Logger used for printing
related information during evaluation. Defaults to None.
Returns:
@@ -145,6 +148,8 @@ def evaluate(self,
eval_results = {}
results = np.vstack(results)
gt_labels = self.get_gt_labels()
+ if indices is not None:
+ gt_labels = gt_labels[indices]
num_imgs = len(results)
assert len(gt_labels) == num_imgs, 'dataset testing results should '\
'be of the same length as gt_labels.'
diff --git a/mmcls/datasets/builder.py b/mmcls/datasets/builder.py
index cae66fa9937..544f64d7d8e 100644
--- a/mmcls/datasets/builder.py
+++ b/mmcls/datasets/builder.py
@@ -1,4 +1,5 @@
# Copyright (c) OpenMMLab. All rights reserved.
+import copy
import platform
import random
from functools import partial
@@ -25,7 +26,7 @@
def build_dataset(cfg, default_args=None):
from .dataset_wrappers import (ConcatDataset, RepeatDataset,
- ClassBalancedDataset)
+ ClassBalancedDataset, KFoldDataset)
if isinstance(cfg, (list, tuple)):
dataset = ConcatDataset([build_dataset(c, default_args) for c in cfg])
elif cfg['type'] == 'RepeatDataset':
@@ -34,6 +35,13 @@ def build_dataset(cfg, default_args=None):
elif cfg['type'] == 'ClassBalancedDataset':
dataset = ClassBalancedDataset(
build_dataset(cfg['dataset'], default_args), cfg['oversample_thr'])
+ elif cfg['type'] == 'KFoldDataset':
+ cp_cfg = copy.deepcopy(cfg)
+ if cp_cfg.get('test_mode', None) is None:
+ cp_cfg['test_mode'] = (default_args or {}).pop('test_mode', False)
+ cp_cfg['dataset'] = build_dataset(cp_cfg['dataset'], default_args)
+ cp_cfg.pop('type')
+ dataset = KFoldDataset(**cp_cfg)
else:
dataset = build_from_cfg(cfg, DATASETS, default_args)
diff --git a/mmcls/datasets/dataset_wrappers.py b/mmcls/datasets/dataset_wrappers.py
index 68c234e2f27..745c8f149af 100644
--- a/mmcls/datasets/dataset_wrappers.py
+++ b/mmcls/datasets/dataset_wrappers.py
@@ -170,3 +170,56 @@ def __getitem__(self, idx):
def __len__(self):
return len(self.repeat_indices)
+
+
[email protected]_module()
+class KFoldDataset:
+ """A wrapper of dataset for K-Fold cross-validation.
+
+ K-Fold cross-validation divides all the samples in groups of samples,
+ called folds, of almost equal sizes. And we use k-1 of folds to do training
+ and use the fold left to do validation.
+
+ Args:
+ dataset (:obj:`CustomDataset`): The dataset to be divided.
+ fold (int): The fold used to do validation. Defaults to 0.
+ num_splits (int): The number of all folds. Defaults to 5.
+ test_mode (bool): Use the training dataset or validation dataset.
+ Defaults to False.
+ seed (int, optional): The seed to shuffle the dataset before splitting.
+ If None, not shuffle the dataset. Defaults to None.
+ """
+
+ def __init__(self,
+ dataset,
+ fold=0,
+ num_splits=5,
+ test_mode=False,
+ seed=None):
+ self.dataset = dataset
+ self.CLASSES = dataset.CLASSES
+ self.test_mode = test_mode
+ self.num_splits = num_splits
+
+ length = len(dataset)
+ indices = list(range(length))
+ if isinstance(seed, int):
+ rng = np.random.default_rng(seed)
+ rng.shuffle(indices)
+
+ test_start = length * fold // num_splits
+ test_end = length * (fold + 1) // num_splits
+ if test_mode:
+ self.indices = indices[test_start:test_end]
+ else:
+ self.indices = indices[:test_start] + indices[test_end:]
+
+ def __getitem__(self, idx):
+ return self.dataset[self.indices[idx]]
+
+ def __len__(self):
+ return len(self.indices)
+
+ def evaluate(self, *args, **kwargs):
+ kwargs['indices'] = self.indices
+ return self.dataset.evaluate(*args, **kwargs)
diff --git a/mmcls/datasets/multi_label.py b/mmcls/datasets/multi_label.py
index 702493e3a48..7838ff5ad50 100644
--- a/mmcls/datasets/multi_label.py
+++ b/mmcls/datasets/multi_label.py
@@ -28,6 +28,7 @@ def evaluate(self,
results,
metric='mAP',
metric_options=None,
+ indices=None,
logger=None,
**deprecated_kwargs):
"""Evaluate the dataset.
@@ -62,6 +63,8 @@ def evaluate(self,
eval_results = {}
results = np.vstack(results)
gt_labels = self.get_gt_labels()
+ if indices is not None:
+ gt_labels = gt_labels[indices]
num_imgs = len(results)
assert len(gt_labels) == num_imgs, 'dataset testing results should '\
'be of the same length as gt_labels.'
diff --git a/tools/kfold-cross-valid.py b/tools/kfold-cross-valid.py
new file mode 100644
index 00000000000..a881316f566
--- /dev/null
+++ b/tools/kfold-cross-valid.py
@@ -0,0 +1,355 @@
+# Copyright (c) OpenMMLab. All rights reserved.
+import argparse
+import copy
+import os
+import os.path as osp
+import time
+from datetime import datetime
+from pathlib import Path
+
+import mmcv
+import torch
+from mmcv import Config, DictAction
+from mmcv.runner import get_dist_info, init_dist
+
+from mmcls import __version__
+from mmcls.apis import init_random_seed, set_random_seed, train_model
+from mmcls.datasets import build_dataset
+from mmcls.models import build_classifier
+from mmcls.utils import collect_env, get_root_logger, load_json_log
+
+TEST_METRICS = ('precision', 'recall', 'f1_score', 'support', 'mAP', 'CP',
+ 'CR', 'CF1', 'OP', 'OR', 'OF1', 'accuracy')
+
+prog_description = """K-Fold cross-validation.
+
+To start a 5-fold cross-validation experiment:
+ python tools/kfold-cross-valid.py $CONFIG --num-splits 5
+
+To resume a 5-fold cross-validation from an interrupted experiment:
+ python tools/kfold-cross-valid.py $CONFIG --num-splits 5 --resume-from work_dirs/fold2/latest.pth
+
+To summarize a 5-fold cross-validation:
+ python tools/kfold-cross-valid.py $CONFIG --num-splits 5 --summary
+""" # noqa: E501
+
+
+def parse_args():
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ description=prog_description)
+ parser.add_argument('config', help='train config file path')
+ parser.add_argument(
+ '--num-splits', type=int, help='The number of all folds.')
+ parser.add_argument(
+ '--fold',
+ type=int,
+ help='The fold used to do validation. '
+ 'If specify, only do an experiment of the specified fold.')
+ parser.add_argument(
+ '--summary',
+ action='store_true',
+ help='Summarize the k-fold cross-validation results.')
+ parser.add_argument('--work-dir', help='the dir to save logs and models')
+ parser.add_argument(
+ '--resume-from', help='the checkpoint file to resume from')
+ parser.add_argument(
+ '--no-validate',
+ action='store_true',
+ help='whether not to evaluate the checkpoint during training')
+ group_gpus = parser.add_mutually_exclusive_group()
+ group_gpus.add_argument('--device', help='device used for training')
+ group_gpus.add_argument(
+ '--gpus',
+ type=int,
+ help='number of gpus to use '
+ '(only applicable to non-distributed training)')
+ group_gpus.add_argument(
+ '--gpu-ids',
+ type=int,
+ nargs='+',
+ help='ids of gpus to use '
+ '(only applicable to non-distributed training)')
+ parser.add_argument('--seed', type=int, default=None, help='random seed')
+ parser.add_argument(
+ '--deterministic',
+ action='store_true',
+ help='whether to set deterministic options for CUDNN backend.')
+ parser.add_argument(
+ '--cfg-options',
+ nargs='+',
+ action=DictAction,
+ help='override some settings in the used config, the key-value pair '
+ 'in xxx=yyy format will be merged into config file. If the value to '
+ 'be overwritten is a list, it should be like key="[a,b]" or key=a,b '
+ 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" '
+ 'Note that the quotation marks are necessary and that no white space '
+ 'is allowed.')
+ parser.add_argument(
+ '--launcher',
+ choices=['none', 'pytorch', 'slurm', 'mpi'],
+ default='none',
+ help='job launcher')
+ parser.add_argument('--local_rank', type=int, default=0)
+ args = parser.parse_args()
+ if 'LOCAL_RANK' not in os.environ:
+ os.environ['LOCAL_RANK'] = str(args.local_rank)
+
+ return args
+
+
+def copy_config(old_cfg):
+ """deepcopy a Config object."""
+ new_cfg = Config()
+ _cfg_dict = copy.deepcopy(old_cfg._cfg_dict)
+ _filename = copy.deepcopy(old_cfg._filename)
+ _text = copy.deepcopy(old_cfg._text)
+ super(Config, new_cfg).__setattr__('_cfg_dict', _cfg_dict)
+ super(Config, new_cfg).__setattr__('_filename', _filename)
+ super(Config, new_cfg).__setattr__('_text', _text)
+ return new_cfg
+
+
+def train_single_fold(args, cfg, fold, distributed, seed):
+ # create the work_dir for the fold
+ work_dir = osp.join(cfg.work_dir, f'fold{fold}')
+ cfg.work_dir = work_dir
+
+ # create work_dir
+ mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))
+
+ # wrap the dataset cfg
+ train_dataset = dict(
+ type='KFoldDataset',
+ fold=fold,
+ dataset=cfg.data.train,
+ num_splits=args.num_splits,
+ seed=seed,
+ )
+ val_dataset = dict(
+ type='KFoldDataset',
+ fold=fold,
+ # Use the same dataset with training.
+ dataset=copy.deepcopy(cfg.data.train),
+ num_splits=args.num_splits,
+ seed=seed,
+ test_mode=True,
+ )
+ val_dataset['dataset']['pipeline'] = cfg.data.val.pipeline
+ cfg.data.train = train_dataset
+ cfg.data.val = val_dataset
+ cfg.data.test = val_dataset
+
+ # dump config
+ stem, suffix = osp.basename(args.config).rsplit('.', 1)
+ cfg.dump(osp.join(cfg.work_dir, f'{stem}_fold{fold}.{suffix}'))
+ # init the logger before other steps
+ timestamp = time.strftime('%Y%m%d_%H%M%S', time.localtime())
+ log_file = osp.join(cfg.work_dir, f'{timestamp}.log')
+ logger = get_root_logger(log_file=log_file, log_level=cfg.log_level)
+
+ # init the meta dict to record some important information such as
+ # environment info and seed, which will be logged
+ meta = dict()
+ # log env info
+ env_info_dict = collect_env()
+ env_info = '\n'.join([(f'{k}: {v}') for k, v in env_info_dict.items()])
+ dash_line = '-' * 60 + '\n'
+ logger.info('Environment info:\n' + dash_line + env_info + '\n' +
+ dash_line)
+ meta['env_info'] = env_info
+
+ # log some basic info
+ logger.info(f'Distributed training: {distributed}')
+ logger.info(f'Config:\n{cfg.pretty_text}')
+ logger.info(
+ f'-------- Cross-validation: [{fold+1}/{args.num_splits}] -------- ')
+
+ # set random seeds
+ # Use different seed in different folds
+ logger.info(f'Set random seed to {seed + fold}, '
+ f'deterministic: {args.deterministic}')
+ set_random_seed(seed + fold, deterministic=args.deterministic)
+ cfg.seed = seed + fold
+ meta['seed'] = seed + fold
+
+ model = build_classifier(cfg.model)
+ model.init_weights()
+
+ datasets = [build_dataset(cfg.data.train)]
+ if len(cfg.workflow) == 2:
+ val_dataset = copy.deepcopy(cfg.data.val)
+ val_dataset.pipeline = cfg.data.train.pipeline
+ datasets.append(build_dataset(val_dataset))
+ meta.update(
+ dict(
+ mmcls_version=__version__,
+ config=cfg.pretty_text,
+ CLASSES=datasets[0].CLASSES,
+ kfold=dict(fold=fold, num_splits=args.num_splits)))
+ # add an attribute for visualization convenience
+ train_model(
+ model,
+ datasets,
+ cfg,
+ distributed=distributed,
+ validate=(not args.no_validate),
+ timestamp=timestamp,
+ device='cpu' if args.device == 'cpu' else 'cuda',
+ meta=meta)
+
+
+def summary(args, cfg):
+ summary = dict()
+ for fold in range(args.num_splits):
+ work_dir = Path(cfg.work_dir) / f'fold{fold}'
+
+ # Find the latest training log
+ log_files = list(work_dir.glob('*.log.json'))
+ if len(log_files) == 0:
+ continue
+ log_file = sorted(log_files)[-1]
+
+ date = datetime.fromtimestamp(log_file.lstat().st_mtime)
+ summary[fold] = {'date': date.strftime('%Y-%m-%d %H:%M:%S')}
+
+ # Find the latest eval log
+ json_log = load_json_log(log_file)
+ epochs = sorted(list(json_log.keys()))
+ eval_log = {}
+
+ def is_metric_key(key):
+ for metric in TEST_METRICS:
+ if metric in key:
+ return True
+ return False
+
+ for epoch in epochs[::-1]:
+ if any(is_metric_key(k) for k in json_log[epoch].keys()):
+ eval_log = json_log[epoch]
+ break
+
+ summary[fold]['epoch'] = epoch
+ summary[fold]['metric'] = {
+ k: v[0] # the value is a list with only one item.
+ for k, v in eval_log.items() if is_metric_key(k)
+ }
+ show_summary(args, summary)
+
+
+def show_summary(args, summary_data):
+ try:
+ from rich.console import Console
+ from rich.table import Table
+ except ImportError:
+ raise ImportError('Please run `pip install rich` to install '
+ 'package `rich` to draw the table.')
+
+ console = Console()
+ table = Table(title=f'{args.num_splits}-fold Cross-validation Summary')
+ table.add_column('Fold')
+ metrics = summary_data[0]['metric'].keys()
+ for metric in metrics:
+ table.add_column(metric)
+ table.add_column('Epoch')
+ table.add_column('Date')
+
+ for fold in range(args.num_splits):
+ row = [f'{fold+1}']
+ if fold not in summary_data:
+ table.add_row(*row)
+ continue
+ for metric in metrics:
+ metric_value = summary_data[fold]['metric'].get(metric, '')
+
+ def format_value(value):
+ if isinstance(value, float):
+ return f'{value:.2f}'
+ if isinstance(value, (list, tuple)):
+ return str([format_value(i) for i in value])
+ else:
+ return str(value)
+
+ row.append(format_value(metric_value))
+ row.append(str(summary_data[fold]['epoch']))
+ row.append(summary_data[fold]['date'])
+ table.add_row(*row)
+
+ console.print(table)
+
+
+def main():
+ args = parse_args()
+
+ cfg = Config.fromfile(args.config)
+ if args.cfg_options is not None:
+ cfg.merge_from_dict(args.cfg_options)
+ # set cudnn_benchmark
+ if cfg.get('cudnn_benchmark', False):
+ torch.backends.cudnn.benchmark = True
+
+ # work_dir is determined in this priority: CLI > segment in file > filename
+ if args.work_dir is not None:
+ # update configs according to CLI args if args.work_dir is not None
+ cfg.work_dir = args.work_dir
+ elif cfg.get('work_dir', None) is None:
+ # use config filename as default work_dir if cfg.work_dir is None
+ cfg.work_dir = osp.join('./work_dirs',
+ osp.splitext(osp.basename(args.config))[0])
+
+ if args.summary:
+ summary(args, cfg)
+ return
+
+ # resume from the previous experiment
+ if args.resume_from is not None:
+ cfg.resume_from = args.resume_from
+ resume_kfold = torch.load(cfg.resume_from).get('meta',
+ {}).get('kfold', None)
+ if resume_kfold is None:
+ raise RuntimeError(
+ 'No "meta" key in checkpoints or no "kfold" in the meta dict. '
+ 'Please check if the resume checkpoint from a k-fold '
+ 'cross-valid experiment.')
+ resume_fold = resume_kfold['fold']
+ assert args.num_splits == resume_kfold['num_splits']
+ else:
+ resume_fold = 0
+
+ if args.gpu_ids is not None:
+ cfg.gpu_ids = args.gpu_ids
+ else:
+ cfg.gpu_ids = range(1) if args.gpus is None else range(args.gpus)
+
+ # init distributed env first, since logger depends on the dist info.
+ if args.launcher == 'none':
+ distributed = False
+ else:
+ distributed = True
+ init_dist(args.launcher, **cfg.dist_params)
+ _, world_size = get_dist_info()
+ cfg.gpu_ids = range(world_size)
+
+ # init a unified random seed
+ seed = init_random_seed(args.seed)
+
+ # create work_dir
+ mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))
+
+ if args.fold is not None:
+ folds = [args.fold]
+ else:
+ folds = range(resume_fold, args.num_splits)
+
+ for fold in folds:
+ cfg_ = copy_config(cfg)
+ if fold != resume_fold:
+ cfg_.resume_from = None
+ train_single_fold(args, cfg_, fold, distributed, seed)
+
+ if args.fold is None:
+ summary(args, cfg)
+
+
+if __name__ == '__main__':
+ main()
| K-fold cross validation
How to do K-fold cross validation with MMCV?
| K-fold is not currently supported.
> K-fold is not currently supported.
Ok, thanks. And can you give me some advice on how implement the code by myself?
Hello, we don't have K-fold cross-validation for now.
I will create a draft for K-fold implementation, any discussion is welcomed to improve it.
> Hello, we don't have K-fold cross-validation for now. I will create a draft for K-fold implementation, any discussion is welcomed to improve it.
Nice! Thanks for your reply! | 2021-11-29T08:11:37 | 0.0 | [] | [] |
||
VlachosGroup/pMuTT | VlachosGroup__pMuTT-267 | 6a52786d164f62a4290d0aa7d2d171eb44883158 | diff --git a/docs/source/conf.py b/docs/source/conf.py
index 0368c997..11181306 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -26,9 +26,9 @@
author = 'Vlachos Research Group'
# The short X.Y version
-version = '1.4.13'
+version = '1.4.14'
# The full version, including alpha/beta/rc tags
-release = '1.4.13'
+release = '1.4.14'
sphinx_version = sphinx.__display_version__
diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst
index 0ef82c7a..6a61ff17 100644
--- a/docs/source/release_notes.rst
+++ b/docs/source/release_notes.rst
@@ -7,6 +7,13 @@ Development Branch
------------------
`Development Branch`_
+Version 1.4.14
+--------------
+
+February 8, 2024
+
+- Fix equilibrium unittest warning messages
+
Version 1.4.13
--------------
diff --git a/pmutt/__init__.py b/pmutt/__init__.py
index 06f5da70..97bb90db 100644
--- a/pmutt/__init__.py
+++ b/pmutt/__init__.py
@@ -10,7 +10,7 @@
# present, too:
#
name = 'pmutt'
-__version__ = '1.4.13'
+__version__ = '1.4.14'
import os
import inspect
diff --git a/pmutt/equilibrium/_equilibrium.py b/pmutt/equilibrium/_equilibrium.py
index 00ed200b..c945e6cb 100644
--- a/pmutt/equilibrium/_equilibrium.py
+++ b/pmutt/equilibrium/_equilibrium.py
@@ -1,12 +1,15 @@
from scipy.optimize import minimize
import numpy as np
import sys
+import warnings
from itertools import repeat
from pmutt.io.thermdat import read_thermdat
from pmutt import pmutt_list_to_dict
from pmutt import constants as c
from collections import namedtuple
+warnings.filterwarnings("ignore", "Values in x were outside bounds during a ")
+
class Equilibrium():
"""Reaction thermodynamic equilibrium.
@@ -142,7 +145,7 @@ def get_net_comp(self, T, P):
# Starting mole guesses = 1
self.guess = list(repeat(1.0, len(self.species)))
# Mole value bounds. Lower bound near zero
- b = [1e-20, sum(self.ele_feed)+0.1]
+ b = [1e-20, sum(self.ele_feed)]
# Upper bound is the total moles of elements
self.bounds = list(repeat(b, len(self.species)))
diff --git a/setup.py b/setup.py
index 47834e64..26fef443 100644
--- a/setup.py
+++ b/setup.py
@@ -15,7 +15,7 @@
setuptools_info = {
'name': 'pmutt',
- 'version': '1.4.13',
+ 'version': '1.4.14',
'author': 'Vlachos Research Group',
'author_email': '[email protected]',
'description': 'Python Multiscale Thermochemistry Toolbox (pmutt)',
| Fix to unittest equilibrium warnings and v1.4.14
| 2024-02-08T19:11:53 | 0.0 | [] | [] |
|||
VlachosGroup/pMuTT | VlachosGroup__pMuTT-261 | 40475c5d29c8593e3b9a4f7d0f0ebca208561ae4 | diff --git a/docs/source/conf.py b/docs/source/conf.py
index fdf24552..4ef6f3ee 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -26,9 +26,9 @@
author = 'Vlachos Research Group'
# The short X.Y version
-version = '1.4.10'
+version = '1.4.11'
# The full version, including alpha/beta/rc tags
-release = '1.4.10'
+release = '1.4.11'
sphinx_version = sphinx.__display_version__
diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst
index c1f0b0da..36e856c2 100644
--- a/docs/source/release_notes.rst
+++ b/docs/source/release_notes.rst
@@ -7,8 +7,16 @@ Development Branch
------------------
`Development Branch`_
+Version 1.4.11
+--------------
+
+February 3, 2024
+
+- Fix unittest for equilibrium class-Missing thermdat file
+- Fix unittest for equilibrium class-Lower bound solver violation
+
Version 1.4.10
--------------
+--------------
February 2, 2024
diff --git a/pmutt/__init__.py b/pmutt/__init__.py
index bed8d416..7a19269a 100644
--- a/pmutt/__init__.py
+++ b/pmutt/__init__.py
@@ -10,7 +10,7 @@
# present, too:
#
name = 'pmutt'
-__version__ = '1.4.10'
+__version__ = '1.4.11'
import os
import inspect
diff --git a/pmutt/equilibrium/_equilibrium.py b/pmutt/equilibrium/_equilibrium.py
index 248018db..31ccbaa2 100644
--- a/pmutt/equilibrium/_equilibrium.py
+++ b/pmutt/equilibrium/_equilibrium.py
@@ -142,7 +142,7 @@ def get_net_comp(self, T, P):
# Starting mole guesses = 1
self.guess = list(repeat(1.0, len(self.species)))
# Mole value bounds. Lower bound near zero
- b = [1e-16, sum(self.ele_feed)]
+ b = [1e-20, sum(self.ele_feed)]
# Upper bound is the total moles of elements
self.bounds = list(repeat(b, len(self.species)))
diff --git a/setup.py b/setup.py
index f9d0ff35..88eff4ac 100644
--- a/setup.py
+++ b/setup.py
@@ -15,7 +15,7 @@
setuptools_info = {
'name': 'pmutt',
- 'version': '1.4.10',
+ 'version': '1.4.11',
'author': 'Vlachos Research Group',
'author_email': '[email protected]',
'description': 'Python Multiscale Thermochemistry Toolbox (pmutt)',
| Misc fixes for unittest for equilibrium class
| 2024-02-03T22:47:57 | 0.0 | [] | [] |
|||
VlachosGroup/pMuTT | VlachosGroup__pMuTT-260 | aa411d8cbaab50e5c68cf1daa405c53b70bfc605 | diff --git a/docs/source/conf.py b/docs/source/conf.py
index fdf24552..4ef6f3ee 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -26,9 +26,9 @@
author = 'Vlachos Research Group'
# The short X.Y version
-version = '1.4.10'
+version = '1.4.11'
# The full version, including alpha/beta/rc tags
-release = '1.4.10'
+release = '1.4.11'
sphinx_version = sphinx.__display_version__
diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst
index c1f0b0da..36e856c2 100644
--- a/docs/source/release_notes.rst
+++ b/docs/source/release_notes.rst
@@ -7,8 +7,16 @@ Development Branch
------------------
`Development Branch`_
+Version 1.4.11
+--------------
+
+February 3, 2024
+
+- Fix unittest for equilibrium class-Missing thermdat file
+- Fix unittest for equilibrium class-Lower bound solver violation
+
Version 1.4.10
--------------
+--------------
February 2, 2024
diff --git a/pmutt/__init__.py b/pmutt/__init__.py
index bed8d416..7a19269a 100644
--- a/pmutt/__init__.py
+++ b/pmutt/__init__.py
@@ -10,7 +10,7 @@
# present, too:
#
name = 'pmutt'
-__version__ = '1.4.10'
+__version__ = '1.4.11'
import os
import inspect
diff --git a/pmutt/equilibrium/_equilibrium.py b/pmutt/equilibrium/_equilibrium.py
index 248018db..31ccbaa2 100644
--- a/pmutt/equilibrium/_equilibrium.py
+++ b/pmutt/equilibrium/_equilibrium.py
@@ -142,7 +142,7 @@ def get_net_comp(self, T, P):
# Starting mole guesses = 1
self.guess = list(repeat(1.0, len(self.species)))
# Mole value bounds. Lower bound near zero
- b = [1e-16, sum(self.ele_feed)]
+ b = [1e-20, sum(self.ele_feed)]
# Upper bound is the total moles of elements
self.bounds = list(repeat(b, len(self.species)))
diff --git a/setup.py b/setup.py
index f9d0ff35..88eff4ac 100644
--- a/setup.py
+++ b/setup.py
@@ -15,7 +15,7 @@
setuptools_info = {
'name': 'pmutt',
- 'version': '1.4.10',
+ 'version': '1.4.11',
'author': 'Vlachos Research Group',
'author_email': '[email protected]',
'description': 'Python Multiscale Thermochemistry Toolbox (pmutt)',
| Unused library call in excel.py
from PyMuTT.models.empirical import BaseThermo
| Fixed in commit 485efee | 2024-02-03T22:46:08 | 0.0 | [] | [] |
||
VlachosGroup/pMuTT | VlachosGroup__pMuTT-213 | eff354498748947a03603c2cb24885be24cc58b4 | diff --git a/docs/source/api/empirical/empirical.rst b/docs/source/api/empirical/empirical.rst
index 9169cd93..099f27ca 100644
--- a/docs/source/api/empirical/empirical.rst
+++ b/docs/source/api/empirical/empirical.rst
@@ -34,6 +34,7 @@ Nasa
:toctree: nasa
:nosignatures:
+
nasa.Nasa
nasa.SingleNasa9
nasa.Nasa9
diff --git a/docs/source/api/empirical/empirical_base/pmutt.empirical.EmpiricalBase.rst b/docs/source/api/empirical/empirical_base/pmutt.empirical.EmpiricalBase.rst
index 793b0ce6..c5454b5a 100644
--- a/docs/source/api/empirical/empirical_base/pmutt.empirical.EmpiricalBase.rst
+++ b/docs/source/api/empirical/empirical_base/pmutt.empirical.EmpiricalBase.rst
@@ -1,4 +1,4 @@
-pmutt.empirical.EmpiricalBase
+pmutt.empirical.EmpiricalBase
=============================
.. currentmodule:: pmutt.empirical
@@ -19,6 +19,21 @@ pmutt.empirical.EmpiricalBase
~EmpiricalBase.compare_HoRT
~EmpiricalBase.compare_SoR
~EmpiricalBase.from_dict
+ ~EmpiricalBase.get_Cp
+ ~EmpiricalBase.get_CpoR
+ ~EmpiricalBase.get_Cv
+ ~EmpiricalBase.get_CvoR
+ ~EmpiricalBase.get_F
+ ~EmpiricalBase.get_FoRT
+ ~EmpiricalBase.get_G
+ ~EmpiricalBase.get_GoRT
+ ~EmpiricalBase.get_H
+ ~EmpiricalBase.get_HoRT
+ ~EmpiricalBase.get_S
+ ~EmpiricalBase.get_SoR
+ ~EmpiricalBase.get_U
+ ~EmpiricalBase.get_UoRT
+ ~EmpiricalBase.get_q
~EmpiricalBase.plot_empirical
~EmpiricalBase.plot_statmech
~EmpiricalBase.plot_statmech_and_empirical
diff --git a/docs/source/api/empirical/misc/pmutt.empirical.GasPressureAdj.rst b/docs/source/api/empirical/misc/pmutt.empirical.GasPressureAdj.rst
index 44816535..34c59189 100644
--- a/docs/source/api/empirical/misc/pmutt.empirical.GasPressureAdj.rst
+++ b/docs/source/api/empirical/misc/pmutt.empirical.GasPressureAdj.rst
@@ -1,4 +1,4 @@
-pmutt.empirical.GasPressureAdj
+pmutt.empirical.GasPressureAdj
==============================
.. currentmodule:: pmutt.empirical
@@ -29,6 +29,7 @@ pmutt.empirical.GasPressureAdj
~GasPressureAdj.get_SoR
~GasPressureAdj.get_U
~GasPressureAdj.get_UoRT
+ ~GasPressureAdj.get_q
~GasPressureAdj.to_dict
diff --git a/docs/source/api/empirical/nasa/pmutt.empirical.nasa.Nasa.rst b/docs/source/api/empirical/nasa/pmutt.empirical.nasa.Nasa.rst
index 62ee002d..d767310a 100644
--- a/docs/source/api/empirical/nasa/pmutt.empirical.nasa.Nasa.rst
+++ b/docs/source/api/empirical/nasa/pmutt.empirical.nasa.Nasa.rst
@@ -1,4 +1,4 @@
-pmutt.empirical.nasa.Nasa
+pmutt.empirical.nasa.Nasa
=========================
.. currentmodule:: pmutt.empirical.nasa
@@ -24,18 +24,27 @@ pmutt.empirical.nasa.Nasa
~Nasa.from_statmech
~Nasa.get_Cp
~Nasa.get_CpoR
+ ~Nasa.get_Cv
+ ~Nasa.get_CvoR
+ ~Nasa.get_F
+ ~Nasa.get_FoRT
~Nasa.get_G
~Nasa.get_GoRT
~Nasa.get_H
~Nasa.get_HoRT
~Nasa.get_S
+ ~Nasa.get_Selements
~Nasa.get_SoR
+ ~Nasa.get_U
+ ~Nasa.get_UoRT
~Nasa.get_a
+ ~Nasa.get_q
~Nasa.plot_empirical
~Nasa.plot_statmech
~Nasa.plot_statmech_and_empirical
- ~Nasa.to_CTI
+ ~Nasa.to_cti
~Nasa.to_dict
+ ~Nasa.to_omkm_yaml
diff --git a/docs/source/api/empirical/nasa/pmutt.empirical.nasa.Nasa9.rst b/docs/source/api/empirical/nasa/pmutt.empirical.nasa.Nasa9.rst
index 6b6fb66c..4bdfac30 100644
--- a/docs/source/api/empirical/nasa/pmutt.empirical.nasa.Nasa9.rst
+++ b/docs/source/api/empirical/nasa/pmutt.empirical.nasa.Nasa9.rst
@@ -1,4 +1,4 @@
-pmutt.empirical.nasa.Nasa9
+pmutt.empirical.nasa.Nasa9
==========================
.. currentmodule:: pmutt.empirical.nasa
@@ -23,17 +23,26 @@ pmutt.empirical.nasa.Nasa9
~Nasa9.from_model
~Nasa9.get_Cp
~Nasa9.get_CpoR
+ ~Nasa9.get_Cv
+ ~Nasa9.get_CvoR
+ ~Nasa9.get_F
+ ~Nasa9.get_FoRT
~Nasa9.get_G
~Nasa9.get_GoRT
~Nasa9.get_H
~Nasa9.get_HoRT
~Nasa9.get_S
+ ~Nasa9.get_Selements
~Nasa9.get_SoR
+ ~Nasa9.get_U
+ ~Nasa9.get_UoRT
+ ~Nasa9.get_q
~Nasa9.plot_empirical
~Nasa9.plot_statmech
~Nasa9.plot_statmech_and_empirical
- ~Nasa9.to_CTI
+ ~Nasa9.to_cti
~Nasa9.to_dict
+ ~Nasa9.to_omkm_yaml
diff --git a/docs/source/api/empirical/nasa/pmutt.empirical.nasa.SingleNasa9.rst b/docs/source/api/empirical/nasa/pmutt.empirical.nasa.SingleNasa9.rst
index 71cf372f..12ce1fd5 100644
--- a/docs/source/api/empirical/nasa/pmutt.empirical.nasa.SingleNasa9.rst
+++ b/docs/source/api/empirical/nasa/pmutt.empirical.nasa.SingleNasa9.rst
@@ -1,4 +1,4 @@
-pmutt.empirical.nasa.SingleNasa9
+pmutt.empirical.nasa.SingleNasa9
================================
.. currentmodule:: pmutt.empirical.nasa
@@ -19,13 +19,25 @@ pmutt.empirical.nasa.SingleNasa9
~SingleNasa9.compare_HoRT
~SingleNasa9.compare_SoR
~SingleNasa9.from_dict
+ ~SingleNasa9.get_Cp
~SingleNasa9.get_CpoR
+ ~SingleNasa9.get_Cv
+ ~SingleNasa9.get_CvoR
+ ~SingleNasa9.get_F
+ ~SingleNasa9.get_FoRT
+ ~SingleNasa9.get_G
+ ~SingleNasa9.get_GoRT
+ ~SingleNasa9.get_H
~SingleNasa9.get_HoRT
+ ~SingleNasa9.get_S
~SingleNasa9.get_SoR
+ ~SingleNasa9.get_U
+ ~SingleNasa9.get_UoRT
+ ~SingleNasa9.get_q
~SingleNasa9.plot_empirical
~SingleNasa9.plot_statmech
~SingleNasa9.plot_statmech_and_empirical
- ~SingleNasa9.to_CTI
+ ~SingleNasa9.to_cti
~SingleNasa9.to_dict
diff --git a/docs/source/api/empirical/references/pmutt.empirical.references.Reference.rst b/docs/source/api/empirical/references/pmutt.empirical.references.Reference.rst
index 0af8a66d..030ae68d 100644
--- a/docs/source/api/empirical/references/pmutt.empirical.references.Reference.rst
+++ b/docs/source/api/empirical/references/pmutt.empirical.references.Reference.rst
@@ -1,4 +1,4 @@
-pmutt.empirical.references.Reference
+pmutt.empirical.references.Reference
====================================
.. currentmodule:: pmutt.empirical.references
@@ -19,6 +19,21 @@ pmutt.empirical.references.Reference
~Reference.compare_HoRT
~Reference.compare_SoR
~Reference.from_dict
+ ~Reference.get_Cp
+ ~Reference.get_CpoR
+ ~Reference.get_Cv
+ ~Reference.get_CvoR
+ ~Reference.get_F
+ ~Reference.get_FoRT
+ ~Reference.get_G
+ ~Reference.get_GoRT
+ ~Reference.get_H
+ ~Reference.get_HoRT
+ ~Reference.get_S
+ ~Reference.get_SoR
+ ~Reference.get_U
+ ~Reference.get_UoRT
+ ~Reference.get_q
~Reference.plot_empirical
~Reference.plot_statmech
~Reference.plot_statmech_and_empirical
diff --git a/docs/source/api/empirical/references/pmutt.empirical.references.References.rst b/docs/source/api/empirical/references/pmutt.empirical.references.References.rst
index 7d4477ad..4145240e 100644
--- a/docs/source/api/empirical/references/pmutt.empirical.references.References.rst
+++ b/docs/source/api/empirical/references/pmutt.empirical.references.References.rst
@@ -1,4 +1,4 @@
-pmutt.empirical.references.References
+pmutt.empirical.references.References
=====================================
.. currentmodule:: pmutt.empirical.references
@@ -36,6 +36,7 @@ pmutt.empirical.references.References
~References.get_UoRT
~References.get_descriptors
~References.get_descriptors_matrix
+ ~References.get_q
~References.index
~References.insert
~References.pop
diff --git a/docs/source/api/empirical/shomate/pmutt.empirical.shomate.Shomate.rst b/docs/source/api/empirical/shomate/pmutt.empirical.shomate.Shomate.rst
index 8ce89046..08b1a567 100644
--- a/docs/source/api/empirical/shomate/pmutt.empirical.shomate.Shomate.rst
+++ b/docs/source/api/empirical/shomate/pmutt.empirical.shomate.Shomate.rst
@@ -1,4 +1,4 @@
-pmutt.empirical.shomate.Shomate
+pmutt.empirical.shomate.Shomate
===============================
.. currentmodule:: pmutt.empirical.shomate
@@ -24,17 +24,26 @@ pmutt.empirical.shomate.Shomate
~Shomate.from_statmech
~Shomate.get_Cp
~Shomate.get_CpoR
+ ~Shomate.get_Cv
+ ~Shomate.get_CvoR
+ ~Shomate.get_F
+ ~Shomate.get_FoRT
~Shomate.get_G
~Shomate.get_GoRT
~Shomate.get_H
~Shomate.get_HoRT
~Shomate.get_S
+ ~Shomate.get_Selements
~Shomate.get_SoR
+ ~Shomate.get_U
+ ~Shomate.get_UoRT
+ ~Shomate.get_q
~Shomate.plot_empirical
~Shomate.plot_statmech
~Shomate.plot_statmech_and_empirical
- ~Shomate.to_CTI
+ ~Shomate.to_cti
~Shomate.to_dict
+ ~Shomate.to_omkm_yaml
diff --git a/docs/source/api/equilibrium/Equilibrium/pmutt.equilibrium.Equilibrium.rst b/docs/source/api/equilibrium/Equilibrium/pmutt.equilibrium.Equilibrium.rst
new file mode 100644
index 00000000..54da16cc
--- /dev/null
+++ b/docs/source/api/equilibrium/Equilibrium/pmutt.equilibrium.Equilibrium.rst
@@ -0,0 +1,24 @@
+pmutt.equilibrium.Equilibrium
+=============================
+
+.. currentmodule:: pmutt.equilibrium
+
+.. autoclass:: Equilibrium
+
+
+ .. automethod:: __init__
+
+
+ .. rubric:: Methods
+
+ .. autosummary::
+
+ ~Equilibrium.__init__
+ ~Equilibrium.from_thermdat
+ ~Equilibrium.get_net_comp
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/source/api/equilibrium/equilibrium.rst b/docs/source/api/equilibrium/equilibrium.rst
new file mode 100644
index 00000000..44368db8
--- /dev/null
+++ b/docs/source/api/equilibrium/equilibrium.rst
@@ -0,0 +1,24 @@
+.. equilibrium:
+
+Equilibrium Models
+******************
+
+Equilibrium models predicts the thermodynamic equilibrium concentration
+of a network of molecules at a temperature and pressure by minimizing
+the Gibbs free energy of the network constrained by conserving the atomic
+composition of the network.
+
+
+.. currentmodule:: pmutt.equilibrium
+
+--------------------------------------------------------------------------------
+
+Equilibrium
+-----------
+
+.. autosummary::
+ :toctree: Equilibrium
+ :nosignatures:
+
+ Equilibrium
+
diff --git a/docs/source/api/helper/helper.rst b/docs/source/api/helper/helper.rst
new file mode 100644
index 00000000..38544912
--- /dev/null
+++ b/docs/source/api/helper/helper.rst
@@ -0,0 +1,22 @@
+.. _helper:
+
+Helper Functions
+****************
+
+This page contains various helper functions useful when working with pMuTT
+
+get_molecular_weight
+====================
+
+.. autofunction:: pmutt.get_molecular_weight
+
+pmutt_list_to_dict
+==================
+
+.. autofunction:: pmutt.pmutt_list_to_dict
+
+get_geometry_from_atoms
+=======================
+
+.. autofunction:: pmutt.statmech.rot.get_geometry_from_atoms
+
diff --git a/docs/source/api/io/omkm/pmutt.io.omkm.get_interactions_phases.rst b/docs/source/api/io/omkm/pmutt.io.omkm.get_interactions_phases.rst
index 573f8e06..6005b108 100644
--- a/docs/source/api/io/omkm/pmutt.io.omkm.get_interactions_phases.rst
+++ b/docs/source/api/io/omkm/pmutt.io.omkm.get_interactions_phases.rst
@@ -1,7 +1,6 @@
-pmutt.io.omkm.get\_interactions\_phases
+pmutt.io.omkm.get\_interactions\_phases
=======================================
.. currentmodule:: pmutt.io.omkm
-.. autofunction:: get_interactions_phases
- :noindex:
\ No newline at end of file
+.. autofunction:: get_interactions_phases
\ No newline at end of file
diff --git a/docs/source/api/io/omkm/pmutt.io.omkm.get_reactions_phases.rst b/docs/source/api/io/omkm/pmutt.io.omkm.get_reactions_phases.rst
index f0621ee3..9744cc90 100644
--- a/docs/source/api/io/omkm/pmutt.io.omkm.get_reactions_phases.rst
+++ b/docs/source/api/io/omkm/pmutt.io.omkm.get_reactions_phases.rst
@@ -1,7 +1,6 @@
-pmutt.io.omkm.get\_reactions\_phases
+pmutt.io.omkm.get\_reactions\_phases
====================================
.. currentmodule:: pmutt.io.omkm
-.. autofunction:: get_reactions_phases
- :noindex:
\ No newline at end of file
+.. autofunction:: get_reactions_phases
\ No newline at end of file
diff --git a/docs/source/api/io/omkm/pmutt.io.omkm.get_species_phases.rst b/docs/source/api/io/omkm/pmutt.io.omkm.get_species_phases.rst
index ba6bcd2e..079047b4 100644
--- a/docs/source/api/io/omkm/pmutt.io.omkm.get_species_phases.rst
+++ b/docs/source/api/io/omkm/pmutt.io.omkm.get_species_phases.rst
@@ -1,7 +1,6 @@
-pmutt.io.omkm.get\_species\_phases
+pmutt.io.omkm.get\_species\_phases
==================================
.. currentmodule:: pmutt.io.omkm
-.. autofunction:: get_species_phases
- :noindex:
\ No newline at end of file
+.. autofunction:: get_species_phases
\ No newline at end of file
diff --git a/docs/source/api/io/omkm/pmutt.io.omkm.organize_phases.rst b/docs/source/api/io/omkm/pmutt.io.omkm.organize_phases.rst
index 6618581e..0131a02b 100644
--- a/docs/source/api/io/omkm/pmutt.io.omkm.organize_phases.rst
+++ b/docs/source/api/io/omkm/pmutt.io.omkm.organize_phases.rst
@@ -1,7 +1,6 @@
-pmutt.io.omkm.get\_species\_phases
-==================================
+pmutt.io.omkm.organize\_phases
+==============================
.. currentmodule:: pmutt.io.omkm
-.. autofunction:: organize_phases
- :noindex:
\ No newline at end of file
+.. autofunction:: organize_phases
\ No newline at end of file
diff --git a/docs/source/api/io/omkm/pmutt.io.omkm.write_cti.rst b/docs/source/api/io/omkm/pmutt.io.omkm.write_cti.rst
index 024084f6..89f9c7af 100644
--- a/docs/source/api/io/omkm/pmutt.io.omkm.write_cti.rst
+++ b/docs/source/api/io/omkm/pmutt.io.omkm.write_cti.rst
@@ -1,7 +1,6 @@
-pmutt.io.omkm.write\_cti
+pmutt.io.omkm.write\_cti
========================
.. currentmodule:: pmutt.io.omkm
-.. autofunction:: write_cti
- :noindex:
\ No newline at end of file
+.. autofunction:: write_cti
\ No newline at end of file
diff --git a/docs/source/api/io/omkm/pmutt.io.omkm.write_yaml.rst b/docs/source/api/io/omkm/pmutt.io.omkm.write_yaml.rst
index c71c3e4d..d892a808 100644
--- a/docs/source/api/io/omkm/pmutt.io.omkm.write_yaml.rst
+++ b/docs/source/api/io/omkm/pmutt.io.omkm.write_yaml.rst
@@ -1,7 +1,6 @@
-pmutt.io.omkm.write\_yaml
+pmutt.io.omkm.write\_yaml
=========================
.. currentmodule:: pmutt.io.omkm
-.. autofunction:: write_yaml\
- :noindex:
\ No newline at end of file
+.. autofunction:: write_yaml
\ No newline at end of file
diff --git a/docs/source/api/kinetic_models/cantera_phase/pmutt.cantera.phase.IdealGas.rst b/docs/source/api/kinetic_models/cantera_phase/pmutt.cantera.phase.IdealGas.rst
index b9c83ce4..7879cf22 100644
--- a/docs/source/api/kinetic_models/cantera_phase/pmutt.cantera.phase.IdealGas.rst
+++ b/docs/source/api/kinetic_models/cantera_phase/pmutt.cantera.phase.IdealGas.rst
@@ -1,4 +1,4 @@
-pmutt.cantera.phase.IdealGas
+pmutt.cantera.phase.IdealGas
============================
.. currentmodule:: pmutt.cantera.phase
@@ -21,7 +21,8 @@ pmutt.cantera.phase.IdealGas
~IdealGas.index_species
~IdealGas.pop_species
~IdealGas.remove_species
- ~IdealGas.to_CTI
+ ~IdealGas.to_cti
+ ~IdealGas.to_omkm_yaml
diff --git a/docs/source/api/kinetic_models/cantera_phase/pmutt.cantera.phase.StoichSolid.rst b/docs/source/api/kinetic_models/cantera_phase/pmutt.cantera.phase.StoichSolid.rst
index f1726b6a..95bb21bb 100644
--- a/docs/source/api/kinetic_models/cantera_phase/pmutt.cantera.phase.StoichSolid.rst
+++ b/docs/source/api/kinetic_models/cantera_phase/pmutt.cantera.phase.StoichSolid.rst
@@ -1,4 +1,4 @@
-pmutt.cantera.phase.StoichSolid
+pmutt.cantera.phase.StoichSolid
===============================
.. currentmodule:: pmutt.cantera.phase
@@ -21,7 +21,7 @@ pmutt.cantera.phase.StoichSolid
~StoichSolid.index_species
~StoichSolid.pop_species
~StoichSolid.remove_species
- ~StoichSolid.to_CTI
+ ~StoichSolid.to_cti
diff --git a/docs/source/api/kinetic_models/cantera_units/pmutt.cantera.units.Units.rst b/docs/source/api/kinetic_models/cantera_units/pmutt.cantera.units.Units.rst
index 70613cf7..6d28044f 100644
--- a/docs/source/api/kinetic_models/cantera_units/pmutt.cantera.units.Units.rst
+++ b/docs/source/api/kinetic_models/cantera_units/pmutt.cantera.units.Units.rst
@@ -1,4 +1,4 @@
-pmutt.cantera.units.Units
+pmutt.cantera.units.Units
=========================
.. currentmodule:: pmutt.cantera.units
@@ -14,8 +14,9 @@ pmutt.cantera.units.Units
.. autosummary::
~Units.__init__
- ~Units.to_CTI
- ~Units.to_CTI_dict
+ ~Units.to_cti
+ ~Units.to_cti_dict
+ ~Units.to_omkm_yaml
diff --git a/docs/source/api/kinetic_models/omkm/pmutt.io.omkm.organize_phases.rst b/docs/source/api/kinetic_models/omkm/pmutt.io.omkm.organize_phases.rst
index 2a7194d8..0131a02b 100644
--- a/docs/source/api/kinetic_models/omkm/pmutt.io.omkm.organize_phases.rst
+++ b/docs/source/api/kinetic_models/omkm/pmutt.io.omkm.organize_phases.rst
@@ -1,5 +1,5 @@
-pmutt.io.omkm.get\_species\_phases
-==================================
+pmutt.io.omkm.organize\_phases
+==============================
.. currentmodule:: pmutt.io.omkm
diff --git a/docs/source/api/kinetic_models/omkm_phases/pmutt.omkm.phase.IdealGas.rst b/docs/source/api/kinetic_models/omkm_phases/pmutt.omkm.phase.IdealGas.rst
index da3ec6df..10af5aed 100644
--- a/docs/source/api/kinetic_models/omkm_phases/pmutt.omkm.phase.IdealGas.rst
+++ b/docs/source/api/kinetic_models/omkm_phases/pmutt.omkm.phase.IdealGas.rst
@@ -1,4 +1,4 @@
-pmutt.omkm.phase.IdealGas
+pmutt.omkm.phase.IdealGas
=========================
.. currentmodule:: pmutt.omkm.phase
@@ -21,7 +21,8 @@ pmutt.omkm.phase.IdealGas
~IdealGas.index_species
~IdealGas.pop_species
~IdealGas.remove_species
- ~IdealGas.to_CTI
+ ~IdealGas.to_cti
+ ~IdealGas.to_omkm_yaml
diff --git a/docs/source/api/kinetic_models/omkm_phases/pmutt.omkm.phase.InteractingInterface.rst b/docs/source/api/kinetic_models/omkm_phases/pmutt.omkm.phase.InteractingInterface.rst
index 0f690170..d110bd0b 100644
--- a/docs/source/api/kinetic_models/omkm_phases/pmutt.omkm.phase.InteractingInterface.rst
+++ b/docs/source/api/kinetic_models/omkm_phases/pmutt.omkm.phase.InteractingInterface.rst
@@ -1,4 +1,4 @@
-pmutt.omkm.phase.InteractingInterface
+pmutt.omkm.phase.InteractingInterface
=====================================
.. currentmodule:: pmutt.omkm.phase
@@ -21,7 +21,8 @@ pmutt.omkm.phase.InteractingInterface
~InteractingInterface.index_species
~InteractingInterface.pop_species
~InteractingInterface.remove_species
- ~InteractingInterface.to_CTI
+ ~InteractingInterface.to_cti
+ ~InteractingInterface.to_omkm_yaml
diff --git a/docs/source/api/kinetic_models/omkm_phases/pmutt.omkm.phase.StoichSolid.rst b/docs/source/api/kinetic_models/omkm_phases/pmutt.omkm.phase.StoichSolid.rst
index a0344344..dcb3c36f 100644
--- a/docs/source/api/kinetic_models/omkm_phases/pmutt.omkm.phase.StoichSolid.rst
+++ b/docs/source/api/kinetic_models/omkm_phases/pmutt.omkm.phase.StoichSolid.rst
@@ -1,4 +1,4 @@
-pmutt.omkm.phase.StoichSolid
+pmutt.omkm.phase.StoichSolid
============================
.. currentmodule:: pmutt.omkm.phase
@@ -21,7 +21,8 @@ pmutt.omkm.phase.StoichSolid
~StoichSolid.index_species
~StoichSolid.pop_species
~StoichSolid.remove_species
- ~StoichSolid.to_CTI
+ ~StoichSolid.to_cti
+ ~StoichSolid.to_omkm_yaml
diff --git a/docs/source/api/kinetic_models/omkm_reactions/pmutt.omkm.reaction.BEP.rst b/docs/source/api/kinetic_models/omkm_reactions/pmutt.omkm.reaction.BEP.rst
index 6a71ed81..b89b94e9 100644
--- a/docs/source/api/kinetic_models/omkm_reactions/pmutt.omkm.reaction.BEP.rst
+++ b/docs/source/api/kinetic_models/omkm_reactions/pmutt.omkm.reaction.BEP.rst
@@ -1,4 +1,4 @@
-pmutt.omkm.reaction.BEP
+pmutt.omkm.reaction.BEP
=======================
.. currentmodule:: pmutt.omkm.reaction
@@ -16,7 +16,9 @@ pmutt.omkm.reaction.BEP
~BEP.__init__
~BEP.from_dict
~BEP.get_Cp
+ ~BEP.get_CpoR
~BEP.get_Cv
+ ~BEP.get_CvoR
~BEP.get_E_act
~BEP.get_EoRT_act
~BEP.get_F
@@ -29,8 +31,10 @@ pmutt.omkm.reaction.BEP
~BEP.get_SoR
~BEP.get_U
~BEP.get_UoRT
- ~BEP.to_CTI
+ ~BEP.get_q
+ ~BEP.to_cti
~BEP.to_dict
+ ~BEP.to_omkm_yaml
diff --git a/docs/source/api/kinetic_models/omkm_reactions/pmutt.omkm.reaction.SurfaceReaction.rst b/docs/source/api/kinetic_models/omkm_reactions/pmutt.omkm.reaction.SurfaceReaction.rst
index a0988aad..3bfb285c 100644
--- a/docs/source/api/kinetic_models/omkm_reactions/pmutt.omkm.reaction.SurfaceReaction.rst
+++ b/docs/source/api/kinetic_models/omkm_reactions/pmutt.omkm.reaction.SurfaceReaction.rst
@@ -1,4 +1,4 @@
-pmutt.omkm.reaction.SurfaceReaction
+pmutt.omkm.reaction.SurfaceReaction
===================================
.. currentmodule:: pmutt.omkm.reaction
@@ -73,8 +73,9 @@ pmutt.omkm.reaction.SurfaceReaction
~SurfaceReaction.get_q_state
~SurfaceReaction.get_species
~SurfaceReaction.get_state_quantity
- ~SurfaceReaction.to_CTI
+ ~SurfaceReaction.to_cti
~SurfaceReaction.to_dict
+ ~SurfaceReaction.to_omkm_yaml
~SurfaceReaction.to_string
@@ -85,11 +86,13 @@ pmutt.omkm.reaction.SurfaceReaction
.. autosummary::
+ ~SurfaceReaction.beta
~SurfaceReaction.id
~SurfaceReaction.products
~SurfaceReaction.products_stoich
~SurfaceReaction.reactants
~SurfaceReaction.reactants_stoich
+ ~SurfaceReaction.sticking_coeff
~SurfaceReaction.transition_state
~SurfaceReaction.transition_state_stoich
diff --git a/docs/source/api/kinetic_models/omkm_units/pmutt.omkm.units.Units.rst b/docs/source/api/kinetic_models/omkm_units/pmutt.omkm.units.Units.rst
index 211d4e3a..136308a7 100644
--- a/docs/source/api/kinetic_models/omkm_units/pmutt.omkm.units.Units.rst
+++ b/docs/source/api/kinetic_models/omkm_units/pmutt.omkm.units.Units.rst
@@ -1,4 +1,4 @@
-pmutt.omkm.units.Units
+pmutt.omkm.units.Units
======================
.. currentmodule:: pmutt.omkm.units
@@ -14,8 +14,9 @@ pmutt.omkm.units.Units
.. autosummary::
~Units.__init__
- ~Units.to_CTI
- ~Units.to_CTI_dict
+ ~Units.to_cti
+ ~Units.to_cti_dict
+ ~Units.to_omkm_yaml
diff --git a/docs/source/api/kinetic_models/zacros/pmutt.empirical.zacros.Zacros.rst b/docs/source/api/kinetic_models/zacros/pmutt.empirical.zacros.Zacros.rst
index 3ce1b2b0..248ffe4e 100644
--- a/docs/source/api/kinetic_models/zacros/pmutt.empirical.zacros.Zacros.rst
+++ b/docs/source/api/kinetic_models/zacros/pmutt.empirical.zacros.Zacros.rst
@@ -1,4 +1,4 @@
-pmutt.empirical.zacros.Zacros
+pmutt.empirical.zacros.Zacros
=============================
.. currentmodule:: pmutt.empirical.zacros
@@ -19,6 +19,21 @@ pmutt.empirical.zacros.Zacros
~Zacros.compare_HoRT
~Zacros.compare_SoR
~Zacros.from_dict
+ ~Zacros.get_Cp
+ ~Zacros.get_CpoR
+ ~Zacros.get_Cv
+ ~Zacros.get_CvoR
+ ~Zacros.get_F
+ ~Zacros.get_FoRT
+ ~Zacros.get_G
+ ~Zacros.get_GoRT
+ ~Zacros.get_H
+ ~Zacros.get_HoRT
+ ~Zacros.get_S
+ ~Zacros.get_SoR
+ ~Zacros.get_U
+ ~Zacros.get_UoRT
+ ~Zacros.get_q
~Zacros.plot_empirical
~Zacros.plot_statmech
~Zacros.plot_statmech_and_empirical
diff --git a/docs/source/api/mixture/cov/pmutt.mixture.cov.PiecewiseCovEffect.rst b/docs/source/api/mixture/cov/pmutt.mixture.cov.PiecewiseCovEffect.rst
index 3fe7e210..54da1df0 100644
--- a/docs/source/api/mixture/cov/pmutt.mixture.cov.PiecewiseCovEffect.rst
+++ b/docs/source/api/mixture/cov/pmutt.mixture.cov.PiecewiseCovEffect.rst
@@ -1,4 +1,4 @@
-pmutt.mixture.cov.PiecewiseCovEffect
+pmutt.mixture.cov.PiecewiseCovEffect
====================================
.. currentmodule:: pmutt.mixture.cov
@@ -32,8 +32,9 @@ pmutt.mixture.cov.PiecewiseCovEffect
~PiecewiseCovEffect.get_q
~PiecewiseCovEffect.insert
~PiecewiseCovEffect.pop
- ~PiecewiseCovEffect.to_CTI
+ ~PiecewiseCovEffect.to_cti
~PiecewiseCovEffect.to_dict
+ ~PiecewiseCovEffect.to_omkm_yaml
diff --git a/docs/source/api/reactions/bep/pmutt.omkm.reaction.BEP.rst b/docs/source/api/reactions/bep/pmutt.omkm.reaction.BEP.rst
index 6a71ed81..b89b94e9 100644
--- a/docs/source/api/reactions/bep/pmutt.omkm.reaction.BEP.rst
+++ b/docs/source/api/reactions/bep/pmutt.omkm.reaction.BEP.rst
@@ -1,4 +1,4 @@
-pmutt.omkm.reaction.BEP
+pmutt.omkm.reaction.BEP
=======================
.. currentmodule:: pmutt.omkm.reaction
@@ -16,7 +16,9 @@ pmutt.omkm.reaction.BEP
~BEP.__init__
~BEP.from_dict
~BEP.get_Cp
+ ~BEP.get_CpoR
~BEP.get_Cv
+ ~BEP.get_CvoR
~BEP.get_E_act
~BEP.get_EoRT_act
~BEP.get_F
@@ -29,8 +31,10 @@ pmutt.omkm.reaction.BEP
~BEP.get_SoR
~BEP.get_U
~BEP.get_UoRT
- ~BEP.to_CTI
+ ~BEP.get_q
+ ~BEP.to_cti
~BEP.to_dict
+ ~BEP.to_omkm_yaml
diff --git a/docs/source/api/reactions/bep/pmutt.reaction.bep.BEP.rst b/docs/source/api/reactions/bep/pmutt.reaction.bep.BEP.rst
index e6642666..1bd37838 100644
--- a/docs/source/api/reactions/bep/pmutt.reaction.bep.BEP.rst
+++ b/docs/source/api/reactions/bep/pmutt.reaction.bep.BEP.rst
@@ -1,4 +1,4 @@
-pmutt.reaction.bep.BEP
+pmutt.reaction.bep.BEP
======================
.. currentmodule:: pmutt.reaction.bep
@@ -16,7 +16,9 @@ pmutt.reaction.bep.BEP
~BEP.__init__
~BEP.from_dict
~BEP.get_Cp
+ ~BEP.get_CpoR
~BEP.get_Cv
+ ~BEP.get_CvoR
~BEP.get_E_act
~BEP.get_EoRT_act
~BEP.get_F
@@ -29,6 +31,7 @@ pmutt.reaction.bep.BEP
~BEP.get_SoR
~BEP.get_U
~BEP.get_UoRT
+ ~BEP.get_q
~BEP.to_dict
diff --git a/docs/source/api/reactions/reaction/pmutt.omkm.reaction.SurfaceReaction.rst b/docs/source/api/reactions/reaction/pmutt.omkm.reaction.SurfaceReaction.rst
index a0988aad..3bfb285c 100644
--- a/docs/source/api/reactions/reaction/pmutt.omkm.reaction.SurfaceReaction.rst
+++ b/docs/source/api/reactions/reaction/pmutt.omkm.reaction.SurfaceReaction.rst
@@ -1,4 +1,4 @@
-pmutt.omkm.reaction.SurfaceReaction
+pmutt.omkm.reaction.SurfaceReaction
===================================
.. currentmodule:: pmutt.omkm.reaction
@@ -73,8 +73,9 @@ pmutt.omkm.reaction.SurfaceReaction
~SurfaceReaction.get_q_state
~SurfaceReaction.get_species
~SurfaceReaction.get_state_quantity
- ~SurfaceReaction.to_CTI
+ ~SurfaceReaction.to_cti
~SurfaceReaction.to_dict
+ ~SurfaceReaction.to_omkm_yaml
~SurfaceReaction.to_string
@@ -85,11 +86,13 @@ pmutt.omkm.reaction.SurfaceReaction
.. autosummary::
+ ~SurfaceReaction.beta
~SurfaceReaction.id
~SurfaceReaction.products
~SurfaceReaction.products_stoich
~SurfaceReaction.reactants
~SurfaceReaction.reactants_stoich
+ ~SurfaceReaction.sticking_coeff
~SurfaceReaction.transition_state
~SurfaceReaction.transition_state_stoich
diff --git a/docs/source/api/reactions/reactions/pmutt.omkm.reaction.SurfaceReaction.rst b/docs/source/api/reactions/reactions/pmutt.omkm.reaction.SurfaceReaction.rst
index a0988aad..3bfb285c 100644
--- a/docs/source/api/reactions/reactions/pmutt.omkm.reaction.SurfaceReaction.rst
+++ b/docs/source/api/reactions/reactions/pmutt.omkm.reaction.SurfaceReaction.rst
@@ -1,4 +1,4 @@
-pmutt.omkm.reaction.SurfaceReaction
+pmutt.omkm.reaction.SurfaceReaction
===================================
.. currentmodule:: pmutt.omkm.reaction
@@ -73,8 +73,9 @@ pmutt.omkm.reaction.SurfaceReaction
~SurfaceReaction.get_q_state
~SurfaceReaction.get_species
~SurfaceReaction.get_state_quantity
- ~SurfaceReaction.to_CTI
+ ~SurfaceReaction.to_cti
~SurfaceReaction.to_dict
+ ~SurfaceReaction.to_omkm_yaml
~SurfaceReaction.to_string
@@ -85,11 +86,13 @@ pmutt.omkm.reaction.SurfaceReaction
.. autosummary::
+ ~SurfaceReaction.beta
~SurfaceReaction.id
~SurfaceReaction.products
~SurfaceReaction.products_stoich
~SurfaceReaction.reactants
~SurfaceReaction.reactants_stoich
+ ~SurfaceReaction.sticking_coeff
~SurfaceReaction.transition_state
~SurfaceReaction.transition_state_stoich
diff --git a/docs/source/api/statmech/elec/pmutt.statmech.lsr.ExtendedLSR.rst b/docs/source/api/statmech/elec/pmutt.statmech.lsr.ExtendedLSR.rst
index 993cd4e1..336e7249 100644
--- a/docs/source/api/statmech/elec/pmutt.statmech.lsr.ExtendedLSR.rst
+++ b/docs/source/api/statmech/elec/pmutt.statmech.lsr.ExtendedLSR.rst
@@ -1,5 +1,5 @@
-pmutt.statmech.lsr.LSR
-======================
+pmutt.statmech.lsr.ExtendedLSR
+==============================
.. currentmodule:: pmutt.statmech.lsr
@@ -13,24 +13,35 @@ pmutt.statmech.lsr.LSR
.. autosummary::
- ~LSR.__init__
- ~LSR.from_dict
- ~LSR.get_Cp
- ~LSR.get_Cv
- ~LSR.get_F
- ~LSR.get_FoRT
- ~LSR.get_G
- ~LSR.get_GoRT
- ~LSR.get_H
- ~LSR.get_HoRT
- ~LSR.get_S
- ~LSR.get_SoR
- ~LSR.get_U
- ~LSR.get_UoRT
- ~LSR.to_dict
+ ~ExtendedLSR.__init__
+ ~ExtendedLSR.from_dict
+ ~ExtendedLSR.get_Cp
+ ~ExtendedLSR.get_CpoR
+ ~ExtendedLSR.get_Cv
+ ~ExtendedLSR.get_CvoR
+ ~ExtendedLSR.get_F
+ ~ExtendedLSR.get_FoRT
+ ~ExtendedLSR.get_G
+ ~ExtendedLSR.get_GoRT
+ ~ExtendedLSR.get_H
+ ~ExtendedLSR.get_HoRT
+ ~ExtendedLSR.get_S
+ ~ExtendedLSR.get_SoR
+ ~ExtendedLSR.get_U
+ ~ExtendedLSR.get_UoRT
+ ~ExtendedLSR.get_q
+ ~ExtendedLSR.to_dict
+ .. rubric:: Attributes
+
+ .. autosummary::
+
+ ~ExtendedLSR.gas_species
+ ~ExtendedLSR.reactions
+ ~ExtendedLSR.surf_species
+
\ No newline at end of file
diff --git a/docs/source/api/statmech/elec/pmutt.statmech.lsr.LSR.rst b/docs/source/api/statmech/elec/pmutt.statmech.lsr.LSR.rst
index d1629641..d66ab7e9 100644
--- a/docs/source/api/statmech/elec/pmutt.statmech.lsr.LSR.rst
+++ b/docs/source/api/statmech/elec/pmutt.statmech.lsr.LSR.rst
@@ -1,4 +1,4 @@
-pmutt.statmech.lsr.LSR
+pmutt.statmech.lsr.LSR
======================
.. currentmodule:: pmutt.statmech.lsr
@@ -16,7 +16,9 @@ pmutt.statmech.lsr.LSR
~LSR.__init__
~LSR.from_dict
~LSR.get_Cp
+ ~LSR.get_CpoR
~LSR.get_Cv
+ ~LSR.get_CvoR
~LSR.get_F
~LSR.get_FoRT
~LSR.get_G
@@ -27,10 +29,19 @@ pmutt.statmech.lsr.LSR
~LSR.get_SoR
~LSR.get_U
~LSR.get_UoRT
+ ~LSR.get_q
~LSR.to_dict
+ .. rubric:: Attributes
+
+ .. autosummary::
+
+ ~LSR.gas_species
+ ~LSR.reaction
+ ~LSR.surf_species
+
\ No newline at end of file
diff --git a/docs/source/api/statmech/statmech/pmutt.statmech.StatMech.rst b/docs/source/api/statmech/statmech/pmutt.statmech.StatMech.rst
index a1390d9c..52c0f867 100644
--- a/docs/source/api/statmech/statmech/pmutt.statmech.StatMech.rst
+++ b/docs/source/api/statmech/statmech/pmutt.statmech.StatMech.rst
@@ -1,4 +1,4 @@
-pmutt.statmech.StatMech
+pmutt.statmech.StatMech
=======================
.. currentmodule:: pmutt.statmech
@@ -28,6 +28,7 @@ pmutt.statmech.StatMech
~StatMech.get_H
~StatMech.get_HoRT
~StatMech.get_S
+ ~StatMech.get_Selements
~StatMech.get_SoR
~StatMech.get_U
~StatMech.get_UoRT
diff --git a/docs/source/conf.py b/docs/source/conf.py
index d449fa4a..1ff95577 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -14,6 +14,7 @@
#
import os
import sys
+import sphinx
pmutt_path = os.path.join(os.path.dirname(__file__), '../../../pmutt')
sys.path.insert(0, os.path.abspath(pmutt_path))
@@ -25,9 +26,10 @@
author = 'Vlachos Research Group'
# The short X.Y version
-version = ''
+version = '1.4.0'
# The full version, including alpha/beta/rc tags
-release = '1.3.1'
+release = '1.4.0'
+sphinx_version = sphinx.__display_version__
# -- General configuration ---------------------------------------------------
@@ -48,15 +50,18 @@
'sphinx.ext.ifconfig',
'sphinx.ext.githubpages',
'sphinx.ext.napoleon',
- #'nbsphinx',
+ 'sphinx_automodapi.automodapi'
]
-
-# Automatically generate summaries
autosummary_generate = True
-autodoc_default_flags = ['members',
- 'undoc-members',
- 'show-inheritance',
- 'inherited-members']
+autoclass_content = 'both'
+html_show_sourcelink = False
+autodoc_inherit_docstrings = True
+set_type_checking_flag = True
+autodoc_default_options = {'members': True,
+ 'undoc-members': False,
+ 'show-inheritance': True,
+ 'inherited-members': True
+ }
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -75,7 +80,7 @@
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
-language = None
+language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
@@ -102,7 +107,7 @@
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
+# html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
@@ -193,4 +198,11 @@
# Logo
html_logo = './logos/pmutt_inverse_highres.png'
-html_favicon = './logos/p_icon.ico'
\ No newline at end of file
+html_favicon = './logos/p_icon.ico'
+
+variables_to_export = [
+ 'sphinx_version',
+]
+frozen_locals = dict(locals())
+rst_epilog = '\n'.join(map(lambda x: f".. |{x}| replace:: {frozen_locals[x]}", variables_to_export))
+del frozen_locals
\ No newline at end of file
diff --git a/docs/source/examples_jupyter/examples.rst b/docs/source/examples_jupyter/examples.rst
index 47547fd1..d0535b84 100644
--- a/docs/source/examples_jupyter/examples.rst
+++ b/docs/source/examples_jupyter/examples.rst
@@ -218,7 +218,7 @@ OpenMKM_IO
:target: https://raw.githubusercontent.com/VlachosGroup/pMuTT/master/docs/source/examples_jupyter/omkm_io/OpenMKM_IO.py
:width: 15%
.. image:: ./images/zip_logo.png
- :target: https://raw.githubusercontent.com/VlachosGroup/pMuTT/master/docs/source/examples_jupyter/omkm_io/omkm_io.zip
+ :target: https://raw.githubusercontent.com/VlachosGroup/pMuTT/master/docs/source/examples_jupyter/omkm_io/openmkm_io.zip
:width: 15%
Topics Covered
diff --git a/docs/source/index.rst b/docs/source/index.rst
index 61ceddcd..c7f300bd 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -28,14 +28,17 @@ Welcome to pmutt's documentation!
api/mixture/mixture
api/kinetic_models/kinetic_models
api/reactions/reactions
+ api/equilibrium/equilibrium
api/phase_diagram/phase_diagram
api/eos/eos
api/visualization/visual
+ api/helper/helper
release_notes
.. toctree::
:caption: Useful Links
+
View Source Code (GitHub) <https://github.com/VlachosGroup/pmutt>
Report an Issue <https://github.com/VlachosGroup/pMuTT/issues>
@@ -43,6 +46,6 @@ Indices and tables
==================
* :ref:`genindex`
-* :ref:`modindex`
* :ref:`search`
+:Sphinx Version: |sphinx_version|
diff --git a/docs/source/release_notes.rst b/docs/source/release_notes.rst
index 9ff79944..632a9fd4 100644
--- a/docs/source/release_notes.rst
+++ b/docs/source/release_notes.rst
@@ -7,6 +7,31 @@ Development Branch
------------------
`Development Branch`_
+Version 1.4.0
+-------------
+
+Mar. 17, 2023
+
+- Added new :class:`~pmutt.equilibrium.Equilibrium` class that computes the equilibrium
+ concentration on a network of species by minimizing the network
+ Gibbs free energy while maintaining the total atom balance of
+ the initial starting concentration of species
+- Major GitHub documentation update including the new functionality
+ from v1.3.2 and v1.4.0, corrections noted in the issues database,
+ fixes to broken links, addition of a new "Helper Functions. section,
+ and misc upgrades.
+
+Version 1.3.2
+-------------
+
+Jan. 26, 2023
+
+- Added ``S_elements`` parameter to entropy and Gibbs free energy
+ methods allowing you to compute an entropy and Gibbs free energy
+ of formation for a single species by including the entropy of the
+ elements in the species.
+- Misc bug fixes
+
Version 1.2.21
--------------
diff --git a/pmutt/__init__.py b/pmutt/__init__.py
index 732feac1..784af240 100644
--- a/pmutt/__init__.py
+++ b/pmutt/__init__.py
@@ -10,7 +10,7 @@
# present, too:
#
name = 'pmutt'
-__version__ = '1.3.1'
+__version__ = '1.3.3'
import os
import inspect
@@ -367,19 +367,20 @@ def plot_1D(obj,
Other variables to use in the calculation. Method specific
arguments can be passed by having a key that corresponds to
the method name
- ::
- e.g. kwargs = {'get_H_kwargs': {'units': 'kcal/mol'},
- 'get_S_kwargs': {'units': 'cal/mol/K'}}
+
+ e.g. kwargs = {'get_H_kwargs': {'units': 'kcal/mol'},
+ 'get_S_kwargs': {'units': 'cal/mol/K'}}
Returns
-------
figure : `matplotlib.figure.Figure`_
Figure
ax : (N,) list of `matplotlib.axes.Axes.axis`_
- Axes of the plots where N is the length of ``methods``
+ Axes of the plots.
.. _`matplotlib.figure.Figure`: https://matplotlib.org/api/_as_gen/matplotlib.figure.Figure.html
.. _`matplotlib.axes.Axes.axis`: https://matplotlib.org/api/_as_gen/matplotlib.axes.Axes.axis.html
"""
+
# Check if single method passed
if not _is_iterable(methods):
methods = (methods, )
@@ -473,9 +474,9 @@ def plot_2D(obj,
Other variables to use in the calculation. Method specific
arguments can be passed by having a key that corresponds to
the method name
-
+
e.g. kwargs = {'get_H': {'units': 'kcal/mol'},
- 'get_S': {'units': 'cal/mol/K'}}
+ 'get_S': {'units': 'cal/mol/K'}}
Returns
-------
figure : `matplotlib.figure.Figure`_
@@ -493,6 +494,7 @@ def plot_2D(obj,
.. _`matplotlib.colorbar.Colorbar`: https://matplotlib.org/3.1.0/api/_as_gen/matplotlib.pyplot.colorbar.html
"""
+
# Check if single method passed
if not _is_iterable(methods):
methods = (methods, )
diff --git a/pmutt/empirical/nasa.py b/pmutt/empirical/nasa.py
index c9274cd0..d80f3918 100644
--- a/pmutt/empirical/nasa.py
+++ b/pmutt/empirical/nasa.py
@@ -1476,6 +1476,7 @@ class SingleNasa9(EmpiricalBase):
High temperature bound (in K)
a : (9,) `numpy.ndarray`_
NASA9 polynomial to use between T_low and T_high
+ .. _`numpy.ndarray`: https://docs.scipy.org/doc/numpy/reference/generated/numpy.ndarray.html
"""
def __init__(self, T_low, T_high, a):
diff --git a/pmutt/empirical/shomate.py b/pmutt/empirical/shomate.py
index 8cb4285f..820248e2 100644
--- a/pmutt/empirical/shomate.py
+++ b/pmutt/empirical/shomate.py
@@ -48,6 +48,7 @@ class Shomate(EmpiricalBase):
Units used to fit the Shomate polynomial. Units should be supported
by :class:`~pmutt.constants.R` (e.g. J/mol/K, cal/mol/K, eV/K).
Default is J/mol/K.
+ .. _`numpy.ndarray`: https://docs.scipy.org/doc/numpy/reference/generated/numpy.ndarray.html
"""
def __init__(self,
diff --git a/pmutt/equilibrium/__init__.py b/pmutt/equilibrium/__init__.py
new file mode 100644
index 00000000..45fa2dc0
--- /dev/null
+++ b/pmutt/equilibrium/__init__.py
@@ -0,0 +1,2 @@
+# -*- coding: utf-8 -*-
+from ._equilibrium import *
diff --git a/pmutt/equilibrium/_equilibrium.py b/pmutt/equilibrium/_equilibrium.py
new file mode 100644
index 00000000..248018db
--- /dev/null
+++ b/pmutt/equilibrium/_equilibrium.py
@@ -0,0 +1,192 @@
+from scipy.optimize import minimize
+import numpy as np
+import sys
+from itertools import repeat
+from pmutt.io.thermdat import read_thermdat
+from pmutt import pmutt_list_to_dict
+from pmutt import constants as c
+from collections import namedtuple
+
+
+class Equilibrium():
+ """Reaction thermodynamic equilibrium.
+
+ Attributes
+ ----------
+ model : list or dictionary of :class:`~pmutt.empirical` or :class:`~pmutt.statmech` objects
+ Thermodynamic object to compute Gibbs free energy for all
+ species in 'network'
+ network : dictionary object
+ List of species to consider in equilibrium calculation including
+ initial moles of each species. All species names must match and
+ be contained in 'model' data.
+ """
+
+ def __init__(self,
+ model,
+ network):
+ self.model = model
+ self.network = network
+ if type(self.model) is list:
+ self.model = pmutt_list_to_dict(self.model)
+ elif type(self.model) is dict:
+ pass
+ else:
+ sys.exit('model must be list or dict')
+
+ # Build molecule-element configuration matrix from species
+ self.elements = []
+ self.species = list(self.network.keys())
+ feed = np.array(list(network.values()))
+ self.mol_elem = np.zeros([len(self.species), 2])
+ for i, x in enumerate(self.species):
+ ele = self.model[x].elements
+ # Read elements in each species
+ for y in ele:
+ # Check if the current element is in the list
+ try:
+ self.elements.index(y)
+ except ValueError:
+ # If not add the element to the list
+ self.elements.append(y)
+ # Check if the molecule-element matrix has sufficient
+ # colums to accomodate the aditional element and add a
+ # column if necessary
+ if len(self.elements) > np.size(self.mol_elem, 1):
+ self.mol_elem = np.append(self.mol_elem,
+ np.zeros([len(self.species),
+ 1]), 1)
+ # Enter the quantity of the current element of the current
+ # molecule into the molecule-element matrix
+ self.mol_elem[i, self.elements.index(y)] =\
+ self.model[x].elements[y]
+
+ # Elimnate zero columns
+ self.elements = list(np.array(self.elements)
+ [sum(self.mol_elem, 0) > 0])
+ self.mol_elem = self.mol_elem[:, sum(self.mol_elem, 0) > 0]
+ # Determine the moles of each element in the feed
+ self.ele_feed = feed.dot(self.mol_elem)
+ self.species_mw = self.mol_elem.dot([c.atomic_weight[x]
+ for x in self.elements])
+
+ # Objective (Cost) Function: Summ of Gibb's Free Energies
+
+ def _objective(self, x, *args):
+ s = 0.0
+ nT = sum(x)
+ g = np.array(args[0])
+ p = args[1]
+ for i in range(len(x)):
+ s += x[i]*(g[i] + np.log(x[i]*p/nT))
+ # Return sum of the Gibb's free energies
+ return s
+
+ # Objective (Cost) Function Jacobian: Summ of Gibb's Free Energies
+
+ def _objective_jac(self, x, *args):
+ s = np.zeros_like(x)
+ nT = sum(x)
+ g = np.array(args[0])
+ p = args[1]
+ for i in range(len(x)):
+ s[i] = g[i] + np.log(x[i]*p/nT)
+ # Return sum of the Gibb's free energies
+ return s
+
+ # Elemental Balance Equality Constraint. The returned value
+ # must be = 0
+
+ def _constraints1_eq(self, x):
+ s = x.dot(self.mol_elem) - self.ele_feed # Elemental balances
+ # Return slack variable for each element
+ return s
+
+ # Elemental Balance Equality Constraint Jacobian.
+
+ def _constraints1_eq_jac(self, x):
+ # Return jacobian
+ return self.mol_elem.T
+
+ def get_net_comp(self, T, P):
+ """Returns the equilibrium composition of the specified molecule
+ network.
+
+ Parameters
+ ----------
+ T : float
+ Temperature in K
+ P : float
+ Pressure in atm
+ Returns
+ -------
+ res : equilibrium._equilibrium.res
+
+ Important attributes are
+ .species : list of strings
+ list of species in network
+ .moles : `numpy.ndarray`_
+ Equilibrium moles of each species in the network
+ .mole_frac : `numpy.ndarray`_
+ Equilibrium mole fraction of each species in the network
+ .P : float
+ Pressure (atm) used in equilibrium calculation
+ .T : float
+ Temperature (K) used in equilibrium calculation
+
+ .. _`numpy.ndarray`: https://docs.scipy.org/doc/numpy/reference/generated/numpy.ndarray.html
+ """
+ self.P = P
+ self.T = T
+ # Model initialization parameters
+ # Starting mole guesses = 1
+ self.guess = list(repeat(1.0, len(self.species)))
+ # Mole value bounds. Lower bound near zero
+ b = [1e-16, sum(self.ele_feed)]
+ # Upper bound is the total moles of elements
+ self.bounds = list(repeat(b, len(self.species)))
+
+ # Designate each constraint as an equality or inequality constraint
+ self.con = {'type': 'eq', 'fun': self._constraints1_eq,
+ 'jac': self._constraints1_eq_jac}
+
+ self.maxiter = 5000 # Maximum iterations for minimize solver
+ # Initialize model results lists
+
+ # t0 = time.time()
+ # Calculate species Gibb's at current temperature
+ self.gibbs = []
+ for x in self.species:
+ self.gibbs.append(self.model[x].get_GoRT(T=T))
+
+ # Run solver once and collect data
+ sol = minimize(self._objective, self.guess,
+ args=(self.gibbs, self.P*1.01325),
+ jac=self._objective_jac,
+ method='SLSQP',
+ options={'ftol': 1e-14, 'maxiter': self.maxiter},
+ bounds=self.bounds,
+ constraints=self.con)
+
+ res = namedtuple("res", ["species", "moles", "mole_frac", "P", "T"])
+
+ return res(self.species, sol.x, sol.x/np.sum(sol.x), self.P, self.T)
+
+ @classmethod
+ def from_thermdat(cls,
+ thermdat,
+ network):
+ """Reaction thermodynamic equilibrium.
+
+ Attributes
+ ----------
+ thermdat : string, filepath to thermdat file
+ File in thermdat format containing NASA polynomials for
+ species in network to compute Gibbs free energy
+ network : dictionary object
+ List of species to consider in equilibrium calculation
+ including initial moles of each species. All species names
+ must match and be contained in 'model' data.
+ """
+ model = read_thermdat(thermdat, "dict")
+ return cls(model=model, network=network)
diff --git a/pmutt/io/chemkin.py b/pmutt/io/chemkin.py
index b613804d..13b60123 100644
--- a/pmutt/io/chemkin.py
+++ b/pmutt/io/chemkin.py
@@ -349,7 +349,7 @@ def write_surf(reactions,
Parameters
----------
- reactions : list of :class:`~pmutt.reaction.ChemkinReaction` objects
+ reactions : A :class:`~pmutt.reaction.Reaction` object containing
Chemkin reactions to write in surf.inp file. Purely gas-phase
reactions will be ignored
filename : str, optional
diff --git a/pmutt/io/excel.py b/pmutt/io/excel.py
index 3d15bb37..04f0423d 100644
--- a/pmutt/io/excel.py
+++ b/pmutt/io/excel.py
@@ -545,7 +545,7 @@ def set_dict_value(dict_name, key, value, output_structure):
key : str
Key corresponding to ``value``
value : float
- Value to assign to ``key
+ Value to assign to ``key``
output_structure : dict
Structure to assign value. Will assign to
output_structure[dict_name]
diff --git a/pmutt/io/omkm.py b/pmutt/io/omkm.py
index b559d84d..c22fdd98 100644
--- a/pmutt/io/omkm.py
+++ b/pmutt/io/omkm.py
@@ -1,5 +1,5 @@
from pathlib import Path
-from collections import namedtuple, defaultdict
+from collections import defaultdict
import yaml
@@ -13,6 +13,7 @@
from pmutt.omkm import phase as omkm_phases
from pmutt.omkm.units import Units
+
def write_cti(phases=None,
species=None,
reactions=None,
@@ -25,9 +26,9 @@ def write_cti(phases=None,
use_motz_wise=False,
ads_act_method='get_H_act',
write_xml=True):
- """Writes the units, phases, species, lateral interactions, reactions and
+ """Writes the units, phases, species, lateral interactions, reactions and
additional options in the CTI format for OpenMKM
-
+
Parameters
----------
phases : list of :class:`~pmutt.omkm.phase.Phase` objects
@@ -170,6 +171,7 @@ def write_cti(phases=None,
# Or return as string
return lines_out
+
def write_thermo_yaml(phases=None, species=None, reactions=None,
lateral_interactions=None, units=None,
filename=None, T=300., P=1., newline='\n',
@@ -177,9 +179,9 @@ def write_thermo_yaml(phases=None, species=None, reactions=None,
use_motz_wise='False',
yaml_options={'default_flow_style': None, 'indent': 2,
'sort_keys': False, 'width': 79}):
- """Writes the units, phases, species, lateral interactions, reactions and
+ """Writes the units, phases, species, lateral interactions, reactions and
additional options in the CTI format for OpenMKM
-
+
Parameters
----------
phases : list of :class:`~pmutt.omkm.phase.Phase` objects
@@ -204,7 +206,7 @@ def write_thermo_yaml(phases=None, species=None, reactions=None,
newline : str, optional
Type of newline to use. Default is Linux newline ('\\n')
ads_act_method : str, optional
- Activation method to use for adsorption reactions. Accepted
+ Activation method to use for adsorption reactions. Accepted
options include 'get_H_act' and 'get_G_act'. Default is
'get_H_act'.
Returns
@@ -386,13 +388,20 @@ def write_yaml(reactor_type=None,
- batch
Value written to ``reactor.type``.
- mode : str
+ temperature_mode : str
Operation of reactor. Supported options include:
- Isothermal
- Adiabatic
- Value written to ``reactor.mode``.
+ Value written to ``reactor.temperature_mode``.
+ pressure_mode : str
+ Operation of reactor. Supported options include:
+
+ - Isobaric
+ - Isochoric
+
+ Value written to ``reactor.pressure_mode``.
nodes : int
Number of nodes to use if ``reactor_type`` is 'pfr_0d'. Value
written to ``reactor.nodes``
diff --git a/setup.py b/setup.py
index 75ce3522..09d0a73b 100644
--- a/setup.py
+++ b/setup.py
@@ -15,7 +15,7 @@
setuptools_info = {
'name': 'pmutt',
- 'version': '1.3.2',
+ 'version': '1.4.0',
'author': 'Vlachos Research Group',
'author_email': '[email protected]',
'description': 'Python Multiscale Thermochemistry Toolbox (pmutt)',
@@ -36,7 +36,9 @@
'pygal>=3.0.0',
'xlrd>=2.0.1',
'more_itertools>=8.14.0',
- 'PyYAML>=6.0.0'],
+ 'PyYAML>=6.0.0',
+ 'python>=3.3'
+ ],
'classifiers': [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
| Import Errors due to renaming
Since the named was changed from Thermochemistry to PyMuTT, imports do not work correctly. We need to change all Thermochemistry to PyMuTT.
| Solved issue in commit 73912f24b2b2f86229c72ae21d39d395a659ac91 | 2023-03-17T19:25:15 | 0.0 | [] | [] |
||
stac-utils/stac-fastapi | stac-utils__stac-fastapi-701 | 1c3546a5b449edebc7587dab289f73224847c63e | diff --git a/CHANGES.md b/CHANGES.md
index 46d284fe0..0229dcfcb 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -2,6 +2,12 @@
## [Unreleased] - TBD
+## [3.0.0a2] - 2024-05-31
+
+### Fixed
+
+* Fix missing default (`None`) for optional `query` attribute in `QueryExtensionPostRequest` model ([#701](https://github.com/stac-utils/stac-fastapi/pull/701))
+
## [3.0.0a1] - 2024-05-22
### Changed
diff --git a/stac_fastapi/extensions/stac_fastapi/extensions/core/query/request.py b/stac_fastapi/extensions/stac_fastapi/extensions/core/query/request.py
index 8b282884a..7f8425e70 100644
--- a/stac_fastapi/extensions/stac_fastapi/extensions/core/query/request.py
+++ b/stac_fastapi/extensions/stac_fastapi/extensions/core/query/request.py
@@ -18,4 +18,4 @@ class QueryExtensionGetRequest(APIRequest):
class QueryExtensionPostRequest(BaseModel):
"""Query Extension POST request model."""
- query: Optional[Dict[str, Dict[str, Any]]]
+ query: Optional[Dict[str, Dict[str, Any]]] = None
| Ensure Optional `query` Field Defaults to None for Pydantic V2 Compatibility
I've identified an issue with the Pydantic model definition in `QueryExtensionPostRequest` [stac_fastapi.extensions.core.query.request] (https://github.com/stac-utils/stac-fastapi/blob/1c3546a5b449edebc7587dab289f73224847c63e/stac_fastapi/extensions/stac_fastapi/extensions/core/query/request.py#L21).
The query field lacks a default value, making it required under Pydantic V2 changes. In Pydantic V2, fields declared as Optional must explicitly default to None to be considered optional and nullable. This change would prevent potential errors from fields being unexpectedly required and aligns with Pydantic V2's [migration guidelines](https://docs.pydantic.dev/latest/migration/#required-optional-and-nullable-fields).
| 2024-05-31T13:06:07 | 0.0 | [] | [] |
|||
thelastpickle/cassandra-medusa | thelastpickle__cassandra-medusa-813 | ce4c3af9bc155764a393dd8cf56bf17fa4eea892 | diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index be8b66a4..01daf085 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -94,8 +94,8 @@ jobs:
exit $?
integration-tests:
- needs: build
continue-on-error: ${{ matrix.experimental }}
+ needs: build
strategy:
fail-fast: false
matrix:
@@ -104,6 +104,7 @@ jobs:
# IBM not included by default due to lite plan quota being easily exceeded
#it-backend: [local, s3, gcs, minio, ibm, azure]
cassandra-version: [2.2.19, 3.11.11, 4.0.0, 'github:apache/trunk']
+ java-version: [8.0.252, 11.0.25]
include:
# tweak the experimental flag for cassandra versions
- cassandra-version: 2.2.19
@@ -117,21 +118,35 @@ jobs:
cassandra-version: 4.0.0
it-backend: gcs
experimental: false
+ java-version: 11.0.25
# explicitly include tests against python 3.10 and one version of cassandra
- python-version: "3.10"
cassandra-version: 4.0.0
it-backend: gcs
experimental: false
+ java-version: 11.0.25
# explicitly include tests against python 3.8 and one version of cassandra
- python-version: 3.8
cassandra-version: 4.0.0
it-backend: gcs
experimental: false
+ java-version: 11.0.25
exclude:
# no tests against trunk
- cassandra-version: 'github:apache/trunk'
- # fewer tests against cassandra 3.11.11 (exclude all but local storage backends)
- - it-backend: s3
+ # no tests for C* 2.2 with java 11
+ - cassandra-version: 2.2.19
+ java-version: 11.0.25
+ # no tests for C* 3.11 with java 11
+ - cassandra-version: 3.11.11
+ java-version: 11.0.25
+ # no tests for C* 4.0 with java 8
+ - cassandra-version: 4.0.0
+ java-version: 8.0.252
+ # fewer tests against cassandra 3.11.11 (exclude all but s3 storage backends)
+ # we are not doing the local because it would run a scenario with mgmt-api which no longer supports 3.11
+ # but we still want some tests against 3.11.11, so we use s3 for at least some coverage
+ - it-backend: local
cassandra-version: "3.11.11"
- it-backend: gcs
cassandra-version: "3.11.11"
@@ -167,7 +182,7 @@ jobs:
- name: Setup Java Action
uses: actions/setup-java@v1
with:
- java-version: '8.0.252'
+ java-version: ${{ matrix.java-version}}
architecture: x64
- name: Setup Poetry
uses: snok/install-poetry@v1
@@ -211,7 +226,7 @@ jobs:
# Write GCS service account credentials to a file
mkdir ~/.aws
# This fake cluster needs to be created first so that the integration tests pass in GH actions. Don't ask me why...
- ccm create test_cluster -v binary:3.11.4 -n 1 --vnodes
+ ccm create test_cluster -v binary:${{ matrix.cassandra-version }} -n 1 --vnodes
ccm node1 updateconf 'storage_port: 7011'
ccm node1 updateconf 'concurrent_reads: 4'
ccm node1 updateconf 'concurrent_writes: 4'
@@ -219,6 +234,9 @@ jobs:
ccm node1 updateconf 'num_tokens: 4'
sed -i 's/#MAX_HEAP_SIZE="4G"/MAX_HEAP_SIZE="256m"/' ~/.ccm/test_cluster/node1/conf/cassandra-env.sh
sed -i 's/#HEAP_NEWSIZE="800M"/HEAP_NEWSIZE="200M"/' ~/.ccm/test_cluster/node1/conf/cassandra-env.sh
+ # remove the ThreadPriorityPolicy option for cases where we run with java 11
+ sed -i 's/-XX:ThreadPriorityPolicy=42//' ~/.ccm/test_cluster/node1/conf/jvm.options || true
+ sed -i 's/-XX:ThreadPriorityPolicy=42//' ~/.ccm/test_cluster/node1/conf/jvm8-server.options || true
ccm start -v
ccm showlastlog|tail -100
ccm stop
diff --git a/medusa-example.ini b/medusa-example.ini
index 21e020a9..52a8bdaa 100644
--- a/medusa-example.ini
+++ b/medusa-example.ini
@@ -130,7 +130,7 @@ use_sudo_for_restore = True
;aws_cli_path = <Location of the aws cli binary if not in PATH>
-; Read timeout in seconds for the storage provider.
+; Read timeout in seconds for the storage provider. Not set by default.
;read_timeout = 60
[monitoring]
diff --git a/medusa/config.py b/medusa/config.py
index 7aae5807..5355fe4b 100644
--- a/medusa/config.py
+++ b/medusa/config.py
@@ -117,7 +117,6 @@ def _build_default_config():
'region': 'default',
'backup_grace_period_in_days': 10,
'use_sudo_for_restore': 'True',
- 'read_timeout': 60
}
config['logging'] = {
diff --git a/medusa/storage/azure_storage.py b/medusa/storage/azure_storage.py
index 67d4778a..3662b6e2 100644
--- a/medusa/storage/azure_storage.py
+++ b/medusa/storage/azure_storage.py
@@ -56,7 +56,7 @@ def __init__(self, config):
logging.getLogger('azure.core.pipeline.policies.http_logging_policy').setLevel(logging.WARNING)
logging.getLogger('chardet.universaldetector').setLevel(logging.WARNING)
- self.read_timeout = int(config.read_timeout)
+ self.read_timeout = int(config.read_timeout) if 'read_timeout' in dir(config) and config.read_timeout else None
super().__init__(config)
diff --git a/medusa/storage/google_storage.py b/medusa/storage/google_storage.py
index 35e7bb15..c172579c 100644
--- a/medusa/storage/google_storage.py
+++ b/medusa/storage/google_storage.py
@@ -49,7 +49,7 @@ def __init__(self, config):
logging.getLogger('gcloud.aio.storage.storage').setLevel(logging.WARNING)
- self.read_timeout = int(config.read_timeout)
+ self.read_timeout = int(config.read_timeout) if 'read_timeout' in dir(config) and config.read_timeout else -1
super().__init__(config)
@@ -158,7 +158,7 @@ async def _download_blob(self, src: str, dest: str):
stream = await self.gcs_storage.download_stream(
bucket=self.bucket_name,
object_name=object_key,
- timeout=self.read_timeout if self.read_timeout is not None else -1,
+ timeout=self.read_timeout,
)
Path(file_path).parent.mkdir(parents=True, exist_ok=True)
with open(file_path, 'wb') as f:
@@ -243,7 +243,7 @@ async def _read_blob_as_bytes(self, blob: AbstractBlob) -> bytes:
bucket=self.bucket_name,
object_name=blob.name,
session=self.session,
- timeout=self.read_timeout if self.read_timeout is not None else -1,
+ timeout=self.read_timeout,
)
return content
diff --git a/medusa/storage/s3_base_storage.py b/medusa/storage/s3_base_storage.py
index 5733d5b9..837e27c1 100644
--- a/medusa/storage/s3_base_storage.py
+++ b/medusa/storage/s3_base_storage.py
@@ -119,6 +119,8 @@ def __init__(self, config):
self.executor = concurrent.futures.ThreadPoolExecutor(int(config.concurrent_transfers))
+ self.read_timeout = int(config.read_timeout) if 'read_timeout' in dir(config) and config.read_timeout else None
+
super().__init__(config)
def connect(self):
@@ -137,7 +139,7 @@ def connect(self):
signature_version='v4',
tcp_keepalive=True,
max_pool_connections=max_pool_size,
- read_timeout=int(self.config.read_timeout),
+ read_timeout=self.read_timeout,
)
if self.credentials.access_key_id is not None:
self.s3_client = boto3.client(
| The `read_timeout` should default to nothing instead of 60 seconds
[Project board link](https://github.com/orgs/k8ssandra/projects/8/views/1?pane=issue&itemId=82767638)
As we got notified [here](https://github.com/k8ssandra/k8ssandra-operator/issues/1353#issuecomment-2399309354), the way we introduced the read_timeout setting is not too compatible with existing instalations. The 60 seconds timeout is not good enough for everybody.
We should default to no timeout, and only pass it in if it's actually configured.
┆Issue is synchronized with this [Jira Story](https://datastax.jira.com/browse/MED-100) by [Unito](https://www.unito.io)
┆Reviewer: Alexander Dejanovski
┆Issue Number: MED-100
| 2024-10-09T13:35:27 | 0.0 | [] | [] |
|||
thelastpickle/cassandra-medusa | thelastpickle__cassandra-medusa-799 | 969e34f576cba0fbc00cdd1f9e6a2b00ed3557d0 | diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 1c7b6281..d714fd4c 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -71,12 +71,12 @@ jobs:
then
echo "Updating debian changelog..."
cd packaging/docker-build
- docker-compose build release && docker-compose run release
+ docker compose build release && docker compose run release
cd ../..
fi
cd packaging/docker-build
- docker-compose build "cassandra-medusa-builder-${{ matrix.suite }}" \
- && docker-compose run "cassandra-medusa-builder-${{ matrix.suite }}"
+ docker compose build "cassandra-medusa-builder-${{ matrix.suite }}" \
+ && docker compose run "cassandra-medusa-builder-${{ matrix.suite }}"
cd ../../packages
if [ -f "cassandra-medusa_${version}-0~${{ matrix.suite }}0_amd64.deb" ]; then
@@ -439,16 +439,16 @@ jobs:
- uses: actions/checkout@v3
- name: Install dependencies
run: |
- sudo curl -L "https://github.com/docker/compose/releases/download/1.25.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
- sudo chmod +x /usr/local/bin/docker-compose
- sudo ln -s /usr/local/bin/docker-compose /usr/bin/docker-compose
+ sudo curl -L "https://github.com/docker/compose/releases/download/1.25.0/docker compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker compose
+ sudo chmod +x /usr/local/bin/docker compose
+ sudo ln -s /usr/local/bin/docker compose /usr/bin/docker compose
- name: Build Debian
run: |
version=$(cat VERSION)
echo "VERSION=$version" >> $GITHUB_ENV
cd packaging/docker-build
- docker-compose build "cassandra-medusa-builder-${{ matrix.suite }}" \
- && docker-compose run "cassandra-medusa-builder-${{ matrix.suite }}"
+ docker compose build "cassandra-medusa-builder-${{ matrix.suite }}" \
+ && docker compose run "cassandra-medusa-builder-${{ matrix.suite }}"
- name: Push Debian to Cloudsmith
id: push-deb
diff --git a/medusa/backup_manager.py b/medusa/backup_manager.py
index 7abdcca4..3b39d4ec 100644
--- a/medusa/backup_manager.py
+++ b/medusa/backup_manager.py
@@ -108,12 +108,13 @@ def register_backup(backup_name, is_async, overwrite_existing=True):
if not BackupMan.__instance:
BackupMan()
- if backup_name in BackupMan.__instance.__backups:
+ if backup_name in BackupMan.__instance.__backups.keys():
if overwrite_existing:
if not BackupMan.__clean(backup_name):
logging.error(f"Registered backup name {backup_name} cleanup failed prior to re-register.")
-
- BackupMan.__instance.__backups[backup_name] = [None, BackupMan.STATUS_UNKNOWN, is_async]
+ BackupMan.__instance.__backups[backup_name] = [None, BackupMan.STATUS_UNKNOWN, is_async]
+ else:
+ BackupMan.__instance.__backups[backup_name] = [None, BackupMan.STATUS_UNKNOWN, is_async]
logging.info("Registered backup id {}".format(backup_name))
# Caller can decide how long to wait for a result using the registered backup future returned.
diff --git a/medusa/service/grpc/server.py b/medusa/service/grpc/server.py
index c47d47e9..86b5a5ac 100644
--- a/medusa/service/grpc/server.py
+++ b/medusa/service/grpc/server.py
@@ -191,10 +191,38 @@ def BackupStatus(self, request, context):
response.finishTime = ""
BackupMan.register_backup(request.backupName, is_async=False, overwrite_existing=False)
status = BackupMan.STATUS_UNKNOWN
- if backup.started:
- status = BackupMan.STATUS_IN_PROGRESS
- if backup.finished:
- status = BackupMan.STATUS_SUCCESS
+ future = BackupMan.get_backup_future(request.backupName)
+ if future is None:
+ # No future exists or the future is finished already,
+ # if the backup isn't marked as finished in the backend then it failed
+ logging.info("Backup {} has no future".format(request.backupName))
+ if not backup.finished:
+ status = BackupMan.STATUS_FAILED
+ elif future.done():
+ try:
+ future.result()
+ logging.info("Backup {} has finished with no exception".format(request.backupName))
+ if not backup.finished:
+ status = BackupMan.STATUS_FAILED
+ except Exception as e:
+ # If the future failed, then log the exception
+ logging.error(f"Backup {request.backupName} has failed: {e}")
+ status = BackupMan.STATUS_FAILED
+ else:
+ logging.info("Backup {} is still running".format(request.backupName))
+ if status == BackupMan.STATUS_UNKNOWN:
+ if backup.started:
+ status = BackupMan.STATUS_IN_PROGRESS
+ if backup.finished:
+ status = BackupMan.STATUS_SUCCESS
+
+ if status == BackupMan.STATUS_FAILED and future is not None:
+ try:
+ future.result()
+ logging.info("Backup {} has failed with no exception".format(request.backupName))
+ except Exception as e:
+ # If the future failed, then log the exception
+ logging.error(f"Backup {request.backupName} has failed: {e}")
BackupMan.update_backup_status(request.backupName, status)
# record the status
record_status_in_response(response, request.backupName)
| Backup tracking is broken in the grpc server
[Project board link](https://github.com/orgs/k8ssandra/projects/8/views/1?pane=issue&itemId=74757756)
Backup tracking is failing to detect failed backups due to how the BackupStatus code does its check.
In the current version, the code will get the node backup from the storage bucket, and it will potentially register the backup in the BackupMan in case it doesn't exist. This allows to recreate the backups metadata in memory after a restart of the GRPC server.
It then computes the status solely based on the data gathered from storage, without checking if there is a pending future or not.
If there's no pending future (backup crashed or medusa was restarted), then we're not resurfacing the failure and the operator will keep on polling indefinitely.
What needs to be changed:
```
BackupMan.register_backup(request.backupName, is_async=False, overwrite_existing=False)
status = BackupMan.STATUS_UNKNOWN
if backup.started:
status = BackupMan.STATUS_IN_PROGRESS
if backup.finished:
status = BackupMan.STATUS_SUCCESS
```
this code needs to include a check for the existence of a future in the BackupMan and return accordingly:
```
BackupMan.register_backup(request.backupName, is_async=False, overwrite_existing=False)
status = BackupMan.STATUS_UNKNOWN
try:
future = BackupMan.get_backup_future(request.backupName)
except RuntimeError as e:
# No future exists, if the backup isn't marked as finished in the backend then it failed
if not backup.finished:
status = BackupMan.STATUS_FAILED
if status == BackupMan.STATUS_UNKNOWN:
# We don't have a pending future and need to compute status based on storage information
if backup.started:
status = BackupMan.STATUS_IN_PROGRESS
if backup.finished:
status = BackupMan.STATUS_SUCCESS
```
| 2024-08-14T11:39:36 | 0.0 | [] | [] |
|||
thelastpickle/cassandra-medusa | thelastpickle__cassandra-medusa-763 | 3c72aa56d411c6c02ffc4c231ae3e017d29770ff | diff --git a/medusa-example.ini b/medusa-example.ini
index f57182f6..235e5634 100644
--- a/medusa-example.ini
+++ b/medusa-example.ini
@@ -118,6 +118,9 @@ use_sudo_for_restore = True
;aws_cli_path = <Location of the aws cli binary if not in PATH>
+; Read timeout in seconds for the storage provider.
+;read_timeout = 60
+
[monitoring]
;monitoring_provider = <Provider used for sending metrics. Currently either of "ffwd" or "local">
diff --git a/medusa/config.py b/medusa/config.py
index b7f55d30..3e133cf0 100644
--- a/medusa/config.py
+++ b/medusa/config.py
@@ -31,7 +31,7 @@
['bucket_name', 'key_file', 'prefix', 'fqdn', 'host_file_separator', 'storage_provider',
'base_path', 'max_backup_age', 'max_backup_count', 'api_profile', 'transfer_max_bandwidth',
'concurrent_transfers', 'multi_part_upload_threshold', 'host', 'region', 'port', 'secure', 'ssl_verify',
- 'aws_cli_path', 'kms_id', 'backup_grace_period_in_days', 'use_sudo_for_restore', 'k8s_mode']
+ 'aws_cli_path', 'kms_id', 'backup_grace_period_in_days', 'use_sudo_for_restore', 'k8s_mode', 'read_timeout']
)
CassandraConfig = collections.namedtuple(
@@ -116,7 +116,8 @@ def _build_default_config():
'fqdn': socket.getfqdn(),
'region': 'default',
'backup_grace_period_in_days': 10,
- 'use_sudo_for_restore': 'True'
+ 'use_sudo_for_restore': 'True',
+ 'read_timeout': 60
}
config['logging'] = {
diff --git a/medusa/storage/azure_storage.py b/medusa/storage/azure_storage.py
index e929b339..209b3d55 100644
--- a/medusa/storage/azure_storage.py
+++ b/medusa/storage/azure_storage.py
@@ -56,6 +56,8 @@ def __init__(self, config):
logging.getLogger('azure.core.pipeline.policies.http_logging_policy').setLevel(logging.WARNING)
logging.getLogger('chardet.universaldetector').setLevel(logging.WARNING)
+ self.read_timeout = int(config.read_timeout)
+
super().__init__(config)
def _make_blob_service_url(self, account_name, config):
@@ -85,7 +87,10 @@ async def _disconnect(self):
async def _list_blobs(self, prefix=None) -> t.List[AbstractBlob]:
blobs = []
- async for b_props in self.azure_container_client.list_blobs(name_starts_with=str(prefix)):
+ async for b_props in self.azure_container_client.list_blobs(
+ name_starts_with=str(prefix),
+ timeout=self.read_timeout
+ ):
blobs.append(AbstractBlob(
b_props.name,
b_props.size,
@@ -150,6 +155,7 @@ async def _download_blob(self, src: str, dest: str):
downloader = await self.azure_container_client.download_blob(
blob=object_key,
max_concurrency=workers,
+ timeout=self.read_timeout,
)
Path(file_path).parent.mkdir(parents=True, exist_ok=True)
await downloader.readinto(open(file_path, "wb"))
@@ -206,6 +212,7 @@ async def _read_blob_as_bytes(self, blob: AbstractBlob) -> bytes:
downloader = await self.azure_container_client.download_blob(
blob=blob.name,
max_concurrency=1,
+ timeout=self.read_timeout,
)
return await downloader.readall()
diff --git a/medusa/storage/google_storage.py b/medusa/storage/google_storage.py
index 01fc2863..2fae757d 100644
--- a/medusa/storage/google_storage.py
+++ b/medusa/storage/google_storage.py
@@ -49,6 +49,8 @@ def __init__(self, config):
logging.getLogger('gcloud.aio.storage.storage').setLevel(logging.WARNING)
+ self.read_timeout = int(config.read_timeout)
+
super().__init__(config)
def connect(self):
@@ -94,7 +96,8 @@ async def _paginate_objects(self, prefix=None):
# fetch a page
page = await self.gcs_storage.list_objects(
bucket=self.bucket_name,
- params=params
+ params=params,
+ timeout=self.read_timeout,
)
# got nothing, return from the function
@@ -151,7 +154,7 @@ async def _download_blob(self, src: str, dest: str):
stream = await self.gcs_storage.download_stream(
bucket=self.bucket_name,
object_name=object_key,
- timeout=-1,
+ timeout=self.read_timeout if self.read_timeout is not None else -1,
)
Path(file_path).parent.mkdir(parents=True, exist_ok=True)
with open(file_path, 'wb') as f:
@@ -171,6 +174,7 @@ async def _stat_blob(self, object_key: str) -> AbstractBlob:
blob = await self.gcs_storage.download_metadata(
bucket=self.bucket_name,
object_name=object_key,
+ timeout=self.read_timeout,
)
return AbstractBlob(
blob['name'],
@@ -233,7 +237,7 @@ async def _read_blob_as_bytes(self, blob: AbstractBlob) -> bytes:
bucket=self.bucket_name,
object_name=blob.name,
session=self.session,
- timeout=-1
+ timeout=self.read_timeout if self.read_timeout is not None else -1,
)
return content
diff --git a/medusa/storage/s3_base_storage.py b/medusa/storage/s3_base_storage.py
index 2c250f85..24d51af7 100644
--- a/medusa/storage/s3_base_storage.py
+++ b/medusa/storage/s3_base_storage.py
@@ -136,7 +136,8 @@ def connect(self):
region_name=self.credentials.region,
signature_version='v4',
tcp_keepalive=True,
- max_pool_connections=max_pool_size
+ max_pool_connections=max_pool_size,
+ read_timeout=int(self.config.read_timeout),
)
if self.credentials.access_key_id is not None:
self.s3_client = boto3.client(
| Make S3 read timeout configurable
[Project board link](https://github.com/orgs/k8ssandra/projects/8/views/1?pane=issue&itemId=63118978)
This is a feature [requested](https://discord.com/channels/836217371453685760/1237249503296360488/1239907775816011777) by a user at discord.
| 2024-05-16T09:48:25 | 0.0 | [] | [] |
|||
thelastpickle/cassandra-medusa | thelastpickle__cassandra-medusa-761 | 12de97367720bcc0431bb2e628cef613a1ae9322 | diff --git a/medusa/restore_cluster.py b/medusa/restore_cluster.py
index 9a85b358..923fae27 100644
--- a/medusa/restore_cluster.py
+++ b/medusa/restore_cluster.py
@@ -330,7 +330,7 @@ def _populate_hostmap(self):
# Remove leading/trailing whitespace
_line = line.strip()
# Ignore comment lines
- if _line.startswith('#'):
+ if not _line or _line.startswith('#'):
continue
seed, target, source = _line.split(self.config.storage.host_file_separator)
# in python, bool('False') evaluates to True. Need to test the membership as below
| restore-cluster - host-list mapping file with an empty last line throws error
[Project board link](https://github.com/orgs/k8ssandra/projects/8/views/1?pane=issue&itemId=13952002)
Hi - While restoring data using restore-cluster an error is observed if the last line in host-list mapping file is an empty line. The error is given below. Can you please check it.
```
[2022-07-18 11:15:33,374] ERROR: This error happened during the cluster restore: not enough values to unpack (expected 3, got 1)
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/medusa/restore_cluster.py", line 71, in orchestrate
restore.execute()
File "/usr/local/lib/python3.6/site-packages/medusa/restore_cluster.py", line 153, in execute
self.prepare_restore()
File "/usr/local/lib/python3.6/site-packages/medusa/restore_cluster.py", line 148, in prepare_restore
self._populate_hostmap()
File "/usr/local/lib/python3.6/site-packages/medusa/restore_cluster.py", line 297, in _populate_hostmap
seed, target, source = _line.split(self.config.storage.host_file_separator)
ValueError: not enough values to unpack (expected 3, got 1)
```
Regards,
┆Issue is synchronized with this [Jira Task](https://k8ssandra.atlassian.net/browse/K8SSAND-1672) by [Unito](https://www.unito.io)
┆friendlyId: K8SSAND-1672
┆priority: Medium
| We should most probably ignore empty lines indeed.
I think the host list mapping technique is not something we should rely upon anymore as it was created back when Medusa couldn't compute mappings on its own. It can now, so using a seed host is most probably the best thing to do.
This, and the fact that there's a workaround (remove the blank line) make this work low priority. | 2024-05-06T18:07:13 | 0.0 | [] | [] |
||
thelastpickle/cassandra-medusa | thelastpickle__cassandra-medusa-716 | 49f88ea06406b42d9e8acdd18ef75e3bbf1e200c | diff --git a/medusa/backup_node.py b/medusa/backup_node.py
index 7c5d1a43..8c88a565 100644
--- a/medusa/backup_node.py
+++ b/medusa/backup_node.py
@@ -20,6 +20,7 @@
import pathlib
import time
import traceback
+import typing as t
import psutil
from retrying import retry
@@ -29,103 +30,10 @@
from medusa.cassandra_utils import Cassandra
from medusa.index import add_backup_start_to_index, add_backup_finish_to_index, set_latest_backup_in_index
from medusa.monitoring import Monitoring
-from medusa.storage import Storage, format_bytes_str
+from medusa.storage import Storage, format_bytes_str, NodeBackup
from medusa.storage.abstract_storage import ManifestObject
-class NodeBackupCache(object):
- NEVER_BACKED_UP = ['manifest.json', 'schema.cql']
-
- def __init__(self, *, node_backup, differential_mode, enable_md5_checks,
- storage_driver, storage_provider, storage_config):
- if node_backup:
- self._node_backup_cache_is_differential = node_backup.is_differential
- self._backup_name = node_backup.name
- self._bucket_name = node_backup.storage.config.bucket_name
- self._data_path = node_backup.data_path
- self._cached_objects = {
- (section['keyspace'], section['columnfamily']): {
- self._sanitize_file_path(pathlib.Path(object['path'])): object
- for object in section['objects']
- }
- for section in json.loads(node_backup.manifest)
- }
- self._differential_mode = differential_mode
- else:
- self._node_backup_cache_is_differential = False
- self._backup_name = None
- self._bucket_name = None
- self._data_path = ''
- self._cached_objects = {}
- self._differential_mode = False
- self._replaced = 0
- self._storage_driver = storage_driver
- self._storage_provider = storage_provider
- self._storage_config = storage_config
- self._enable_md5_checks = enable_md5_checks
-
- def _sanitize_file_path(self, path):
- # Secondary indexes are stored as subdirectories to the base table, starting with a dot.
- # In order to avoid mixing 2i sstables with the base table sstables, the file name isn't enough
- # to perform the comparison on differential backups. We need to retain the subdir name for 2i tables.
- if path.parts[-2].startswith('.'):
- return os.path.join(path.parts[-2], path.parts[-1])
- else:
- return path.name
-
- @property
- def replaced(self):
- return self._replaced
-
- @property
- def backup_name(self):
- return self._backup_name
-
- def replace_or_remove_if_cached(self, *, keyspace, columnfamily, srcs):
- retained = list()
- skipped = list()
- path_prefix = self._storage_driver.get_path_prefix(self._data_path)
- for src in srcs:
- if src.name in self.NEVER_BACKED_UP:
- pass
- else:
- fqtn = (keyspace, columnfamily)
- cached_item = None
- if self._storage_provider.lower() == 'google_storage' or self._differential_mode is True:
- cached_item = self._cached_objects.get(fqtn, {}).get(self._sanitize_file_path(src))
-
- threshold = self._storage_config.multi_part_upload_threshold
- if cached_item is None or not self._storage_driver.file_matches_cache(src,
- cached_item,
- threshold,
- self._enable_md5_checks):
- # We have no matching object in the cache matching the file
- retained.append(src)
- else:
- # File was already present in the previous backup
- # In case the backup isn't differential or the cache backup isn't differential, copy from cache
- if self._differential_mode is False and self._node_backup_cache_is_differential is False:
- prefixed_path = '{}{}'.format(path_prefix, cached_item['path'])
- cached_item_path = self._storage_driver.get_cache_path(prefixed_path)
- retained.append(cached_item_path)
- # This backup is differential, but the cached one wasn't
- # We must re-upload the files according to the differential format
- elif self._differential_mode is True and self._node_backup_cache_is_differential is False:
- retained.append(src)
- else:
- # in case the backup is differential, we want to rule out files, not copy them from cache
- manifest_object = self._make_manifest_object(path_prefix, cached_item)
- logging.debug("Skipping upload of {} which was already part of the previous backup"
- .format(cached_item['path']))
- skipped.append(manifest_object)
- self._replaced += 1
-
- return retained, skipped
-
- def _make_manifest_object(self, path_prefix, cached_item):
- return ManifestObject('{}{}'.format(path_prefix, cached_item['path']), cached_item['size'], cached_item['MD5'])
-
-
def throttle_backup():
"""
Makes sure to only use idle IO for backups
@@ -198,7 +106,7 @@ def handle_backup(config, backup_name_arg, stagger_time, enable_md5_checks_flag,
logging.debug("Done with backup, returning backup result information")
return (info["actual_backup_duration"], info["actual_start_time"], info["end_time"],
- info["node_backup"], info["node_backup_cache"], info["num_files"],
+ info["node_backup"], info["num_files"], info["num_replaced"], info["num_kept"],
info["start_time"], info["backup_name"])
except Exception as e:
@@ -250,20 +158,22 @@ def start_backup(storage, node_backup, cassandra, differential_mode, stagger_tim
# Perform the actual backup
actual_start = datetime.datetime.now()
enable_md5 = enable_md5_checks_flag or medusa.utils.evaluate_boolean(config.checks.enable_md5_checks)
- num_files, node_backup_cache = do_backup(
- cassandra, node_backup, storage, differential_mode, enable_md5, config, backup_name)
+ num_files, num_replaced, num_kept = do_backup(
+ cassandra, node_backup, storage, enable_md5, backup_name
+ )
end = datetime.datetime.now()
actual_backup_duration = end - actual_start
- print_backup_stats(actual_backup_duration, actual_start, end, node_backup, node_backup_cache, num_files, start)
+ print_backup_stats(actual_backup_duration, actual_start, end, node_backup, num_files, num_replaced, num_kept, start)
update_monitoring(actual_backup_duration, backup_name, monitoring, node_backup)
return {
"actual_backup_duration": actual_backup_duration,
"actual_start_time": actual_start,
"end_time": end,
"node_backup": node_backup,
- "node_backup_cache": node_backup_cache,
"num_files": num_files,
+ "num_replaced": num_replaced,
+ "num_kept": num_kept,
"start_time": start,
"backup_name": backup_name
}
@@ -286,17 +196,7 @@ def get_server_type_and_version(cassandra):
return server_type, release_version
-def do_backup(cassandra, node_backup, storage, differential_mode, enable_md5_checks,
- config, backup_name):
- # Load last backup as a cache
- node_backup_cache = NodeBackupCache(
- node_backup=storage.latest_node_backup(fqdn=config.storage.fqdn),
- differential_mode=differential_mode,
- enable_md5_checks=enable_md5_checks,
- storage_driver=storage.storage_driver,
- storage_provider=storage.storage_provider,
- storage_config=config.storage
- )
+def do_backup(cassandra, node_backup, storage, enable_md5_checks, backup_name):
# the cassandra snapshot we use defines __exit__ that cleans up the snapshot
# so even if exception is thrown, a new snapshot will be created on the next run
@@ -304,21 +204,30 @@ def do_backup(cassandra, node_backup, storage, differential_mode, enable_md5_che
logging.info('Creating snapshot')
with cassandra.create_snapshot(backup_name) as snapshot:
manifest = []
- num_files = backup_snapshots(storage, manifest, node_backup, node_backup_cache, snapshot)
+ num_files, num_replaced, num_kept = backup_snapshots(
+ storage, manifest, node_backup, snapshot, enable_md5_checks
+ )
if node_backup.is_dse:
logging.info('Creating DSE snapshot')
with cassandra.create_dse_snapshot(backup_name) as snapshot:
- num_files += backup_snapshots(storage, manifest, node_backup, node_backup_cache, snapshot)
+ dse_num_files, dse_replaced, dse_kept = backup_snapshots(
+ storage, manifest, node_backup, snapshot, enable_md5_checks
+ )
+ num_files += dse_num_files
+ num_replaced += dse_replaced
+ num_kept += dse_kept
logging.info('Updating backup index')
node_backup.manifest = json.dumps(manifest)
add_backup_finish_to_index(storage, node_backup)
set_latest_backup_in_index(storage, node_backup)
- return num_files, node_backup_cache
+ return num_files, num_replaced, num_kept
-def print_backup_stats(actual_backup_duration, actual_start, end, node_backup, node_backup_cache, num_files, start):
+def print_backup_stats(
+ actual_backup_duration, actual_start, end, node_backup, num_files, num_replaced, num_kept, start
+):
logging.info('Backup done')
logging.info("""- Started: {:%Y-%m-%d %H:%M:%S}
@@ -329,18 +238,18 @@ def print_backup_stats(actual_backup_duration, actual_start, end, node_backup, n
'for other nodes)'.format(actual_backup_duration))
logging.info('- {} files, {}'.format(
- num_files,
+ num_files + num_kept,
format_bytes_str(node_backup.size())
))
- logging.info('- {} files copied from host'.format(
- num_files - node_backup_cache.replaced
+ logging.info('- {} files copied from host ({} new, {} reuploaded)'.format(
+ num_files, num_files - num_replaced, num_replaced
))
- if node_backup_cache.backup_name is not None:
- logging.info('- {} copied from previous backup ({})'.format(
- node_backup_cache.replaced,
- node_backup_cache.backup_name
+ if node_backup.name is not None:
+ logging.info('- {} kept from previous backup ({})'.format(
+ num_kept,
+ node_backup.name
))
@@ -359,40 +268,113 @@ def update_monitoring(actual_backup_duration, backup_name, monitoring, node_back
logging.debug('Done emitting metrics')
-def backup_snapshots(storage, manifest, node_backup, node_backup_cache, snapshot):
+def backup_snapshots(storage, manifest, node_backup, snapshot, enable_md5_checks):
try:
num_files = 0
- for snapshot_path in snapshot.find_dirs():
- logging.debug("Backing up {}".format(snapshot_path))
+ replaced = 0
+ kept = 0
+ multipart_threshold = storage.config.multi_part_upload_threshold
+
+ if node_backup.is_differential:
+ logging.info(f'Listing already backed up files for node {node_backup.fqdn}')
+ files_in_storage = storage.list_files_per_table()
+ else:
+ files_in_storage = dict()
- (needs_backup, already_backed_up) = node_backup_cache.replace_or_remove_if_cached(
+ for snapshot_path in snapshot.find_dirs():
+ fqtn = f"{snapshot_path.keyspace}.{snapshot_path.columnfamily}"
+ logging.info(f"Backing up {fqtn}")
+
+ needs_backup, needs_reupload, already_backed_up = check_already_uploaded(
+ storage=storage,
+ node_backup=node_backup,
+ files_in_storage=files_in_storage,
+ multipart_threshold=multipart_threshold,
+ enable_md5_checks=enable_md5_checks,
keyspace=snapshot_path.keyspace,
- columnfamily=snapshot_path.columnfamily,
srcs=list(snapshot_path.list_files()))
- num_files += len(needs_backup) + len(already_backed_up)
+ replaced += len(needs_reupload)
+ kept += len(already_backed_up)
+ num_files += len(needs_backup) + len(needs_reupload)
- dst_path = str(node_backup.datapath(keyspace=snapshot_path.keyspace,
- columnfamily=snapshot_path.columnfamily))
+ dst_path = str(node_backup.datapath(
+ keyspace=snapshot_path.keyspace,
+ columnfamily=snapshot_path.columnfamily)
+ )
logging.debug("Snapshot destination path: {}".format(dst_path))
manifest_objects = list()
- if len(needs_backup) > 0:
- manifest_objects += storage.storage_driver.upload_blobs(needs_backup, dst_path)
+ needs_upload = needs_backup + needs_reupload
+ if len(needs_upload) > 0:
+ manifest_objects += storage.storage_driver.upload_blobs(needs_upload, dst_path)
+
+ # inform about fixing backups
+ if len(needs_reupload) > 0:
+ logging.info(
+ f"Re-uploaded {len(needs_reupload)} files in {fqtn} because they were not found in storage"
+ )
# Reintroducing already backed up objects in the manifest in differential
- for obj in already_backed_up:
- manifest_objects.append(obj)
+ if len(already_backed_up) > 0 and node_backup.is_differential:
+ logging.info(
+ f"Skipping upload of {len(already_backed_up)} files in {fqtn} because they are already in storage"
+ )
+ for obj in already_backed_up:
+ manifest_objects.append(obj)
manifest.append(make_manifest_object(node_backup.fqdn, snapshot_path, manifest_objects, storage))
- return num_files
+ return num_files, replaced, kept
except Exception as e:
logging.error('Error occurred during backup: {}'.format(str(e)))
traceback.print_exc()
raise e
+def check_already_uploaded(
+ storage: Storage,
+ node_backup: NodeBackup,
+ multipart_threshold: int,
+ enable_md5_checks: bool,
+ files_in_storage: t.Dict[str, t.Dict[str, t.Dict[str, ManifestObject]]],
+ keyspace: str,
+ srcs: t.List[pathlib.Path]
+) -> (t.List[pathlib.Path], t.List[ManifestObject]):
+
+ NEVER_BACKED_UP = ['manifest.json', 'schema.cql']
+ needs_backup = list()
+ needs_reupload = list()
+ already_backed_up = list()
+
+ # in full mode we upload always everything
+ if node_backup.is_differential is False:
+ return [src for src in srcs if src.name not in NEVER_BACKED_UP], needs_reupload, already_backed_up
+
+ keyspace_files_in_storage = files_in_storage.get(keyspace, {})
+
+ for src in srcs:
+ if src.name in NEVER_BACKED_UP:
+ continue
+ else:
+ # safe_table_name is either a table, or a "table.2i_name"
+ _, safe_table_name = Storage.sanitize_keyspace_and_table_name(src)
+ item_in_storage = keyspace_files_in_storage.get(safe_table_name, {}).get(src.name, None)
+ # object is not in storage
+ if item_in_storage is None:
+ needs_backup.append(src)
+ continue
+ # object is in storage but with different size or digest
+ storage_driver = storage.storage_driver
+ if not storage_driver.file_matches_storage(src, item_in_storage, multipart_threshold, enable_md5_checks):
+ needs_reupload.append(src)
+ continue
+ # object is in storage with correct size and digest
+ already_backed_up.append(item_in_storage)
+
+ return needs_backup, needs_reupload, already_backed_up
+
+
def make_manifest_object(fqdn, snapshot_path, manifest_objects, storage):
return {
'keyspace': snapshot_path.keyspace,
diff --git a/medusa/storage/__init__.py b/medusa/storage/__init__.py
index 52833a86..73ffc1ce 100644
--- a/medusa/storage/__init__.py
+++ b/medusa/storage/__init__.py
@@ -18,6 +18,7 @@
import operator
import pathlib
import re
+import typing as t
from retrying import retry
@@ -25,6 +26,7 @@
from medusa.storage.cluster_backup import ClusterBackup
from medusa.storage.node_backup import NodeBackup
+from medusa.storage.abstract_storage import ManifestObject, AbstractBlob
from medusa.storage.google_storage import GoogleStorage
from medusa.storage.local_storage import LocalStorage
from medusa.storage.s3_storage import S3Storage
@@ -444,3 +446,61 @@ def remove_latest_backup_marker(self, fqdn):
def delete_objects(self, objects, concurrent_transfers=None):
self.storage_driver.delete_objects(objects, concurrent_transfers)
+
+ @staticmethod
+ def sanitize_keyspace_and_table_name(path: pathlib.Path) -> t.Tuple[str, str]:
+ """
+ This functions makes sure that for a given path (being it a local or storage one), we identify a SSTable file
+ together with its parent, which might sometime be a secondary index.
+ Secondary indices live as hidden folder in the regular data folder, and contain regular SSTable files.
+ Similar logic applies to DSE (6.8) internal files.
+
+ When dealing with local paths, the path is a string like
+ /some/path/to/data/folder/keyspace/table-cfid/snapshots/snapshot-name/nb-5-big-CompressionInfo.db
+ or
+ /some/path/to/data/folder/keyspace/table-cfid/snapshots/snapshot-name/.index_name/nb-5-big-CompressionInfo.db
+ """
+ # 2i tables or the dse internal folder, we merge table and index name as a new table
+ # we're dealing with local path, which features a snapshot
+ if str(path).startswith('/'):
+ is_2i_or_dse = path.parent.name.startswith('.') or path.parent.name.endswith('nodes')
+ chunks = str(path).split('/')
+ if (len(chunks) < 7 and is_2i_or_dse) or len(chunks) < 6:
+ raise RuntimeError(f'Path {path} does not look like a correct SSTable location')
+ if is_2i_or_dse:
+ k, t, index_name = chunks[-6], chunks[-5], chunks[-2]
+ keyspace, table = k, f"{t}.{index_name}"
+ else:
+ keyspace, table = chunks[-5], chunks[-4]
+ # it's a path in a storage, without a snapshot
+ else:
+ if path.parent.name.startswith('.') or path.parent.name.endswith('nodes'):
+ keyspace, table = path.parent.parent.parent.name, f"{path.parent.parent.name}.{path.parent.name}"
+ else:
+ keyspace, table = path.parent.parent.name, path.parent.name
+ return keyspace, table
+
+ @staticmethod
+ def get_keyspace_and_table(manifest_object: ManifestObject) -> t.Tuple[str, str, ManifestObject]:
+ p = pathlib.Path(manifest_object.path)
+ # 2i tables or the dse internal folder, we merge table and index name as a new table
+ keyspace, table = Storage.sanitize_keyspace_and_table_name(p)
+ return keyspace, table, manifest_object
+
+ def list_files_per_table(self) -> t.Dict[str, t.Dict[str, t.Set[ManifestObject]]]:
+ if self.config.prefix != '':
+ prefix = f"{self.config.prefix}/"
+ else:
+ prefix = ""
+ fdns_data_prefix = f"{prefix}{self.config.fqdn}/data/"
+ all_blobs: t.List[AbstractBlob] = self.storage_driver.list_blobs(prefix=fdns_data_prefix)
+ all_files = [ManifestObject(blob.name, blob.size, blob.hash) for blob in all_blobs]
+ keyspace_table_mo_tuples = map(Storage.get_keyspace_and_table, all_files)
+
+ files_by_keyspace_and_table = dict()
+ for ks, ks_files in itertools.groupby(keyspace_table_mo_tuples, lambda t: t[0]):
+ files_by_keyspace_and_table[ks] = dict()
+ for tt, t_files in itertools.groupby(ks_files, lambda tf: tf[1]):
+ files_by_keyspace_and_table[ks][tt] = {pathlib.Path(tf[2].path).name: tf[2] for tf in t_files}
+
+ return files_by_keyspace_and_table
diff --git a/medusa/storage/abstract_storage.py b/medusa/storage/abstract_storage.py
index de5f1147..940d9297 100644
--- a/medusa/storage/abstract_storage.py
+++ b/medusa/storage/abstract_storage.py
@@ -21,6 +21,7 @@
import hashlib
import io
import logging
+import pathlib
import typing as t
from pathlib import Path
@@ -401,15 +402,15 @@ def blob_matches_manifest(blob, object_in_manifest, enable_md5_checks=False):
@staticmethod
@abc.abstractmethod
- def file_matches_cache(src, cached_item, threshold=None, enable_md5_checks=False):
+ def file_matches_storage(src: pathlib.Path, cached_item: ManifestObject, threshold=None, enable_md5_checks=False):
"""
- Compares a local file with its entry in the cache of backed up items. This happens when doing an actual backup.
+ Compares a local file with its version in the storage backend. This happens when doing an actual backup.
This method is expected to take care of actually computing the local hash, but leave the actual comparing to
_compare_blob_with_manifest().
- :param src: typically, local file that comes as a string/path
- :param cached_item: usually a reference to a item in the storage, mostly a dict. Likely a manifest object
+ :param src: typically, local file that comes as a Path
+ :param cached_item: a reference to the storage, should be via a manifest object
:param threshold: files bigger than this are digested by chunks
:param enable_md5_checks: boolean flag to enable md5 file generation and comparison to the md5
found in the manifest (only applicable to some cloud storage implementations that compare md5 hashes)
diff --git a/medusa/storage/azure_storage.py b/medusa/storage/azure_storage.py
index d44484b6..e929b339 100644
--- a/medusa/storage/azure_storage.py
+++ b/medusa/storage/azure_storage.py
@@ -20,6 +20,7 @@
import json
import logging
import os
+import pathlib
import typing as t
from azure.core.credentials import AzureNamedKeyCredential
@@ -233,12 +234,12 @@ def blob_matches_manifest(blob, object_in_manifest, enable_md5_checks=False):
)
@staticmethod
- def file_matches_cache(src, cached_item, threshold=None, enable_md5_checks=False):
+ def file_matches_storage(src: pathlib.Path, cached_item: ManifestObject, threshold=None, enable_md5_checks=False):
return AzureStorage.compare_with_manifest(
actual_size=src.stat().st_size,
- size_in_manifest=cached_item['size'],
+ size_in_manifest=cached_item.size,
actual_hash=AbstractStorage.generate_md5_hash(src) if enable_md5_checks else None,
- hash_in_manifest=cached_item['MD5'],
+ hash_in_manifest=cached_item.MD5,
)
@staticmethod
diff --git a/medusa/storage/google_storage.py b/medusa/storage/google_storage.py
index 5ea6df28..01fc2863 100644
--- a/medusa/storage/google_storage.py
+++ b/medusa/storage/google_storage.py
@@ -12,6 +12,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+import pathlib
import aiohttp
import base64
@@ -254,12 +255,12 @@ def blob_matches_manifest(blob, object_in_manifest, enable_md5_checks=False):
)
@staticmethod
- def file_matches_cache(src, cached_item, threshold=None, enable_md5_checks=False):
+ def file_matches_storage(src: pathlib.Path, cached_item: ManifestObject, threshold=None, enable_md5_checks=False):
return GoogleStorage.compare_with_manifest(
actual_size=src.stat().st_size,
- size_in_manifest=cached_item['size'],
+ size_in_manifest=cached_item.size,
actual_hash=AbstractStorage.generate_md5_hash(src) if enable_md5_checks else None,
- hash_in_manifest=cached_item['MD5']
+ hash_in_manifest=cached_item.MD5
)
@staticmethod
diff --git a/medusa/storage/local_storage.py b/medusa/storage/local_storage.py
index bc2a4eed..41e0f7c7 100644
--- a/medusa/storage/local_storage.py
+++ b/medusa/storage/local_storage.py
@@ -18,6 +18,7 @@
import io
import logging
import os
+import pathlib
import typing as t
from pathlib import Path
@@ -186,10 +187,10 @@ def blob_matches_manifest(blob, object_in_manifest, enable_md5_checks=False):
)
@staticmethod
- def file_matches_cache(src, cached_item, threshold=None, enable_md5_checks=False):
+ def file_matches_storage(src: pathlib.Path, cached_item: ManifestObject, threshold=None, enable_md5_checks=False):
return LocalStorage.compare_with_manifest(
actual_size=src.stat().st_size,
- size_in_manifest=cached_item['size']
+ size_in_manifest=cached_item.size
)
@staticmethod
diff --git a/medusa/storage/s3_base_storage.py b/medusa/storage/s3_base_storage.py
index ad6648ef..2c250f85 100644
--- a/medusa/storage/s3_base_storage.py
+++ b/medusa/storage/s3_base_storage.py
@@ -15,6 +15,8 @@
# limitations under the License.
import asyncio
import base64
+import pathlib
+
import boto3
import botocore.session
import concurrent.futures
@@ -426,7 +428,7 @@ def blob_matches_manifest(blob: AbstractBlob, object_in_manifest: dict, enable_m
)
@staticmethod
- def file_matches_cache(src, cached_item, threshold=None, enable_md5_checks=False):
+ def file_matches_storage(src: pathlib.Path, cached_item: ManifestObject, threshold=None, enable_md5_checks=False):
threshold = int(threshold) if threshold else -1
@@ -440,9 +442,9 @@ def file_matches_cache(src, cached_item, threshold=None, enable_md5_checks=False
return S3BaseStorage.compare_with_manifest(
actual_size=src.stat().st_size,
- size_in_manifest=cached_item['size'],
+ size_in_manifest=cached_item.size,
actual_hash=md5_hash,
- hash_in_manifest=cached_item['MD5'],
+ hash_in_manifest=cached_item.MD5,
threshold=threshold
)
diff --git a/medusa/storage/s3_rgw.py b/medusa/storage/s3_rgw.py
index 78b9fff2..2ca39474 100644
--- a/medusa/storage/s3_rgw.py
+++ b/medusa/storage/s3_rgw.py
@@ -12,7 +12,9 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+import pathlib
+from medusa.storage.abstract_storage import ManifestObject
from medusa.storage.s3_base_storage import S3BaseStorage
from medusa.storage.s3_storage import S3Storage
@@ -28,9 +30,9 @@ def blob_matches_manifest(blob, object_in_manifest, enable_md5_checks=False):
return S3Storage.blob_matches_manifest(blob, object_in_manifest, enable_md5_checks)
@staticmethod
- def file_matches_cache(src, cached_item, threshold=None, enable_md5_checks=False):
+ def file_matches_storage(src: pathlib.Path, cached_item: ManifestObject, threshold=None, enable_md5_checks=False):
# for S3RGW, we never set threshold so the S3's multipart never happens
- return S3Storage.file_matches_cache(src, cached_item, None, enable_md5_checks)
+ return S3Storage.file_matches_storage(src, cached_item, None, enable_md5_checks)
@staticmethod
def compare_with_manifest(actual_size, size_in_manifest, actual_hash=None, hash_in_manifest=None, threshold=None):
diff --git a/packaging/docker-build/Dockerfile b/packaging/docker-build/Dockerfile
index a4f0b714..38d80aa2 100644
--- a/packaging/docker-build/Dockerfile
+++ b/packaging/docker-build/Dockerfile
@@ -58,7 +58,7 @@ RUN apt-get update \
RUN pip3 install --upgrade pip
RUN cd /tmp && \
- wget http://mirrors.kernel.org/ubuntu/pool/universe/d/dh-virtualenv/dh-virtualenv_1.2.2-1.4_all.deb && \
+ wget http://mirrors.kernel.org/ubuntu/pool/universe/d/dh-virtualenv/dh-virtualenv_1.2.2-1.4build1_all.deb && \
gdebi -n dh-virtualenv*.deb && \
rm dh-virtualenv_*.deb
| Check actual file presence when performing differential backups
[Project board link](https://github.com/orgs/k8ssandra/projects/8/views/1?pane=issue&itemId=13938589)
Follow up to #363
Verify presence of blobs that the new backup is going to "inherit" from the old ones instead of assuming that they are all present. If a blob referenced by a previous backup's manifest is missing and we have a local copy of the file with the same MD5 as listed in the manifest, just upload it again. This will patch the previous backups and make them whole again as well as preventing the current backup from becoming corrupted.
This prevents a corrupted backup from breaking all subsequent backups.
┆Issue is synchronized with this [Jira Task](https://k8ssandra.atlassian.net/browse/K8SSAND-1403) by [Unito](https://www.unito.io)
┆friendlyId: K8SSAND-1403
┆priority: Medium
| Please [add your planning poker estimate](https://app.zenhub.com/workspaces/K8ssandra-Grooming-623b197f1fcd4800110540ef/issues/thelastpickle/cassandra-medusa/368?planning-poker) with ZenHub @adejanovski | 2024-03-01T14:52:34 | 0.0 | [] | [] |
||
thelastpickle/cassandra-medusa | thelastpickle__cassandra-medusa-704 | 4ed1fd538f6138205cddecf77b16a73d0fe2fd9a | diff --git a/medusa/cassandra_utils.py b/medusa/cassandra_utils.py
index 04b348dca..0878c68a7 100644
--- a/medusa/cassandra_utils.py
+++ b/medusa/cassandra_utils.py
@@ -437,6 +437,15 @@ def rebuild_search_index(self):
logging.debug(f'Rebuilding search index for {fqtn}')
session.execute(f"REBUILD SEARCH INDEX ON {fqtn}")
+ @staticmethod
+ def _ignore_snapshots(folder, contents):
+ ignored = set()
+ if folder.endswith('metadata/snapshots'):
+ logging.info(f'Ignoring {contents} in folder {folder}')
+ for c in contents:
+ ignored.add(c)
+ return ignored
+
def create_dse_snapshot(self, backup_name):
"""
There is no good way of making snapshot of DSE files
@@ -445,11 +454,12 @@ def create_dse_snapshot(self, backup_name):
That folder is nested in the parent folder, just like for regular tables
This way, we can reuse a lot of code later on
"""
+
tag = "{}{}".format(self.SNAPSHOT_PREFIX, backup_name)
if not self.dse_snapshot_exists(tag):
src_path = self._dse_root / self._dse_metadata_folder
dst_path = self._dse_root / self._dse_metadata_folder / 'snapshots' / tag
- shutil.copytree(src_path, dst_path)
+ shutil.copytree(src_path, dst_path, ignore=Cassandra._ignore_snapshots)
return Cassandra.DseSnapshot(self, tag)
diff --git a/medusa/service/snapshot/nodetool_snapshot_service.py b/medusa/service/snapshot/nodetool_snapshot_service.py
index 024202831..0936e4ffe 100644
--- a/medusa/service/snapshot/nodetool_snapshot_service.py
+++ b/medusa/service/snapshot/nodetool_snapshot_service.py
@@ -29,7 +29,11 @@ def create_snapshot(self, *, tag):
# create the Nodetool command
cmd = self._nodetool.nodetool + ['snapshot', '-t', tag]
logging.debug('Executing: {}'.format(' '.join(cmd)))
- subprocess.check_call(cmd, stdout=subprocess.DEVNULL, universal_newlines=True)
+ try:
+ subprocess.check_output(cmd, universal_newlines=True)
+ except subprocess.CalledProcessError as e:
+ logging.error('nodetool output: {}'.format(e.output))
+ logging.error('Creating snapshot failed and without a snapshot we cannot do a backup')
def delete_snapshot(self, *, tag):
# create the Nodetool command
| DSE snapshots are recursive and therefore broken
[Project board link](https://github.com/orgs/k8ssandra/projects/8/views/1?pane=issue&itemId=49828179)
The way Medusa creates DSE snapshots is by copying the DSE metadata folder into itself, but under snapshots folder. If this happens repeatedly, the `shutiil.copytree` explodes in its recursion depth, causing deep tree which breaks things (`[Errno 63] File name too long:`).
| 2024-01-12T15:35:37 | 0.0 | [] | [] |
|||
thelastpickle/cassandra-medusa | thelastpickle__cassandra-medusa-693 | 4ddaeeb41d4d875e360bac047da569b0f86b7f21 | diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 7e66169a..22db4083 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -304,7 +304,7 @@ jobs:
- uses: codecov/codecov-action@v1
name: Report code coverage
-
+
k8ssandra-e2e-tests:
needs: [build]
runs-on: ubuntu-latest
@@ -312,6 +312,7 @@ jobs:
matrix:
e2e_test:
- CreateSingleMedusaJob
+ - CreateSingleDseSearchDatacenterCluster
fail-fast: false
name: k8ssandra-${{ matrix.e2e_test }}
env:
@@ -343,7 +344,7 @@ jobs:
- uses: actions/checkout@v3
with:
repository: k8ssandra/k8ssandra-operator
- ref: main
+ ref: radovan/ec2-dse-medusa
path: k8ssandra-operator
- name: Set up Go
uses: actions/setup-go@v3
diff --git a/.gitignore b/.gitignore
index 3fdb4a0d..460e06d9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -29,4 +29,5 @@ debian/cassandra-medusa/
.coverage
coverage.xml
pytest.ini
-.python-version
\ No newline at end of file
+.python-version
+tests/resources/dse/dse-*
diff --git a/medusa/backup_node.py b/medusa/backup_node.py
index 7c180f11..7c5d1a43 100644
--- a/medusa/backup_node.py
+++ b/medusa/backup_node.py
@@ -224,11 +224,16 @@ def start_backup(storage, node_backup, cassandra, differential_mode, stagger_tim
logging.info('Saving tokenmap and schema')
schema, tokenmap = get_schema_and_tokenmap(cassandra)
-
node_backup.schema = schema
node_backup.tokenmap = json.dumps(tokenmap)
+
+ logging.info('Saving server version')
+ server_type, release_version = get_server_type_and_version(cassandra)
+ node_backup.server_version = json.dumps({'server_type': server_type, 'release_version': release_version})
+
if differential_mode is True:
node_backup.differential = mode
+
add_backup_start_to_index(storage, node_backup)
if stagger_time:
@@ -274,6 +279,13 @@ def get_schema_and_tokenmap(cassandra):
return schema, tokenmap
+@retry(stop_max_attempt_number=7, wait_exponential_multiplier=10000, wait_exponential_max=120000)
+def get_server_type_and_version(cassandra):
+ with cassandra.new_session() as cql_session:
+ server_type, release_version = cql_session.get_server_type_and_release_version()
+ return server_type, release_version
+
+
def do_backup(cassandra, node_backup, storage, differential_mode, enable_md5_checks,
config, backup_name):
# Load last backup as a cache
@@ -294,6 +306,11 @@ def do_backup(cassandra, node_backup, storage, differential_mode, enable_md5_che
manifest = []
num_files = backup_snapshots(storage, manifest, node_backup, node_backup_cache, snapshot)
+ if node_backup.is_dse:
+ logging.info('Creating DSE snapshot')
+ with cassandra.create_dse_snapshot(backup_name) as snapshot:
+ num_files += backup_snapshots(storage, manifest, node_backup, node_backup_cache, snapshot)
+
logging.info('Updating backup index')
node_backup.manifest = json.dumps(manifest)
add_backup_finish_to_index(storage, node_backup)
diff --git a/medusa/cassandra_utils.py b/medusa/cassandra_utils.py
index 82497d5b..04b348dc 100644
--- a/medusa/cassandra_utils.py
+++ b/medusa/cassandra_utils.py
@@ -21,6 +21,7 @@
import logging
import os
import pathlib
+import shutil
import shlex
import socket
import subprocess
@@ -218,6 +219,17 @@ def schema_path_mapping(self):
def execute(self, query):
return self.session.execute(query)
+ def get_server_type_and_release_version(self):
+ server_type = 'cassandra'
+ release_version = 'None'
+ rows = self.session.execute("SELECT * FROM system.local")
+ for row in rows:
+ if hasattr(row, 'dse_version'):
+ server_type = 'dse'
+ if hasattr(row, 'release_version'):
+ release_version = row.release_version
+ return server_type, release_version
+
class CassandraConfigReader(object):
DEFAULT_CASSANDRA_CONFIG = '/etc/cassandra/cassandra.yaml'
@@ -329,6 +341,7 @@ def seeds(self):
class Cassandra(object):
SNAPSHOT_PATTERN = '*/*/snapshots/{}'
+ DSE_SNAPSHOT_PATTERN = '*/snapshots/{}'
SNAPSHOT_PREFIX = 'medusa-'
def __init__(self, config, contact_point=None, release_version=None):
@@ -342,6 +355,8 @@ def __init__(self, config, contact_point=None, release_version=None):
config_reader = CassandraConfigReader(cassandra_config.config_file, release_version)
self._cassandra_config_file = cassandra_config.config_file
self._root = config_reader.root
+ self._dse_root = self._root.parent
+ self._dse_metadata_folder = 'metadata'
self._commitlog_path = config_reader.commitlog_directory
self._saved_caches_path = config_reader.saved_caches_directory
self._hostname = contact_point if contact_point is not None else config_reader.listen_address
@@ -404,6 +419,69 @@ def rpc_port(self):
def release_version(self):
return self._release_version
+ @property
+ def dse_metadata_path(self):
+ return self._dse_root / self._dse_metadata_folder
+
+ @property
+ def dse_search_path(self):
+ # the DSE Search files are next to regular keyspace folders, but are not a real keyspace
+ return self._root / 'solr.data'
+
+ def rebuild_search_index(self):
+ logging.debug('Opening new session to restore DSE indexes')
+ with self._cql_session_provider.new_session() as session:
+ rows = session.execute("SELECT core_name FROM solr_admin.solr_resources")
+ fqtns_with_index = {r.core_name for r in rows}
+ for fqtn in fqtns_with_index:
+ logging.debug(f'Rebuilding search index for {fqtn}')
+ session.execute(f"REBUILD SEARCH INDEX ON {fqtn}")
+
+ def create_dse_snapshot(self, backup_name):
+ """
+ There is no good way of making snapshot of DSE files
+ They are not SSTables, so we cannot just hard-link them and get immutable files to work with
+ So we have a poor-mans alternative of just copying them into a folder
+ That folder is nested in the parent folder, just like for regular tables
+ This way, we can reuse a lot of code later on
+ """
+ tag = "{}{}".format(self.SNAPSHOT_PREFIX, backup_name)
+ if not self.dse_snapshot_exists(tag):
+ src_path = self._dse_root / self._dse_metadata_folder
+ dst_path = self._dse_root / self._dse_metadata_folder / 'snapshots' / tag
+ shutil.copytree(src_path, dst_path)
+
+ return Cassandra.DseSnapshot(self, tag)
+
+ class DseSnapshot(object):
+
+ def __init__(self, parent, tag):
+ self._parent = parent
+ self._tag = tag
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ logging.debug('Cleaning up DSE snapshot')
+ self.delete()
+
+ def find_dirs(self):
+ dse_folder = self._parent._dse_metadata_folder
+ return [
+ SnapshotPath(
+ pathlib.Path(self._parent._dse_root) / dse_folder / 'snapshots', 'dse', dse_folder
+ )
+ ]
+
+ def delete(self):
+ dse_folder = self._parent._dse_metadata_folder
+ dse_folder_path = self._parent._dse_root / dse_folder / 'snapshots' / self._tag
+ shutil.rmtree(dse_folder_path)
+
+ def __repr__(self):
+ return '{}<{}>'.format(self.__class__.__qualname__, self._tag)
+
class Snapshot(object):
def __init__(self, parent, tag):
self._parent = parent
@@ -413,7 +491,7 @@ def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
- logging.debug('Cleaning up snapshot')
+ logging.debug('Cleaning up Cassandra snapshot')
self.delete()
@property
@@ -477,6 +555,14 @@ def snapshot_exists(self, tag):
return True
return False
+ def dse_snapshot_exists(self, tag):
+ # dse files live one directory up from the data folder
+ # the root field should point to the data directory as defined in the cassandra.yaml
+ for snapshot in self._dse_root.glob(self.DSE_SNAPSHOT_PATTERN.format('*')):
+ if snapshot.is_dir() and snapshot.name == tag:
+ return True
+ return False
+
def create_snapshot_command(self, backup_name):
"""
:param backup_name: string name of the medusa backup
diff --git a/medusa/config.py b/medusa/config.py
index 94235eb5..d8ec0e73 100644
--- a/medusa/config.py
+++ b/medusa/config.py
@@ -38,9 +38,9 @@
'CassandraConfig',
['start_cmd', 'stop_cmd', 'config_file', 'cql_username', 'cql_password', 'check_running', 'is_ccm',
'sstableloader_bin', 'nodetool_username', 'nodetool_password', 'nodetool_password_file_path', 'nodetool_host',
- 'nodetool_port', 'certfile', 'usercert', 'userkey', 'sstableloader_ts', 'sstableloader_tspw',
- 'sstableloader_ks', 'sstableloader_kspw', 'nodetool_ssl', 'resolve_ip_addresses', 'use_sudo', 'nodetool_flags',
- 'cql_k8s_secrets_path', 'nodetool_k8s_secrets_path']
+ 'nodetool_executable', 'nodetool_port', 'certfile', 'usercert', 'userkey', 'sstableloader_ts',
+ 'sstableloader_tspw', 'sstableloader_ks', 'sstableloader_kspw', 'nodetool_ssl', 'resolve_ip_addresses', 'use_sudo',
+ 'nodetool_flags', 'cql_k8s_secrets_path', 'nodetool_k8s_secrets_path']
)
SSHConfig = collections.namedtuple(
@@ -136,6 +136,7 @@ def _build_default_config():
'sstableloader_bin': 'sstableloader',
'resolve_ip_addresses': 'True',
'use_sudo': 'True',
+ 'nodetool_executable': 'nodetool',
'nodetool_flags': '-Dcom.sun.jndi.rmiURLParsing=legacy'
}
diff --git a/medusa/nodetool.py b/medusa/nodetool.py
index 78101ee3..3b2810f0 100644
--- a/medusa/nodetool.py
+++ b/medusa/nodetool.py
@@ -17,8 +17,9 @@
class Nodetool(object):
def __init__(self, cassandra_config):
+ nodetool_executable = cassandra_config.nodetool_executable
nodetool_flags = cassandra_config.nodetool_flags.split(" ") if cassandra_config.nodetool_flags else []
- self._nodetool = ['nodetool'] + nodetool_flags
+ self._nodetool = [nodetool_executable] + nodetool_flags
if cassandra_config.nodetool_ssl == "true":
self._nodetool += ['--ssl']
if cassandra_config.nodetool_username is not None:
diff --git a/medusa/restore_node.py b/medusa/restore_node.py
index 979d620f..e0a3a0b5 100644
--- a/medusa/restore_node.py
+++ b/medusa/restore_node.py
@@ -98,7 +98,10 @@ def restore_node_locally(config, temp_dir, backup_name, in_place, keep_auth, see
# especially around system tables.
use_sudo = medusa.utils.evaluate_boolean(config.storage.use_sudo_for_restore)
clean_path(cassandra.commit_logs_path, use_sudo, keep_folder=True)
- clean_path(cassandra.saved_caches_path, use_sudo, keep_folder=True)
+
+ if node_backup.is_dse:
+ clean_path(cassandra.dse_metadata_path, use_sudo, keep_folder=True)
+ clean_path(cassandra.dse_search_path, use_sudo, keep_folder=True)
# move backup data to Cassandra data directory according to system table
logging.info('Moving backup data to Cassandra data directory')
@@ -134,6 +137,12 @@ def restore_node_locally(config, temp_dir, backup_name, in_place, keep_auth, see
cassandra.start_with_implicit_token()
else:
cassandra.start(tokens)
+
+ # if we're restoring DSE, we need to explicitly trigger Search index rebuild
+ if node_backup.is_dse:
+ logging.info('Triggering DSE Search index rebuild')
+ cassandra.rebuild_search_index()
+
elif not in_place:
# Kubernetes will manage the lifecycle, but we still need to modify the tokens
cassandra.replace_tokens_in_cassandra_yaml_and_disable_bootstrap(tokens)
@@ -299,9 +308,15 @@ def maybe_restore_section(section, download_dir, cassandra_data_dir, in_place, k
logging.info('Keeping section {}.{} untouched'.format(section['keyspace'], section['columnfamily']))
return
- src = download_dir / section['keyspace'] / section['columnfamily']
- # not appending the column family name because mv later on copies the whole folder
- dst = cassandra_data_dir / section['keyspace'] / section['columnfamily']
+ # the 'dse' is an arbitrary name we gave to folders that don't sit in the regular place for keyspaces
+ # this is mostly DSE internal files
+ if section['keyspace'] != 'dse':
+ src = download_dir / section['keyspace'] / section['columnfamily']
+ # not appending the column family name because mv later on copies the whole folder
+ dst = cassandra_data_dir / section['keyspace'] / section['columnfamily']
+ else:
+ src = download_dir / section['keyspace'] / section['columnfamily']
+ dst = cassandra_data_dir.parent / section['columnfamily']
# prepare the destination folder
if dst.exists():
diff --git a/medusa/storage/abstract_storage.py b/medusa/storage/abstract_storage.py
index 3894e301..de5f1147 100644
--- a/medusa/storage/abstract_storage.py
+++ b/medusa/storage/abstract_storage.py
@@ -240,6 +240,28 @@ def get_object_datetime(self, blob: AbstractBlob) -> datetime.datetime:
def hashes_match(manifest_hash, object_hash):
return base64.b64decode(manifest_hash).hex() == str(object_hash) or manifest_hash == str(object_hash)
+ @staticmethod
+ def path_maybe_with_parent(dest: str, src_path: Path) -> str:
+ """
+ Works out which path to download or upload a file into.
+ @param dest : path to a directory where we'll be placing the file into
+ @param src_path : full path of a file (or object) we are read
+ @returns : full path of the file or object we write
+
+ Medusa generally expects SSTables which reside in .../keyspace/table/ (this is where dest points to)
+ But in some cases, we have exceptions:
+ - secondary indexes are stored in whatever/data_folder/keyspace/table/.index_name/,
+ so we need to include the index name in the destination path
+ - DSE metadata in resides in whatever/metadata where there is a `nodes` folder only (DSE 6.8)
+ in principle, this is just like a 2i file structure, so we reuse all the other logic
+ """
+ if src_path.parent.name.startswith(".") or src_path.parent.name.endswith('nodes'):
+ # secondary index file or a DSE metadata file
+ return "{}/{}/{}".format(dest, src_path.parent.name, src_path.name)
+ else:
+ # regular SSTable
+ return "{}/{}".format(dest, src_path.name)
+
def get_path_prefix(self, path=None) -> str:
return ""
diff --git a/medusa/storage/azure_storage.py b/medusa/storage/azure_storage.py
index 9d25b792..5f3c2a9c 100644
--- a/medusa/storage/azure_storage.py
+++ b/medusa/storage/azure_storage.py
@@ -132,11 +132,7 @@ async def _download_blob(self, src: str, dest: str):
# we must make sure the blob gets stored under sub-folder (if there is any)
# the dest variable only points to the table folder, so we need to add the sub-folder
src_path = Path(src)
- file_path = (
- "{}/{}/{}".format(dest, src_path.parent.name, src_path.name)
- if src_path.parent.name.startswith(".")
- else "{}/{}".format(dest, src_path.name)
- )
+ file_path = AbstractStorage.path_maybe_with_parent(dest, src_path)
if blob.size < int(self.config.multi_part_upload_threshold):
workers = 1
@@ -153,6 +149,7 @@ async def _download_blob(self, src: str, dest: str):
blob=object_key,
max_concurrency=workers,
)
+ Path(file_path).parent.mkdir(parents=True, exist_ok=True)
await downloader.readinto(open(file_path, "wb"))
async def _stat_blob(self, object_key: str) -> AbstractBlob:
@@ -172,15 +169,10 @@ async def _stat_blob(self, object_key: str) -> AbstractBlob:
@retry(stop_max_attempt_number=MAX_UP_DOWN_LOAD_RETRIES, wait_fixed=5000)
async def _upload_blob(self, src: str, dest: str) -> ManifestObject:
- src_chunks = src.split('/')
- parent_name, file_name = src_chunks[-2], src_chunks[-1]
+ src_path = Path(src)
# check if objects resides in a sub-folder (e.g. secondary index). if it does, use the sub-folder in object path
- object_key = (
- "{}/{}/{}".format(dest, parent_name, file_name)
- if parent_name.startswith(".")
- else "{}/{}".format(dest, file_name)
- )
+ object_key = AbstractStorage.path_maybe_with_parent(dest, src_path)
file_size = os.stat(src).st_size
logging.debug(
diff --git a/medusa/storage/google_storage.py b/medusa/storage/google_storage.py
index 5d41d56f..5ea6df28 100644
--- a/medusa/storage/google_storage.py
+++ b/medusa/storage/google_storage.py
@@ -138,11 +138,7 @@ async def _download_blob(self, src: str, dest: str):
# we must make sure the blob gets stored under sub-folder (if there is any)
# the dest variable only points to the table folder, so we need to add the sub-folder
src_path = Path(src)
- file_path = (
- "{}/{}/{}".format(dest, src_path.parent.name, src_path.name)
- if src_path.parent.name.startswith(".")
- else "{}/{}".format(dest, src_path.name)
- )
+ file_path = AbstractStorage.path_maybe_with_parent(dest, src_path)
logging.debug(
'[Storage] Downloading gcs://{}/{} -> {}'.format(
@@ -156,6 +152,7 @@ async def _download_blob(self, src: str, dest: str):
object_name=object_key,
timeout=-1,
)
+ Path(file_path).parent.mkdir(parents=True, exist_ok=True)
with open(file_path, 'wb') as f:
while True:
chunk = await stream.read(DOWNLOAD_STREAM_CONSUMPTION_CHUNK_SIZE)
@@ -184,15 +181,10 @@ async def _stat_blob(self, object_key: str) -> AbstractBlob:
@retry(stop_max_attempt_number=MAX_UP_DOWN_LOAD_RETRIES, wait_fixed=5000)
async def _upload_blob(self, src: str, dest: str) -> ManifestObject:
- src_chunks = src.split('/')
- parent_name, file_name = src_chunks[-2], src_chunks[-1]
+ src_path = Path(src)
# check if objects resides in a sub-folder (e.g. secondary index). if it does, use the sub-folder in object path
- object_key = (
- "{}/{}/{}".format(dest, parent_name, file_name)
- if parent_name.startswith(".")
- else "{}/{}".format(dest, file_name)
- )
+ object_key = AbstractStorage.path_maybe_with_parent(dest, src_path)
if src.startswith("gs"):
logging.debug(
diff --git a/medusa/storage/local_storage.py b/medusa/storage/local_storage.py
index cc329ff5..bc2a4eed 100644
--- a/medusa/storage/local_storage.py
+++ b/medusa/storage/local_storage.py
@@ -98,11 +98,7 @@ async def _download_blob(self, src: str, dest: str):
src_file = self.root_dir / src
src_path = Path(src)
- dest_file = (
- "{}/{}/{}".format(dest, src_path.parent.name, src_path.name)
- if src_path.parent.name.startswith(".")
- else "{}/{}".format(dest, src_path.name)
- )
+ dest_file = AbstractStorage.path_maybe_with_parent(dest, src_path)
logging.debug(
'[Local Storage] Downloading {} -> {}'.format(
@@ -111,6 +107,7 @@ async def _download_blob(self, src: str, dest: str):
)
with open(src_file, 'rb') as f:
+ Path(dest_file).parent.mkdir(parents=True, exist_ok=True)
with open(dest_file, 'wb') as d:
while True:
data = f.read(BUFFER_SIZE)
@@ -120,21 +117,15 @@ async def _download_blob(self, src: str, dest: str):
async def _upload_blob(self, src: str, dest: str) -> ManifestObject:
- src_file = src
- src_chunks = src.split('/')
- parent_name, file_name = src_chunks[-2], src_chunks[-1]
+ src_path = Path(src)
# check if objects resides in a sub-folder (e.g. secondary index). if it does, use the sub-folder in object path
- dest_file = (
- self.root_dir / dest / parent_name / file_name
- if parent_name.startswith(".")
- else self.root_dir / dest / file_name
- )
+ dest_file = self.root_dir / AbstractStorage.path_maybe_with_parent(dest, src_path)
- file_size = os.stat(src_file).st_size
+ file_size = os.stat(src_path).st_size
logging.debug(
'[Local Storage] Uploading {} ({}) -> {}'.format(
- src_file, self.human_readable_size(file_size), dest_file
+ src_path, self.human_readable_size(file_size), dest_file
)
)
# remove root_dir from dest_file name
@@ -142,7 +133,7 @@ async def _upload_blob(self, src: str, dest: str) -> ManifestObject:
md5 = hashlib.md5()
- with open(src_file, 'rb') as f:
+ with open(src_path, 'rb') as f:
dest_file.parent.mkdir(parents=True, exist_ok=True)
with open(dest_file, 'wb') as d:
while True:
diff --git a/medusa/storage/node_backup.py b/medusa/storage/node_backup.py
index 5a131637..d68bf204 100644
--- a/medusa/storage/node_backup.py
+++ b/medusa/storage/node_backup.py
@@ -34,7 +34,8 @@ def __init__(self,
finished_timestamp=None,
finished_blob=None,
differential_blob=None,
- differential_mode=False):
+ differential_mode=False,
+ server_version_blob=None):
self._storage = storage
self._fqdn = fqdn
@@ -62,6 +63,7 @@ def __init__(self,
self._incremental_path = self._meta_path / 'incremental'
self._differential_path = self._meta_path / 'differential'
self._restore_verify_query_path = self._meta_path / 'restore_verify_query.json'
+ self._server_version_path = self._meta_path / 'server_version.json'
if preloaded_blobs is None:
preloaded_blobs = []
@@ -74,6 +76,7 @@ def __init__(self,
self.cached_manifest_blob = manifest_blob
self.cached_schema_blob = schema_blob
self.cached_tokenmap_blob = tokenmap_blob
+ self.cached_server_version_blob = server_version_blob
self.started_blob = started_blob
self.finished_blob = finished_blob
self._started = started_timestamp
@@ -124,6 +127,36 @@ def tokenmap(self):
def tokenmap(self, tokenmap):
self._storage.storage_driver.upload_blob_from_string(self.tokenmap_path, tokenmap)
+ @property
+ def server_version_path(self):
+ return self._server_version_path
+
+ @property
+ def server_version(self):
+ try:
+ if self.cached_server_version_blob is None:
+ self.cached_server_version_blob = self._blob(self.server_version_path)
+ return self._storage.storage_driver.read_blob_as_string(self.cached_server_version_blob)
+ except Exception:
+ # old versions of Medusa do not write the server_version.json file, so we return a default thing
+ return json.dumps({"server_type": "cassandra", "release_version": "unknown"})
+
+ @server_version.setter
+ def server_version(self, version):
+ self._storage.storage_driver.upload_blob_from_string(self.server_version_path, version)
+
+ @property
+ def server_type(self):
+ return json.loads(self.server_version)["server_type"]
+
+ @property
+ def release_version(self):
+ return json.loads(self.server_version)["release_version"]
+
+ @property
+ def is_dse(self):
+ return self.server_type == "dse"
+
@property
def schema_path(self):
return self._schema_path
diff --git a/medusa/storage/s3_base_storage.py b/medusa/storage/s3_base_storage.py
index 1c22542b..4ee96de2 100644
--- a/medusa/storage/s3_base_storage.py
+++ b/medusa/storage/s3_base_storage.py
@@ -284,20 +284,17 @@ def __download_blob(self, src: str, dest: str):
# we must make sure the blob gets stored under sub-folder (if there is any)
# the dest variable only points to the table folder, so we need to add the sub-folder
src_path = Path(src)
- file_path = (
- "{}/{}/{}".format(dest, src_path.parent.name, src_path.name)
- if src_path.parent.name.startswith(".")
- else "{}/{}".format(dest, src_path.name)
- )
+ file_path = AbstractStorage.path_maybe_with_parent(dest, src_path)
# print also object size
logging.debug(
- '[S3 Storage] Downloading {} -> {}/{}'.format(
- object_key, self.bucket_name, object_key
+ '[S3 Storage] Downloading s3://{}/{} -> {}'.format(
+ self.bucket_name, object_key, file_path
)
)
try:
+ Path(file_path).parent.mkdir(parents=True, exist_ok=True)
self.s3_client.download_file(
Bucket=self.bucket_name,
Key=object_key,
@@ -329,15 +326,10 @@ def __stat_blob(self, key):
@retry(stop_max_attempt_number=MAX_UP_DOWN_LOAD_RETRIES, wait_fixed=5000)
async def _upload_blob(self, src: str, dest: str) -> ManifestObject:
- src_chunks = src.split('/')
- parent_name, file_name = src_chunks[-2], src_chunks[-1]
+ src_path = Path(src)
# check if objects resides in a sub-folder (e.g. secondary index). if it does, use the sub-folder in object path
- object_key = (
- "{}/{}/{}".format(dest, parent_name, file_name)
- if parent_name.startswith(".")
- else "{}/{}".format(dest, file_name)
- )
+ object_key = AbstractStorage.path_maybe_with_parent(dest, src_path)
kms_args = {}
if self.kms_id is not None:
| Handle DSE folders correctly
[Project board link](https://github.com/orgs/k8ssandra/projects/8/views/1?pane=issue&itemId=40977146)
The DSE comes with a few extra folders in `/var/lib/cassandra` (and probably elsewhere) and Medusa currently cannot handle those.
One specific example is `/var/lib/cassandra/metadata/` which we should treat, probably, in a similar way to `system.local` and `system.peers`.
When working on #659 , I already noticed there's a whole buch of these folders, and I have a `sed` command to modify them. That command is a good starting point to identify these folders and then treat them correctly.
## Definition of done
- [x] Identify all the extra folders of DSE
- [ ] Backup and (not) restore those folders as requested
| 2023-12-07T13:20:17 | 0.0 | [] | [] |
|||
thelastpickle/cassandra-medusa | thelastpickle__cassandra-medusa-679 | b0cc9586e2095ed9d45c4b41871f4dbe31dfc10f | diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 36b62c2c8..e46d24273 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -16,4 +16,4 @@ values =
[bumpversion:file:VERSION]
-[bumpversion:file:setup.py]
+[bumpversion:file:pyproject.toml]
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 717c4a6c6..05470204b 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -30,27 +30,23 @@ jobs:
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
+ - name: Set up Poetry
+ uses: snok/install-poetry@v1
- name: Install dependencies
run: |
- python -m venv venv
- . venv/bin/activate
- python -m pip install --upgrade pip
- pip install -r requirements.txt
- pip install tox
+ poetry install
- name: Lint with flake8
run: |
- pip install flake8
# stop the build if there are Python syntax errors or undefined names
- flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
+ poetry run flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
- flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics --ignore=W503
+ poetry run flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics --ignore=W503
- name: Run tox
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
- . venv/bin/activate
- tox -e ${{ matrix.tox-py }}
+ poetry run tox -e ${{ matrix.tox-py }}
- uses: codecov/codecov-action@v1
name: Report code coverage
@@ -173,23 +169,13 @@ jobs:
with:
java-version: '8.0.252'
architecture: x64
+ - name: Setup Poetry
+ uses: snok/install-poetry@v1
- name: Install dependencies
run: |
- python -m venv venv
- . venv/bin/activate
- sudo apt-get remove azure-cli
- python -m pip install --upgrade pip
- pip install -r requirements.txt
- pip install -r requirements-test.txt
+ poetry install
+ sudo apt-get remove azure-cli || true
pip install ccm
- case '${{ matrix.it-backend }}' in
- 'azure'|'azure-hierarchical')
- echo "No extra requirements for now."
- ;;
- 'ibm'|'minio'|'s3')
- echo "No extra requirements for now."
- ;;
- esac
- name: Check if integration tests can run
env:
@@ -216,7 +202,6 @@ jobs:
AWS_SECRET_ACCESS_KEY: ${{ secrets.BUCKET_SECRET }}
run: |
set -e
- . venv/bin/activate
if [[ "${{ matrix.it-backend }}" == "ibm" ]];
then
# Prevent awscli from using AWS secrets in case we're running against IBM cloud
@@ -292,7 +277,8 @@ jobs:
# Move and convert the coverage analysis file to XML
mv tests/integration/.coverage .
- coverage xml
+ poetry run coverage xml
+
- uses: codecov/codecov-action@v1
name: Report code coverage
@@ -329,6 +315,8 @@ jobs:
with:
username: ${{ secrets.K8SSANDRA_DOCKER_HUB_USERNAME }}
password: ${{ secrets.K8SSANDRA_DOCKER_HUB_PASSWORD }}
+ - name: Set up Poetry
+ uses: snok/install-poetry@v1
- uses: actions/checkout@v3
with:
path: cassandra-medusa
@@ -354,7 +342,7 @@ jobs:
working-directory: cassandra-medusa
run: |
echo "short_sha=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
- python setup.py build
+ poetry install && poetry build
- name: Build Medusa image
id: docker_build
uses: docker/build-push-action@v4
@@ -417,10 +405,12 @@ jobs:
shell: bash
run: |
echo "sha_short=$(git rev-parse --short=8 ${{ github.sha }})" >> $GITHUB_OUTPUT
+ - name: Set up Poetry
+ uses: snok/install-poetry@v1
- name: Build Medusa
run: |
echo "Publishing release $(git rev-parse --short HEAD) in Docker Hub"
- python setup.py build
+ poetry install && poetry build
- name: Build image and push
id: docker_build
uses: docker/build-push-action@v4
@@ -496,10 +486,12 @@ jobs:
shell: bash
run: |
echo "version=$(cat VERSION)" >> $GITHUB_OUTPUT
+ - name: Set up Poetry
+ uses: snok/install-poetry@v1
- name: Build Medusa
run: |
echo "Publishing release ${{ steps.vars.outputs.version}} in Docker Hub"
- python setup.py build
+ poetry install && poetry build
- name: Build image and push
id: docker_build
uses: docker/build-push-action@v4
@@ -535,5 +527,5 @@ jobs:
run: |
. venv/bin/activate
pip install setuptools wheel twine
- python setup.py sdist bdist_wheel
+ poetry install && poetry build
twine upload dist/*
diff --git a/README.md b/README.md
index c6f2d6fbe..ec801f483 100644
--- a/README.md
+++ b/README.md
@@ -21,6 +21,8 @@
[](https://codecov.io/gh/thelastpickle/cassandra-medusa)
+
+
Medusa for Apache Cassandra™
==================================
@@ -58,4 +60,6 @@ You can find the Docker images for Cassandra Medusa at [https://hub.docker.com/r
Dependencies
------------
+Medusa requires Python 3.8 or newer.
+
For information on the packaged dependencies of Medusa for Apache Cassandra® and their licenses, check out our [open source report](https://app.fossa.com/reports/cac72e73-1214-4e6d-8476-76567e08db21).
diff --git a/debian/cassandra-medusa.links b/debian/cassandra-medusa.links
index d22780470..0cce9e6a3 100644
--- a/debian/cassandra-medusa.links
+++ b/debian/cassandra-medusa.links
@@ -1,2 +1,2 @@
usr/share/cassandra-medusa/bin/medusa usr/bin/medusa
-usr/share/cassandra-medusa/bin/medusa-wrapper usr/bin/medusa-wrapper
+usr/share/cassandra-medusa/bin/medusa-wrapper usr/bin/medusa-wrapper
\ No newline at end of file
diff --git a/debian/rules b/debian/rules
index 58eebecf3..6ff2e05c9 100755
--- a/debian/rules
+++ b/debian/rules
@@ -21,6 +21,7 @@ PACKAGEVERSION = $(VERSION)-0~$(DISTRIBUTION)0
PY3VER = $(shell py3versions -d)
SSH2_LIBS_SUFFIX = debian/cassandra-medusa/usr/share/cassandra-medusa/lib/$(PY3VER)/site-packages/ssh2_python.libs/
SSH_LIBS_SUFFIX = debian/cassandra-medusa/usr/share/cassandra-medusa/lib/$(PY3VER)/site-packages/ssh_python.libs/
+POETRY_VIRTUALENVS_IN_PROJECT = true
export DH_ALWAYS_EXCLUDE = .git
export DH_VIRTUALENV_INSTALL_ROOT = /usr/share
@@ -29,8 +30,8 @@ export DH_VIRTUALENV_INSTALL_ROOT = /usr/share
override_dh_virtualenv:
dh_virtualenv \
- --extra-pip-arg "--no-cache-dir" \
- --python /usr/bin/python3 --preinstall=setuptools==40.3.0 --preinstall=pip==21.3.1 --preinstall=wheel --builtin-venv
+ --python /usr/bin/python3 --preinstall=setuptools==40.3.0 --preinstall=pip==21.3.1 --preinstall=wheel --builtin-venv \
+ --preinstall=poetry==1.6.1 --preinstall=dh-poetry==0.2.0 --pip-tool=dh-poetry
override_dh_strip:
dh_strip --no-automatic-dbgsym -X libssh2 -X libssh -X libgssapi_krb5 -X libcrypto -X libkrb5 -X libk5crypto
@@ -40,3 +41,7 @@ override_dh_shlibdeps:
override_dh_gencontrol:
dh_gencontrol -- -v$(PACKAGEVERSION)
+
+override_dh_auto_configure:
+ # overriding with a no-op to avoid dh to look for setup.py
+ true
\ No newline at end of file
diff --git a/docs/Installation.md b/docs/Installation.md
index e5f8dc608..deb8b18a6 100644
--- a/docs/Installation.md
+++ b/docs/Installation.md
@@ -26,23 +26,27 @@ Running the installation using `sudo` is necessary to have the `/usr/local/bin/m
If your Cassandra servers do not have internet access:
-- on a machine with the same target os and python version, clone the cassandra-medusa repo and cd into the root directory
-- run `mkdir pip_dependencies && pip download -r requirements.txt -d medusa_dependencies` to download the dependencies into a sub directory
-- run `cp requirements.txt medusa_dependencies/`
-- run `tar -zcf medusa_dependencies.tar.gz medusa_dependencies` to compress the dependencies
-- Upload the archive to all Cassandra nodes and decompress it
-- run `pip install -r medusa_dependencies/requirements.txt --no-index --find-links` to install the dependencies on the nodes
-- install Medusa using `python setup.py install` from the cassandra-medusa source directory
+- on a machine with the same target OS and Python version, clone the cassandra-medusa repo and cd into the root directory
+- run `POETRY_VIRTUALENVS_IN_PROJECT=true poetry install` to install Medusa's dependencies
+- run `poetry run pip freeze | grep -v cassandra-medusa > requirements.txt` to identify the dependencies
+- run `mkdir -p medusa_dependencies && pip download -r requirements.txt -d medusa_dependencies` to download the dependencies into a sub directory
+- run `tar --exclude=".venv" -zcf medusa.tar.gz .` to compress Medusa together with its dependencies but without Poetry's venv
+- upload the archive to all Cassandra nodes and decompress it (`mkdir -p cassandra-medusa && tar xf medusa.tar.gz -C cassandra-medusa`)
+- install Medusa's dependencies using `pip install --no-index --find-links medusa_dependencies -r requirements.txt`
+- install medusa itself using a command similar to `poetry build && pip install dist/cassandra_medusa-<version>-py3-none-any.whl`
+- test if the installation was successful by running `medusa`
#### Example of Offline installation using pipenv on RHEL, centos 7
- install RPM pre-requisites `sudo yum install -y python3-pip python3-devel`
- install pipenv `sudo pip3 install pipenv`
-- unpack your archive built using the procedure from previous section `tar zxvf my-archive-of-cassandra-medusa.tar.gz`
-- create your python env in the directory previously created `cd cassandra-medusa-0.7.1 && pipenv --python 3`
+- unpack your archive built using the procedure from previous section
+- create your python env in the directory previously created `cd cassandra-medusa && pipenv --python 3`
- install python dependencies of medusa `pipenv run pip3 install -r requirements.txt --no-index --find-links medusa_dependencies/`
- prepare an installation directory with appropriate privileges `sudo mkdir /opt/cassandra-medusa ; sudo chmod go+rwX /opt/cassandra-medusa`
-- install medusa as non root user `pipenv run python3 setup.py install --prefix=. --root=/opt/cassandra-medusa`
+- build Medusa using `pipenv run pip3 install poetry && poetry build`
+- install medusa as non-root user `pipenv run pip3 install dist/cassandra_medusa-0.20.0.dev0-py3-none-any.whl --no-index --find-links medusa_dependencies`
+- test if the installation was successful by running `pipenv run medusa`
## Debian packages
### Using apt-get
diff --git a/k8s/Dockerfile b/k8s/Dockerfile
index 0ad19fc4d..f596e8003 100644
--- a/k8s/Dockerfile
+++ b/k8s/Dockerfile
@@ -27,12 +27,11 @@ ENV PATH=/root/.local/bin:$PATH
COPY . /build/
# General requirements
-RUN python3 -m pip install -U pip && pip3 install --ignore-installed --user \
- -r /build/requirements.txt \
- -r /build/requirements-grpc-runtime.txt
+ENV POETRY_VIRTUALENVS_IN_PROJECT=true
+RUN python3 -m pip install -U pip && pip3 install --ignore-installed --user poetry
# Build medusa itself so we can add the executables in the final image
-RUN pip3 install --ignore-installed --user /build
+RUN cd /build && poetry install
# Could be python:slim, but we have a .sh entrypoint
FROM ubuntu:22.04
@@ -40,8 +39,6 @@ FROM ubuntu:22.04
## add user
RUN groupadd -r cassandra --gid=999 && useradd -r -g cassandra --uid=999 --create-home cassandra
-COPY --from=base /install /usr/local
-
# wget could happen in the build-phase
RUN apt-get update && apt-get install -y python3 python3-setuptools wget \
&& rm -rf /var/lib/apt/lists/*
@@ -59,6 +56,8 @@ ENV PATH=/home/cassandra/.local/bin:/home/cassandra/google-cloud-sdk/bin:$PATH
COPY --from=base --chown=cassandra:cassandra /root/.local /home/cassandra/.local
+COPY --from=base --chown=cassandra:cassandra /build/.venv /home/cassandra/.venv
+COPY --from=base --chown=cassandra:cassandra /build/pyproject.toml /home/cassandra/pyproject.toml
COPY --chown=cassandra:cassandra medusa /home/cassandra/medusa
COPY --chown=cassandra:cassandra k8s/docker-entrypoint.sh /home/cassandra
diff --git a/k8s/README.md b/k8s/README.md
index c4839b77c..f81add3bc 100644
--- a/k8s/README.md
+++ b/k8s/README.md
@@ -37,7 +37,6 @@ $ cd medusa/service/grpc
$ python -m grpc_tools.protoc -I. --python_out=. --grpc_python_out=. medusa.proto
```
-These steps should be integrated into the build or setup.py at some point. I am just not sure where the appropriate integrations points are yet.
# Kubernetes Configuration
There is a Kubernetes section of the Medusa configuration that will need to be set for all this to work in a K8s environment. Currently, the Cassandra image you use will need to support one of two APIs in order for the Medusa image to be able to perform backup and restore functions: JMX (via Jolokia) or the [Management API](https://github.com/datastax/management-api-for-apache-cassandra).
@@ -86,7 +85,7 @@ If you made any changes to `medusa.proto`, then you first need to run the protob
Run the following from the project root:
```
-$ python setup.py build
+$ poetry install && poetry build
$ docker build -t <tag name> -f k8s/Dockerfile .
```
\ No newline at end of file
diff --git a/k8s/docker-entrypoint.sh b/k8s/docker-entrypoint.sh
index 646f7d76a..e11ed3b4f 100755
--- a/k8s/docker-entrypoint.sh
+++ b/k8s/docker-entrypoint.sh
@@ -41,14 +41,14 @@ restore() {
echo "Skipping restore operation"
else
echo "Restoring backup $BACKUP_NAME"
- python3 -m medusa.service.grpc.restore -- "/etc/medusa/medusa.ini" $RESTORE_KEY
+ poetry run python -m medusa.service.grpc.restore -- "/etc/medusa/medusa.ini" $RESTORE_KEY
echo $RESTORE_KEY > $last_restore_file
fi
}
grpc() {
echo "Starting Medusa gRPC service"
- exec python3 -m medusa.service.grpc.server server.py
+ exec poetry run python -m medusa.service.grpc.server server.py
}
echo "sleeping for $DEBUG_SLEEP sec"
diff --git a/medusa/service/grpc/restore.py b/medusa/service/grpc/restore.py
index 62ba03199..8629f6992 100644
--- a/medusa/service/grpc/restore.py
+++ b/medusa/service/grpc/restore.py
@@ -104,9 +104,9 @@ def restore_backup(in_place, config):
if __name__ == '__main__':
- if len(sys.argv) > 3:
- config_file_path = sys.argv[2]
- restore_key = sys.argv[3]
+ if len(sys.argv) > 2:
+ config_file_path = sys.argv[1]
+ restore_key = sys.argv[2]
else:
logging.error("Usage: {} <config_file_path> <restore_key>".format(sys.argv[0]))
sys.exit(1)
diff --git a/packaging/docker-build/Dockerfile b/packaging/docker-build/Dockerfile
index 507420737..a4f0b714e 100644
--- a/packaging/docker-build/Dockerfile
+++ b/packaging/docker-build/Dockerfile
@@ -33,8 +33,8 @@ RUN apt-get update && \
# install dependencies
RUN apt-get update \
&& apt-get install -y \
- libffi-dev \
- libssl-dev \
+ libffi-dev \
+ libssl-dev \
debhelper \
gdebi-core \
gcc \
@@ -58,13 +58,14 @@ RUN apt-get update \
RUN pip3 install --upgrade pip
RUN cd /tmp && \
- wget http://mirrors.kernel.org/ubuntu/pool/universe/d/dh-virtualenv/dh-virtualenv_1.0-1_all.deb && \
+ wget http://mirrors.kernel.org/ubuntu/pool/universe/d/dh-virtualenv/dh-virtualenv_1.2.2-1.4_all.deb && \
gdebi -n dh-virtualenv*.deb && \
rm dh-virtualenv_*.deb
RUN pip3 install greenlet
RUN pip3 install gevent
+RUN pip3 install poetry
# Add entrypoint script
COPY packaging/docker-build/docker-entrypoint.sh ${WORKDIR}
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 000000000..d8aa32caf
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,2321 @@
+# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
+
+[[package]]
+name = "aiofiles"
+version = "23.2.1"
+description = "File support for asyncio."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "aiofiles-23.2.1-py3-none-any.whl", hash = "sha256:19297512c647d4b27a2cf7c34caa7e405c0d60b5560618a29a9fe027b18b0107"},
+ {file = "aiofiles-23.2.1.tar.gz", hash = "sha256:84ec2218d8419404abcb9f0c02df3f34c6e0a68ed41072acfb1cef5cbc29051a"},
+]
+
+[[package]]
+name = "aiohttp"
+version = "3.8.5"
+description = "Async http client/server framework (asyncio)"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"},
+ {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"},
+ {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"},
+ {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"},
+ {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"},
+ {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"},
+ {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"},
+ {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"},
+ {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"},
+ {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"},
+ {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"},
+ {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"},
+ {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"},
+ {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"},
+ {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"},
+ {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"},
+ {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"},
+ {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"},
+ {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"},
+ {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"},
+ {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"},
+ {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"},
+ {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"},
+ {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"},
+ {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"},
+ {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"},
+ {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"},
+ {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"},
+ {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"},
+ {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"},
+ {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"},
+ {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"},
+ {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"},
+ {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"},
+ {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"},
+ {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"},
+ {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"},
+ {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"},
+ {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"},
+ {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"},
+ {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"},
+ {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"},
+ {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"},
+ {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"},
+ {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"},
+ {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"},
+ {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"},
+ {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"},
+ {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"},
+ {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"},
+ {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"},
+ {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"},
+ {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"},
+ {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"},
+ {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"},
+ {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"},
+ {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"},
+ {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"},
+ {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"},
+ {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"},
+ {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"},
+]
+
+[package.dependencies]
+aiosignal = ">=1.1.2"
+async-timeout = ">=4.0.0a3,<5.0"
+attrs = ">=17.3.0"
+charset-normalizer = ">=2.0,<4.0"
+frozenlist = ">=1.1.1"
+multidict = ">=4.5,<7.0"
+yarl = ">=1.0,<2.0"
+
+[package.extras]
+speedups = ["Brotli", "aiodns", "cchardet"]
+
+[[package]]
+name = "aiosignal"
+version = "1.3.1"
+description = "aiosignal: a list of registered asynchronous callbacks"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
+ {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
+]
+
+[package.dependencies]
+frozenlist = ">=1.1.0"
+
+[[package]]
+name = "async-timeout"
+version = "4.0.3"
+description = "Timeout context manager for asyncio programs"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
+ {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
+]
+
+[[package]]
+name = "asyncio"
+version = "3.4.3"
+description = "reference implementation of PEP 3156"
+optional = false
+python-versions = "*"
+files = [
+ {file = "asyncio-3.4.3-cp33-none-win32.whl", hash = "sha256:b62c9157d36187eca799c378e572c969f0da87cd5fc42ca372d92cdb06e7e1de"},
+ {file = "asyncio-3.4.3-cp33-none-win_amd64.whl", hash = "sha256:c46a87b48213d7464f22d9a497b9eef8c1928b68320a2fa94240f969f6fec08c"},
+ {file = "asyncio-3.4.3-py3-none-any.whl", hash = "sha256:c4d18b22701821de07bd6aea8b53d21449ec0ec5680645e5317062ea21817d2d"},
+ {file = "asyncio-3.4.3.tar.gz", hash = "sha256:83360ff8bc97980e4ff25c964c7bd3923d333d177aa4f7fb736b019f26c7cb41"},
+]
+
+[[package]]
+name = "attrs"
+version = "23.1.0"
+description = "Classes Without Boilerplate"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"},
+ {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"},
+]
+
+[package.extras]
+cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
+dev = ["attrs[docs,tests]", "pre-commit"]
+docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
+tests = ["attrs[tests-no-zope]", "zope-interface"]
+tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+
+[[package]]
+name = "azure-core"
+version = "1.29.4"
+description = "Microsoft Azure Core Library for Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "azure-core-1.29.4.tar.gz", hash = "sha256:500b3aa9bf2e90c5ccc88bb105d056114ca0ce7d0ce73afb8bc4d714b2fc7568"},
+ {file = "azure_core-1.29.4-py3-none-any.whl", hash = "sha256:b03261bcba22c0b9290faf9999cedd23e849ed2577feee90515694cea6bc74bf"},
+]
+
+[package.dependencies]
+requests = ">=2.18.4"
+six = ">=1.11.0"
+typing-extensions = ">=4.6.0"
+
+[package.extras]
+aio = ["aiohttp (>=3.0)"]
+
+[[package]]
+name = "azure-identity"
+version = "1.14.0"
+description = "Microsoft Azure Identity Library for Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "azure-identity-1.14.0.zip", hash = "sha256:72441799f8c5c89bfe21026965e266672a7c5d050c2c65119ef899dd5362e2b1"},
+ {file = "azure_identity-1.14.0-py3-none-any.whl", hash = "sha256:edabf0e010eb85760e1dd19424d5e8f97ba2c9caff73a16e7b30ccbdbcce369b"},
+]
+
+[package.dependencies]
+azure-core = ">=1.11.0,<2.0.0"
+cryptography = ">=2.5"
+msal = ">=1.20.0,<2.0.0"
+msal-extensions = ">=0.3.0,<2.0.0"
+
+[[package]]
+name = "azure-storage-blob"
+version = "12.17.0"
+description = "Microsoft Azure Blob Storage Client Library for Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "azure-storage-blob-12.17.0.zip", hash = "sha256:c14b785a17050b30fc326a315bdae6bc4a078855f4f94a4c303ad74a48dc8c63"},
+ {file = "azure_storage_blob-12.17.0-py3-none-any.whl", hash = "sha256:0016e0c549a80282d7b4920c03f2f4ba35c53e6e3c7dbcd2a4a8c8eb3882c1e7"},
+]
+
+[package.dependencies]
+azure-core = ">=1.28.0,<2.0.0"
+cryptography = ">=2.1.4"
+isodate = ">=0.6.1"
+typing-extensions = ">=4.3.0"
+
+[package.extras]
+aio = ["azure-core[aio] (>=1.28.0,<2.0.0)"]
+
+[[package]]
+name = "backoff"
+version = "2.2.1"
+description = "Function decoration for backoff and retry"
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+ {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"},
+ {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"},
+]
+
+[[package]]
+name = "behave"
+version = "1.2.6"
+description = "behave is behaviour-driven development, Python style"
+optional = false
+python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "behave-1.2.6-py2.py3-none-any.whl", hash = "sha256:ebda1a6c9e5bfe95c5f9f0a2794e01c7098b3dde86c10a95d8621c5907ff6f1c"},
+ {file = "behave-1.2.6.tar.gz", hash = "sha256:b9662327aa53294c1351b0a9c369093ccec1d21026f050c3bd9b3e5cccf81a86"},
+]
+
+[package.dependencies]
+parse = ">=1.8.2"
+parse-type = ">=0.4.2"
+six = ">=1.11"
+
+[package.extras]
+develop = ["coverage", "invoke (>=0.21.0)", "modernize (>=0.5)", "path.py (>=8.1.2)", "pathlib", "pycmd", "pylint", "pytest (>=3.0)", "pytest-cov", "tox"]
+docs = ["sphinx (>=1.6)", "sphinx-bootstrap-theme (>=0.6)"]
+
+[[package]]
+name = "boto3"
+version = "1.28.71"
+description = "The AWS SDK for Python"
+optional = false
+python-versions = ">= 3.7"
+files = [
+ {file = "boto3-1.28.71-py3-none-any.whl", hash = "sha256:cf54d6da6d348317da896235e9850c7e550dbc18bd5cb16cce1b035e90d0166f"},
+ {file = "boto3-1.28.71.tar.gz", hash = "sha256:8d1b50127b20b817fdcec3ce6a625c5057b5a722acf1cfa64cf3824ff40b1e75"},
+]
+
+[package.dependencies]
+botocore = ">=1.31.71,<1.32.0"
+jmespath = ">=0.7.1,<2.0.0"
+s3transfer = ">=0.7.0,<0.8.0"
+
+[package.extras]
+crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
+
+[[package]]
+name = "botocore"
+version = "1.31.75"
+description = "Low-level, data-driven core of boto 3."
+optional = false
+python-versions = ">= 3.7"
+files = [
+ {file = "botocore-1.31.75-py3-none-any.whl", hash = "sha256:fa078c4aa9a5777b3ede756540e62fec551e13d39cf7abf9a37bb81981496d68"},
+ {file = "botocore-1.31.75.tar.gz", hash = "sha256:d704ea9867b2227de0350bc2a5ca2543349e164ecb5d15edbfacbb05f2056482"},
+]
+
+[package.dependencies]
+jmespath = ">=0.7.1,<2.0.0"
+python-dateutil = ">=2.1,<3.0.0"
+urllib3 = [
+ {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""},
+ {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""},
+]
+
+[package.extras]
+crt = ["awscrt (==0.16.26)"]
+
+[[package]]
+name = "cachetools"
+version = "5.3.2"
+description = "Extensible memoizing collections and decorators"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"},
+ {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"},
+]
+
+[[package]]
+name = "cassandra-driver"
+version = "3.28.0"
+description = "DataStax Driver for Apache Cassandra"
+optional = false
+python-versions = "*"
+files = [
+ {file = "cassandra-driver-3.28.0.tar.gz", hash = "sha256:64ff130d19f994b80997c14343a8306be52a0e7ab92520a534eed944c88d70df"},
+ {file = "cassandra_driver-3.28.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8cceb2cc658b3ebf28873f84aab4f28bbd5df23a6528a5b38ecf89a45232509"},
+ {file = "cassandra_driver-3.28.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:35aef74e2a593a969b77a3fcf02d27e9b82a078d9aa66caa3bd2d2583c46a82c"},
+ {file = "cassandra_driver-3.28.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:48f20e0d21b6c7406dfd8a4d9e07fddc3c7c3d6ad7d5b5d480bf82aac7068739"},
+ {file = "cassandra_driver-3.28.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3820a421fb7e4cf215718dc35522869c5f933d4fd4c50fd43307d3ce5d9dd138"},
+ {file = "cassandra_driver-3.28.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dd9511fe5b85010e92199f6589e0733ab14ed3d2279dcc6ae504c0cef11d652"},
+ {file = "cassandra_driver-3.28.0-cp310-cp310-win32.whl", hash = "sha256:887f7e3df9b34b41de6dfdd5f2ef8804c2d9782bbc39202eda9d3b67a3c8fe37"},
+ {file = "cassandra_driver-3.28.0-cp310-cp310-win_amd64.whl", hash = "sha256:28c636239b15944103df18a12ef95e6401ceadd7b9aca2d59f4beccf9ca21e2d"},
+ {file = "cassandra_driver-3.28.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9232434459303b0e1a26fa65006fd8438475037aef4e6204a32dfaeb10e7f739"},
+ {file = "cassandra_driver-3.28.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:634a553a5309a9faa08c3256fe0237ff0308152210211f3b8eab0664335560e0"},
+ {file = "cassandra_driver-3.28.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4a101114a7d93505ee79272edc82dba0cfc706172ad7948a6e4fb3dc1eb8b59c"},
+ {file = "cassandra_driver-3.28.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36d844ba0089111858fad3c53897b0fea7c91cedd8bd205eeb82fe22fd60e748"},
+ {file = "cassandra_driver-3.28.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3bf6bacb60dc8d1b8ba5ddd7d35772e3b98da951aed6bb148827aa9c38cd009"},
+ {file = "cassandra_driver-3.28.0-cp311-cp311-win32.whl", hash = "sha256:212eb39ca99ab5960eb5c31ce279b61e075df02ac7a6209415982a3f8cfe1126"},
+ {file = "cassandra_driver-3.28.0-cp311-cp311-win_amd64.whl", hash = "sha256:777f60ed821ec43d5b3f7a65eaf02decbd9cbc11e32f2099bfe9d7a6bfe33da9"},
+ {file = "cassandra_driver-3.28.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b867c49c3c9efa21923845456cfb3e81ad13a33e40eb20279f58b3642d54614f"},
+ {file = "cassandra_driver-3.28.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1dc54edf3b664dc8e45a9c8fed163dacbad8bc92c788c84a371ccb700e18638"},
+ {file = "cassandra_driver-3.28.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e383aff200b7194d0d5625bf162bbc8471d05db7163c546341e5f27b36b53134"},
+ {file = "cassandra_driver-3.28.0-cp37-cp37m-win32.whl", hash = "sha256:a5e8b066f816868b344c108f34acc04b53c44caed2cdbcfe08ebdcbc1fd35046"},
+ {file = "cassandra_driver-3.28.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ae8c8e9a46e1b0174ace1e836d4ea97292aa6de509db0def0f816322468fb430"},
+ {file = "cassandra_driver-3.28.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d5e8cf7db955b113f51274f166be9db0f0a06620c894abc41159828f0aeda259"},
+ {file = "cassandra_driver-3.28.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:26cbdb0d04f749b78bf7de17fd6a713b90430d1c70d8aa442845d51db823b9eb"},
+ {file = "cassandra_driver-3.28.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fe302940780932d83414ad5282c8a6bd72b248f3b1fceff995f28c77a6ebc925"},
+ {file = "cassandra_driver-3.28.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3694c1e19d310668f5a60c16511fb12c3ad4c387d089a8080b74239a916620fb"},
+ {file = "cassandra_driver-3.28.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5690b7b121e82c4365d298bd49dc574ecd8eed3ec0bafdf43fce708f2f992b"},
+ {file = "cassandra_driver-3.28.0-cp38-cp38-win32.whl", hash = "sha256:d09c8b0b392064054656050448dece04e4fa890af3c677a2f2034af14983ceb5"},
+ {file = "cassandra_driver-3.28.0-cp38-cp38-win_amd64.whl", hash = "sha256:e2342420bae4f80587e2ddebb38ade448c9ab1d210787a8030c1c04f54ef4a84"},
+ {file = "cassandra_driver-3.28.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c8d934cb7eac6586823a7eb69d40019154fd8e7d640bfaed49ac7edc373578df"},
+ {file = "cassandra_driver-3.28.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8b51805d57ff6ed73a95c83c25d0479391da28c765c2bf019ee1370d8ca64cd0"},
+ {file = "cassandra_driver-3.28.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5f05495ccabe5be046bb9f1c2cc3e3ff696a94fd4f2f2b1004c951e56b1ea38d"},
+ {file = "cassandra_driver-3.28.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59050666423c4ffdda9626676c18cce83a71c8331dd3d99f6b9306e0941348cf"},
+ {file = "cassandra_driver-3.28.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a665841c15f2fade6b00a8404d3424fed8757971b75e791b69bfedacc4753f7c"},
+ {file = "cassandra_driver-3.28.0-cp39-cp39-win32.whl", hash = "sha256:46433de332b8ef59ad44140f287b584303b90111cf6f355ec8c990830135dd21"},
+ {file = "cassandra_driver-3.28.0-cp39-cp39-win_amd64.whl", hash = "sha256:5e6213f10d58b05a6120bcff4f479d89c152d3f4ba43b3bda3283ee67c3abe23"},
+]
+
+[package.dependencies]
+geomet = ">=0.1,<0.3"
+six = ">=1.9"
+
+[package.extras]
+cle = ["cryptography (>=35.0)"]
+graph = ["gremlinpython (==3.4.6)"]
+
+[[package]]
+name = "certifi"
+version = "2023.7.22"
+description = "Python package for providing Mozilla's CA Bundle."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"},
+ {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"},
+]
+
+[[package]]
+name = "cffi"
+version = "1.16.0"
+description = "Foreign Function Interface for Python calling C code."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"},
+ {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"},
+ {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"},
+ {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"},
+ {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"},
+ {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"},
+ {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"},
+ {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"},
+ {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"},
+ {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"},
+ {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"},
+ {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"},
+ {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"},
+ {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"},
+ {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"},
+ {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"},
+ {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"},
+ {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"},
+ {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"},
+ {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"},
+ {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"},
+ {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"},
+ {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"},
+ {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"},
+ {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"},
+ {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"},
+ {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"},
+]
+
+[package.dependencies]
+pycparser = "*"
+
+[[package]]
+name = "chardet"
+version = "5.2.0"
+description = "Universal encoding detector for Python 3"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"},
+ {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"},
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.3.2"
+description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
+ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
+]
+
+[[package]]
+name = "click"
+version = "8.1.7"
+description = "Composable command line interface toolkit"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
+ {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "click-aliases"
+version = "1.0.1"
+description = "Enable aliases for Click"
+optional = false
+python-versions = "*"
+files = [
+ {file = "click-aliases-1.0.1.tar.gz", hash = "sha256:f48012077e0788eb02f4f8ee458fef3601873fec6c998e9ea8b4554394e705a3"},
+ {file = "click_aliases-1.0.1-py2.py3-none-any.whl", hash = "sha256:229ecab12a97d1d5ce3f1fd7ce16da0e4333a24ebe3b34d8b7a6d0a1d2cfab90"},
+]
+
+[package.dependencies]
+click = "*"
+
+[package.extras]
+dev = ["coveralls", "flake8", "flake8-import-order", "pytest", "pytest-cov", "tox-travis", "wheel"]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "coverage"
+version = "7.3.2"
+description = "Code coverage measurement for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"},
+ {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"},
+ {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"},
+ {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"},
+ {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"},
+ {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"},
+ {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"},
+ {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"},
+ {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"},
+ {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"},
+ {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"},
+ {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"},
+ {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"},
+ {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"},
+ {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"},
+ {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"},
+ {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"},
+ {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"},
+ {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"},
+ {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"},
+ {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"},
+ {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"},
+ {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"},
+ {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"},
+ {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"},
+ {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"},
+ {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"},
+ {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"},
+ {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"},
+ {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"},
+ {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"},
+ {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"},
+ {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"},
+ {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"},
+ {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"},
+ {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"},
+ {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"},
+ {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"},
+ {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"},
+ {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"},
+ {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"},
+ {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"},
+ {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"},
+ {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"},
+ {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"},
+ {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"},
+ {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"},
+ {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"},
+ {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"},
+ {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"},
+ {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"},
+ {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"},
+]
+
+[package.dependencies]
+tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
+
+[package.extras]
+toml = ["tomli"]
+
+[[package]]
+name = "cryptography"
+version = "35.0.0"
+description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "cryptography-35.0.0-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:d57e0cdc1b44b6cdf8af1d01807db06886f10177469312fbde8f44ccbb284bc9"},
+ {file = "cryptography-35.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:ced40344e811d6abba00295ced98c01aecf0c2de39481792d87af4fa58b7b4d6"},
+ {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:54b2605e5475944e2213258e0ab8696f4f357a31371e538ef21e8d61c843c28d"},
+ {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7b7ceeff114c31f285528ba8b390d3e9cfa2da17b56f11d366769a807f17cbaa"},
+ {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d69645f535f4b2c722cfb07a8eab916265545b3475fdb34e0be2f4ee8b0b15e"},
+ {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2d0e0acc20ede0f06ef7aa58546eee96d2592c00f450c9acb89c5879b61992"},
+ {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:07bb7fbfb5de0980590ddfc7f13081520def06dc9ed214000ad4372fb4e3c7f6"},
+ {file = "cryptography-35.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7eba2cebca600a7806b893cb1d541a6e910afa87e97acf2021a22b32da1df52d"},
+ {file = "cryptography-35.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:18d90f4711bf63e2fb21e8c8e51ed8189438e6b35a6d996201ebd98a26abbbe6"},
+ {file = "cryptography-35.0.0-cp36-abi3-win32.whl", hash = "sha256:c10c797ac89c746e488d2ee92bd4abd593615694ee17b2500578b63cad6b93a8"},
+ {file = "cryptography-35.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:7075b304cd567694dc692ffc9747f3e9cb393cc4aa4fb7b9f3abd6f5c4e43588"},
+ {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a688ebcd08250eab5bb5bca318cc05a8c66de5e4171a65ca51db6bd753ff8953"},
+ {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d99915d6ab265c22873f1b4d6ea5ef462ef797b4140be4c9d8b179915e0985c6"},
+ {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:928185a6d1ccdb816e883f56ebe92e975a262d31cc536429041921f8cb5a62fd"},
+ {file = "cryptography-35.0.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ebeddd119f526bcf323a89f853afb12e225902a24d29b55fe18dd6fcb2838a76"},
+ {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22a38e96118a4ce3b97509443feace1d1011d0571fae81fc3ad35f25ba3ea999"},
+ {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb80e8a1f91e4b7ef8b33041591e6d89b2b8e122d787e87eeb2b08da71bb16ad"},
+ {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:abb5a361d2585bb95012a19ed9b2c8f412c5d723a9836418fab7aaa0243e67d2"},
+ {file = "cryptography-35.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1ed82abf16df40a60942a8c211251ae72858b25b7421ce2497c2eb7a1cee817c"},
+ {file = "cryptography-35.0.0.tar.gz", hash = "sha256:9933f28f70d0517686bd7de36166dda42094eac49415459d9bdf5e7df3e0086d"},
+]
+
+[package.dependencies]
+cffi = ">=1.12"
+
+[package.extras]
+docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"]
+docstest = ["doc8", "pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
+pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
+sdist = ["setuptools-rust (>=0.11.4)"]
+ssh = ["bcrypt (>=3.1.5)"]
+test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"]
+
+[[package]]
+name = "datadog"
+version = "0.47.0"
+description = "The Datadog Python library"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "datadog-0.47.0-py2.py3-none-any.whl", hash = "sha256:a45ec997ab554208837e8c44d81d0e1456539dc14da5743687250e028bc809b7"},
+ {file = "datadog-0.47.0.tar.gz", hash = "sha256:47be3b2c3d709a7f5b709eb126ed4fe6cc7977d618fe5c158dd89c2a9f7d9916"},
+]
+
+[package.dependencies]
+requests = ">=2.6.0"
+
+[[package]]
+name = "distlib"
+version = "0.3.7"
+description = "Distribution utilities"
+optional = false
+python-versions = "*"
+files = [
+ {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"},
+ {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"},
+]
+
+[[package]]
+name = "dnspython"
+version = "2.4.2"
+description = "DNS toolkit"
+optional = false
+python-versions = ">=3.8,<4.0"
+files = [
+ {file = "dnspython-2.4.2-py3-none-any.whl", hash = "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8"},
+ {file = "dnspython-2.4.2.tar.gz", hash = "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"},
+]
+
+[package.extras]
+dnssec = ["cryptography (>=2.6,<42.0)"]
+doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.24.1)"]
+doq = ["aioquic (>=0.9.20)"]
+idna = ["idna (>=2.1,<4.0)"]
+trio = ["trio (>=0.14,<0.23)"]
+wmi = ["wmi (>=1.5.1,<2.0.0)"]
+
+[[package]]
+name = "entrypoints"
+version = "0.3"
+description = "Discover and load entry points from installed packages."
+optional = false
+python-versions = ">=2.7"
+files = [
+ {file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"},
+ {file = "entrypoints-0.3.tar.gz", hash = "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"},
+]
+
+[[package]]
+name = "exceptiongroup"
+version = "1.1.3"
+description = "Backport of PEP 654 (exception groups)"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"},
+ {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"},
+]
+
+[package.extras]
+test = ["pytest (>=6)"]
+
+[[package]]
+name = "fasteners"
+version = "0.16"
+description = "A python package that provides useful locks."
+optional = false
+python-versions = "*"
+files = [
+ {file = "fasteners-0.16-py2.py3-none-any.whl", hash = "sha256:74b6847e0a6bb3b56c8511af8e24c40e4cf7a774dfff5b251c260ed338096a4b"},
+ {file = "fasteners-0.16.tar.gz", hash = "sha256:c995d8c26b017c5d6a6de9ad29a0f9cdd57de61ae1113d28fac26622b06a0933"},
+]
+
+[package.dependencies]
+six = "*"
+
+[[package]]
+name = "ffwd"
+version = "0.0.2"
+description = "A Python client for FFWD"
+optional = false
+python-versions = "*"
+files = [
+ {file = "ffwd-0.0.2.tar.gz", hash = "sha256:1167e791b21524bfdf8fe47345d586dfd8fae07dc5ffb855aa2e43bb76e7e2ad"},
+]
+
+[[package]]
+name = "filelock"
+version = "3.13.1"
+description = "A platform independent file lock."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"},
+ {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"},
+]
+
+[package.extras]
+docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"]
+testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
+typing = ["typing-extensions (>=4.8)"]
+
+[[package]]
+name = "flake8"
+version = "3.7.9"
+description = "the modular source code checker: pep8, pyflakes and co"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "flake8-3.7.9-py2.py3-none-any.whl", hash = "sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca"},
+ {file = "flake8-3.7.9.tar.gz", hash = "sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb"},
+]
+
+[package.dependencies]
+entrypoints = ">=0.3.0,<0.4.0"
+mccabe = ">=0.6.0,<0.7.0"
+pycodestyle = ">=2.5.0,<2.6.0"
+pyflakes = ">=2.1.0,<2.2.0"
+
+[[package]]
+name = "frozenlist"
+version = "1.4.0"
+description = "A list-like structure which implements collections.abc.MutableSequence"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"},
+ {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"},
+ {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"},
+ {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"},
+ {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"},
+ {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"},
+ {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"},
+ {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"},
+ {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"},
+ {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"},
+ {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"},
+ {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"},
+ {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"},
+ {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"},
+ {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"},
+ {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"},
+ {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"},
+ {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"},
+ {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"},
+ {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"},
+ {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"},
+]
+
+[[package]]
+name = "gcloud-aio-auth"
+version = "4.2.3"
+description = "Python Client for Google Cloud Auth"
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+ {file = "gcloud_aio_auth-4.2.3-py3-none-any.whl", hash = "sha256:e4adadd36e35eeeb8537b926840372c3080c2f5a6909d44aa1bacbced2260bb1"},
+ {file = "gcloud_aio_auth-4.2.3.tar.gz", hash = "sha256:8e12297c5b45cfc20d629b83e1233f83a1c7d5f830f24f31bc5bb8816c0cda1b"},
+]
+
+[package.dependencies]
+aiohttp = ">=3.3.0,<4.0.0"
+backoff = ">=1.0.0,<3.0.0"
+chardet = ">=2.0,<6.0"
+cryptography = ">=2.0.0,<42.0.0"
+pyjwt = ">=1.5.3,<3.0.0"
+setuptools = ">=66.0.0,<67.0.0"
+
+[[package]]
+name = "gcloud-aio-storage"
+version = "8.3.0"
+description = "Python Client for Google Cloud Storage"
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+ {file = "gcloud_aio_storage-8.3.0-py3-none-any.whl", hash = "sha256:6154eb9fe7e254f1ee3312cf3cc464109da8fce979815b183727b60347daec59"},
+ {file = "gcloud_aio_storage-8.3.0.tar.gz", hash = "sha256:222ccd4cfbbd7d4a5e5bfa8010235f70b65697e68f25bf68fb3e570dceac78e8"},
+]
+
+[package.dependencies]
+aiofiles = ">=0.6.0,<24.0.0"
+gcloud-aio-auth = ">=3.6.0,<5.0.0"
+pyasn1-modules = ">=0.2.1,<0.3.0"
+rsa = ">=3.1.4,<5.0.0"
+setuptools = ">=66.0.0,<67.0.0"
+
+[[package]]
+name = "geomet"
+version = "0.2.1.post1"
+description = "GeoJSON <-> WKT/WKB conversion utilities"
+optional = false
+python-versions = ">2.6, !=3.3.*, <4"
+files = [
+ {file = "geomet-0.2.1.post1-py3-none-any.whl", hash = "sha256:a41a1e336b381416d6cbed7f1745c848e91defaa4d4c1bdc1312732e46ffad2b"},
+ {file = "geomet-0.2.1.post1.tar.gz", hash = "sha256:91d754f7c298cbfcabd3befdb69c641c27fe75e808b27aa55028605761d17e95"},
+]
+
+[package.dependencies]
+click = "*"
+six = "*"
+
+[[package]]
+name = "gevent"
+version = "23.9.1"
+description = "Coroutine-based network library"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "gevent-23.9.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:a3c5e9b1f766a7a64833334a18539a362fb563f6c4682f9634dea72cbe24f771"},
+ {file = "gevent-23.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b101086f109168b23fa3586fccd1133494bdb97f86920a24dc0b23984dc30b69"},
+ {file = "gevent-23.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36a549d632c14684bcbbd3014a6ce2666c5f2a500f34d58d32df6c9ea38b6535"},
+ {file = "gevent-23.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:272cffdf535978d59c38ed837916dfd2b5d193be1e9e5dcc60a5f4d5025dd98a"},
+ {file = "gevent-23.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcb8612787a7f4626aa881ff15ff25439561a429f5b303048f0fca8a1c781c39"},
+ {file = "gevent-23.9.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:d57737860bfc332b9b5aa438963986afe90f49645f6e053140cfa0fa1bdae1ae"},
+ {file = "gevent-23.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5f3c781c84794926d853d6fb58554dc0dcc800ba25c41d42f6959c344b4db5a6"},
+ {file = "gevent-23.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dbb22a9bbd6a13e925815ce70b940d1578dbe5d4013f20d23e8a11eddf8d14a7"},
+ {file = "gevent-23.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:707904027d7130ff3e59ea387dddceedb133cc742b00b3ffe696d567147a9c9e"},
+ {file = "gevent-23.9.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:45792c45d60f6ce3d19651d7fde0bc13e01b56bb4db60d3f32ab7d9ec467374c"},
+ {file = "gevent-23.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e24c2af9638d6c989caffc691a039d7c7022a31c0363da367c0d32ceb4a0648"},
+ {file = "gevent-23.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e1ead6863e596a8cc2a03e26a7a0981f84b6b3e956101135ff6d02df4d9a6b07"},
+ {file = "gevent-23.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65883ac026731ac112184680d1f0f1e39fa6f4389fd1fc0bf46cc1388e2599f9"},
+ {file = "gevent-23.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7af500da05363e66f122896012acb6e101a552682f2352b618e541c941a011"},
+ {file = "gevent-23.9.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c3e5d2fa532e4d3450595244de8ccf51f5721a05088813c1abd93ad274fe15e7"},
+ {file = "gevent-23.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c84d34256c243b0a53d4335ef0bc76c735873986d478c53073861a92566a8d71"},
+ {file = "gevent-23.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ada07076b380918829250201df1d016bdafb3acf352f35e5693b59dceee8dd2e"},
+ {file = "gevent-23.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:921dda1c0b84e3d3b1778efa362d61ed29e2b215b90f81d498eb4d8eafcd0b7a"},
+ {file = "gevent-23.9.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ed7a048d3e526a5c1d55c44cb3bc06cfdc1947d06d45006cc4cf60dedc628904"},
+ {file = "gevent-23.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c1abc6f25f475adc33e5fc2dbcc26a732608ac5375d0d306228738a9ae14d3b"},
+ {file = "gevent-23.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4368f341a5f51611411ec3fc62426f52ac3d6d42eaee9ed0f9eebe715c80184e"},
+ {file = "gevent-23.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:52b4abf28e837f1865a9bdeef58ff6afd07d1d888b70b6804557e7908032e599"},
+ {file = "gevent-23.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52e9f12cd1cda96603ce6b113d934f1aafb873e2c13182cf8e86d2c5c41982ea"},
+ {file = "gevent-23.9.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:de350fde10efa87ea60d742901e1053eb2127ebd8b59a7d3b90597eb4e586599"},
+ {file = "gevent-23.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fde6402c5432b835fbb7698f1c7f2809c8d6b2bd9d047ac1f5a7c1d5aa569303"},
+ {file = "gevent-23.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dd6c32ab977ecf7c7b8c2611ed95fa4aaebd69b74bf08f4b4960ad516861517d"},
+ {file = "gevent-23.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:455e5ee8103f722b503fa45dedb04f3ffdec978c1524647f8ba72b4f08490af1"},
+ {file = "gevent-23.9.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:7ccf0fd378257cb77d91c116e15c99e533374a8153632c48a3ecae7f7f4f09fe"},
+ {file = "gevent-23.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d163d59f1be5a4c4efcdd13c2177baaf24aadf721fdf2e1af9ee54a998d160f5"},
+ {file = "gevent-23.9.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7532c17bc6c1cbac265e751b95000961715adef35a25d2b0b1813aa7263fb397"},
+ {file = "gevent-23.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:78eebaf5e73ff91d34df48f4e35581ab4c84e22dd5338ef32714264063c57507"},
+ {file = "gevent-23.9.1-cp38-cp38-win32.whl", hash = "sha256:f632487c87866094546a74eefbca2c74c1d03638b715b6feb12e80120960185a"},
+ {file = "gevent-23.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:62d121344f7465e3739989ad6b91f53a6ca9110518231553fe5846dbe1b4518f"},
+ {file = "gevent-23.9.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:bf456bd6b992eb0e1e869e2fd0caf817f0253e55ca7977fd0e72d0336a8c1c6a"},
+ {file = "gevent-23.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43daf68496c03a35287b8b617f9f91e0e7c0d042aebcc060cadc3f049aadd653"},
+ {file = "gevent-23.9.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7c28e38dcde327c217fdafb9d5d17d3e772f636f35df15ffae2d933a5587addd"},
+ {file = "gevent-23.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fae8d5b5b8fa2a8f63b39f5447168b02db10c888a3e387ed7af2bd1b8612e543"},
+ {file = "gevent-23.9.1-cp39-cp39-win32.whl", hash = "sha256:2c7b5c9912378e5f5ccf180d1fdb1e83f42b71823483066eddbe10ef1a2fcaa2"},
+ {file = "gevent-23.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:a2898b7048771917d85a1d548fd378e8a7b2ca963db8e17c6d90c76b495e0e2b"},
+ {file = "gevent-23.9.1.tar.gz", hash = "sha256:72c002235390d46f94938a96920d8856d4ffd9ddf62a303a0d7c118894097e34"},
+]
+
+[package.dependencies]
+cffi = {version = ">=1.12.2", markers = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""}
+greenlet = [
+ {version = ">=2.0.0", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.11\""},
+ {version = ">=3.0rc3", markers = "platform_python_implementation == \"CPython\" and python_version >= \"3.11\""},
+]
+"zope.event" = "*"
+"zope.interface" = "*"
+
+[package.extras]
+dnspython = ["dnspython (>=1.16.0,<2.0)", "idna"]
+docs = ["furo", "repoze.sphinx.autointerface", "sphinx", "sphinxcontrib-programoutput", "zope.schema"]
+monitor = ["psutil (>=5.7.0)"]
+recommended = ["cffi (>=1.12.2)", "dnspython (>=1.16.0,<2.0)", "idna", "psutil (>=5.7.0)"]
+test = ["cffi (>=1.12.2)", "coverage (>=5.0)", "dnspython (>=1.16.0,<2.0)", "idna", "objgraph", "psutil (>=5.7.0)", "requests", "setuptools"]
+
+[[package]]
+name = "greenlet"
+version = "3.0.1"
+description = "Lightweight in-process concurrent programming"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "greenlet-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064"},
+ {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d"},
+ {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd"},
+ {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19bbdf1cce0346ef7341705d71e2ecf6f41a35c311137f29b8a2dc2341374565"},
+ {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599daf06ea59bfedbec564b1692b0166a0045f32b6f0933b0dd4df59a854caf2"},
+ {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b641161c302efbb860ae6b081f406839a8b7d5573f20a455539823802c655f63"},
+ {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d57e20ba591727da0c230ab2c3f200ac9d6d333860d85348816e1dca4cc4792e"},
+ {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5805e71e5b570d490938d55552f5a9e10f477c19400c38bf1d5190d760691846"},
+ {file = "greenlet-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:52e93b28db27ae7d208748f45d2db8a7b6a380e0d703f099c949d0f0d80b70e9"},
+ {file = "greenlet-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f7bfb769f7efa0eefcd039dd19d843a4fbfbac52f1878b1da2ed5793ec9b1a65"},
+ {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e6c7db42638dc45cf2e13c73be16bf83179f7859b07cfc139518941320be96"},
+ {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1757936efea16e3f03db20efd0cd50a1c86b06734f9f7338a90c4ba85ec2ad5a"},
+ {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19075157a10055759066854a973b3d1325d964d498a805bb68a1f9af4aaef8ec"},
+ {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9d21aaa84557d64209af04ff48e0ad5e28c5cca67ce43444e939579d085da72"},
+ {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2847e5d7beedb8d614186962c3d774d40d3374d580d2cbdab7f184580a39d234"},
+ {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97e7ac860d64e2dcba5c5944cfc8fa9ea185cd84061c623536154d5a89237884"},
+ {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b2c02d2ad98116e914d4f3155ffc905fd0c025d901ead3f6ed07385e19122c94"},
+ {file = "greenlet-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:22f79120a24aeeae2b4471c711dcf4f8c736a2bb2fabad2a67ac9a55ea72523c"},
+ {file = "greenlet-3.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:100f78a29707ca1525ea47388cec8a049405147719f47ebf3895e7509c6446aa"},
+ {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60d5772e8195f4e9ebf74046a9121bbb90090f6550f81d8956a05387ba139353"},
+ {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:daa7197b43c707462f06d2c693ffdbb5991cbb8b80b5b984007de431493a319c"},
+ {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea6b8aa9e08eea388c5f7a276fabb1d4b6b9d6e4ceb12cc477c3d352001768a9"},
+ {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d11ebbd679e927593978aa44c10fc2092bc454b7d13fdc958d3e9d508aba7d0"},
+ {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd4c177afb8a8d9ba348d925b0b67246147af806f0b104af4d24f144d461cd5"},
+ {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20107edf7c2c3644c67c12205dc60b1bb11d26b2610b276f97d666110d1b511d"},
+ {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8bef097455dea90ffe855286926ae02d8faa335ed8e4067326257cb571fc1445"},
+ {file = "greenlet-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:b2d3337dcfaa99698aa2377c81c9ca72fcd89c07e7eb62ece3f23a3fe89b2ce4"},
+ {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80ac992f25d10aaebe1ee15df45ca0d7571d0f70b645c08ec68733fb7a020206"},
+ {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:337322096d92808f76ad26061a8f5fccb22b0809bea39212cd6c406f6a7060d2"},
+ {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9934adbd0f6e476f0ecff3c94626529f344f57b38c9a541f87098710b18af0a"},
+ {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4d815b794fd8868c4d67602692c21bf5293a75e4b607bb92a11e821e2b859a"},
+ {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41bdeeb552d814bcd7fb52172b304898a35818107cc8778b5101423c9017b3de"},
+ {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6e6061bf1e9565c29002e3c601cf68569c450be7fc3f7336671af7ddb4657166"},
+ {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fa24255ae3c0ab67e613556375a4341af04a084bd58764731972bcbc8baeba36"},
+ {file = "greenlet-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:b489c36d1327868d207002391f662a1d163bdc8daf10ab2e5f6e41b9b96de3b1"},
+ {file = "greenlet-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f33f3258aae89da191c6ebaa3bc517c6c4cbc9b9f689e5d8452f7aedbb913fa8"},
+ {file = "greenlet-3.0.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d2905ce1df400360463c772b55d8e2518d0e488a87cdea13dd2c71dcb2a1fa16"},
+ {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a02d259510b3630f330c86557331a3b0e0c79dac3d166e449a39363beaae174"},
+ {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55d62807f1c5a1682075c62436702aaba941daa316e9161e4b6ccebbbf38bda3"},
+ {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fcc780ae8edbb1d050d920ab44790201f027d59fdbd21362340a85c79066a74"},
+ {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eddd98afc726f8aee1948858aed9e6feeb1758889dfd869072d4465973f6bfd"},
+ {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eabe7090db68c981fca689299c2d116400b553f4b713266b130cfc9e2aa9c5a9"},
+ {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f2f6d303f3dee132b322a14cd8765287b8f86cdc10d2cb6a6fae234ea488888e"},
+ {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d923ff276f1c1f9680d32832f8d6c040fe9306cbfb5d161b0911e9634be9ef0a"},
+ {file = "greenlet-3.0.1-cp38-cp38-win32.whl", hash = "sha256:0b6f9f8ca7093fd4433472fd99b5650f8a26dcd8ba410e14094c1e44cd3ceddd"},
+ {file = "greenlet-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:990066bff27c4fcf3b69382b86f4c99b3652bab2a7e685d968cd4d0cfc6f67c6"},
+ {file = "greenlet-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ce85c43ae54845272f6f9cd8320d034d7a946e9773c693b27d620edec825e376"},
+ {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89ee2e967bd7ff85d84a2de09df10e021c9b38c7d91dead95b406ed6350c6997"},
+ {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87c8ceb0cf8a5a51b8008b643844b7f4a8264a2c13fcbcd8a8316161725383fe"},
+ {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6a8c9d4f8692917a3dc7eb25a6fb337bff86909febe2f793ec1928cd97bedfc"},
+ {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fbc5b8f3dfe24784cee8ce0be3da2d8a79e46a276593db6868382d9c50d97b1"},
+ {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85d2b77e7c9382f004b41d9c72c85537fac834fb141b0296942d52bf03fe4a3d"},
+ {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:696d8e7d82398e810f2b3622b24e87906763b6ebfd90e361e88eb85b0e554dc8"},
+ {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:329c5a2e5a0ee942f2992c5e3ff40be03e75f745f48847f118a3cfece7a28546"},
+ {file = "greenlet-3.0.1-cp39-cp39-win32.whl", hash = "sha256:cf868e08690cb89360eebc73ba4be7fb461cfbc6168dd88e2fbbe6f31812cd57"},
+ {file = "greenlet-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:ac4a39d1abae48184d420aa8e5e63efd1b75c8444dd95daa3e03f6c6310e9619"},
+ {file = "greenlet-3.0.1.tar.gz", hash = "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b"},
+]
+
+[package.extras]
+docs = ["Sphinx"]
+test = ["objgraph", "psutil"]
+
+[[package]]
+name = "grpcio"
+version = "1.58.0"
+description = "HTTP/2-based RPC framework"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "grpcio-1.58.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:3e6bebf1dfdbeb22afd95650e4f019219fef3ab86d3fca8ebade52e4bc39389a"},
+ {file = "grpcio-1.58.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:cde11577d5b6fd73a00e6bfa3cf5f428f3f33c2d2878982369b5372bbc4acc60"},
+ {file = "grpcio-1.58.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:a2d67ff99e70e86b2be46c1017ae40b4840d09467d5455b2708de6d4c127e143"},
+ {file = "grpcio-1.58.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ed979b273a81de36fc9c6716d9fb09dd3443efa18dcc8652501df11da9583e9"},
+ {file = "grpcio-1.58.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:458899d2ebd55d5ca2350fd3826dfd8fcb11fe0f79828ae75e2b1e6051d50a29"},
+ {file = "grpcio-1.58.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc7ffef430b80345729ff0a6825e9d96ac87efe39216e87ac58c6c4ef400de93"},
+ {file = "grpcio-1.58.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5b23d75e5173faa3d1296a7bedffb25afd2fddb607ef292dfc651490c7b53c3d"},
+ {file = "grpcio-1.58.0-cp310-cp310-win32.whl", hash = "sha256:fad9295fe02455d4f158ad72c90ef8b4bcaadfdb5efb5795f7ab0786ad67dd58"},
+ {file = "grpcio-1.58.0-cp310-cp310-win_amd64.whl", hash = "sha256:bc325fed4d074367bebd465a20763586e5e1ed5b943e9d8bc7c162b1f44fd602"},
+ {file = "grpcio-1.58.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:652978551af02373a5a313e07bfef368f406b5929cf2d50fa7e4027f913dbdb4"},
+ {file = "grpcio-1.58.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:9f13a171281ebb4d7b1ba9f06574bce2455dcd3f2f6d1fbe0fd0d84615c74045"},
+ {file = "grpcio-1.58.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:8774219e21b05f750eef8adc416e9431cf31b98f6ce9def288e4cea1548cbd22"},
+ {file = "grpcio-1.58.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09206106848462763f7f273ca93d2d2d4d26cab475089e0de830bb76be04e9e8"},
+ {file = "grpcio-1.58.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62831d5e251dd7561d9d9e83a0b8655084b2a1f8ea91e4bd6b3cedfefd32c9d2"},
+ {file = "grpcio-1.58.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:212f38c6a156862098f6bdc9a79bf850760a751d259d8f8f249fc6d645105855"},
+ {file = "grpcio-1.58.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4b12754af201bb993e6e2efd7812085ddaaef21d0a6f0ff128b97de1ef55aa4a"},
+ {file = "grpcio-1.58.0-cp311-cp311-win32.whl", hash = "sha256:3886b4d56bd4afeac518dbc05933926198aa967a7d1d237a318e6fbc47141577"},
+ {file = "grpcio-1.58.0-cp311-cp311-win_amd64.whl", hash = "sha256:002f228d197fea12797a14e152447044e14fb4fdb2eb5d6cfa496f29ddbf79ef"},
+ {file = "grpcio-1.58.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:b5e8db0aff0a4819946215f156bd722b6f6c8320eb8419567ffc74850c9fd205"},
+ {file = "grpcio-1.58.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:201e550b7e2ede113b63e718e7ece93cef5b0fbf3c45e8fe4541a5a4305acd15"},
+ {file = "grpcio-1.58.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:d79b660681eb9bc66cc7cbf78d1b1b9e335ee56f6ea1755d34a31108b80bd3c8"},
+ {file = "grpcio-1.58.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ef8d4a76d2c7d8065aba829f8d0bc0055495c998dce1964ca5b302d02514fb3"},
+ {file = "grpcio-1.58.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cba491c638c76d3dc6c191d9c75041ca5b8f5c6de4b8327ecdcab527f130bb4"},
+ {file = "grpcio-1.58.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6801ff6652ecd2aae08ef994a3e49ff53de29e69e9cd0fd604a79ae4e545a95c"},
+ {file = "grpcio-1.58.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:24edec346e69e672daf12b2c88e95c6f737f3792d08866101d8c5f34370c54fd"},
+ {file = "grpcio-1.58.0-cp37-cp37m-win_amd64.whl", hash = "sha256:7e473a7abad9af48e3ab5f3b5d237d18208024d28ead65a459bd720401bd2f8f"},
+ {file = "grpcio-1.58.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:4891bbb4bba58acd1d620759b3be11245bfe715eb67a4864c8937b855b7ed7fa"},
+ {file = "grpcio-1.58.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:e9f995a8a421405958ff30599b4d0eec244f28edc760de82f0412c71c61763d2"},
+ {file = "grpcio-1.58.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:2f85f87e2f087d9f632c085b37440a3169fda9cdde80cb84057c2fc292f8cbdf"},
+ {file = "grpcio-1.58.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb6b92036ff312d5b4182fa72e8735d17aceca74d0d908a7f08e375456f03e07"},
+ {file = "grpcio-1.58.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d81c2b2b24c32139dd2536972f1060678c6b9fbd106842a9fcdecf07b233eccd"},
+ {file = "grpcio-1.58.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fbcecb6aedd5c1891db1d70efbfbdc126c986645b5dd616a045c07d6bd2dfa86"},
+ {file = "grpcio-1.58.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92ae871a902cf19833328bd6498ec007b265aabf2fda845ab5bd10abcaf4c8c6"},
+ {file = "grpcio-1.58.0-cp38-cp38-win32.whl", hash = "sha256:dc72e04620d49d3007771c0e0348deb23ca341c0245d610605dddb4ac65a37cb"},
+ {file = "grpcio-1.58.0-cp38-cp38-win_amd64.whl", hash = "sha256:1c1c5238c6072470c7f1614bf7c774ffde6b346a100521de9ce791d1e4453afe"},
+ {file = "grpcio-1.58.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:fe643af248442221db027da43ed43e53b73e11f40c9043738de9a2b4b6ca7697"},
+ {file = "grpcio-1.58.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:128eb1f8e70676d05b1b0c8e6600320fc222b3f8c985a92224248b1367122188"},
+ {file = "grpcio-1.58.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:039003a5e0ae7d41c86c768ef8b3ee2c558aa0a23cf04bf3c23567f37befa092"},
+ {file = "grpcio-1.58.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f061722cad3f9aabb3fbb27f3484ec9d4667b7328d1a7800c3c691a98f16bb0"},
+ {file = "grpcio-1.58.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0af11938acf8cd4cf815c46156bcde36fa5850518120920d52620cc3ec1830"},
+ {file = "grpcio-1.58.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d4cef77ad2fed42b1ba9143465856d7e737279854e444925d5ba45fc1f3ba727"},
+ {file = "grpcio-1.58.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24765a627eb4d9288ace32d5104161c3654128fe27f2808ecd6e9b0cfa7fc8b9"},
+ {file = "grpcio-1.58.0-cp39-cp39-win32.whl", hash = "sha256:f0241f7eb0d2303a545136c59bc565a35c4fc3b924ccbd69cb482f4828d6f31c"},
+ {file = "grpcio-1.58.0-cp39-cp39-win_amd64.whl", hash = "sha256:dcfba7befe3a55dab6fe1eb7fc9359dc0c7f7272b30a70ae0af5d5b063842f28"},
+ {file = "grpcio-1.58.0.tar.gz", hash = "sha256:532410c51ccd851b706d1fbc00a87be0f5312bd6f8e5dbf89d4e99c7f79d7499"},
+]
+
+[package.extras]
+protobuf = ["grpcio-tools (>=1.58.0)"]
+
+[[package]]
+name = "grpcio-health-checking"
+version = "1.58.0"
+description = "Standard Health Checking Service for gRPC"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "grpcio-health-checking-1.58.0.tar.gz", hash = "sha256:07d58623f27becf186c862cbae7dfd7a54bd63f4b37594c77c8b8fc933f11c2f"},
+ {file = "grpcio_health_checking-1.58.0-py3-none-any.whl", hash = "sha256:ac7c268654df114ab32be5395fdbfd0d2c4510e7b7ef50189777b39de979249b"},
+]
+
+[package.dependencies]
+grpcio = ">=1.58.0"
+protobuf = ">=4.21.6"
+
+[[package]]
+name = "grpcio-tools"
+version = "1.58.0"
+description = "Protobuf code generator for gRPC"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "grpcio-tools-1.58.0.tar.gz", hash = "sha256:6f4d80ceb591e31ca4dceec747dbe56132e1392a0a9bb1c8fe001d1b5cac898a"},
+ {file = "grpcio_tools-1.58.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:60c874908f3b40f32f1bb0221f7b3ab65ecb53a4d0a9f0a394f031f1b292c177"},
+ {file = "grpcio_tools-1.58.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:1852e798f31e5437ca7b37abc910e028b34732fb19364862cedb87b1dab66fad"},
+ {file = "grpcio_tools-1.58.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:149fb48f53cb691a6328f68bed8e4036c730f7106b7f98e92c2c0403f0b9e93c"},
+ {file = "grpcio_tools-1.58.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba3d383e5ca93826038b70f326fce8e8d12dd9b2f64d363a3d612f7475f12dd2"},
+ {file = "grpcio_tools-1.58.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6997511e9d2979f7a2389479682dbb06823f21a904e8fb0a5c6baaf1b4b4a863"},
+ {file = "grpcio_tools-1.58.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8de0b701da479643f71fad71fe66885cddd89441ae16e2c724939b47742dc72e"},
+ {file = "grpcio_tools-1.58.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:43cc23908b63fcaefe690b10f68a2d8652c994b5b36ab77d2271d9608c895320"},
+ {file = "grpcio_tools-1.58.0-cp310-cp310-win32.whl", hash = "sha256:2c2221123d010dc6231799e63a37f2f4786bf614ef65b23009c387cd20d8b193"},
+ {file = "grpcio_tools-1.58.0-cp310-cp310-win_amd64.whl", hash = "sha256:df2788736bdf58abe7b0e4d6b1ff806f7686c98c5ad900da312252e3322d91c4"},
+ {file = "grpcio_tools-1.58.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:b6ea5578712cdb29b0ff60bfc6405bf0e8d681b9c71d106dd1cda54fe7fe4e55"},
+ {file = "grpcio_tools-1.58.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:c29880f491581c83181c0a84a4d11402af2b13166a5266f64e246adf1da7aa66"},
+ {file = "grpcio_tools-1.58.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:32d51e933c3565414dd0835f930bb28a1cdeba435d9d2c87fa3cf8b1d284db3c"},
+ {file = "grpcio_tools-1.58.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ad9d77f25514584b1ddc981d70c9e50dfcfc388aa5ba943eee67520c5267ed9"},
+ {file = "grpcio_tools-1.58.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4882382631e6352819059278a5c878ce0b067008dd490911d16d5616e8a36d85"},
+ {file = "grpcio_tools-1.58.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d84091a189d848d94645b7c48b61734c12ec03b0d46e5fc0049343a26989ac5c"},
+ {file = "grpcio_tools-1.58.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:85ac28a9621e9b92a3fc416288c4ce45542db0b4c31b3e23031dd8e0a0ec5590"},
+ {file = "grpcio_tools-1.58.0-cp311-cp311-win32.whl", hash = "sha256:7371d8ea80234b29affec145e25569523f549520ed7e53b2aa92bed412cdecfd"},
+ {file = "grpcio_tools-1.58.0-cp311-cp311-win_amd64.whl", hash = "sha256:6997df6e7c5cf4d3ddc764240c1ff6a04b45d70ec28913b38fbc6396ef743e12"},
+ {file = "grpcio_tools-1.58.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:ac65b8d6e3acaf88b815edf9af88ff844b6600ff3d2591c05ba4f655b45d5fb4"},
+ {file = "grpcio_tools-1.58.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:88e8191d0dd789bebf42533808728f5ce75d2c51e2a72bdf20abe5b5e3fbec42"},
+ {file = "grpcio_tools-1.58.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:a3dbece2a121761499a659b799979d4b738586d1065439053de553773eee11ca"},
+ {file = "grpcio_tools-1.58.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1086fe240c4c879b9721952b47d46996deb283c2d9355a8dc24a804811aacf70"},
+ {file = "grpcio_tools-1.58.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7ae3dca059d5b358dd03fb63277428fa7d771605d4074a019138dd38d70719a"},
+ {file = "grpcio_tools-1.58.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3f8904ac7fc3da2e874f00b3a986e8b7e004f499344a8e7eb213c26dfb025041"},
+ {file = "grpcio_tools-1.58.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aadbd8393ae332e49731adb31e741f2e689989150569b7acc939f5ea43124e2d"},
+ {file = "grpcio_tools-1.58.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1cb6e24194786687d4f23c64de1f0ce553af51de22746911bc37340f85f9783e"},
+ {file = "grpcio_tools-1.58.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:6ec43909095c630df3e479e77469bdad367067431f4af602f6ccb978a3b78afd"},
+ {file = "grpcio_tools-1.58.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:4be49ed320b0ebcbc21d19ef555fbf229c1c452105522b728e1171ee2052078e"},
+ {file = "grpcio_tools-1.58.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:28eefebddec3d3adf19baca78f8b82a2287d358e1b1575ae018cdca8eacc6269"},
+ {file = "grpcio_tools-1.58.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ef8c696e9d78676cc3f583a92bbbf2c84e94e350f7ad22f150a52559f4599d1"},
+ {file = "grpcio_tools-1.58.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9aeb5949e46558d21c51fd3ec3eeecc59c94dbca76c67c0a80d3da6b7437930c"},
+ {file = "grpcio_tools-1.58.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f7144aad9396d35fb1b80429600a970b559c2ad4d07020eeb180fe83cea2bee"},
+ {file = "grpcio_tools-1.58.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ee26e9253a721fff355737649678535f76cf5d642aa3ac0cd937832559b90af"},
+ {file = "grpcio_tools-1.58.0-cp38-cp38-win32.whl", hash = "sha256:343f572312039059a8797d6e29a7fc62196e73131ab01755660a9d48202267c1"},
+ {file = "grpcio_tools-1.58.0-cp38-cp38-win_amd64.whl", hash = "sha256:cd7acfbb43b7338a78cf4a67528d05530d574d92b7c829d185b78dfc451d158f"},
+ {file = "grpcio_tools-1.58.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:46628247fbce86d18232eead24bd22ed0826c79f3fe2fc2fbdbde45971361049"},
+ {file = "grpcio_tools-1.58.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:51587842a54e025a3d0d37afcf4ef2b7ac1def9a5d17448665cb424b53d6c287"},
+ {file = "grpcio_tools-1.58.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:a062ae3072a2a39a3c057f4d68b57b021f1dd2956cd09aab39709f6af494e1de"},
+ {file = "grpcio_tools-1.58.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eec3c93a08df11c80ef1c29a616bcbb0d83dbc6ea41b48306fcacc720416dfa7"},
+ {file = "grpcio_tools-1.58.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b63f823ac991ff77104da614d2a2485a59d37d57830eb2e387a6e2a3edc7fa2b"},
+ {file = "grpcio_tools-1.58.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:579c11a9f198847ed48dbc4f211c67fe96a73320b87c81f01b044b72e24a7d77"},
+ {file = "grpcio_tools-1.58.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6ca2fc1dd8049d417a5034d944c9df05cee76f855b3e431627ab4292e7c01c47"},
+ {file = "grpcio_tools-1.58.0-cp39-cp39-win32.whl", hash = "sha256:453023120114c35d3d9d6717ea0820e5d5c140f51f9d0b621de4397ff854471b"},
+ {file = "grpcio_tools-1.58.0-cp39-cp39-win_amd64.whl", hash = "sha256:b6c896f1df99c35cf062d4803c15663ff00a33ff09add28baa6e475cf6b5e258"},
+]
+
+[package.dependencies]
+grpcio = ">=1.58.0"
+protobuf = ">=4.21.6,<5.0dev"
+setuptools = "*"
+
+[[package]]
+name = "idna"
+version = "3.4"
+description = "Internationalized Domain Names in Applications (IDNA)"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
+ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
+]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+description = "brain-dead simple config-ini parsing"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
+ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
+]
+
+[[package]]
+name = "isodate"
+version = "0.6.1"
+description = "An ISO 8601 date/time/duration parser and formatter"
+optional = false
+python-versions = "*"
+files = [
+ {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"},
+ {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"},
+]
+
+[package.dependencies]
+six = "*"
+
+[[package]]
+name = "jmespath"
+version = "1.0.1"
+description = "JSON Matching Expressions"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"},
+ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
+]
+
+[[package]]
+name = "lockfile"
+version = "0.12.2"
+description = "Platform-independent file locking module"
+optional = false
+python-versions = "*"
+files = [
+ {file = "lockfile-0.12.2-py2.py3-none-any.whl", hash = "sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa"},
+ {file = "lockfile-0.12.2.tar.gz", hash = "sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799"},
+]
+
+[[package]]
+name = "mccabe"
+version = "0.6.1"
+description = "McCabe checker, plugin for flake8"
+optional = false
+python-versions = "*"
+files = [
+ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
+ {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
+]
+
+[[package]]
+name = "msal"
+version = "1.24.1"
+description = "The Microsoft Authentication Library (MSAL) for Python library"
+optional = false
+python-versions = ">=2.7"
+files = [
+ {file = "msal-1.24.1-py2.py3-none-any.whl", hash = "sha256:ce4320688f95c301ee74a4d0e9dbcfe029a63663a8cc61756f40d0d0d36574ad"},
+ {file = "msal-1.24.1.tar.gz", hash = "sha256:aa0972884b3c6fdec53d9a0bd15c12e5bd7b71ac1b66d746f54d128709f3f8f8"},
+]
+
+[package.dependencies]
+cryptography = ">=0.6,<44"
+PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]}
+requests = ">=2.0.0,<3"
+
+[package.extras]
+broker = ["pymsalruntime (>=0.13.2,<0.14)"]
+
+[[package]]
+name = "msal-extensions"
+version = "1.0.0"
+description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism."
+optional = false
+python-versions = "*"
+files = [
+ {file = "msal-extensions-1.0.0.tar.gz", hash = "sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354"},
+ {file = "msal_extensions-1.0.0-py2.py3-none-any.whl", hash = "sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee"},
+]
+
+[package.dependencies]
+msal = ">=0.4.1,<2.0.0"
+portalocker = [
+ {version = ">=1.0,<3", markers = "python_version >= \"3.5\" and platform_system != \"Windows\""},
+ {version = ">=1.6,<3", markers = "python_version >= \"3.5\" and platform_system == \"Windows\""},
+]
+
+[[package]]
+name = "multidict"
+version = "6.0.4"
+description = "multidict implementation"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"},
+ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"},
+ {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"},
+ {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"},
+ {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"},
+ {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"},
+ {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"},
+ {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"},
+ {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"},
+ {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"},
+ {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"},
+ {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"},
+ {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"},
+ {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"},
+ {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"},
+]
+
+[[package]]
+name = "nose"
+version = "1.3.7"
+description = "nose extends unittest to make testing easier"
+optional = false
+python-versions = "*"
+files = [
+ {file = "nose-1.3.7-py2-none-any.whl", hash = "sha256:dadcddc0aefbf99eea214e0f1232b94f2fa9bd98fa8353711dacb112bfcbbb2a"},
+ {file = "nose-1.3.7-py3-none-any.whl", hash = "sha256:9ff7c6cc443f8c51994b34a667bbcf45afd6d945be7477b52e97516fd17c53ac"},
+ {file = "nose-1.3.7.tar.gz", hash = "sha256:f1bffef9cbc82628f6e7d7b40d7e255aefaa1adb6a1b1d26c69a8b79e6208a98"},
+]
+
+[[package]]
+name = "packaging"
+version = "23.2"
+description = "Core utilities for Python packages"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
+ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
+]
+
+[[package]]
+name = "parallel-ssh"
+version = "2.2.0"
+description = "Asynchronous parallel SSH library"
+optional = false
+python-versions = "*"
+files = [
+ {file = "parallel-ssh-2.2.0.tar.gz", hash = "sha256:0713093ec1903af698ef3682d801d07321d6ea5de3d7a3cd61752b803b043278"},
+ {file = "parallel_ssh-2.2.0-py2.py3-none-any.whl", hash = "sha256:c1dc6d0c326ff2612e147483b1fd3b5e660c4255e01469bdfa7d24444ea09e81"},
+]
+
+[package.dependencies]
+gevent = ">=1.1"
+ssh-python = ">=0.8.0"
+ssh2-python = ">=0.22.0"
+
+[[package]]
+name = "parse"
+version = "1.19.1"
+description = "parse() is the opposite of format()"
+optional = false
+python-versions = "*"
+files = [
+ {file = "parse-1.19.1-py2.py3-none-any.whl", hash = "sha256:371ed3800dc63983832159cc9373156613947707bc448b5215473a219dbd4362"},
+ {file = "parse-1.19.1.tar.gz", hash = "sha256:cc3a47236ff05da377617ddefa867b7ba983819c664e1afe46249e5b469be464"},
+]
+
+[[package]]
+name = "parse-type"
+version = "0.6.2"
+description = "Simplifies to build parse types based on the parse module"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*"
+files = [
+ {file = "parse_type-0.6.2-py2.py3-none-any.whl", hash = "sha256:06d39a8b70fde873eb2a131141a0e79bb34a432941fb3d66fad247abafc9766c"},
+ {file = "parse_type-0.6.2.tar.gz", hash = "sha256:79b1f2497060d0928bc46016793f1fca1057c4aacdf15ef876aa48d75a73a355"},
+]
+
+[package.dependencies]
+parse = {version = ">=1.18.0", markers = "python_version >= \"3.0\""}
+six = ">=1.15"
+
+[package.extras]
+develop = ["build (>=0.5.1)", "coverage (>=4.4)", "pylint", "pytest (<5.0)", "pytest (>=5.0)", "pytest-cov", "pytest-html (>=1.19.0)", "ruff", "tox (>=2.8,<4.0)", "twine (>=1.13.0)", "virtualenv (<20.22.0)", "virtualenv (>=20.0.0)"]
+docs = ["Sphinx (>=1.6)", "sphinx-bootstrap-theme (>=0.6.0)"]
+testing = ["pytest (<5.0)", "pytest (>=5.0)", "pytest-html (>=1.19.0)"]
+
+[[package]]
+name = "platformdirs"
+version = "3.11.0"
+description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"},
+ {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"},
+]
+
+[package.extras]
+docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"]
+
+[[package]]
+name = "pluggy"
+version = "1.3.0"
+description = "plugin and hook calling mechanisms for python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"},
+ {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"},
+]
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+testing = ["pytest", "pytest-benchmark"]
+
+[[package]]
+name = "portalocker"
+version = "2.8.2"
+description = "Wraps the portalocker recipe for easy usage"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "portalocker-2.8.2-py3-none-any.whl", hash = "sha256:cfb86acc09b9aa7c3b43594e19be1345b9d16af3feb08bf92f23d4dce513a28e"},
+ {file = "portalocker-2.8.2.tar.gz", hash = "sha256:2b035aa7828e46c58e9b31390ee1f169b98e1066ab10b9a6a861fe7e25ee4f33"},
+]
+
+[package.dependencies]
+pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""}
+
+[package.extras]
+docs = ["sphinx (>=1.7.1)"]
+redis = ["redis"]
+tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"]
+
+[[package]]
+name = "protobuf"
+version = "4.24.3"
+description = ""
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"},
+ {file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"},
+ {file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"},
+ {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"},
+ {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"},
+ {file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"},
+ {file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"},
+ {file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"},
+ {file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"},
+ {file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"},
+ {file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"},
+ {file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"},
+ {file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"},
+]
+
+[[package]]
+name = "psutil"
+version = "5.9.6"
+description = "Cross-platform lib for process and system monitoring in Python."
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+files = [
+ {file = "psutil-5.9.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fb8a697f11b0f5994550555fcfe3e69799e5b060c8ecf9e2f75c69302cc35c0d"},
+ {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:91ecd2d9c00db9817a4b4192107cf6954addb5d9d67a969a4f436dbc9200f88c"},
+ {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:10e8c17b4f898d64b121149afb136c53ea8b68c7531155147867b7b1ac9e7e28"},
+ {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:18cd22c5db486f33998f37e2bb054cc62fd06646995285e02a51b1e08da97017"},
+ {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ca2780f5e038379e520281e4c032dddd086906ddff9ef0d1b9dcf00710e5071c"},
+ {file = "psutil-5.9.6-cp27-none-win32.whl", hash = "sha256:70cb3beb98bc3fd5ac9ac617a327af7e7f826373ee64c80efd4eb2856e5051e9"},
+ {file = "psutil-5.9.6-cp27-none-win_amd64.whl", hash = "sha256:51dc3d54607c73148f63732c727856f5febec1c7c336f8f41fcbd6315cce76ac"},
+ {file = "psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a"},
+ {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92e0cc43c524834af53e9d3369245e6cc3b130e78e26100d1f63cdb0abeb3d3c"},
+ {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4"},
+ {file = "psutil-5.9.6-cp36-cp36m-win32.whl", hash = "sha256:3ebf2158c16cc69db777e3c7decb3c0f43a7af94a60d72e87b2823aebac3d602"},
+ {file = "psutil-5.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:ff18b8d1a784b810df0b0fff3bcb50ab941c3b8e2c8de5726f9c71c601c611aa"},
+ {file = "psutil-5.9.6-cp37-abi3-win32.whl", hash = "sha256:a6f01f03bf1843280f4ad16f4bde26b817847b4c1a0db59bf6419807bc5ce05c"},
+ {file = "psutil-5.9.6-cp37-abi3-win_amd64.whl", hash = "sha256:6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a"},
+ {file = "psutil-5.9.6-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:daecbcbd29b289aac14ece28eca6a3e60aa361754cf6da3dfb20d4d32b6c7f57"},
+ {file = "psutil-5.9.6.tar.gz", hash = "sha256:e4b92ddcd7dd4cdd3f900180ea1e104932c7bce234fb88976e2a3b296441225a"},
+]
+
+[package.extras]
+test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"]
+
+[[package]]
+name = "pyasn1"
+version = "0.4.8"
+description = "ASN.1 types and codecs"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
+ {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
+]
+
+[[package]]
+name = "pyasn1-modules"
+version = "0.2.8"
+description = "A collection of ASN.1-based protocols modules."
+optional = false
+python-versions = "*"
+files = [
+ {file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"},
+ {file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"},
+]
+
+[package.dependencies]
+pyasn1 = ">=0.4.6,<0.5.0"
+
+[[package]]
+name = "pycodestyle"
+version = "2.5.0"
+description = "Python style guide checker"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "pycodestyle-2.5.0-py2.py3-none-any.whl", hash = "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56"},
+ {file = "pycodestyle-2.5.0.tar.gz", hash = "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"},
+]
+
+[[package]]
+name = "pycparser"
+version = "2.21"
+description = "C parser in Python"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
+ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
+]
+
+[[package]]
+name = "pycryptodome"
+version = "3.19.0"
+description = "Cryptographic library for Python"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "pycryptodome-3.19.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3006c44c4946583b6de24fe0632091c2653d6256b99a02a3db71ca06472ea1e4"},
+ {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:7c760c8a0479a4042111a8dd2f067d3ae4573da286c53f13cf6f5c53a5c1f631"},
+ {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:08ce3558af5106c632baf6d331d261f02367a6bc3733086ae43c0f988fe042db"},
+ {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45430dfaf1f421cf462c0dd824984378bef32b22669f2635cb809357dbaab405"},
+ {file = "pycryptodome-3.19.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:a9bcd5f3794879e91970f2bbd7d899780541d3ff439d8f2112441769c9f2ccea"},
+ {file = "pycryptodome-3.19.0-cp27-cp27m-win32.whl", hash = "sha256:190c53f51e988dceb60472baddce3f289fa52b0ec38fbe5fd20dd1d0f795c551"},
+ {file = "pycryptodome-3.19.0-cp27-cp27m-win_amd64.whl", hash = "sha256:22e0ae7c3a7f87dcdcf302db06ab76f20e83f09a6993c160b248d58274473bfa"},
+ {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:7822f36d683f9ad7bc2145b2c2045014afdbbd1d9922a6d4ce1cbd6add79a01e"},
+ {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:05e33267394aad6db6595c0ce9d427fe21552f5425e116a925455e099fdf759a"},
+ {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:829b813b8ee00d9c8aba417621b94bc0b5efd18c928923802ad5ba4cf1ec709c"},
+ {file = "pycryptodome-3.19.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:fc7a79590e2b5d08530175823a242de6790abc73638cc6dc9d2684e7be2f5e49"},
+ {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:542f99d5026ac5f0ef391ba0602f3d11beef8e65aae135fa5b762f5ebd9d3bfb"},
+ {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:61bb3ccbf4bf32ad9af32da8badc24e888ae5231c617947e0f5401077f8b091f"},
+ {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d49a6c715d8cceffedabb6adb7e0cbf41ae1a2ff4adaeec9432074a80627dea1"},
+ {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e249a784cc98a29c77cea9df54284a44b40cafbfae57636dd2f8775b48af2434"},
+ {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d033947e7fd3e2ba9a031cb2d267251620964705a013c5a461fa5233cc025270"},
+ {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:84c3e4fffad0c4988aef0d5591be3cad4e10aa7db264c65fadbc633318d20bde"},
+ {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:139ae2c6161b9dd5d829c9645d781509a810ef50ea8b657e2257c25ca20efe33"},
+ {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5b1986c761258a5b4332a7f94a83f631c1ffca8747d75ab8395bf2e1b93283d9"},
+ {file = "pycryptodome-3.19.0-cp35-abi3-win32.whl", hash = "sha256:536f676963662603f1f2e6ab01080c54d8cd20f34ec333dcb195306fa7826997"},
+ {file = "pycryptodome-3.19.0-cp35-abi3-win_amd64.whl", hash = "sha256:04dd31d3b33a6b22ac4d432b3274588917dcf850cc0c51c84eca1d8ed6933810"},
+ {file = "pycryptodome-3.19.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:8999316e57abcbd8085c91bc0ef75292c8618f41ca6d2b6132250a863a77d1e7"},
+ {file = "pycryptodome-3.19.0-pp27-pypy_73-win32.whl", hash = "sha256:a0ab84755f4539db086db9ba9e9f3868d2e3610a3948cbd2a55e332ad83b01b0"},
+ {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0101f647d11a1aae5a8ce4f5fad6644ae1b22bb65d05accc7d322943c69a74a6"},
+ {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1601e04d32087591d78e0b81e1e520e57a92796089864b20e5f18c9564b3fa"},
+ {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:506c686a1eee6c00df70010be3b8e9e78f406af4f21b23162bbb6e9bdf5427bc"},
+ {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7919ccd096584b911f2a303c593280869ce1af9bf5d36214511f5e5a1bed8c34"},
+ {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:560591c0777f74a5da86718f70dfc8d781734cf559773b64072bbdda44b3fc3e"},
+ {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1cc2f2ae451a676def1a73c1ae9120cd31af25db3f381893d45f75e77be2400"},
+ {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17940dcf274fcae4a54ec6117a9ecfe52907ed5e2e438fe712fe7ca502672ed5"},
+ {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d04f5f623a280fbd0ab1c1d8ecbd753193ab7154f09b6161b0f857a1a676c15f"},
+ {file = "pycryptodome-3.19.0.tar.gz", hash = "sha256:bc35d463222cdb4dbebd35e0784155c81e161b9284e567e7e933d722e533331e"},
+]
+
+[[package]]
+name = "pyflakes"
+version = "2.1.1"
+description = "passive checker of Python programs"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "pyflakes-2.1.1-py2.py3-none-any.whl", hash = "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0"},
+ {file = "pyflakes-2.1.1.tar.gz", hash = "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"},
+]
+
+[[package]]
+name = "pyjwt"
+version = "2.8.0"
+description = "JSON Web Token implementation in Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"},
+ {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"},
+]
+
+[package.dependencies]
+cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""}
+
+[package.extras]
+crypto = ["cryptography (>=3.4.0)"]
+dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"]
+docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"]
+tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
+
+[[package]]
+name = "pyopenssl"
+version = "22.0.0"
+description = "Python wrapper module around the OpenSSL library"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "pyOpenSSL-22.0.0-py2.py3-none-any.whl", hash = "sha256:ea252b38c87425b64116f808355e8da644ef9b07e429398bfece610f893ee2e0"},
+ {file = "pyOpenSSL-22.0.0.tar.gz", hash = "sha256:660b1b1425aac4a1bea1d94168a85d99f0b3144c869dd4390d27629d0087f1bf"},
+]
+
+[package.dependencies]
+cryptography = ">=35.0"
+
+[package.extras]
+docs = ["sphinx", "sphinx-rtd-theme"]
+test = ["flaky", "pretend", "pytest (>=3.0.1)"]
+
+[[package]]
+name = "pyproject-api"
+version = "1.6.1"
+description = "API to interact with the python pyproject.toml based projects"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pyproject_api-1.6.1-py3-none-any.whl", hash = "sha256:4c0116d60476b0786c88692cf4e325a9814965e2469c5998b830bba16b183675"},
+ {file = "pyproject_api-1.6.1.tar.gz", hash = "sha256:1817dc018adc0d1ff9ca1ed8c60e1623d5aaca40814b953af14a9cf9a5cae538"},
+]
+
+[package.dependencies]
+packaging = ">=23.1"
+tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""}
+
+[package.extras]
+docs = ["furo (>=2023.8.19)", "sphinx (<7.2)", "sphinx-autodoc-typehints (>=1.24)"]
+testing = ["covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "setuptools (>=68.1.2)", "wheel (>=0.41.2)"]
+
+[[package]]
+name = "pytest"
+version = "7.4.3"
+description = "pytest: simple powerful testing with Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"},
+ {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
+iniconfig = "*"
+packaging = "*"
+pluggy = ">=0.12,<2.0"
+tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
+
+[package.extras]
+testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+
+[[package]]
+name = "pytest-cov"
+version = "4.1.0"
+description = "Pytest plugin for measuring coverage."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
+ {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
+]
+
+[package.dependencies]
+coverage = {version = ">=5.2.1", extras = ["toml"]}
+pytest = ">=4.6"
+
+[package.extras]
+testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
+
+[[package]]
+name = "python-dateutil"
+version = "2.8.1"
+description = "Extensions to the standard Python datetime module"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+files = [
+ {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"},
+ {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"},
+]
+
+[package.dependencies]
+six = ">=1.5"
+
+[[package]]
+name = "pywin32"
+version = "306"
+description = "Python for Window Extensions"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"},
+ {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"},
+ {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"},
+ {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"},
+ {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"},
+ {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"},
+ {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"},
+ {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"},
+ {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"},
+ {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"},
+ {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"},
+ {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"},
+ {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"},
+ {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"},
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.1"
+description = "YAML parser and emitter for Python"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
+ {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
+ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
+ {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
+ {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
+ {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
+ {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
+ {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
+ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
+ {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
+ {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
+ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
+ {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
+ {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
+ {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
+ {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
+ {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
+ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
+ {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
+ {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
+ {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
+ {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
+ {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
+ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
+ {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
+ {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
+ {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
+ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
+]
+
+[[package]]
+name = "requests"
+version = "2.31.0"
+description = "Python HTTP for Humans."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
+ {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
+]
+
+[package.dependencies]
+certifi = ">=2017.4.17"
+charset-normalizer = ">=2,<4"
+idna = ">=2.5,<4"
+urllib3 = ">=1.21.1,<3"
+
+[package.extras]
+socks = ["PySocks (>=1.5.6,!=1.5.7)"]
+use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
+
+[[package]]
+name = "retrying"
+version = "1.3.4"
+description = "Retrying"
+optional = false
+python-versions = "*"
+files = [
+ {file = "retrying-1.3.4-py3-none-any.whl", hash = "sha256:8cc4d43cb8e1125e0ff3344e9de678fefd85db3b750b81b2240dc0183af37b35"},
+ {file = "retrying-1.3.4.tar.gz", hash = "sha256:345da8c5765bd982b1d1915deb9102fd3d1f7ad16bd84a9700b85f64d24e8f3e"},
+]
+
+[package.dependencies]
+six = ">=1.7.0"
+
+[[package]]
+name = "rsa"
+version = "4.9"
+description = "Pure-Python RSA implementation"
+optional = false
+python-versions = ">=3.6,<4"
+files = [
+ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
+ {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
+]
+
+[package.dependencies]
+pyasn1 = ">=0.1.3"
+
+[[package]]
+name = "s3transfer"
+version = "0.7.0"
+description = "An Amazon S3 Transfer Manager"
+optional = false
+python-versions = ">= 3.7"
+files = [
+ {file = "s3transfer-0.7.0-py3-none-any.whl", hash = "sha256:10d6923c6359175f264811ef4bf6161a3156ce8e350e705396a7557d6293c33a"},
+ {file = "s3transfer-0.7.0.tar.gz", hash = "sha256:fd3889a66f5fe17299fe75b82eae6cf722554edca744ca5d5fe308b104883d2e"},
+]
+
+[package.dependencies]
+botocore = ">=1.12.36,<2.0a.0"
+
+[package.extras]
+crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
+
+[[package]]
+name = "setuptools"
+version = "66.1.1"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "setuptools-66.1.1-py3-none-any.whl", hash = "sha256:6f590d76b713d5de4e49fe4fbca24474469f53c83632d5d0fd056f7ff7e8112b"},
+ {file = "setuptools-66.1.1.tar.gz", hash = "sha256:ac4008d396bc9cd983ea483cb7139c0240a07bbc74ffb6232fceffedc6cf03a8"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+
+[[package]]
+name = "ssh-python"
+version = "1.0.0"
+description = "libssh C library bindings for Python."
+optional = false
+python-versions = "*"
+files = [
+ {file = "ssh-python-1.0.0.tar.gz", hash = "sha256:68bee3f5a657e657d6dcc94e1b6c9ec332fd5d59bf4976a54150b997a800bf68"},
+ {file = "ssh_python-1.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:ab7e01059b5a40c3de08a094b51a57ee23b41b1b0c52dc8f4a07e1524932778d"},
+ {file = "ssh_python-1.0.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c51998b2ea7de00ef5ff557893df28d8b07c4392a31bccbb60e1142451b949df"},
+ {file = "ssh_python-1.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6ee326dc3767f28f58691d92ad4543ac8a2ebbac51aa49d6f6f448593cb6bf7"},
+ {file = "ssh_python-1.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e73f5ea92e82c2a928ef9d01dee8e684713e1b08a97f9c7d583b34a4720658cb"},
+ {file = "ssh_python-1.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:43da4a16e88dccd54db4e2dac50e09ae424811a7d973a1a1f1c081ebec2a52bb"},
+ {file = "ssh_python-1.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5985e71fee63d34ae15eb660cdf21c3ed4f712b2326c48d10be3c5a2bd1fa51"},
+ {file = "ssh_python-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92b95293395924953f537b732a3d8fa1aba7ddb26b2137be80f53111852fa289"},
+ {file = "ssh_python-1.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:82f77cd5d9ddedcc312c37a2848b9d76a0a6162c81f52a8051cc18b644b18b4f"},
+ {file = "ssh_python-1.0.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6cb934aaa0d8a89ecdc4d37f06d009a7bad50eea18fa3e9763193e85633669be"},
+ {file = "ssh_python-1.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3259ce8cf7b2169abdaab117db466c06c6185db1a65b8e97fa5e475f1ec399fb"},
+ {file = "ssh_python-1.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d176c778f02b3066c608c119e94d5b8c3d0d211e83b363bee0bd0d78e0eaa56"},
+ {file = "ssh_python-1.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:58f2fe1899add41d3954ae655caf09119491a468cc93b4575b199f29ecb4c49f"},
+ {file = "ssh_python-1.0.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd56baaef157052755cfdbef718911a062e008cc59b267deb89952b269313995"},
+ {file = "ssh_python-1.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44c1509fa26ad699de921e53347a86cef18796b5c8582d9a2cdd3e21cbd667a"},
+ {file = "ssh_python-1.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e438710ffbf34f800e2c051091da478191c02a429abd49f565f82a0b149d01f"},
+ {file = "ssh_python-1.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:364d3b075001d9a1e7d83a545e43cb28420171ddd779a286d4e267e52b9df2b7"},
+ {file = "ssh_python-1.0.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d8e386c137414d7ef3bbac939be37f5e2bd11d1f83c78274c1e68efe184bc5d2"},
+ {file = "ssh_python-1.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ebd8ebfad53f47c938634645c8b4b5870e444e190de90a0d5ceb5422a48fe1"},
+ {file = "ssh_python-1.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3d0fca290cb88d73c12d7fad162ad36d79beb32b4fb3e39de87cd37330404f5"},
+ {file = "ssh_python-1.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:3af87fd53e314cef8cc65f00cd69a2b1aca84863028b161721800bb7e89526f2"},
+ {file = "ssh_python-1.0.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a50af7f32517cd4067b3e68b723ec5c4efbbde2fb92263cbf11d7da5eacd65b4"},
+ {file = "ssh_python-1.0.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c83f5f7f3ded106d1088f33bc8aba03ae60126baa767d91a9bb9dc0372d73bda"},
+ {file = "ssh_python-1.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2566ffb2725a05e0b650ccf63b5d4f46fd5e47328d22d65f8111183e7dc4a62d"},
+ {file = "ssh_python-1.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1bfa28c5b159ab804bc5b496819e5af8ebb7e5f6b442e8a87136296cebb172c"},
+ {file = "ssh_python-1.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:c755afbe8ce3e926c80d8301f7bf6941781a2a1d497d3cb2692ea73bdf064f97"},
+ {file = "ssh_python-1.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6190ae4afedf0c35f84a1a5cd52c2b68935cd5c4b1330b491ca381006fa8fae6"},
+ {file = "ssh_python-1.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:422140695d14796e9eb6f608365c37d5b780092ca6dbfedde9e6ffd15722f2b0"},
+ {file = "ssh_python-1.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e36540aba6f3a7bc18cdb8321808f9b7ea3d551fad53069b0806189c9eab989"},
+ {file = "ssh_python-1.0.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b695ff0561f16746b0d47f4a51cca205088a06a06c6d0ab29127b6f21bdf286"},
+ {file = "ssh_python-1.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c56298bc06afa7986f76b1a5add539f462bc040c9c8bd9df1308a5b7b62af090"},
+ {file = "ssh_python-1.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1abed03383d3c40e3a3687dfade03b881b055f29baeb3d3a4d60eb80580b01a8"},
+ {file = "ssh_python-1.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26380e2f06ea8b4f79c6ada4b2016079c0f6d47752785dd129c914f9aeda23f4"},
+ {file = "ssh_python-1.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54731a4c3c06968ff41fd72a62ac40c1aa163d4fb51c82a4367e74398c9968ae"},
+]
+
+[[package]]
+name = "ssh2-python"
+version = "1.0.0"
+description = "Bindings for libssh2 C library"
+optional = false
+python-versions = "*"
+files = [
+ {file = "ssh2-python-1.0.0.tar.gz", hash = "sha256:af89e80c3203e7829b24eea688eaf5c5e279071aed1882238b4f44ec2144e7c5"},
+ {file = "ssh2_python-1.0.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:743df09165bbfcf754c6faf8a5feba414605ed73a6eee79e323d525c60f0cb41"},
+ {file = "ssh2_python-1.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c3c2910976da579c337dbd2134601d462b7bd861772dcb6e6dbb298749d1fd9"},
+ {file = "ssh2_python-1.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:631375d2338aa373b9d2f2fe2af7ffc7c552845661a2c7456757b4c39c9ae15b"},
+ {file = "ssh2_python-1.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5fca1744fe977eec920d7fdfeb1a79dd3fe4747f9455fb0e76aaa02b2e2dee7a"},
+ {file = "ssh2_python-1.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:016c2fb027bb8a1720b71cf144c423f8d1acfdfef4238ada47c8452c7c5273fd"},
+ {file = "ssh2_python-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbbdaaedc094364ee5027942bc40d68d1c5f9da7ec4cccb615036309b980bd8b"},
+ {file = "ssh2_python-1.0.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c9abd88161ce5e40f08a3b0af2173acafc7b19dd3dd140ccbd7c0b40094e558f"},
+ {file = "ssh2_python-1.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ca21a64c1aa77763691de70ed29143b457fafc323a108bfeb4d249e1cca84f"},
+ {file = "ssh2_python-1.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1ba1b05256277c78f4d4e158c4b024427bda94761426b9b37e77a551cc85158"},
+ {file = "ssh2_python-1.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:3270c77d24a35b1b52c312c42f14f0c27b013ab00cb8fdb67d4eb55e999b715a"},
+ {file = "ssh2_python-1.0.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8582b2965116435c35f9f2f3da4297eae8a5daea0fcebfdfd9388a9dfc5cd922"},
+ {file = "ssh2_python-1.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:114b27c130144f6fad0fbc5d35e71d0a841059efd3f868b86b5443de89b208af"},
+ {file = "ssh2_python-1.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddcba606818f28719d25c4a14789cc9b88714385e26e52d45ce967068ed7ad83"},
+ {file = "ssh2_python-1.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:aca4967a67a8fe4a4cabdac710065cca426e8200e5d426f6daae4c6a3a7e1921"},
+ {file = "ssh2_python-1.0.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:190b1c1389206aed43b40a1351732c1d854578480a566d5f6b899587ff989c74"},
+ {file = "ssh2_python-1.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f288aef1ab4d1e8bbc8a38bcebc1b21849fa8448fb68e851b9ff136eab46da6"},
+ {file = "ssh2_python-1.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:161eebd09258fe7fe47652561cefca9116c0df3b1128e1514d10d3d97e4f1521"},
+ {file = "ssh2_python-1.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:b315b754392e3b3c3ac546f4d35acdedb174d104212242a6f4778695beda9bb7"},
+ {file = "ssh2_python-1.0.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:c5635f196a96bbe6d12b6cdb2f9d5fd464a0e82edca58f56391cc25aa61a4379"},
+ {file = "ssh2_python-1.0.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:fc8f46d0d883d77462122a4cdb1dcff9a890923bd3525f061a2eb3563c7c5612"},
+ {file = "ssh2_python-1.0.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0f80a96dbffb34f09d84532178d7d24e404b0b313c76037a8b87187990ddddbd"},
+ {file = "ssh2_python-1.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc837e44189c9092af9af5ff25138a8efd05ad6fb10623c49039aa8f2f6ce03e"},
+ {file = "ssh2_python-1.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:092c7f63c7ab8291748c7af8eb7809c81c53a761e9f4a8eecfc8705c92c636cb"},
+ {file = "ssh2_python-1.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:6d52034fdb35872fb7082d0a325cb90e0dca41050bc1d7f65cf90b951d5f7caa"},
+ {file = "ssh2_python-1.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:489ae221149a1049bdda48f12bb99e20d51769ac67f7f0c65468f298f81c10c0"},
+ {file = "ssh2_python-1.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2593b1659f8551a06730579ad70390939f50094b71dc8acd9b86cba0d95ab5c0"},
+ {file = "ssh2_python-1.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b304bcda607935375eeb5caa8f798021174893de7ea1b72416ad28709793695"},
+ {file = "ssh2_python-1.0.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b139b8b47e230750b5b3b2f9977a7af996e614e334565bce5bf70e018d6536c7"},
+ {file = "ssh2_python-1.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93d9d7c48f7b729e9e0250b09dff5e712aab7df10718764f6ec00f88521b8a12"},
+ {file = "ssh2_python-1.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50c1e9cdcf416e72934bf2af8faf977ea3836c98e248d4966606410b2a71f5de"},
+ {file = "ssh2_python-1.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3771b58ae9944aff2aa86aadadc4f2dea174e3a30c8503bed022587bedd7e1c5"},
+ {file = "ssh2_python-1.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99ee7e9aaaba60fc91ecb21d6233272bfa8acb7aec28b599eee9f374327e5f9c"},
+]
+
+[[package]]
+name = "tomli"
+version = "2.0.1"
+description = "A lil' TOML parser"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
+ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+]
+
+[[package]]
+name = "tox"
+version = "4.11.3"
+description = "tox is a generic virtualenv management and test command line tool"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "tox-4.11.3-py3-none-any.whl", hash = "sha256:599af5e5bb0cad0148ac1558a0b66f8fff219ef88363483b8d92a81e4246f28f"},
+ {file = "tox-4.11.3.tar.gz", hash = "sha256:5039f68276461fae6a9452a3b2c7295798f00a0e92edcd9a3b78ba1a73577951"},
+]
+
+[package.dependencies]
+cachetools = ">=5.3.1"
+chardet = ">=5.2"
+colorama = ">=0.4.6"
+filelock = ">=3.12.3"
+packaging = ">=23.1"
+platformdirs = ">=3.10"
+pluggy = ">=1.3"
+pyproject-api = ">=1.6.1"
+tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""}
+virtualenv = ">=20.24.3"
+
+[package.extras]
+docs = ["furo (>=2023.8.19)", "sphinx (>=7.2.4)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.24)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
+testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.1.1)", "devpi-process (>=1)", "diff-cover (>=7.7)", "distlib (>=0.3.7)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.18)", "psutil (>=5.9.5)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-xdist (>=3.3.1)", "re-assert (>=1.1)", "time-machine (>=2.12)", "wheel (>=0.41.2)"]
+
+[[package]]
+name = "typing-extensions"
+version = "4.8.0"
+description = "Backported and Experimental Type Hints for Python 3.8+"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"},
+ {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"},
+]
+
+[[package]]
+name = "urllib3"
+version = "1.26.18"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+files = [
+ {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"},
+ {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"},
+]
+
+[package.extras]
+brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
+secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
+socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
+
+[[package]]
+name = "urllib3"
+version = "2.0.7"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"},
+ {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"},
+]
+
+[package.extras]
+brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
+secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"]
+socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
+zstd = ["zstandard (>=0.18.0)"]
+
+[[package]]
+name = "virtualenv"
+version = "20.24.6"
+description = "Virtual Python Environment builder"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "virtualenv-20.24.6-py3-none-any.whl", hash = "sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381"},
+ {file = "virtualenv-20.24.6.tar.gz", hash = "sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af"},
+]
+
+[package.dependencies]
+distlib = ">=0.3.7,<1"
+filelock = ">=3.12.2,<4"
+platformdirs = ">=3.9.1,<4"
+
+[package.extras]
+docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
+test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
+
+[[package]]
+name = "wheel"
+version = "0.32.3"
+description = "A built-package format for Python."
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "wheel-0.32.3-py2.py3-none-any.whl", hash = "sha256:1e53cdb3f808d5ccd0df57f964263752aa74ea7359526d3da6c02114ec1e1d44"},
+ {file = "wheel-0.32.3.tar.gz", hash = "sha256:029703bf514e16c8271c3821806a1c171220cc5bdd325cbf4e7da1e056a01db6"},
+]
+
+[package.extras]
+test = ["pytest (>=3.0.0)", "pytest-cov"]
+
+[[package]]
+name = "yarl"
+version = "1.9.2"
+description = "Yet another URL library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"},
+ {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"},
+ {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"},
+ {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"},
+ {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"},
+ {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"},
+ {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"},
+ {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"},
+ {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"},
+ {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"},
+ {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"},
+ {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"},
+ {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"},
+ {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"},
+ {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"},
+]
+
+[package.dependencies]
+idna = ">=2.0"
+multidict = ">=4.0"
+
+[[package]]
+name = "zope-event"
+version = "5.0"
+description = "Very basic event publishing system"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"},
+ {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"},
+]
+
+[package.dependencies]
+setuptools = "*"
+
+[package.extras]
+docs = ["Sphinx"]
+test = ["zope.testrunner"]
+
+[[package]]
+name = "zope-interface"
+version = "6.1"
+description = "Interfaces for Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "zope.interface-6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:43b576c34ef0c1f5a4981163b551a8781896f2a37f71b8655fd20b5af0386abb"},
+ {file = "zope.interface-6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:67be3ca75012c6e9b109860820a8b6c9a84bfb036fbd1076246b98e56951ca92"},
+ {file = "zope.interface-6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b9bc671626281f6045ad61d93a60f52fd5e8209b1610972cf0ef1bbe6d808e3"},
+ {file = "zope.interface-6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe81def9cf3e46f16ce01d9bfd8bea595e06505e51b7baf45115c77352675fd"},
+ {file = "zope.interface-6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dc998f6de015723196a904045e5a2217f3590b62ea31990672e31fbc5370b41"},
+ {file = "zope.interface-6.1-cp310-cp310-win_amd64.whl", hash = "sha256:239a4a08525c080ff833560171d23b249f7f4d17fcbf9316ef4159f44997616f"},
+ {file = "zope.interface-6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9ffdaa5290422ac0f1688cb8adb1b94ca56cee3ad11f29f2ae301df8aecba7d1"},
+ {file = "zope.interface-6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34c15ca9248f2e095ef2e93af2d633358c5f048c49fbfddf5fdfc47d5e263736"},
+ {file = "zope.interface-6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b012d023b4fb59183909b45d7f97fb493ef7a46d2838a5e716e3155081894605"},
+ {file = "zope.interface-6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97806e9ca3651588c1baaebb8d0c5ee3db95430b612db354c199b57378312ee8"},
+ {file = "zope.interface-6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fddbab55a2473f1d3b8833ec6b7ac31e8211b0aa608df5ab09ce07f3727326de"},
+ {file = "zope.interface-6.1-cp311-cp311-win_amd64.whl", hash = "sha256:a0da79117952a9a41253696ed3e8b560a425197d4e41634a23b1507efe3273f1"},
+ {file = "zope.interface-6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8bb9c990ca9027b4214fa543fd4025818dc95f8b7abce79d61dc8a2112b561a"},
+ {file = "zope.interface-6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b51b64432eed4c0744241e9ce5c70dcfecac866dff720e746d0a9c82f371dfa7"},
+ {file = "zope.interface-6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa6fd016e9644406d0a61313e50348c706e911dca29736a3266fc9e28ec4ca6d"},
+ {file = "zope.interface-6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c8cf55261e15590065039696607f6c9c1aeda700ceee40c70478552d323b3ff"},
+ {file = "zope.interface-6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e30506bcb03de8983f78884807e4fd95d8db6e65b69257eea05d13d519b83ac0"},
+ {file = "zope.interface-6.1-cp312-cp312-win_amd64.whl", hash = "sha256:e33e86fd65f369f10608b08729c8f1c92ec7e0e485964670b4d2633a4812d36b"},
+ {file = "zope.interface-6.1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:2f8d89721834524a813f37fa174bac074ec3d179858e4ad1b7efd4401f8ac45d"},
+ {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13b7d0f2a67eb83c385880489dbb80145e9d344427b4262c49fbf2581677c11c"},
+ {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef43ee91c193f827e49599e824385ec7c7f3cd152d74cb1dfe02cb135f264d83"},
+ {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e441e8b7d587af0414d25e8d05e27040d78581388eed4c54c30c0c91aad3a379"},
+ {file = "zope.interface-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f89b28772fc2562ed9ad871c865f5320ef761a7fcc188a935e21fe8b31a38ca9"},
+ {file = "zope.interface-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:70d2cef1bf529bff41559be2de9d44d47b002f65e17f43c73ddefc92f32bf00f"},
+ {file = "zope.interface-6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad54ed57bdfa3254d23ae04a4b1ce405954969c1b0550cc2d1d2990e8b439de1"},
+ {file = "zope.interface-6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef467d86d3cfde8b39ea1b35090208b0447caaabd38405420830f7fd85fbdd56"},
+ {file = "zope.interface-6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6af47f10cfc54c2ba2d825220f180cc1e2d4914d783d6fc0cd93d43d7bc1c78b"},
+ {file = "zope.interface-6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9559138690e1bd4ea6cd0954d22d1e9251e8025ce9ede5d0af0ceae4a401e43"},
+ {file = "zope.interface-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:964a7af27379ff4357dad1256d9f215047e70e93009e532d36dcb8909036033d"},
+ {file = "zope.interface-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:387545206c56b0315fbadb0431d5129c797f92dc59e276b3ce82db07ac1c6179"},
+ {file = "zope.interface-6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57d0a8ce40ce440f96a2c77824ee94bf0d0925e6089df7366c2272ccefcb7941"},
+ {file = "zope.interface-6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ebc4d34e7620c4f0da7bf162c81978fce0ea820e4fa1e8fc40ee763839805f3"},
+ {file = "zope.interface-6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a804abc126b33824a44a7aa94f06cd211a18bbf31898ba04bd0924fbe9d282d"},
+ {file = "zope.interface-6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f294a15f7723fc0d3b40701ca9b446133ec713eafc1cc6afa7b3d98666ee1ac"},
+ {file = "zope.interface-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a41f87bb93b8048fe866fa9e3d0c51e27fe55149035dcf5f43da4b56732c0a40"},
+ {file = "zope.interface-6.1.tar.gz", hash = "sha256:2fdc7ccbd6eb6b7df5353012fbed6c3c5d04ceaca0038f75e601060e95345309"},
+]
+
+[package.dependencies]
+setuptools = "*"
+
+[package.extras]
+docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx-rtd-theme"]
+test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
+testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
+
+[metadata]
+lock-version = "2.0"
+python-versions = "^3.8"
+content-hash = "27c74dd969974e2dd56771dd09a0eaeec15e91c1b8307e4f08356f2063f91d27"
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 000000000..89919e2dc
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,101 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2018 Spotify AB
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+[tool.poetry]
+name = "cassandra-medusa"
+version = "0.20.0-dev"
+description = "Apache Cassandra backup and restore tool"
+authors = ["The Last Pickle <[email protected]>"]
+license = "Apache"
+readme = "README.md"
+homepage = "https://github.com/thelastpickle/cassandra-medusa"
+classifiers = [
+ "Development Status :: 4 - Beta",
+ "Environment :: Console",
+ "Intended Audience :: System Administrators",
+ "License :: OSI Approved :: Apache Software License",
+ "Programming Language :: Python :: 3.8",
+ "Topic :: Database",
+ "Topic :: System :: Archiving :: Backup"
+]
+packages = [
+ { include = "medusa" }
+]
+include = [
+ { path = "medusa-example.ini" },
+ { path = "bin/medsua-wrapper" }
+]
+
+[project]
+requires-python = ">=3.8,<=3.11"
+
+[tool.poetry.scripts]
+medusa = { reference = "medusa.medusacli:cli", type = "console" }
+
+[tool.poetry.dependencies]
+python = "^3.8"
+python-dateutil = "2.8.1"
+click = "8.1.7"
+click-aliases = "1.0.1"
+PyYAML = "6.0.1"
+cassandra-driver = "3.28.0"
+psutil = "5.9.6"
+ffwd = "0.0.2"
+lockfile = "0.12.2"
+pyOpenSSL = "22.0.0"
+cryptography = "35.0"
+pycryptodome = "3.19.0"
+retrying = "1.3.4"
+ssh2-python = "1.0.0"
+ssh-python = "1.0.0"
+parallel-ssh = "2.2.0"
+requests = "2.31.0"
+wheel = "^0.32.0"
+gevent = "23.9.1"
+greenlet = "3.0.1"
+fasteners = "0.16"
+datadog = "0.47.0"
+botocore = "1.31.75"
+boto3 = "1.28.71"
+dnspython = "2.4.2"
+asyncio = "3.4.3"
+aiohttp = "3.8.5"
+gcloud-aio-storage = "8.3.0"
+azure-core = "1.29.4"
+azure-identity = "1.14.0"
+azure-storage-blob = "12.17.0"
+
+[tool.poetry.group.grpc.dependencies]
+protobuf = "4.24.3"
+grpcio = "1.58.0"
+grpcio-health-checking = "1.58.0"
+grpcio-tools = "1.58.0"
+
+[tool.poetry.group.grpc-runtime.dependencies]
+grpcio = "1.58.0"
+grpcio-health-checking = "1.58.0"
+
+[tool.poetry.group.test.dependencies]
+behave = "1.2.6"
+flake8 = "3.7.9"
+nose = "1.3.7"
+coverage = "7.3.2"
+pytest-cov = "4.1.0"
+tox = "4.11.3"
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
diff --git a/requirements-grpc-runtime.txt b/requirements-grpc-runtime.txt
deleted file mode 100644
index c29fba7bf..000000000
--- a/requirements-grpc-runtime.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-grpcio==1.58.0
-grpcio-health-checking==1.58.0
\ No newline at end of file
diff --git a/requirements-grpc.txt b/requirements-grpc.txt
deleted file mode 100644
index 301b8a7b1..000000000
--- a/requirements-grpc.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-protobuf==4.24.3
-grpcio==1.58.0
-grpcio-health-checking==1.58.0
-grpcio-tools==1.58.0
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index ac55fd458..000000000
--- a/requirements.txt
+++ /dev/null
@@ -1,30 +0,0 @@
-python-dateutil==2.8.2
-Click>=8.0.1
-click-aliases==1.0.1
-PyYAML>=5.1
-cassandra-driver>=3.27.0
-psutil>=5.4.7
-ffwd>=0.0.2
-lockfile>=0.12.2
-pyOpenSSL==23.2.0
-cryptography==39.0.1
-pycryptodome>=3.9.9
-retrying>=1.3.3
-ssh2-python==1.0.0
-ssh-python>=0.8.0
-parallel-ssh==2.2.0
-requests==2.31.0
-wheel>=0.32.0
-gevent
-greenlet
-fasteners==0.16
-datadog
-botocore==1.34.39
-boto3>=1.28.38
-dnspython>=2.2.1
-asyncio==3.4.3
-aiohttp==3.8.5
-gcloud-aio-storage==8.3.0
-azure-core==1.29.4
-azure-identity==1.14.0
-azure-storage-blob==12.17.0
\ No newline at end of file
diff --git a/setup.py b/setup.py
deleted file mode 100644
index a415f7de4..000000000
--- a/setup.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-# Copyright 2018 Spotify AB
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import setuptools
-
-with open("README.md", "r") as fh:
- long_description = fh.read()
-
-setuptools.setup(
- name='cassandra-medusa',
- version='0.20.0-dev',
- author='The Last Pickle',
- author_email='[email protected]',
- url='https://github.com/thelastpickle/cassandra-medusa',
- description='Apache Cassandra backup and restore tool',
- long_description=long_description,
- long_description_content_type="text/markdown",
- license='Apache',
- classifiers=[
- 'Development Status :: 4 - Beta',
- 'Environment :: Console',
- 'Intended Audience :: System Administrators',
- 'License :: OSI Approved :: Apache Software License',
- 'Programming Language :: Python :: 3.8',
- 'Topic :: Database',
- 'Topic :: System :: Archiving :: Backup'
- ],
- python_requires='>=3.8',
- packages=setuptools.find_packages(),
- install_requires=[
- 'python-dateutil==2.8.1',
- 'Click>=8.0.1',
- 'click-aliases==1.0.1',
- 'PyYAML>=5.1',
- 'cassandra-driver>=3.27.0',
- 'psutil>=5.4.7',
- 'ffwd>=0.0.2',
- 'lockfile>=0.12.2',
- 'pyOpenSSL==23.2.0',
- 'cryptography==39.0.1',
- 'pycryptodome>=3.9.9',
- 'retrying>=1.3.3',
- 'parallel-ssh==2.2.0',
- 'ssh2-python==1.0.0',
- 'ssh-python>=0.8.0',
- 'requests==2.31.0',
- 'protobuf==4.24.3',
- 'grpcio==1.58.0',
- 'grpcio-health-checking==1.58.0',
- 'grpcio-tools==1.58.0',
- 'gevent',
- 'greenlet',
- 'fasteners==0.16',
- 'datadog',
- 'botocore==1.34.39',
- 'dnspython>=2.2.1',
- 'asyncio==3.4.3',
- 'aiohttp==3.8.5',
- 'boto3>=1.28.38',
- 'gcloud-aio-storage==8.3.0',
- 'azure-core==1.29.4',
- 'azure-identity==1.14.0',
- 'azure-storage-blob==12.17.0'
- ],
- entry_points={
- 'console_scripts': [
- 'medusa=medusa.medusacli:cli',
- ]},
- scripts=['bin/medusa-wrapper'],
- data_files=[('/etc/medusa', ['medusa-example.ini'])]
-)
diff --git a/tox.ini b/tox.ini
index bc6126e74..52e3aaf85 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,15 +1,13 @@
[tox]
-envlist = py{36,37,38,39,310,311}
+isolated_build = True
+envlist = py{38,39,310,311}
[testenv]
-deps =
- -r{toxinidir}/requirements.txt
- -r{toxinidir}/requirements-test.txt
-
+allowlist_externals = poetry
commands =
- python setup.py check -m -s
- flake8 . --ignore=W503,E402 --exclude=medusa/service/grpc/medusa_pb2.py,medusa/service/grpc/medusa_pb2_grpc.py,.tox,venv,build,dist,debian
- pytest --cov=medusa --cov-report=xml -v {posargs:tests/}
+ poetry install
+ poetry run flake8 . --ignore=W503,E402 --exclude=medusa/service/grpc/medusa_pb2.py,medusa/service/grpc/medusa_pb2_grpc.py,.tox,venv,build,dist,debian
+ poetry run pytest --cov=medusa --cov-report=xml -v {posargs:tests/}
setenv =
DYLD_LIBRARY_PATH = {env:DYLD_LIBRARY_PATH}
| Python 3.11+ PIP Dependencies
[Project board link](https://github.com/orgs/k8ssandra/projects/8/views/1?pane=issue&itemId=50107255)
After releasing breaking changes noted in :
https://thelastpickle.com/blog/2023/09/21/medusa-v016-was-released.html
cassandra-medusa 0.17.1 still have some issues with Python compatibility modules for
`python-dateutil` etc.
As well as some inconsistency in the repo for requirements.txt and setup.py

Thus, please review all Python requirements.
| 2023-11-01T14:21:41 | 0.0 | [] | [] |
|||
thelastpickle/cassandra-medusa | thelastpickle__cassandra-medusa-675 | 01a49ded8c9183ac4c5a0cd55c212e69752ed47d | diff --git a/medusa/storage/s3_base_storage.py b/medusa/storage/s3_base_storage.py
index 7d1be225e..1c22542b0 100644
--- a/medusa/storage/s3_base_storage.py
+++ b/medusa/storage/s3_base_storage.py
@@ -13,10 +13,11 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
+import asyncio
import base64
import boto3
import botocore.session
+import concurrent.futures
import logging
import io
import os
@@ -114,6 +115,8 @@ def __init__(self, config):
self.connection_extra_args = self._make_connection_arguments(config)
self.transfer_config = self._make_transfer_config(config)
+ self.executor = concurrent.futures.ThreadPoolExecutor(int(config.concurrent_transfers))
+
super().__init__(config)
def connect(self):
@@ -122,10 +125,16 @@ def connect(self):
self.storage_provider, self.connection_extra_args
)
)
+
+ # make the pool size double of what we will have going on
+ # helps urllib (used by boto) to reuse connections better and not WARN us about evicting connections
+ max_pool_size = int(self.config.concurrent_transfers) * 2
+
boto_config = Config(
region_name=self.credentials.region,
signature_version='v4',
- tcp_keepalive=True
+ tcp_keepalive=True,
+ max_pool_connections=max_pool_size
)
self.s3_client = boto3.client(
's3',
@@ -139,6 +148,7 @@ def disconnect(self):
logging.debug('Disconnecting from S3...')
try:
self.s3_client.close()
+ self.executor.shutdown()
except Exception as e:
logging.error('Error disconnecting from S3: {}'.format(e))
@@ -260,7 +270,15 @@ async def _upload_object(self, data: io.BytesIO, object_key: str, headers: t.Dic
@retry(stop_max_attempt_number=MAX_UP_DOWN_LOAD_RETRIES, wait_fixed=5000)
async def _download_blob(self, src: str, dest: str):
- blob = await self._stat_blob(src)
+ # boto has a connection pool, but it does not support the asyncio API
+ # so we make things ugly and submit the whole download as a task to an executor
+ # which allows us to download several files in parallel
+ loop = asyncio.get_event_loop()
+ future = loop.run_in_executor(self.executor, self.__download_blob, src, dest)
+ await future
+
+ def __download_blob(self, src: str, dest: str):
+ blob = self.__stat_blob(src)
object_key = blob.name
# we must make sure the blob gets stored under sub-folder (if there is any)
@@ -304,6 +322,11 @@ async def _stat_blob(self, object_key: str) -> AbstractBlob:
logging.error("An error occurred:", e)
logging.error('Error getting object from s3://{}/{}'.format(self.bucket_name, object_key))
+ def __stat_blob(self, key):
+ resp = self.s3_client.head_object(Bucket=self.bucket_name, Key=key)
+ item_hash = resp['ETag'].replace('"', '')
+ return AbstractBlob(key, int(resp['ContentLength']), item_hash, resp['LastModified'])
+
@retry(stop_max_attempt_number=MAX_UP_DOWN_LOAD_RETRIES, wait_fixed=5000)
async def _upload_blob(self, src: str, dest: str) -> ManifestObject:
src_chunks = src.split('/')
@@ -328,18 +351,29 @@ async def _upload_blob(self, src: str, dest: str) -> ManifestObject:
)
)
- self.s3_client.upload_file(
- Filename=src,
- Bucket=self.bucket_name,
- Key=object_key,
- Config=self.transfer_config,
- ExtraArgs=kms_args,
- )
-
- blob = await self._stat_blob(object_key)
- mo = ManifestObject(blob.name, blob.size, blob.hash)
+ upload_conf = {
+ 'Filename': src,
+ 'Bucket': self.bucket_name,
+ 'Key': object_key,
+ 'Config': self.transfer_config,
+ 'ExtraArgs': kms_args,
+ }
+ # we are going to combine asyncio with boto's threading
+ # we do this by submitting the upload into an executor
+ loop = asyncio.get_event_loop()
+ future = loop.run_in_executor(self.executor, self.__upload_file, upload_conf)
+ # and then ask asyncio to yield until it completes
+ mo = await future
return mo
+ def __upload_file(self, upload_conf):
+ self.s3_client.upload_file(**upload_conf)
+ resp = self.s3_client.head_object(Bucket=upload_conf['Bucket'], Key=upload_conf['Key'])
+ blob_name = upload_conf['Key']
+ blob_size = int(resp['ContentLength'])
+ blob_hash = resp['ETag'].replace('"', '')
+ return ManifestObject(blob_name, blob_size, blob_hash)
+
async def _get_object(self, object_key: t.Union[Path, str]) -> AbstractBlob:
blob = await self._stat_blob(str(object_key))
return blob
| Make s3 storage (boto) work with asyncio
[Project board link](https://github.com/orgs/k8ssandra/projects/8/views/1?pane=issue&itemId=41924411)
The s3 integration tests are taking too long. Up to 1h compared to ~15-20 min for other storage backends. This is because, despite the medusa storage classes using asyncio, the boto library underneath doesn't. In the end, we end up serialising everything.
It is really frustrating to keep waiting for the s3 tests on each PR.
A candidate library to help us here is https://github.com/terrycain/aioboto3.
| 2023-10-26T13:07:28 | 0.0 | [] | [] |
|||
thelastpickle/cassandra-medusa | thelastpickle__cassandra-medusa-662 | 02ffb910fafad52a5dd142e467d190f76e616c24 | diff --git a/medusa/service/grpc/server.py b/medusa/service/grpc/server.py
index 58e800f21..08024695e 100644
--- a/medusa/service/grpc/server.py
+++ b/medusa/service/grpc/server.py
@@ -174,7 +174,7 @@ def BackupStatus(self, request, context):
response.startTime = datetime.fromtimestamp(backup.started).strftime(TIMESTAMP_FORMAT)
response.finishedNodes.extend([node.fqdn for node in backup.complete_nodes()])
response.unfinishedNodes.extend([node.fqdn for node in backup.incomplete_nodes()])
- response.missingNodes.extend([node.fqdn for node in backup.missing_nodes()])
+ response.missingNodes.extend(backup.missing_nodes())
if backup.finished:
response.finishTime = datetime.fromtimestamp(backup.finished).strftime(TIMESTAMP_FORMAT)
| BackupStatus grpc op failing when nodes are missing a backup
[Project board link](https://github.com/orgs/k8ssandra/projects/8/views/1?pane=issue&itemId=38726495)
When starting an AsyncBackup() on a single node out of 2 in a cluster, the following error pops up:
```
AttributeError: 'str' object has no attribute 'fqdn'
[2023-09-15 13:33:55,428] ERROR: Exception calling application: 'str' object has no attribute 'fqdn'
Traceback (most recent call last):
File "/home/cassandra/.local/lib/python3.6/site-packages/grpc/_server.py", line 443, in _call_behavior
response_or_iterator = behavior(argument, context)
File "/home/cassandra/medusa/service/grpc/server.py", line 175, in BackupStatus
response.missingNodes.extend([node.fqdn for node in backup.missing_nodes()])
File "/home/cassandra/medusa/service/grpc/server.py", line 175, in <listcomp>
response.missingNodes.extend([node.fqdn for node in backup.missing_nodes()])
```
As soon as the second node backup triggers, the error disappears.
It turns out that unlike `complete_nodes()` and `incomplete_nodes()` which return a list of `NodeBackup` objects, `missing_nodes()` returns a list of strings which are the fqdn of the missing nodes.
| 2023-10-04T12:23:31 | 0.0 | [] | [] |
|||
thelastpickle/cassandra-medusa | thelastpickle__cassandra-medusa-654 | 701333191f2592a33fa187ea31356002edb8d36d | diff --git a/medusa/backup_cluster.py b/medusa/backup_cluster.py
index 6f926522a..ed5580499 100644
--- a/medusa/backup_cluster.py
+++ b/medusa/backup_cluster.py
@@ -57,7 +57,7 @@ def orchestrate(config, backup_name_arg, seed_target, stagger, enable_md5_checks
try:
# Try to get a backup with backup_name. If it exists then we cannot take another backup with that name
cluster_backup = storage.get_cluster_backup(backup_name)
- if cluster_backup:
+ if cluster_backup and cluster_backup.name == backup_name:
err_msg = 'Backup named {} already exists.'.format(backup_name)
logging.error(err_msg)
raise Exception(err_msg)
| Make test_backup_orchestration unit test work again
[Project board link](https://github.com/orgs/k8ssandra/projects/8/views/1?pane=issue&itemId=38876115)
tests/backup_cluster_test.py::BackupClusterTest::test_backup_orchestration
| 2023-09-25T14:51:28 | 0.0 | [] | [] |
|||
nkaz001/hftbacktest | nkaz001__hftbacktest-70 | f6d0bc719825a74678dc7c93933c0584d34ab59a | diff --git a/.gitignore b/.gitignore
index 9c51c2f..34baf02 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,3 +3,5 @@ build
*.egg-info
__pycache__
docs/_build
+rust/Cargo.lock
+rust/target
diff --git a/rust/Cargo.toml b/rust/Cargo.toml
new file mode 100644
index 0000000..4d417ed
--- /dev/null
+++ b/rust/Cargo.toml
@@ -0,0 +1,84 @@
+[package]
+name = "hftbacktest"
+version = "0.1.0"
+edition = "2021"
+authors = ["nkaz001 <[email protected]>"]
+license = "MIT"
+#documentation = "https://docs.rs/hftbacktest/"
+repository = "https://github.com/nkaz001/hftbacktest/rust"
+#readme = "README.md"
+description = "A high-frequency trading and market-making backtesting tool accounts for limit orders, queue positions, and latencies, utilizing full tick data for trades and order books."
+keywords = [
+ "quantitative",
+ "finance",
+ "simulation",
+ "trading",
+ "backtesting",
+ "crypto",
+ "hft",
+ "high-frequency-trading",
+ "market-making"
+]
+categories = ["finance", "mathematics", "science", "algorithms", "simulation"]
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+chrono = "0.4.33"
+serde = { version = "1.0.196", features = ["derive"] }
+tokio = { version = "1.35.1", features = ["full"] }
+hyper = { version = "1.1.0", features = ["full"] }
+hyper-util = "0.1.3"
+serde_json = "1.0.113"
+futures-util = "0.3.30"
+form_urlencoded = "1.2.1"
+http = "1.0.0"
+bytes = "1.5.0"
+tokio-tungstenite = { version = "0.21.0", features = ["native-tls"] }
+url = { version = "2.5.0", features = [] }
+tracing = "0.1.40"
+tracing-subscriber = { version = "0.3.18", features = [] }
+anyhow = "1.0.79"
+reqwest = { version = "0.11.24", features = ["json"] }
+sha2 = "0.11.0-pre.3"
+hmac = "0.13.0-pre.3"
+zip = "0.6.6"
+thiserror = "1.0.57"
+rand = "0.8.5"
+
+[profile.dev]
+opt-level = 0
+debug = true
+debug-assertions = false
+overflow-checks = true
+lto = false
+panic = "unwind"
+incremental = true
+codegen-units = 256
+
+[profile.test]
+opt-level = 0
+debug = true
+debug-assertions = true
+overflow-checks = true
+lto = false
+panic = "unwind"
+incremental = true
+codegen-units = 256
+
+[profile.release]
+opt-level = 3
+debug = false
+debug-assertions = false
+overflow-checks = false
+lto = true
+strip = true
+panic = "abort"
+incremental = false
+codegen-units = 1
+
+[profile.release-with-debug]
+inherits = "release"
+incremental = true
+debug = true
+strip = false
diff --git a/rust/README.rst b/rust/README.rst
new file mode 100644
index 0000000..af66462
--- /dev/null
+++ b/rust/README.rst
@@ -0,0 +1,2 @@
+This project is currently in its initial development stages, meaning that breaking changes may occur without prior notice.
+The live bot feature has not undergone comprehensive testing yet; therefore, it must be used at your own risk.
\ No newline at end of file
diff --git a/rust/examples/algo.rs b/rust/examples/algo.rs
new file mode 100644
index 0000000..cbb7b8d
--- /dev/null
+++ b/rust/examples/algo.rs
@@ -0,0 +1,160 @@
+use std::{
+ collections::{HashMap, HashSet},
+ fmt::Debug,
+ fs::File,
+ io::Write,
+};
+
+use hftbacktest::{
+ depth::{btreebook::BTreeMapMarketDepth, hashmapbook::HashMapMarketDepth, MarketDepth},
+ ty::{OrdType, Side, TimeInForce},
+ Interface,
+};
+use tracing::info;
+
+pub fn gridtrading<Q, MD, I: Interface<Q, MD>>(
+ hbt: &mut I,
+ half_spread: f64,
+ grid_interval: f64,
+ skew: f64,
+ order_qty: f64,
+) -> Result<(), i64>
+where
+ Q: Sized + Clone,
+ MD: MarketDepth,
+ <I as Interface<Q, MD>>::Error: Debug,
+{
+ let grid_num = 20;
+ let max_position = grid_num as f64 * order_qty;
+
+ // Running interval in nanoseconds
+ while hbt.elapse(100_000_000).unwrap() {
+ let depth = hbt.depth(0);
+ let position = hbt.position(0);
+ let tick_size = depth.tick_size() as f64;
+
+ if depth.best_bid_tick() == INVALID_MIN || depth.best_ask_tick() == INVALID_MAX {
+ // Market depth is incomplete.
+ continue;
+ }
+
+ info!(
+ time = hbt.current_timestamp(),
+ bid = depth.best_bid(),
+ ask = depth.best_ask(),
+ position = position,
+ "Run"
+ );
+
+ let mid_price = (depth.best_bid() + depth.best_ask()) as f64 / 2.0;
+
+ let normalized_position = position / order_qty;
+
+ let bid_depth = half_spread + skew * normalized_position;
+ let ask_depth = half_spread - skew * normalized_position;
+
+ let bid_price = (mid_price - bid_depth).min(depth.best_bid() as f64);
+ let ask_price = (mid_price + ask_depth).max(depth.best_ask() as f64);
+
+ let grid_interval = ((half_spread / tick_size).round() * tick_size).max(tick_size);
+
+ let mut bid_price = (bid_price / grid_interval).floor() * grid_interval;
+ let mut ask_price = (ask_price / grid_interval).ceil() * grid_interval;
+
+ //--------------------------------------------------------
+ // Updates quotes
+
+ hbt.clear_inactive_orders(Some(0));
+
+ {
+ let orders = hbt.orders(0);
+ let mut new_bid_orders = HashMap::new();
+ if position < max_position && bid_price.is_finite() {
+ for i in 0..grid_num {
+ bid_price -= i as f64 * grid_interval;
+ let bid_price_tick = (bid_price / tick_size).round() as i64;
+
+ // order price in tick is used as order id.
+ new_bid_orders.insert(bid_price_tick, bid_price);
+ }
+ }
+ // Cancels if an order is not in the new grid.
+ let cancel_order_ids: Vec<i64> = orders
+ .values()
+ .filter(|order| {
+ order.side == Side::Buy
+ && order.cancellable()
+ && !new_bid_orders.contains_key(&order.order_id)
+ })
+ .map(|order| order.order_id)
+ .collect();
+ // Posts an order if it doesn't exist.
+ let new_orders: Vec<(i64, f64)> = new_bid_orders
+ .into_iter()
+ .filter(|(order_id, _)| !orders.contains_key(&order_id))
+ .map(|v| v)
+ .collect();
+ for order_id in cancel_order_ids {
+ hbt.cancel(0, order_id, false).unwrap();
+ }
+ for (order_id, order_price) in new_orders {
+ hbt.submit_buy_order(
+ 0,
+ order_id,
+ order_price as f32,
+ order_qty as f32,
+ TimeInForce::GTX,
+ OrdType::Limit,
+ false,
+ )
+ .unwrap();
+ }
+ }
+
+ {
+ let orders = hbt.orders(0);
+ let mut new_ask_orders = HashMap::new();
+ if position > -max_position && ask_price.is_finite() {
+ for i in 0..grid_num {
+ ask_price += i as f64 * grid_interval;
+ let ask_price_tick = (ask_price / tick_size).round() as i64;
+
+ // order price in tick is used as order id.
+ new_ask_orders.insert(ask_price_tick, ask_price);
+ }
+ }
+ // Cancels if an order is not in the new grid.
+ let cancel_order_ids: Vec<i64> = orders
+ .values()
+ .filter(|order| {
+ order.side == Side::Sell
+ && order.cancellable()
+ && !new_ask_orders.contains_key(&order.order_id)
+ })
+ .map(|order| order.order_id)
+ .collect();
+ // Posts an order if it doesn't exist.
+ let new_orders: Vec<(i64, f64)> = new_ask_orders
+ .into_iter()
+ .filter(|(order_id, _)| !orders.contains_key(&order_id))
+ .map(|v| v)
+ .collect();
+ for order_id in cancel_order_ids {
+ hbt.cancel(0, order_id, false).unwrap();
+ }
+ for (order_id, order_price) in new_orders {
+ hbt.submit_sell_order(
+ 0,
+ order_id,
+ order_price as f32,
+ order_qty as f32,
+ TimeInForce::GTX,
+ OrdType::Limit,
+ false,
+ )
+ .unwrap();
+ }
+ }
+ }
+ Ok(())
+}
diff --git a/rust/examples/gridtrading_live.rs b/rust/examples/gridtrading_live.rs
new file mode 100644
index 0000000..41ccd9c
--- /dev/null
+++ b/rust/examples/gridtrading_live.rs
@@ -0,0 +1,59 @@
+use hftbacktest::{
+ backtest::{
+ assettype::LinearAsset,
+ backtest::HftBacktest,
+ models::{IntpOrderLatency, PowerProbQueueFunc3, ProbQueueModel, QueuePos},
+ reader::read_npz,
+ BtAssetBuilder,
+ BtBuilder,
+ DataSource,
+ },
+ connector::binancefutures::BinanceFutures,
+ depth::hashmapbook::HashMapMarketDepth,
+ live::{bot::Bot, LiveBuilder},
+ Interface,
+};
+use tracing::info;
+
+mod algo;
+
+use algo::gridtrading;
+
+const STREAM_URL: &str = "wss://fstream.binancefuture.com/stream?streams=";
+const API_URL: &str = "https://testnet.binancefuture.com";
+const ORDER_PREFIX: &str = "prefix";
+const API_KEY: &str = "apikey";
+const SECRET: &str = "secret";
+
+fn prepare_live() -> Bot {
+ let binance_futures = BinanceFutures::new(
+ STREAM_URL,
+ API_URL,
+ ORDER_PREFIX,
+ API_KEY,
+ SECRET
+ );
+
+ let mut hbt = LiveBuilder::new()
+ .register("binancefutures", binance_futures)
+ .add("binancefutures", "SOLUSDT", 0.001, 1.0)
+ .build()
+ .unwrap();
+
+ hbt.run();
+ hbt
+}
+
+fn main() {
+ tracing_subscriber::fmt::init();
+
+ let mut hbt = prepare_live();
+
+ let half_spread = 0.05;
+ let grid_interval = 0.05;
+ let skew = 0.004;
+ let order_qty = 1.0;
+
+ gridtrading(&mut hbt, half_spread, grid_interval, skew, order_qty).unwrap();
+ hbt.close().unwrap();
+}
\ No newline at end of file
diff --git a/rust/rustfmt.toml b/rust/rustfmt.toml
new file mode 100644
index 0000000..0addf32
--- /dev/null
+++ b/rust/rustfmt.toml
@@ -0,0 +1,4 @@
+imports_layout = "HorizontalVertical"
+imports_granularity = "Crate"
+group_imports = "StdExternalCrate"
+newline_style = "Unix"
\ No newline at end of file
diff --git a/rust/src/connector/binancefutures/mod.rs b/rust/src/connector/binancefutures/mod.rs
new file mode 100644
index 0000000..3248f79
--- /dev/null
+++ b/rust/src/connector/binancefutures/mod.rs
@@ -0,0 +1,358 @@
+mod msg;
+mod ordermanager;
+mod rest;
+mod ws;
+
+use std::{
+ collections::HashMap,
+ sync::mpsc::Sender,
+ time::Duration,
+};
+use std::sync::{Arc, Mutex};
+
+use reqwest::StatusCode;
+use tracing::{debug, error};
+use thiserror::Error;
+
+use crate::{
+ connector::{
+ binancefutures::{
+ ordermanager::OrderMgr,
+ rest::{BinanceFuturesClient, RequestError},
+ ws::connect,
+ },
+ Connector,
+ },
+ live::AssetInfo,
+ ty::{EvError, Event, Order, OrderResponse, Position, Status},
+};
+use crate::connector::binancefutures::ordermanager::OrderManager;
+
+pub enum Endpoint {
+ Public,
+ Private,
+ Testnet,
+ LowLatency,
+ Custom(String),
+}
+
+#[derive(Error, Debug)]
+pub enum BinanceFuturesError {
+ #[error("asset not found")]
+ AssetNotFound,
+}
+
+pub struct BinanceFutures {
+ url: String,
+ prefix: String,
+ api_key: String,
+ secret: String,
+ assets: HashMap<String, AssetInfo>,
+ inv_assets: HashMap<usize, AssetInfo>,
+ orders: OrderMgr,
+ client: BinanceFuturesClient,
+}
+
+impl BinanceFutures {
+ pub fn new(
+ stream_url: &str,
+ api_url: &str,
+ prefix: &str,
+ api_key: &str,
+ secret: &str
+ ) -> Self {
+ let orders: OrderMgr = Arc::new(Mutex::new(OrderManager::new(prefix)));
+ Self {
+ url: stream_url.to_string(),
+ prefix: prefix.to_string(),
+ api_key: api_key.to_string(),
+ secret: secret.to_string(),
+ assets: Default::default(),
+ inv_assets: Default::default(),
+ orders: orders.clone(),
+ client: BinanceFuturesClient::new(api_url, api_key, secret, orders),
+ }
+ }
+}
+
+impl Connector for BinanceFutures {
+ fn add(
+ &mut self,
+ asset_no: usize,
+ symbol: String,
+ tick_size: f32,
+ lot_size: f32,
+ ) -> Result<(), anyhow::Error> {
+ let asset_info = AssetInfo {
+ asset_no,
+ symbol: symbol.clone(),
+ tick_size,
+ lot_size,
+ };
+ self.assets.insert(symbol, asset_info.clone());
+ self.inv_assets.insert(asset_no, asset_info);
+ Ok(())
+ }
+
+ fn run(&mut self, ev_tx: Sender<Event>) -> Result<(), anyhow::Error> {
+ let assets = self.assets.clone();
+ let base_url = self.url.clone();
+ let prefix = self.prefix.clone();
+ let client = self.client.clone();
+ let orders = self.orders.clone();
+ let mut error_count = 0;
+
+ let _ = tokio::spawn(async move {
+ 'connection: loop {
+ if error_count > 0 {
+ tokio::time::sleep(Duration::from_secs(5)).await;
+ }
+
+ // Cancel all orders before connecting to the stream in order to start with the
+ // clean state.
+ for symbol in assets.keys() {
+ if let Err(error) = client.cancel_all_orders(symbol).await {
+ error!(?error, %symbol, "Couldn't cancel all open orders.");
+ if error.status().unwrap_or(StatusCode::default())
+ == StatusCode::UNAUTHORIZED
+ {
+ ev_tx
+ .send(Event::Error(
+ EvError::CriticalConnectionError as i64,
+ Some({
+ let mut var = HashMap::new();
+ var.insert("reason", error.to_string());
+ var.insert("status", format!("{:?}", error.status()));
+ var
+ }),
+ ))
+ .unwrap();
+ }
+ error_count += 1;
+ continue 'connection;
+ }
+ }
+
+ // Fetches the initial states such as positions and open orders.
+ match client.get_position_information().await {
+ Ok(positions) => {
+ positions.into_iter().for_each(|position| {
+ assets.get(&position.symbol).map(|asset_info| {
+ ev_tx
+ .send(Event::Position(Position {
+ asset_no: asset_info.asset_no,
+ symbol: position.symbol,
+ qty: position.position_amount,
+ }))
+ .unwrap();
+ });
+ });
+ }
+ Err(error) => {
+ error!(?error, "Couldn't get position information.");
+ error_count += 1;
+ continue 'connection;
+ }
+ }
+
+ let listen_key = match client.start_user_data_stream().await {
+ Ok(listen_key) => listen_key,
+ Err(error) => {
+ error!(?error, "Couldn't start user data stream.");
+ // 1000 indicates user data stream starting error.
+ ev_tx
+ .send(Event::Error(
+ 1000,
+ Some({
+ let mut var = HashMap::new();
+ var.insert("reason", error.to_string());
+ var.insert("status", format!("{:?}", error.status()));
+ var
+ }),
+ ))
+ .unwrap();
+ continue 'connection;
+ }
+ };
+
+ // Prepares a URL that connects streams
+ let streams: Vec<String> = assets
+ .keys()
+ .map(|symbol| {
+ format!(
+ "{}@depth@0ms/{}@trade",
+ symbol.to_lowercase(),
+ symbol.to_lowercase()
+ )
+ })
+ .collect();
+ let url = format!("{}{}/{}", &base_url, listen_key, streams.join("/"));
+
+ if let Err(error) = connect(
+ &url,
+ ev_tx.clone(),
+ assets.clone(),
+ &prefix,
+ orders.clone(),
+ client.clone(),
+ )
+ .await
+ {
+ error!(?error, "A connection error occurred.");
+ }
+ error_count += 1;
+ ev_tx
+ .send(Event::Error(EvError::ConnectionInterrupted as i64, None))
+ .unwrap();
+ }
+ });
+ Ok(())
+ }
+
+ fn submit(
+ &self,
+ asset_no: usize,
+ mut order: Order<()>,
+ tx: Sender<Event>,
+ ) -> Result<(), anyhow::Error> {
+ let asset_info = self
+ .inv_assets
+ .get(&asset_no)
+ .ok_or(BinanceFuturesError::AssetNotFound)?;
+ let symbol = asset_info.symbol.clone();
+ let client = self.client.clone();
+ let orders = self.orders.clone();
+ tokio::spawn(async move {
+ let client_order_id = orders
+ .lock()
+ .unwrap()
+ .prepare_client_order_id(order.clone());
+
+ match client_order_id {
+ Some(client_order_id) => {
+ match client
+ .submit_order(
+ &client_order_id,
+ &symbol,
+ order.side,
+ order.price_tick as f32 * order.tick_size,
+ get_precision(order.tick_size),
+ order.qty,
+ order.order_type,
+ order.time_in_force,
+ )
+ .await
+ {
+ Ok(resp) => {
+ let order = orders
+ .lock()
+ .unwrap()
+ .update_submit_success(order, resp);
+ if let Some(order) = order {
+ tx.send(Event::Order(OrderResponse { asset_no, order }))
+ .unwrap();
+ }
+ }
+ Err(error) => {
+ let order = orders
+ .lock()
+ .unwrap()
+ .update_submit_fail(order, error, client_order_id);
+ if let Some(order) = order {
+ tx.send(Event::Order(OrderResponse { asset_no, order }))
+ .unwrap();
+ }
+
+ // fixme
+ tx.send(Event::Error(0, None)).unwrap();
+ }
+ }
+ }
+ None => {
+ error!("duplicate order id");
+ order.req = Status::None;
+ order.status = Status::Expired;
+ tx.send(Event::Order(OrderResponse { asset_no, order }))
+ .unwrap();
+ }
+ }
+ });
+ Ok(())
+ }
+
+ fn cancel(
+ &self,
+ asset_no: usize,
+ mut order: Order<()>,
+ tx: Sender<Event>,
+ ) -> Result<(), anyhow::Error> {
+ let asset_info = self
+ .inv_assets
+ .get(&asset_no)
+ .ok_or(BinanceFuturesError::AssetNotFound)?;
+ let symbol = asset_info.symbol.clone();
+ let client = self.client.clone();
+ let orders = self.orders.clone();
+ tokio::spawn(async move {
+ let client_order_id = orders
+ .lock()
+ .unwrap()
+ .get_client_order_id(order.order_id);
+
+ match client_order_id {
+ Some(client_order_id) => {
+ match client.cancel_order(&client_order_id, &symbol).await {
+ Ok(resp) => {
+ let order = orders
+ .lock()
+ .unwrap()
+ .update_cancel_success(order, resp);
+ if let Some(order) = order {
+ tx.send(Event::Order(OrderResponse { asset_no, order }))
+ .unwrap();
+ }
+ }
+ Err(error) => {
+ let order = orders
+ .lock()
+ .unwrap()
+ .update_cancel_fail(order, error, client_order_id);
+ if let Some(order) = order {
+ tx.send(Event::Order(OrderResponse { asset_no, order }))
+ .unwrap();
+ }
+
+ // fixme
+ tx.send(Event::Error(0, None)).unwrap();
+ }
+ }
+ }
+ None => {
+ debug!(
+ order_id = order.order_id,
+ "client_order_id corresponding to order_id is not found; \
+ this may be due to the order already being canceled or filled."
+ );
+ // order.req = Status::None;
+ // order.status = Status::Expired;
+ // tx.send(Event::Order(OrderResponse { asset_no, order }))
+ // .unwrap();
+ }
+ }
+ });
+ Ok(())
+ }
+}
+
+/// tick_size should not be a computed value.
+fn get_precision(tick_size: f32) -> usize {
+ let s = tick_size.to_string();
+ let mut prec = 0;
+ for (i, c) in s.chars().enumerate() {
+ if c == '.' {
+ prec = s.len() - i - 1;
+ break;
+ }
+ }
+ prec
+}
diff --git a/rust/src/connector/binancefutures/msg/mod.rs b/rust/src/connector/binancefutures/msg/mod.rs
new file mode 100644
index 0000000..94b9f35
--- /dev/null
+++ b/rust/src/connector/binancefutures/msg/mod.rs
@@ -0,0 +1,107 @@
+use serde::{
+ de::{Error, Unexpected},
+ Deserialize,
+ Deserializer,
+};
+
+use crate::ty::{OrdType, Side, Status, TimeInForce};
+
+mod rest;
+mod stream;
+
+pub use rest::*;
+pub use stream::*;
+
+fn from_str_to_f32<'de, D>(deserializer: D) -> Result<f32, D::Error>
+where
+ D: Deserializer<'de>,
+{
+ let s: &str = Deserialize::deserialize(deserializer)?;
+ s.parse::<f32>().map_err(Error::custom)
+}
+
+fn from_str_to_f64<'de, D>(deserializer: D) -> Result<f64, D::Error>
+where
+ D: Deserializer<'de>,
+{
+ let s: &str = Deserialize::deserialize(deserializer)?;
+ s.parse::<f64>().map_err(Error::custom)
+}
+
+fn from_str_to_f32_opt<'de, D>(deserializer: D) -> Result<Option<f32>, D::Error>
+where
+ D: Deserializer<'de>,
+{
+ let s: Option<&str> = Deserialize::deserialize(deserializer)?;
+ match s {
+ Some(s) => Ok(Some(s.parse::<f32>().map_err(Error::custom)?)),
+ None => Ok(None),
+ }
+}
+
+fn from_str_to_side<'de, D>(deserializer: D) -> Result<Side, D::Error>
+where
+ D: Deserializer<'de>,
+{
+ let s: &str = Deserialize::deserialize(deserializer)?;
+ match s {
+ "BUY" => Ok(Side::Buy),
+ "SELL" => Ok(Side::Sell),
+ s => Err(Error::invalid_value(Unexpected::Other(s), &"BUY or SELL")),
+ }
+}
+
+fn from_str_to_status<'de, D>(deserializer: D) -> Result<Status, D::Error>
+where
+ D: Deserializer<'de>,
+{
+ let s: &str = Deserialize::deserialize(deserializer)?;
+ match s {
+ "NEW" => Ok(Status::New),
+ "PARTIALLY_FILLED" => Ok(Status::PartiallyFilled),
+ "FILLED" => Ok(Status::Filled),
+ "CANCELED" => Ok(Status::Canceled),
+ // "REJECTED" => Ok(Status::Rejected),
+ "EXPIRED" => Ok(Status::Expired),
+ // "EXPIRED_IN_MATCH" => Ok(Status::ExpiredInMatch),
+ s => Err(Error::invalid_value(
+ Unexpected::Other(s),
+ &"NEW,PARTIALLY_FILLED,FILLED,CANCELED,EXPIRED",
+ )),
+ }
+}
+
+fn from_str_to_type<'de, D>(deserializer: D) -> Result<OrdType, D::Error>
+where
+ D: Deserializer<'de>,
+{
+ let s: &str = Deserialize::deserialize(deserializer)?;
+ match s {
+ "LIMIT" => Ok(OrdType::Limit),
+ "MARKET" => Ok(OrdType::Market),
+ // "STOP" => Ok(OrdType::StopLimit),
+ // "TAKE_PROFIT" => Ok(OrdType::TakeProfitLimit),
+ // "STOP_MARKET" => Ok(OrdType::StopMarket),
+ // "TAKE_PROFIT_MARKET" => Ok(OrdType::TakeProfitMarket),
+ // "TRAILING_STOP_MARKET" => Ok(OrdType::TrailingStopMarket),
+ s => Err(Error::invalid_value(Unexpected::Other(s), &"LIMIT,MARKET")),
+ }
+}
+
+fn from_str_to_tif<'de, D>(deserializer: D) -> Result<TimeInForce, D::Error>
+where
+ D: Deserializer<'de>,
+{
+ let s: &str = Deserialize::deserialize(deserializer)?;
+ match s {
+ "GTC" => Ok(TimeInForce::GTC),
+ "IOC" => Ok(TimeInForce::IOC),
+ "FOK" => Ok(TimeInForce::FOK),
+ "GTX" => Ok(TimeInForce::GTX),
+ // "GTD" => Ok(TimeInForce::GTD),
+ s => Err(Error::invalid_value(
+ Unexpected::Other(s),
+ &"GTC,IOC,FOK,GTX",
+ )),
+ }
+}
diff --git a/rust/src/connector/binancefutures/msg/rest.rs b/rust/src/connector/binancefutures/msg/rest.rs
new file mode 100644
index 0000000..3df845d
--- /dev/null
+++ b/rust/src/connector/binancefutures/msg/rest.rs
@@ -0,0 +1,261 @@
+use serde::Deserialize;
+
+use super::{
+ from_str_to_f32,
+ from_str_to_f32_opt,
+ from_str_to_f64,
+ from_str_to_side,
+ from_str_to_status,
+ from_str_to_tif,
+ from_str_to_type,
+};
+use crate::ty::{OrdType, Side, Status, TimeInForce};
+
+#[derive(Deserialize, Debug)]
+#[serde(untagged)]
+pub enum OrderResponseResult {
+ Ok(OrderResponse),
+ Err(ErrorResponse),
+}
+
+#[derive(Deserialize, Debug)]
+pub struct OrderResponse {
+ #[serde(rename = "clientOrderId")]
+ pub client_order_id: String,
+ #[serde(rename = "cumQty")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub cum_qty: f32,
+ /// New Order and Cancel Order responses only field
+ #[serde(rename = "cumQuote")]
+ #[serde(default)]
+ #[serde(deserialize_with = "from_str_to_f32_opt")]
+ pub cum_quote: Option<f32>,
+ /// Modify Order response only field
+ #[serde(rename = "cumBase")]
+ #[serde(default)]
+ #[serde(deserialize_with = "from_str_to_f32_opt")]
+ pub cum_base: Option<f32>,
+ #[serde(rename = "executedQty")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub executed_qty: f32,
+ #[serde(rename = "orderId")]
+ pub order_id: i64,
+ /// New Order and Modify Order responses only field
+ #[serde(rename = "avgPrice")]
+ #[serde(default)]
+ #[serde(deserialize_with = "from_str_to_f32_opt")]
+ pub avg_price: Option<f32>,
+ #[serde(rename = "origQty")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub orig_qty: f32,
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub price: f32,
+ #[serde(rename = "reduceOnly")]
+ pub reduce_only: bool,
+ #[serde(deserialize_with = "from_str_to_side")]
+ pub side: Side,
+ #[serde(rename = "positionSide")]
+ pub position_side: String,
+ #[serde(deserialize_with = "from_str_to_status")]
+ pub status: Status,
+ #[serde(rename = "stopPrice")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub stop_price: f32,
+ #[serde(rename = "closePosition")]
+ pub close_position: bool,
+ pub symbol: String,
+ /// Modify Order response only field
+ #[serde(default)]
+ pub pair: Option<String>,
+ #[serde(rename = "timeInForce")]
+ #[serde(deserialize_with = "from_str_to_tif")]
+ pub time_in_force: TimeInForce,
+ #[serde(rename = "type")]
+ #[serde(deserialize_with = "from_str_to_type")]
+ pub type_: OrdType,
+ #[serde(rename = "origType")]
+ #[serde(deserialize_with = "from_str_to_type")]
+ pub orig_type: OrdType,
+ /// New Order and Cancel Order responses only field
+ #[serde(rename = "activatePrice")]
+ #[serde(default)]
+ #[serde(deserialize_with = "from_str_to_f32_opt")]
+ pub activate_price: Option<f32>,
+ /// New Order and Cancel Order responses only field
+ #[serde(rename = "priceRate")]
+ #[serde(default)]
+ #[serde(deserialize_with = "from_str_to_f32_opt")]
+ pub price_rate: Option<f32>,
+ #[serde(rename = "updateTime")]
+ pub update_time: i64,
+ #[serde(rename = "workingType")]
+ pub working_type: String,
+ #[serde(rename = "priceProtect")]
+ pub price_protect: bool,
+ #[serde(rename = "priceMatch")]
+ pub price_match: String,
+ #[serde(rename = "selfTradePreventionMode")]
+ pub self_trade_prevention_mode: String,
+ #[serde(rename = "goodTillDate")]
+ pub good_till_date: i64,
+}
+
+#[derive(Deserialize, Debug)]
+pub struct ErrorResponse {
+ pub code: i64,
+ pub msg: String,
+}
+
+#[derive(Deserialize, Debug)]
+pub struct PositionInformationV2 {
+ #[serde(rename = "entryPrice")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub entry_price: f32,
+ #[serde(rename = "breakEvenPrice")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub breakeven_price: f32,
+ #[serde(rename = "marginType")]
+ pub margin_type: String,
+ #[serde(rename = "isAutoAddMargin")]
+ pub is_auto_add_margin: String,
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub leverage: f32,
+ #[serde(rename = "liquidationPrice")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub liquidation_price: f32,
+ #[serde(rename = "markPrice")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub mark_price: f32,
+ #[serde(rename = "maxNotionalValue")]
+ #[serde(deserialize_with = "from_str_to_f64")]
+ pub max_notional_value: f64,
+ #[serde(rename = "positionAmt")]
+ #[serde(deserialize_with = "from_str_to_f64")]
+ pub position_amount: f64,
+ #[serde(deserialize_with = "from_str_to_f64")]
+ pub notional: f64,
+ #[serde(rename = "isolatedWallet")]
+ pub isolated_wallet: String,
+ pub symbol: String,
+ #[serde(rename = "unRealizedProfit")]
+ pub unrealized_pnl: String,
+ #[serde(rename = "positionSide")]
+ pub position_side: String,
+ #[serde(rename = "updateTime")]
+ pub update_time: i64,
+}
+
+/// https://binance-docs.github.io/apidocs/futures/en/#error-codes
+pub enum ErrorCode {
+ Unknown,
+ Disconnected,
+ Unauthorized,
+ TooManyRequests,
+ DuplicateIp,
+ NoSuchIp,
+ UnexpectedResp,
+ Timeout,
+ ServerBusy,
+ ErrorMsgReceived,
+ NonWhiteList,
+ InvalidMessage,
+ UnknownOrderComposition,
+ TooManyOrders,
+ ServiceShuttingDown,
+ UnsupportedOperation,
+ InvalidTimestamp,
+ InvalidSignature,
+ StartTimeGreaterThanEndTime,
+ NotFound,
+ // 11xx - Request issues
+ IllegalChars,
+ TooManyParameters,
+ MandatoryParamEmptyOrMalformed,
+ UnknownParam,
+ UnreadParameters,
+ ParamEmpty,
+ ParamNotRequired,
+ BadAsset,
+ BadAccount,
+ BadInstrumentType,
+ BadPrecision,
+ NoDepth,
+ WithdrawNotNegative,
+ TifNotRequired,
+ InvalidTif,
+ InvalidOrderType,
+ InvalidSide,
+ EmptyNewClOrdId,
+ EmptyOrgClOrdId,
+ BadInterval,
+ BadSymbol,
+ InvalidSymbolStatus,
+ InvalidListenKey,
+ AssetNotSupported,
+ MoreThanXxHours,
+ OptionalParamsBadCombo,
+ InvalidParameter,
+ InvalidNewOrderRespType,
+ // 20xx - Processing issues
+ NewOrderRejected,
+ CancelRejected,
+ CancelAllFail,
+ NoSuchOrder,
+ BadApiKeyFmt,
+ RejectedMbxKey,
+ NoTradingWindow,
+ ApiKeysLocked,
+ BalanceNotSufficient,
+ MarginNotSufficient,
+ UnableToFill,
+ OrderWouldImmediatelyTrigger,
+ ReduceOnlyReject,
+ UserInLiquidation,
+ PositionNotSufficient,
+ MaxOpenOrderExceeded,
+ ReduceOnlyOrderTypeNotSupported,
+ MaxLeverageRatio,
+ MinLeverageRatio,
+ // 40xx Filters and other issues
+ InvalidOrderStatus,
+ PriceLessThanZero,
+ PriceGreaterThanMaxPrice,
+ QtyLessThanZero,
+ QtyLessThanMinQty,
+ QtyGreaterThanMaxQty,
+ StopPriceLessThanZero,
+ StopPriceGreaterThanMaxPrice,
+ TickSizeLessThanZero,
+ MaxPriceLEssThanMinPrice,
+ MaxQtyLessThanMinQty,
+ StepSizeLessThanZero,
+ MaxNumOrdersLessThanZero,
+ PriceLessThanMinPrice,
+ PriceNotIncreasedByTickSize,
+ InvalidClOrdIdLen,
+ PriceHigherThanMultiplierUp,
+ MultiplierUpLessThanZero,
+ CompositeScaleOverflow,
+ TargetStrategyInvalid,
+ InvalidDepthLimit,
+ WrongMarketStatus,
+ QtyNotIncreasedByStepSize,
+ PriceLowerThanMultiplierDown,
+ MultiplierDecimalLessThanZero,
+ CommissionInvalid,
+ InvalidAccountType,
+ InvalidLeverage,
+ InvalidTickSizePrecision,
+ InvalidStepSizePrecision,
+ InvalidWorkingType,
+ ExceedMaxCancelOrderSize,
+ InsuranceAccountNotFound,
+ InvalidBalanceType,
+ MaxStopOrderExceeded,
+ NoNeedToChangeMarginType,
+ ThereExistsOpenOrders,
+ ThereExistsQuantity,
+ AddIsolatedMarginReject,
+ CrossBalanceInsufficient,
+ NoNeedToChangeAutoAddMargin,
+}
diff --git a/rust/src/connector/binancefutures/msg/stream.rs b/rust/src/connector/binancefutures/msg/stream.rs
new file mode 100644
index 0000000..b1d21b2
--- /dev/null
+++ b/rust/src/connector/binancefutures/msg/stream.rs
@@ -0,0 +1,244 @@
+use serde::Deserialize;
+
+use super::{
+ from_str_to_f32,
+ from_str_to_f64,
+ from_str_to_side,
+ from_str_to_status,
+ from_str_to_tif,
+ from_str_to_type,
+};
+use crate::ty::{OrdType, Side, Status, TimeInForce};
+
+#[derive(Deserialize, Debug)]
+pub struct Stream {
+ pub stream: String,
+ pub data: Data,
+}
+
+#[derive(Deserialize, Debug)]
+#[serde(tag = "e")]
+pub enum Data {
+ #[serde(rename = "depthUpdate")]
+ DepthUpdate(Depth),
+ #[serde(rename = "trade")]
+ Trade(Trade),
+ #[serde(rename = "ORDER_TRADE_UPDATE")]
+ OrderTradeUpdate(OrderTradeUpdate),
+ #[serde(rename = "ACCOUNT_UPDATE")]
+ AccountUpdate(AccountUpdate),
+ #[serde(rename = "listenKeyExpired")]
+ ListenKeyExpired(ListenKeyStream),
+}
+
+#[derive(Deserialize, Debug)]
+pub struct Depth {
+ #[serde(rename = "T")]
+ pub ev_timestamp: i64,
+ #[serde(rename = "E")]
+ pub send_timestamp: i64,
+ #[serde(rename = "s")]
+ pub symbol: String,
+ #[serde(rename = "U")]
+ pub first_update_id: i64,
+ #[serde(rename = "u")]
+ pub last_update_id: i64,
+ // #[serde(rename = "pu")]
+ // pub prev_update_id: i64,
+ #[serde(rename = "b")]
+ pub bids: Vec<(String, String)>,
+ #[serde(rename = "a")]
+ pub asks: Vec<(String, String)>,
+}
+
+#[derive(Deserialize, Debug)]
+pub struct Trade {
+ #[serde(rename = "T")]
+ pub ev_timestamp: i64,
+ #[serde(rename = "E")]
+ pub send_timestamp: i64,
+ #[serde(rename = "s")]
+ pub symbol: String,
+ #[serde(rename = "t")]
+ pub id: i64,
+ #[serde(rename = "p")]
+ pub price: String,
+ #[serde(rename = "q")]
+ pub qty: String,
+ #[serde(rename = "X")]
+ pub type_: String,
+ #[serde(rename = "m")]
+ pub is_the_buyer_the_market_maker: bool,
+}
+
+#[derive(Deserialize, Debug)]
+pub struct AccountUpdate {
+ #[serde(rename = "E")]
+ pub event_time: i64,
+ #[serde(rename = "T")]
+ pub transaction_tim: i64,
+ #[serde(rename = "a")]
+ pub account: Account,
+}
+
+#[derive(Deserialize, Debug)]
+pub struct Account {
+ #[serde(rename = "m")]
+ pub ev_reason: String,
+ #[serde(rename = "B")]
+ pub balance: Vec<Balance>,
+ #[serde(rename = "P")]
+ pub position: Vec<Position>,
+}
+
+#[derive(Deserialize, Debug)]
+pub struct Balance {
+ #[serde(rename = "a")]
+ pub asset: String,
+ #[serde(rename = "wb")]
+ #[serde(deserialize_with = "from_str_to_f64")]
+ pub wallet_balance: f64,
+ #[serde(rename = "cw")]
+ #[serde(deserialize_with = "from_str_to_f64")]
+ pub cross_wallet_balance: f64,
+ #[serde(rename = "bc")]
+ #[serde(deserialize_with = "from_str_to_f64")]
+ pub balance_change: f64,
+}
+
+#[derive(Deserialize, Debug)]
+pub struct Position {
+ #[serde(rename = "s")]
+ pub symbol: String,
+ #[serde(rename = "pa")]
+ #[serde(deserialize_with = "from_str_to_f64")]
+ pub position_amount: f64,
+ #[serde(rename = "ep")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub entry_price: f32,
+ #[serde(rename = "bep")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub breakeven_price: f32,
+ #[serde(rename = "cr")]
+ #[serde(deserialize_with = "from_str_to_f64")]
+ pub prefee_accumulated_realized: f64,
+ #[serde(rename = "up")]
+ pub unrealized_pnl: String,
+ #[serde(rename = "mt")]
+ pub margin_type: String,
+ #[serde(rename = "iw")]
+ pub isolated_wallet: Option<String>,
+ #[serde(rename = "ps")]
+ pub position_side: String,
+}
+
+#[derive(Deserialize, Debug)]
+pub struct OrderTradeUpdate {
+ #[serde(rename = "E")]
+ pub event_time: i64,
+ #[serde(rename = "T")]
+ pub transaction_time: i64,
+ #[serde(rename = "o")]
+ pub order: Order,
+}
+
+#[derive(Deserialize, Debug)]
+pub struct Order {
+ #[serde(rename = "s")]
+ pub symbol: String,
+ #[serde(rename = "c")]
+ pub client_order_id: String,
+ #[serde(rename = "S")]
+ #[serde(deserialize_with = "from_str_to_side")]
+ pub side: Side,
+ #[serde(rename = "o")]
+ #[serde(deserialize_with = "from_str_to_type")]
+ pub order_type: OrdType,
+ #[serde(rename = "f")]
+ #[serde(deserialize_with = "from_str_to_tif")]
+ pub time_in_force: TimeInForce,
+ #[serde(rename = "q")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub original_qty: f32,
+ #[serde(rename = "p")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub original_price: f32,
+ #[serde(rename = "ap")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub average_price: f32,
+ #[serde(rename = "sp")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub stop_price: f32,
+ #[serde(rename = "x")]
+ pub execution_type: String,
+ #[serde(rename = "X")]
+ #[serde(deserialize_with = "from_str_to_status")]
+ pub order_status: Status,
+ #[serde(rename = "i")]
+ pub order_id: i64,
+ #[serde(rename = "l")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub order_last_filled_qty: f32,
+ #[serde(rename = "z")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub order_filled_accumulated_qty: f32,
+ #[serde(rename = "L")]
+ #[serde(deserialize_with = "from_str_to_f32")]
+ pub last_filled_price: f32,
+ // #[serde(rename = "N")]
+ // pub commission_asset: Option<String>,
+ // #[serde(rename = "n")]
+ // pub commission: Option<String>,
+ #[serde(rename = "T")]
+ pub order_trade_time: i64,
+ #[serde(rename = "t")]
+ pub trade_id: i64,
+ // #[serde(rename = "b")]
+ // pub bid_notional: String,
+ // #[serde(rename = "a")]
+ // pub ask_notional: String,
+ // #[serde(rename = "m")]
+ // pub is_maker_side: bool,
+ // #[serde(rename = "R")]
+ // pub is_reduce_only: bool,
+ // #[serde(rename = "wt")]
+ // pub stop_price_working_type: String,
+ // #[serde(rename = "ot")]
+ // pub original_order_type: String,
+ // #[serde(rename = "ps")]
+ // pub position_side: String,
+ // #[serde(rename = "cp")]
+ // pub close_all: Option<String>,
+ // #[serde(rename = "AP")]
+ // pub activation_price: Option<String>,
+ // #[serde(rename = "cr")]
+ // pub callback_rate: Option<String>,
+ // #[serde(rename = "pP")]
+ // pub price_protection: bool,
+ // #[serde(rename = "si")]
+ // pub ignore: i64,
+ // #[serde(rename = "ss")]
+ // pub ignore: i64,
+ // #[serde(rename = "rp")]
+ // pub realized_profit: String,
+ // #[serde(rename = "V")]
+ // pub stp_mode: String,
+ // #[serde(rename = "pm")]
+ // pub price_match_mode: String,
+ // #[serde(rename = "gtd")]
+ // pub gtd_auto_cancel_time: i64,
+}
+
+#[derive(Deserialize, Debug)]
+pub struct ListenKey {
+ #[serde(rename = "listenKey")]
+ pub listen_key: String,
+}
+
+#[derive(Deserialize, Debug)]
+pub struct ListenKeyStream {
+ #[serde(rename = "E")]
+ pub event_time: i64,
+ #[serde(rename = "listenKey")]
+ pub listen_key: String,
+}
diff --git a/rust/src/connector/binancefutures/ordermanager.rs b/rust/src/connector/binancefutures/ordermanager.rs
new file mode 100644
index 0000000..d15ecba
--- /dev/null
+++ b/rust/src/connector/binancefutures/ordermanager.rs
@@ -0,0 +1,315 @@
+use std::{
+ collections::{hash_map::Entry, HashMap},
+ sync::{Arc, Mutex},
+};
+use chrono::Utc;
+use rand::distributions::Alphanumeric;
+use rand::Rng;
+
+use tracing::{debug, error, info, warn};
+
+use crate::{
+ connector::binancefutures::{msg::OrderResponse, rest::RequestError},
+ ty::{Order, Status},
+};
+
+#[derive(Debug)]
+struct OrderWrapper {
+ order: Order<()>,
+ client_order_id: String,
+ removed_by_ws: bool,
+ removed_by_rest: bool,
+}
+
+pub type OrderMgr = Arc<Mutex<OrderManager>>;
+
+#[derive(Default, Debug)]
+pub struct OrderManager {
+ prefix: String,
+ orders: HashMap<String, OrderWrapper>,
+ order_id_map: HashMap<i64, String>,
+}
+
+impl OrderManager {
+ pub fn new(prefix: &str) -> Self {
+ Self {
+ prefix: prefix.to_string(),
+ orders: Default::default(),
+ order_id_map: Default::default(),
+ }
+ }
+
+ pub fn update_from_ws(
+ &mut self,
+ client_order_id: String,
+ order: Order<()>,
+ ) -> Option<Order<()>> {
+ match self.orders.entry(client_order_id.clone()) {
+ Entry::Occupied(mut entry) => {
+ let wrapper = entry.get_mut();
+ let already_removed = wrapper.removed_by_ws || wrapper.removed_by_rest;
+ if order.exch_timestamp >= wrapper.order.exch_timestamp {
+ wrapper.order.update(&order);
+ }
+
+ if order.status != Status::New && order.status != Status::PartiallyFilled {
+ wrapper.removed_by_ws = true;
+ if !already_removed {
+ self.order_id_map.remove(&order.order_id);
+ }
+
+ if wrapper.removed_by_ws && wrapper.removed_by_rest {
+ entry.remove_entry();
+ }
+ }
+
+ if already_removed {
+ None
+ } else {
+ Some(order)
+ }
+ }
+ Entry::Vacant(entry) => {
+ if !order.active() {
+ return None;
+ }
+
+ debug!(%client_order_id, ?order, "Received an unmanaged order from WS.");
+ let wrapper = entry.insert(OrderWrapper {
+ order: order.clone(),
+ removed_by_ws: order.status != Status::New
+ && order.status != Status::PartiallyFilled,
+ removed_by_rest: false,
+ client_order_id,
+ });
+ if wrapper.removed_by_ws || wrapper.removed_by_rest {
+ self.order_id_map.remove(&order.order_id);
+ }
+ Some(order)
+ }
+ }
+ }
+
+ pub fn update_submit_success(
+ &mut self,
+ order: Order<()>,
+ resp: OrderResponse,
+ ) -> Option<Order<()>> {
+ let order = Order {
+ qty: resp.orig_qty,
+ leaves_qty: resp.orig_qty - resp.cum_qty,
+ price_tick: (resp.price / order.tick_size).round() as i32,
+ tick_size: order.tick_size,
+ side: order.side,
+ time_in_force: resp.time_in_force,
+ exch_timestamp: resp.update_time * 1_000_000,
+ status: Status::New,
+ local_timestamp: 0,
+ req: Status::None,
+ exec_price_tick: 0,
+ exec_qty: resp.executed_qty,
+ order_id: order.order_id,
+ order_type: resp.type_,
+ // Invalid information
+ q: (),
+ // Invalid information
+ maker: false,
+ };
+ self.update_from_rest(resp.client_order_id, order)
+ }
+
+ pub fn update_submit_fail(
+ &mut self,
+ mut order: Order<()>,
+ error: RequestError,
+ client_order_id: String,
+ ) -> Option<Order<()>> {
+ match error {
+ RequestError::OrderError(-5022, _) => {
+ // GTX rejection.
+ }
+ RequestError::OrderError(-1008, _) => {
+ // Server is currently overloaded with other requests. Please try again in a few minutes.
+ error!("Server is currently overloaded with other requests. Please try again in a few minutes.");
+ }
+ RequestError::OrderError(-2019, _) => {
+ // Margin is insufficient.
+ error!("Margin is insufficient.");
+ }
+ RequestError::OrderError(-1015, _) => {
+ // Too many new orders; current limit is 300 orders per TEN_SECONDS."
+ error!("Too many new orders; current limit is 300 orders per TEN_SECONDS.");
+ }
+ error => {
+ error!(?error, "submit error");
+ }
+ }
+
+ order.req = Status::None;
+ order.status = Status::Expired;
+ self.update_from_rest(client_order_id, order)
+ }
+
+ pub fn update_cancel_success(
+ &mut self,
+ mut order: Order<()>,
+ resp: OrderResponse,
+ ) -> Option<Order<()>> {
+ let order = Order {
+ qty: resp.orig_qty,
+ leaves_qty: resp.orig_qty - resp.cum_qty,
+ price_tick: (resp.price / order.tick_size).round() as i32,
+ tick_size: order.tick_size,
+ side: resp.side,
+ time_in_force: resp.time_in_force,
+ exch_timestamp: resp.update_time * 1_000_000,
+ status: Status::Canceled,
+ local_timestamp: 0,
+ req: Status::None,
+ exec_price_tick: 0,
+ exec_qty: resp.executed_qty,
+ order_id: order.order_id,
+ order_type: resp.type_,
+ // Invalid information
+ q: (),
+ // Invalid information
+ maker: false,
+ };
+ self.update_from_rest(resp.client_order_id, order)
+ }
+
+ pub fn update_cancel_fail(
+ &mut self,
+ mut order: Order<()>,
+ error: RequestError,
+ client_order_id: String,
+ ) -> Option<Order<()>> {
+ match error {
+ RequestError::OrderError(-2011, _) => {
+ // The given order may no longer exist; it could have already been filled or
+ // canceled. But, it cannot determine the order status because it lacks the
+ // necessary information.
+ order.leaves_qty = 0.0;
+ order.status = Status::None;
+ }
+ error => {
+ error!(?error, "cancel error");
+ }
+ }
+ order.req = Status::None;
+ self.update_from_rest(client_order_id, order)
+ }
+
+ fn update_from_rest(&mut self, client_order_id: String, order: Order<()>) -> Option<Order<()>> {
+ match self.orders.entry(client_order_id.clone()) {
+ Entry::Occupied(mut entry) => {
+ let wrapper = entry.get_mut();
+ let already_removed = wrapper.removed_by_ws || wrapper.removed_by_rest;
+ if order.exch_timestamp >= wrapper.order.exch_timestamp {
+ wrapper.order.update(&order);
+ }
+
+ if order.status != Status::New && order.status != Status::PartiallyFilled {
+ wrapper.removed_by_rest = true;
+ if !already_removed {
+ self.order_id_map.remove(&order.order_id);
+ }
+
+ if wrapper.removed_by_ws && wrapper.removed_by_rest {
+ entry.remove_entry();
+ }
+ }
+
+ if already_removed {
+ None
+ } else {
+ Some(order)
+ }
+ }
+ Entry::Vacant(entry) => {
+ if !order.active() {
+ return None;
+ }
+
+ debug!(%client_order_id, ?order, "Received an unmanaged order from REST.");
+ let wrapper = entry.insert(OrderWrapper {
+ order: order.clone(),
+ removed_by_ws: false,
+ removed_by_rest: order.status != Status::New
+ && order.status != Status::PartiallyFilled,
+ client_order_id,
+ });
+ if wrapper.removed_by_ws || wrapper.removed_by_rest {
+ self.order_id_map.remove(&order.order_id);
+ }
+ Some(order)
+ }
+ }
+ }
+
+ pub fn prepare_client_order_id(&mut self, order: Order<()>) -> Option<String> {
+ if self.order_id_map.contains_key(&order.order_id) {
+ return None;
+ }
+
+ let rand_id: String = rand::thread_rng()
+ .sample_iter(&Alphanumeric)
+ .take(16)
+ .map(char::from)
+ .collect();
+
+ let client_order_id = format!("{}{}{}", self.prefix, &rand_id, order.order_id);
+ if self.orders.contains_key(&client_order_id) {
+ return None;
+ }
+
+ self.order_id_map.insert(order.order_id, client_order_id.clone());
+ self.orders.insert(client_order_id.clone(), OrderWrapper {
+ order,
+ client_order_id: client_order_id.clone(),
+ removed_by_ws: false,
+ removed_by_rest: false,
+ });
+ Some(client_order_id)
+ }
+
+ pub fn get_client_order_id(&self, order_id: i64) -> Option<String> {
+ self.order_id_map.get(&order_id).cloned()
+ }
+
+ pub fn gc(&mut self) {
+ let now = Utc::now().timestamp_nanos_opt().unwrap();
+ let stale_ts = now - 300_000_000_000;
+ let stale_ids: Vec<(_, _)> = self.orders
+ .iter()
+ .filter(|&(_, wrapper)| {
+ wrapper.order.status != Status::New
+ && wrapper.order.status != Status::PartiallyFilled
+ && wrapper.order.status != Status::Unsupported
+ && wrapper.order.exch_timestamp < stale_ts
+ })
+ .map(|(client_order_id, wrapper)|
+ (client_order_id.clone(), wrapper.order.order_id)
+ )
+ .collect();
+ for (client_order_id, order_id) in stale_ids.iter() {
+ if self.order_id_map.contains_key(order_id) {
+ // Something went wrong?
+ }
+ self.orders.remove(client_order_id);
+ }
+ }
+
+ pub fn parse_client_order_id(client_order_id: &str, prefix: &str) -> Option<i64> {
+ if !client_order_id.starts_with(prefix) {
+ None
+ } else {
+ let s = &client_order_id[(prefix.len() + 16)..];
+ if let Ok(order_id) = s.parse() {
+ Some(order_id)
+ } else {
+ None
+ }
+ }
+ }
+}
diff --git a/rust/src/connector/binancefutures/rest.rs b/rust/src/connector/binancefutures/rest.rs
new file mode 100644
index 0000000..8e9f43c
--- /dev/null
+++ b/rust/src/connector/binancefutures/rest.rs
@@ -0,0 +1,453 @@
+use std::{
+ collections::HashMap,
+ fmt::{Debug, Write},
+};
+
+use chrono::Utc;
+use hmac::{Hmac, KeyInit, Mac};
+use serde::Deserialize;
+use sha2::Sha256;
+use thiserror::Error;
+
+/// https://binance-docs.github.io/apidocs/futures/en/
+use super::{msg::PositionInformationV2};
+use crate::{
+ connector::binancefutures::{
+ msg::{ListenKey, OrderResponse, OrderResponseResult},
+ ordermanager::OrderMgr,
+ },
+ live::AssetInfo,
+ ty::{OrdType, Order, Side, Status, TimeInForce, ToStr},
+};
+use crate::connector::binancefutures::ordermanager::OrderManager;
+
+#[derive(Error, Debug)]
+pub enum RequestError {
+ #[error("invalid request")]
+ InvalidRequest,
+ #[error("http error")]
+ ReqError(#[from] reqwest::Error),
+ #[error("order error")]
+ OrderError(i64, String),
+}
+
+#[derive(Clone)]
+pub struct BinanceFuturesClient {
+ client: reqwest::Client,
+ url: String,
+ api_key: String,
+ secret: String,
+ orders: OrderMgr,
+}
+
+impl BinanceFuturesClient {
+ pub fn new(url: &str, api_key: &str, secret: &str, orders: OrderMgr) -> Self {
+ Self {
+ client: reqwest::Client::new(),
+ url: url.to_string(),
+ api_key: api_key.to_string(),
+ secret: secret.to_string(),
+ orders,
+ }
+ }
+
+ fn sign(secret: &str, s: &str) -> String {
+ let mut mac = Hmac::<Sha256>::new_from_slice(secret.as_bytes()).unwrap();
+ mac.update(s.as_bytes());
+ let hash = mac.finalize().into_bytes();
+ let mut tmp = String::with_capacity(hash.len() * 2);
+ for c in hash {
+ write!(&mut tmp, "{:02x}", c).unwrap();
+ }
+ tmp
+ }
+
+ async fn get<T: for<'a> Deserialize<'a>>(
+ &self,
+ path: &str,
+ mut query: String,
+ api_key: &str,
+ secret: &str,
+ ) -> Result<T, reqwest::Error> {
+ let time = Utc::now().timestamp_millis() - 1000;
+ if !query.is_empty() {
+ query.push_str("&");
+ }
+ query.push_str("recvWindow=5000×tamp=");
+ query.push_str(&time.to_string());
+ let signature = Self::sign(secret, &query);
+ let resp = self
+ .client
+ .get(&format!(
+ "{}{}?{}&signature={}",
+ self.url, path, query, signature
+ ))
+ .header("Accept", "application/json")
+ .header("X-MBX-APIKEY", api_key)
+ .send()
+ .await?
+ .json()
+ .await?;
+ Ok(resp)
+ }
+
+ async fn put<T: for<'a> Deserialize<'a>>(
+ &self,
+ path: &str,
+ body: String,
+ api_key: &str,
+ secret: &str,
+ ) -> Result<T, reqwest::Error> {
+ let time = Utc::now().timestamp_millis() - 1000;
+ let sign_body = format!("recvWindow=5000×tamp={}{}", time, body);
+ let signature = Self::sign(secret, &sign_body);
+ let resp = self
+ .client
+ .put(&format!(
+ "{}{}?recvWindow=5000×tamp={}&signature={}",
+ self.url, path, time, signature
+ ))
+ .header("Accept", "application/json")
+ .header("X-MBX-APIKEY", api_key)
+ .body(body)
+ .send()
+ .await?
+ .json()
+ .await?;
+ Ok(resp)
+ }
+
+ async fn post<T: for<'a> Deserialize<'a>>(
+ &self,
+ path: &str,
+ body: String,
+ api_key: &str,
+ secret: &str,
+ ) -> Result<T, reqwest::Error> {
+ let time = Utc::now().timestamp_millis() - 1000;
+ let sign_body = format!("recvWindow=5000×tamp={}{}", time, body);
+ let signature = Self::sign(secret, &sign_body);
+ let resp = self
+ .client
+ .post(&format!(
+ "{}{}?recvWindow=5000×tamp={}&signature={}",
+ self.url, path, time, signature
+ ))
+ .header("Accept", "application/json")
+ .header("X-MBX-APIKEY", api_key)
+ .body(body)
+ .send()
+ .await?
+ .json()
+ .await?;
+ Ok(resp)
+ }
+
+ async fn delete<T: for<'a> Deserialize<'a>>(
+ &self,
+ path: &str,
+ body: String,
+ api_key: &str,
+ secret: &str,
+ ) -> Result<T, reqwest::Error> {
+ let time = Utc::now().timestamp_millis() - 1000;
+ let sign_body = format!("recvWindow=5000×tamp={}{}", time, body);
+ let signature = Self::sign(secret, &sign_body);
+ let resp = self
+ .client
+ .delete(&format!(
+ "{}{}?recvWindow=5000×tamp={}&signature={}",
+ self.url, path, time, signature
+ ))
+ .header("Accept", "application/json")
+ .header("X-MBX-APIKEY", api_key)
+ .body(body)
+ .send()
+ .await?
+ .json()
+ .await?;
+ Ok(resp)
+ }
+
+ pub async fn start_user_data_stream(&self) -> Result<String, reqwest::Error> {
+ let resp: Result<ListenKey, _> = self
+ .post(
+ "/fapi/v1/listenKey",
+ String::new(),
+ &self.api_key,
+ &self.secret,
+ )
+ .await;
+ resp.map(|v| v.listen_key)
+ }
+
+ pub async fn keepalive_user_data_stream(&self) -> Result<(), reqwest::Error> {
+ let _: serde_json::Value = self
+ .put(
+ "/fapi/v1/listenKey",
+ String::new(),
+ &self.api_key,
+ &self.secret,
+ )
+ .await?;
+ Ok(())
+ }
+
+ pub async fn submit_order(
+ &self,
+ client_order_id: &str,
+ symbol: &str,
+ side: Side,
+ price: f32,
+ price_prec: usize,
+ qty: f32,
+ order_type: OrdType,
+ time_in_force: TimeInForce,
+ ) -> Result<OrderResponse, RequestError> {
+ let mut body = String::with_capacity(200);
+ body.push_str("newClientOrderId=");
+ body.push_str(&client_order_id);
+ body.push_str("&symbol=");
+ body.push_str(&symbol);
+ body.push_str("&side=");
+ body.push_str(side.to_str());
+ body.push_str("&price=");
+ body.push_str(&format!("{:.prec$}", price, prec = price_prec));
+ body.push_str("&quantity=");
+ body.push_str(&format!("{:.5}", qty));
+ body.push_str("&type=");
+ body.push_str(order_type.to_str());
+ body.push_str("&timeInForce=");
+ body.push_str(time_in_force.to_str());
+
+ let resp: OrderResponseResult = self
+ .post("/fapi/v1/order", body, &self.api_key, &self.secret)
+ .await?;
+ match resp {
+ OrderResponseResult::Ok(resp) => {
+ Ok(resp)
+ }
+ OrderResponseResult::Err(resp) => {
+ Err(RequestError::OrderError(resp.code, resp.msg))
+ }
+ }
+ }
+
+ pub async fn submit_orders(
+ &self,
+ orders: Vec<(String, String, Side, f32, usize, f32, OrdType, TimeInForce)>,
+ ) -> Result<Vec<Result<OrderResponse, RequestError>>, RequestError> {
+ if orders.len() > 5 {
+ return Err(RequestError::InvalidRequest);
+ }
+ let mut body = String::with_capacity(2000 * orders.len());
+ body.push_str("{\"batchOrders\":[");
+ for (i, order) in orders.iter().enumerate() {
+ if i > 0 {
+ body.push_str(",");
+ }
+ body.push_str("{\"newClientOrderId\":\"");
+ body.push_str(&order.0);
+ body.push_str("\",\"symbol\":\"");
+ body.push_str(&order.1);
+ body.push_str("\",\"side\":\"");
+ body.push_str(order.2.to_str());
+ body.push_str("\",\"price\":\"");
+ body.push_str(&format!("{:.prec$}", order.3, prec = order.4));
+ body.push_str("\",\"quantity\":\"");
+ body.push_str(&format!("{:.5}", order.5));
+ body.push_str("\",\"type\":\"");
+ body.push_str(order.6.to_str());
+ body.push_str("\",\"timeInForce\":\"");
+ body.push_str(order.7.to_str());
+ body.push_str("\"}");
+ }
+ body.push_str("]}");
+
+ let resp: Vec<OrderResponseResult> = self
+ .post("/fapi/v1/batchOrders", body, &self.api_key, &self.secret)
+ .await?;
+ Ok(resp
+ .into_iter()
+ .map(|resp| match resp {
+ OrderResponseResult::Ok(resp) => {
+ Ok(resp)
+ }
+ OrderResponseResult::Err(resp) => {
+ Err(RequestError::OrderError(resp.code, resp.msg))
+ }
+ })
+ .collect()
+ )
+ }
+
+ pub async fn modify_order(
+ &self,
+ client_order_id: &str,
+ symbol: &str,
+ side: Side,
+ price: f32,
+ price_prec: usize,
+ qty: f32,
+ ) -> Result<OrderResponse, RequestError> {
+ let mut body = String::with_capacity(100);
+ body.push_str("symbol=");
+ body.push_str(&symbol);
+ body.push_str("&origClientOrderId=");
+ body.push_str(&client_order_id);
+ body.push_str("&side=");
+ body.push_str(side.to_str());
+ body.push_str("&price=");
+ body.push_str(&format!("{:.prec$}", price, prec = price_prec));
+ body.push_str("&quantity=");
+ body.push_str(&format!("{:.5}", qty));
+
+ let resp: OrderResponseResult = self
+ .put("/fapi/v1/order", body, &self.api_key, &self.secret)
+ .await?;
+ match resp {
+ OrderResponseResult::Ok(resp) => {
+ Ok(resp)
+ }
+ OrderResponseResult::Err(resp) => {
+ Err(RequestError::OrderError(resp.code, resp.msg))
+ }
+ }
+ }
+
+ pub async fn cancel_order(
+ &self,
+ client_order_id: &str,
+ symbol: &str,
+ ) -> Result<OrderResponse, RequestError> {
+ let mut body = String::with_capacity(100);
+ body.push_str("symbol=");
+ body.push_str(&symbol);
+ body.push_str("&origClientOrderId=");
+ body.push_str(client_order_id);
+
+ let resp: OrderResponseResult = self
+ .delete("/fapi/v1/order", body, &self.api_key, &self.secret)
+ .await?;
+ match resp {
+ OrderResponseResult::Ok(resp) => {
+ Ok(resp)
+ }
+ OrderResponseResult::Err(resp) => {
+ Err(RequestError::OrderError(resp.code, resp.msg))
+ }
+ }
+ }
+
+ pub async fn cancel_orders(
+ &self,
+ symbol: &str,
+ client_order_ids: Vec<String>,
+ ) -> Result<Vec<Result<OrderResponse, RequestError>>, RequestError> {
+ if client_order_ids.len() > 10 {
+ return Err(RequestError::InvalidRequest);
+ }
+ let mut body = String::with_capacity(100);
+ body.push_str("{\"symbol\":\"");
+ body.push_str(symbol);
+ body.push_str("\",\"origClientOrderIdList\":[");
+ for (i, client_order_id) in client_order_ids.iter().enumerate() {
+ if i > 0 {
+ body.push_str(",");
+ }
+ body.push_str("\"");
+ body.push_str(client_order_id);
+ body.push_str("\"");
+ }
+ body.push_str("]}");
+ let resp: Vec<OrderResponseResult> = self
+ .post("/fapi/v1/batchOrders", body, &self.api_key, &self.secret)
+ .await?;
+ Ok(resp
+ .into_iter()
+ .map(|resp| match resp {
+ OrderResponseResult::Ok(resp) => {
+ Ok(resp)
+ }
+ OrderResponseResult::Err(resp) => {
+ Err(RequestError::OrderError(resp.code, resp.msg))
+ }
+ })
+ .collect()
+ )
+ }
+
+ pub async fn cancel_all_orders(&self, symbol: &str) -> Result<(), reqwest::Error> {
+ let _: serde_json::Value = self
+ .delete(
+ "/fapi/v1/allOpenOrders",
+ format!("symbol={}", symbol),
+ &self.api_key,
+ &self.secret,
+ )
+ .await?;
+ Ok(())
+ }
+
+ pub async fn get_position_information(
+ &self,
+ ) -> Result<Vec<PositionInformationV2>, reqwest::Error> {
+ let resp: Vec<PositionInformationV2> = self
+ .get(
+ "/fapi/v2/positionRisk",
+ String::new(),
+ &self.api_key,
+ &self.secret,
+ )
+ .await?;
+ Ok(resp)
+ }
+
+ pub async fn get_current_all_open_orders(
+ &self,
+ assets: &HashMap<String, AssetInfo>,
+ ) -> Result<Vec<Order<()>>, reqwest::Error> {
+ let resp: Vec<OrderResponse> = self
+ .get(
+ "/fapi/v1/openOrders",
+ String::new(),
+ &self.api_key,
+ &self.secret,
+ )
+ .await?;
+ Ok(resp
+ .iter()
+ .map(|data| {
+ assets.get(&data.symbol).and_then(|asset_info| {
+ // fixme
+ OrderManager::parse_client_order_id(&data.client_order_id, "prefix")
+ .map(|order_id|
+ Order {
+ qty: data.orig_qty,
+ leaves_qty: data.orig_qty - data.cum_qty,
+ price_tick: (data.price / asset_info.tick_size).round() as i32,
+ tick_size: asset_info.tick_size,
+ side: data.side,
+ time_in_force: data.time_in_force,
+ exch_timestamp: data.update_time * 1_000_000,
+ status: data.status,
+ local_timestamp: 0,
+ req: Status::None,
+ exec_price_tick: 0,
+ exec_qty: data.executed_qty,
+ order_id,
+ order_type: data.type_,
+ // Invalid information
+ q: (),
+ // Invalid information
+ maker: false,
+ }
+ )
+ })
+ })
+ .filter(|order| order.is_some())
+ .map(|order| order.unwrap())
+ .collect()
+ )
+ }
+}
diff --git a/rust/src/connector/binancefutures/ws.rs b/rust/src/connector/binancefutures/ws.rs
new file mode 100644
index 0000000..649dbd5
--- /dev/null
+++ b/rust/src/connector/binancefutures/ws.rs
@@ -0,0 +1,214 @@
+use std::{collections::HashMap, sync::mpsc::Sender, time::Duration};
+
+use anyhow::Error;
+use chrono::Utc;
+use futures_util::{SinkExt, StreamExt};
+use tokio::{select, time};
+use tokio_tungstenite::{
+ connect_async,
+ tungstenite::{client::IntoClientRequest, Message},
+};
+use tracing::{error, info};
+
+use super::{
+ msg::{Data, Stream},
+ rest::BinanceFuturesClient,
+ BinanceFuturesError,
+ OrderMgr,
+};
+use crate::{
+ live::AssetInfo,
+ ty::{self, Depth, Event, Order, OrderResponse, Position, Status, BUY, SELL},
+};
+use crate::connector::binancefutures::ordermanager::OrderManager;
+
+fn parse_depth(
+ bids: Vec<(String, String)>,
+ asks: Vec<(String, String)>,
+) -> Result<(Vec<(f32, f32)>, Vec<(f32, f32)>), anyhow::Error> {
+ let mut bids_ = Vec::with_capacity(bids.len());
+ for (px, qty) in bids {
+ bids_.push(parse_px_qty_tup(px, qty)?);
+ }
+ let mut asks_ = Vec::with_capacity(asks.len());
+ for (px, qty) in asks {
+ asks_.push(parse_px_qty_tup(px, qty)?);
+ }
+ Ok((bids_, asks_))
+}
+
+fn parse_px_qty_tup(px: String, qty: String) -> Result<(f32, f32), anyhow::Error> {
+ Ok((px.parse()?, qty.parse()?))
+}
+
+pub async fn connect(
+ url: &str,
+ ev_tx: Sender<Event>,
+ assets: HashMap<String, AssetInfo>,
+ prefix: &str,
+ orders: OrderMgr,
+ client: BinanceFuturesClient,
+) -> Result<(), anyhow::Error> {
+ let mut request = url.into_client_request()?;
+ let _ = request.headers_mut();
+
+ let (ws_stream, _) = connect_async(request).await?;
+ let (mut write, mut read) = ws_stream.split();
+ let mut interval = time::interval(Duration::from_secs(60 * 30));
+ loop {
+ select! {
+ _ = interval.tick() => {
+ if let Err(error) = client.keepalive_user_data_stream().await {
+ error!(?error, "Failed keepalive user data stream.");
+ }
+ }
+ message = read.next() => {
+ match message {
+ Some(Ok(Message::Text(text))) => {
+ let stream = match serde_json::from_str::<Stream>(&text) {
+ Ok(stream) => stream,
+ Err(error) => {
+ error!(?error, %text, "Couldn't parse Stream.");
+ continue;
+ }
+ };
+ match stream.data {
+ Data::DepthUpdate(data) => {
+ match parse_depth(data.bids, data.asks) {
+ Ok((bids, asks)) => {
+ let ai = assets
+ .get(&data.symbol)
+ .ok_or(BinanceFuturesError::AssetNotFound)?;
+ ev_tx.send(
+ Event::Depth(
+ Depth {
+ asset_no: ai.asset_no,
+ exch_ts: data.ev_timestamp * 1_000_000,
+ local_ts: Utc::now().timestamp_nanos_opt().unwrap(),
+ bids,
+ asks,
+ }
+ )
+ ).unwrap();
+ }
+ Err(error) => {
+ error!(?error, "Couldn't parse DepthUpdate stream.");
+ }
+ }
+ }
+ Data::Trade(data) => {
+ match parse_px_qty_tup(data.price, data.qty) {
+ Ok((price, qty)) => {
+ let asset_info = assets
+ .get(&data.symbol)
+ .ok_or(BinanceFuturesError::AssetNotFound)?;
+ ev_tx.send(
+ Event::Trade(
+ ty::Trade {
+ asset_no: asset_info.asset_no,
+ exch_ts: data.ev_timestamp * 1_000_000,
+ local_ts: Utc::now().timestamp_nanos_opt().unwrap(),
+ side: {
+ if data.is_the_buyer_the_market_maker {
+ SELL as i8
+ } else {
+ BUY as i8
+ }
+ },
+ price,
+ qty,
+ }
+ )
+ ).unwrap();
+ }
+ Err(e) => {
+ error!(error = ?e, "Couldn't parse trade stream.");
+ }
+ }
+ }
+ Data::ListenKeyExpired(_) => {
+ error!("Listen key is expired.");
+ // fixme: it should return an error.
+ break;
+ }
+ Data::AccountUpdate(data) => {
+ for position in data.account.position {
+ if let Some(asset_info) = assets.get(&position.symbol) {
+ ev_tx.send(
+ Event::Position(
+ Position {
+ asset_no: asset_info.asset_no,
+ symbol: position.symbol,
+ qty: position.position_amount
+ }
+ )
+ ).unwrap();
+ }
+ }
+ }
+ Data::OrderTradeUpdate(data) => {
+ if let Some(asset_info) = assets.get(&data.order.symbol) {
+ if let Some(order_id) = OrderManager::parse_client_order_id(&data.order.client_order_id, &prefix) {
+ let order = Order {
+ qty: data.order.original_qty,
+ leaves_qty: data.order.original_qty - data.order.order_filled_accumulated_qty,
+ price_tick: (data.order.original_price / asset_info.tick_size).round() as i32,
+ tick_size: asset_info.tick_size,
+ side: data.order.side,
+ time_in_force: data.order.time_in_force,
+ exch_timestamp: data.transaction_time * 1_000_000,
+ status: data.order.order_status,
+ local_timestamp: 0,
+ req: Status::None,
+ exec_price_tick: (data.order.last_filled_price / asset_info.tick_size).round() as i32,
+ exec_qty: data.order.order_last_filled_qty,
+ order_id,
+ q: (),
+ maker: false,
+ order_type: data.order.order_type
+ };
+
+ let order = orders
+ .lock()
+ .unwrap()
+ .update_from_ws(data.order.client_order_id, order);
+ if let Some(order) = order {
+ ev_tx.send(
+ Event::Order(
+ OrderResponse {
+ asset_no: asset_info.asset_no,
+ order
+ }
+ )
+ ).unwrap();
+ }
+ }
+ }
+ }
+ }
+ }
+ Some(Ok(Message::Binary(_))) => {}
+ Some(Ok(Message::Ping(_))) => {
+ orders.lock()
+ .unwrap()
+ .gc();
+ write.send(Message::Pong(Vec::new())).await?;
+ }
+ Some(Ok(Message::Pong(_))) => {}
+ Some(Ok(Message::Close(close_frame))) => {
+ info!(?close_frame, "close");
+ break;
+ }
+ Some(Ok(Message::Frame(_))) => {}
+ Some(Err(e)) => {
+ return Err(Error::from(e));
+ }
+ None => {
+ break;
+ }
+ }
+ }
+ }
+ }
+ Ok(())
+}
diff --git a/rust/src/connector/mod.rs b/rust/src/connector/mod.rs
new file mode 100644
index 0000000..19f1775
--- /dev/null
+++ b/rust/src/connector/mod.rs
@@ -0,0 +1,31 @@
+use std::sync::mpsc::Sender;
+
+use crate::ty::{Event, Order};
+
+pub mod binancefutures;
+
+pub trait Connector {
+ fn add(
+ &mut self,
+ an: usize,
+ symbol: String,
+ tick_size: f32,
+ lot_size: f32,
+ ) -> Result<(), anyhow::Error>;
+
+ fn run(&mut self, tx: Sender<Event>) -> Result<(), anyhow::Error>;
+
+ fn submit(
+ &self,
+ an: usize,
+ order: Order<()>,
+ ev_tx: Sender<Event>,
+ ) -> Result<(), anyhow::Error>;
+
+ fn cancel(
+ &self,
+ an: usize,
+ order: Order<()>,
+ ev_tx: Sender<Event>,
+ ) -> Result<(), anyhow::Error>;
+}
diff --git a/rust/src/depth/btreebook.rs b/rust/src/depth/btreebook.rs
new file mode 100644
index 0000000..9cdda02
--- /dev/null
+++ b/rust/src/depth/btreebook.rs
@@ -0,0 +1,150 @@
+use std::collections::BTreeMap;
+
+use super::{ApplySnapshot, MarketDepth, INVALID_MAX, INVALID_MIN};
+use crate::{
+ backtest::reader::Data,
+ ty::{Row, BUY, SELL},
+};
+
+#[derive(Debug)]
+pub struct BTreeMapMarketDepth {
+ pub tick_size: f32,
+ pub lot_size: f32,
+ pub timestamp: i64,
+ pub bid_depth: BTreeMap<i32, f32>,
+ pub ask_depth: BTreeMap<i32, f32>,
+}
+
+impl BTreeMapMarketDepth {
+ pub fn new(tick_size: f32, lot_size: f32) -> Self {
+ Self {
+ tick_size,
+ lot_size,
+ timestamp: 0,
+ bid_depth: Default::default(),
+ ask_depth: Default::default(),
+ }
+ }
+}
+
+impl MarketDepth for BTreeMapMarketDepth {
+ fn update_bid_depth(
+ &mut self,
+ price: f32,
+ qty: f32,
+ timestamp: i64,
+ ) -> (i32, i32, i32, f32, f32, i64) {
+ let price_tick = (price / self.tick_size).round() as i32;
+ let prev_best_bid_tick = *self.bid_depth.keys().last().unwrap_or(&INVALID_MIN);
+ let prev_qty = *self.bid_depth.get(&prev_best_bid_tick).unwrap_or(&0.0);
+
+ if (qty / self.lot_size).round() as i32 == 0 {
+ self.bid_depth.remove(&price_tick);
+ } else {
+ *self.bid_depth.entry(price_tick).or_insert(qty) = qty;
+ }
+ let best_bid_tick = *self.bid_depth.keys().last().unwrap_or(&INVALID_MIN);
+ (
+ price_tick,
+ prev_best_bid_tick,
+ best_bid_tick,
+ prev_qty,
+ qty,
+ timestamp,
+ )
+ }
+
+ fn update_ask_depth(
+ &mut self,
+ price: f32,
+ qty: f32,
+ timestamp: i64,
+ ) -> (i32, i32, i32, f32, f32, i64) {
+ let price_tick = (price / self.tick_size).round() as i32;
+ let prev_best_ask_tick = *self.bid_depth.keys().next().unwrap_or(&INVALID_MAX);
+ let prev_qty = *self.ask_depth.get(&prev_best_ask_tick).unwrap_or(&0.0);
+
+ if (qty / self.lot_size).round() as i32 == 0 {
+ self.ask_depth.remove(&price_tick);
+ } else {
+ *self.ask_depth.entry(price_tick).or_insert(qty) = qty;
+ }
+ let best_ask_tick = *self.ask_depth.keys().next().unwrap_or(&INVALID_MAX);
+ (
+ price_tick,
+ prev_best_ask_tick,
+ best_ask_tick,
+ prev_qty,
+ qty,
+ timestamp,
+ )
+ }
+
+ fn clear_depth(&mut self, side: i64, clear_upto_price: f32) {
+ let clear_upto = (clear_upto_price / self.tick_size).round() as i32;
+ if side == BUY {
+ let best_bid_tick = self.best_bid_tick();
+ if best_bid_tick != INVALID_MIN {
+ for t in clear_upto..(best_bid_tick + 1) {
+ if self.bid_depth.contains_key(&t) {
+ self.bid_depth.remove(&t);
+ }
+ }
+ }
+ } else if side == SELL {
+ let best_ask_tick = self.best_ask_tick();
+ if best_ask_tick != INVALID_MAX {
+ for t in best_ask_tick..(clear_upto + 1) {
+ if self.ask_depth.contains_key(&t) {
+ self.ask_depth.remove(&t);
+ }
+ }
+ }
+ } else {
+ self.bid_depth.clear();
+ self.ask_depth.clear();
+ }
+ }
+
+ fn best_bid(&self) -> f32 {
+ self.best_bid_tick() as f32 * self.tick_size
+ }
+
+ fn best_ask(&self) -> f32 {
+ self.best_ask_tick() as f32 * self.tick_size
+ }
+
+ fn best_bid_tick(&self) -> i32 {
+ *self.bid_depth.keys().last().unwrap_or(&INVALID_MIN)
+ }
+
+ fn best_ask_tick(&self) -> i32 {
+ *self.ask_depth.keys().next().unwrap_or(&INVALID_MAX)
+ }
+
+ fn tick_size(&self) -> f32 {
+ self.tick_size
+ }
+
+ fn lot_size(&self) -> f32 {
+ self.lot_size
+ }
+}
+
+impl ApplySnapshot for BTreeMapMarketDepth {
+ fn apply_snapshot(&mut self, data: &Data<Row>) {
+ self.bid_depth.clear();
+ self.ask_depth.clear();
+ for row_num in 0..data.len() {
+ let price = data[row_num].px;
+ let qty = data[row_num].qty;
+
+ let price_tick = (price / self.tick_size).round() as i32;
+ if data[row_num].ev & BUY == BUY {
+ *self.bid_depth.entry(price_tick).or_insert(0f32) = qty;
+ } else if data[row_num].ev & SELL == SELL {
+ *self.ask_depth.entry(price_tick).or_insert(0f32) = qty;
+ }
+ }
+ }
+}
diff --git a/rust/src/depth/hashmapbook.rs b/rust/src/depth/hashmapbook.rs
new file mode 100644
index 0000000..5204687
--- /dev/null
+++ b/rust/src/depth/hashmapbook.rs
@@ -0,0 +1,251 @@
+use std::collections::{hash_map::Entry, HashMap};
+
+use super::{ApplySnapshot, MarketDepth, INVALID_MAX, INVALID_MIN};
+use crate::{
+ backtest::reader::Data,
+ ty::{Row, BUY, SELL},
+};
+
+pub struct HashMapMarketDepth {
+ pub tick_size: f32,
+ pub lot_size: f32,
+ pub timestamp: i64,
+ pub ask_depth: HashMap<i32, f32>,
+ pub bid_depth: HashMap<i32, f32>,
+ pub best_bid_tick: i32,
+ pub best_ask_tick: i32,
+ pub low_bid_tick: i32,
+ pub high_ask_tick: i32,
+}
+
+fn depth_below(depth: &HashMap<i32, f32>, start: i32, end: i32) -> i32 {
+ for t in (end..start).rev() {
+ if *depth.get(&t).unwrap_or(&0f32) > 0f32 {
+ return t;
+ }
+ }
+ return INVALID_MIN;
+}
+
+fn depth_above(depth: &HashMap<i32, f32>, start: i32, end: i32) -> i32 {
+ for t in (start + 1)..(end + 1) {
+ if *depth.get(&t).unwrap_or(&0f32) > 0f32 {
+ return t;
+ }
+ }
+ return INVALID_MAX;
+}
+
+impl HashMapMarketDepth {
+ pub fn new(tick_size: f32, lot_size: f32) -> Self {
+ Self {
+ tick_size,
+ lot_size,
+ timestamp: 0,
+ ask_depth: HashMap::new(),
+ bid_depth: HashMap::new(),
+ best_bid_tick: INVALID_MIN,
+ best_ask_tick: INVALID_MAX,
+ low_bid_tick: INVALID_MAX,
+ high_ask_tick: INVALID_MIN,
+ }
+ }
+}
+
+impl MarketDepth for HashMapMarketDepth {
+ fn update_bid_depth(
+ &mut self,
+ price: f32,
+ qty: f32,
+ timestamp: i64,
+ ) -> (i32, i32, i32, f32, f32, i64) {
+ let price_tick = (price / self.tick_size).round() as i32;
+ let qty_lot = (qty / self.lot_size).round() as i32;
+ let prev_best_bid_tick = self.best_bid_tick;
+ let prev_qty;
+ match self.bid_depth.entry(price_tick) {
+ Entry::Occupied(mut entry) => {
+ prev_qty = *entry.get();
+ if qty_lot > 0 {
+ *entry.get_mut() = qty;
+ } else {
+ entry.remove();
+ }
+ }
+ Entry::Vacant(entry) => {
+ prev_qty = 0f32;
+ if qty_lot > 0 {
+ entry.insert(qty);
+ }
+ }
+ }
+
+ if qty_lot == 0 {
+ if price_tick == self.best_bid_tick {
+ self.best_bid_tick =
+ depth_below(&self.bid_depth, self.best_bid_tick, self.low_bid_tick);
+ if self.best_bid_tick == INVALID_MIN {
+ self.low_bid_tick = INVALID_MAX
+ }
+ }
+ } else {
+ if price_tick > self.best_bid_tick {
+ self.best_bid_tick = price_tick;
+ if self.best_bid_tick >= self.best_ask_tick {
+ self.best_ask_tick =
+ depth_above(&self.ask_depth, self.best_bid_tick, self.high_ask_tick);
+ }
+ }
+ self.low_bid_tick = self.low_bid_tick.min(price_tick);
+ }
+ (
+ price_tick,
+ prev_best_bid_tick,
+ self.best_bid_tick,
+ prev_qty,
+ qty,
+ timestamp,
+ )
+ }
+
+ fn update_ask_depth(
+ &mut self,
+ price: f32,
+ qty: f32,
+ timestamp: i64,
+ ) -> (i32, i32, i32, f32, f32, i64) {
+ let price_tick = (price / self.tick_size).round() as i32;
+ let qty_lot = (qty / self.lot_size).round() as i32;
+ let prev_best_ask_tick = self.best_ask_tick;
+ let prev_qty;
+ match self.ask_depth.entry(price_tick) {
+ Entry::Occupied(mut entry) => {
+ prev_qty = *entry.get();
+ if qty_lot > 0 {
+ *entry.get_mut() = qty;
+ } else {
+ entry.remove();
+ }
+ }
+ Entry::Vacant(entry) => {
+ prev_qty = 0f32;
+ if qty_lot > 0 {
+ entry.insert(qty);
+ }
+ }
+ }
+
+ if qty_lot == 0 {
+ if price_tick == self.best_ask_tick {
+ self.best_ask_tick =
+ depth_above(&self.ask_depth, self.best_ask_tick, self.high_ask_tick);
+ if self.best_ask_tick == INVALID_MAX {
+ self.high_ask_tick = INVALID_MIN
+ }
+ }
+ } else {
+ if price_tick < self.best_ask_tick {
+ self.best_ask_tick = price_tick;
+ if self.best_bid_tick >= self.best_ask_tick {
+ self.best_bid_tick =
+ depth_below(&self.bid_depth, self.best_ask_tick, self.low_bid_tick);
+ }
+ }
+ self.high_ask_tick = self.high_ask_tick.max(price_tick);
+ }
+ (
+ price_tick,
+ prev_best_ask_tick,
+ self.best_ask_tick,
+ prev_qty,
+ qty,
+ timestamp,
+ )
+ }
+
+ fn clear_depth(&mut self, side: i64, clear_upto_price: f32) {
+ let clear_upto = (clear_upto_price / self.tick_size).round() as i32;
+ if side == BUY {
+ if self.best_bid_tick != INVALID_MIN {
+ for t in clear_upto..(self.best_bid_tick + 1) {
+ if self.bid_depth.contains_key(&t) {
+ self.bid_depth.remove(&t);
+ }
+ }
+ }
+ self.best_bid_tick = depth_below(&self.bid_depth, clear_upto - 1, self.low_bid_tick);
+ if self.best_bid_tick == INVALID_MIN {
+ self.low_bid_tick = INVALID_MAX;
+ }
+ } else if side == SELL {
+ if self.best_ask_tick != INVALID_MAX {
+ for t in self.best_ask_tick..(clear_upto + 1) {
+ if self.ask_depth.contains_key(&t) {
+ self.ask_depth.remove(&t);
+ }
+ }
+ }
+ self.best_ask_tick = depth_above(&self.ask_depth, clear_upto + 1, self.high_ask_tick);
+ if self.best_ask_tick == INVALID_MAX {
+ self.high_ask_tick = INVALID_MIN;
+ }
+ } else {
+ self.bid_depth.clear();
+ self.ask_depth.clear();
+ self.best_bid_tick = INVALID_MIN;
+ self.best_ask_tick = INVALID_MAX;
+ self.low_bid_tick = INVALID_MAX;
+ self.high_ask_tick = INVALID_MIN;
+ }
+ }
+
+ fn best_bid(&self) -> f32 {
+ self.best_bid_tick as f32 * self.tick_size
+ }
+
+ fn best_ask(&self) -> f32 {
+ self.best_ask_tick as f32 * self.tick_size
+ }
+
+ fn best_bid_tick(&self) -> i32 {
+ self.best_bid_tick
+ }
+
+ fn best_ask_tick(&self) -> i32 {
+ self.best_ask_tick
+ }
+
+ fn tick_size(&self) -> f32 {
+ self.tick_size
+ }
+
+ fn lot_size(&self) -> f32 {
+ self.lot_size
+ }
+}
+
+impl ApplySnapshot for HashMapMarketDepth {
+ fn apply_snapshot(&mut self, data: &Data<Row>) {
+ self.best_bid_tick = INVALID_MIN;
+ self.best_ask_tick = INVALID_MAX;
+ self.low_bid_tick = INVALID_MAX;
+ self.high_ask_tick = INVALID_MIN;
+ self.bid_depth.clear();
+ self.ask_depth.clear();
+ for row_num in 0..data.len() {
+ let price = data[row_num].px;
+ let qty = data[row_num].qty;
+
+ let price_tick = (price / self.tick_size).round() as i32;
+ if data[row_num].ev & BUY == BUY {
+ self.best_bid_tick = self.best_bid_tick.max(price_tick);
+ self.low_bid_tick = self.low_bid_tick.min(price_tick);
+ *self.bid_depth.entry(price_tick).or_insert(0f32) = qty;
+ } else if data[row_num].ev & SELL == SELL {
+ self.best_ask_tick = self.best_ask_tick.min(price_tick);
+ self.high_ask_tick = self.high_ask_tick.max(price_tick);
+ *self.ask_depth.entry(price_tick).or_insert(0f32) = qty;
+ }
+ }
+ }
+}
diff --git a/rust/src/depth/mod.rs b/rust/src/depth/mod.rs
new file mode 100644
index 0000000..45080d4
--- /dev/null
+++ b/rust/src/depth/mod.rs
@@ -0,0 +1,41 @@
+use crate::{backtest::reader::Data, ty::Row};
+
+pub mod btreebook;
+pub mod hashmapbook;
+
+pub const INVALID_MIN: i32 = i32::MIN;
+pub const INVALID_MAX: i32 = i32::MAX;
+
+pub trait MarketDepth {
+ fn update_bid_depth(
+ &mut self,
+ price: f32,
+ qty: f32,
+ timestamp: i64,
+ ) -> (i32, i32, i32, f32, f32, i64);
+
+ fn update_ask_depth(
+ &mut self,
+ price: f32,
+ qty: f32,
+ timestamp: i64,
+ ) -> (i32, i32, i32, f32, f32, i64);
+
+ fn clear_depth(&mut self, side: i64, clear_upto_price: f32);
+
+ fn best_bid(&self) -> f32;
+
+ fn best_ask(&self) -> f32;
+
+ fn best_bid_tick(&self) -> i32;
+
+ fn best_ask_tick(&self) -> i32;
+
+ fn tick_size(&self) -> f32;
+
+ fn lot_size(&self) -> f32;
+}
+
+pub trait ApplySnapshot {
+ fn apply_snapshot(&mut self, data: &Data<Row>);
+}
diff --git a/rust/src/error.rs b/rust/src/error.rs
new file mode 100644
index 0000000..dcc9387
--- /dev/null
+++ b/rust/src/error.rs
@@ -0,0 +1,23 @@
+use std::fmt::{Display, Formatter};
+
+#[derive(Debug)]
+pub enum BuildError {
+ BuilderIncomplete(&'static str),
+ Duplicate(String, String),
+ ConnectorNotFound(String),
+ Error(anyhow::Error),
+}
+
+impl Display for BuildError {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{:?}", self)
+ }
+}
+
+impl std::error::Error for BuildError {}
+
+impl From<anyhow::Error> for BuildError {
+ fn from(value: anyhow::Error) -> Self {
+ BuildError::Error(value)
+ }
+}
diff --git a/rust/src/lib.rs b/rust/src/lib.rs
new file mode 100644
index 0000000..6333aef
--- /dev/null
+++ b/rust/src/lib.rs
@@ -0,0 +1,66 @@
+use std::collections::HashMap;
+
+use crate::{
+ backtest::state::StateValues,
+ ty::{OrdType, Order, Row, TimeInForce},
+};
+
+pub mod backtest;
+pub mod connector;
+pub mod depth;
+mod error;
+pub mod live;
+pub mod ty;
+
+pub trait Interface<Q, MD>
+where
+ Q: Sized + Clone,
+{
+ type Error;
+
+ fn current_timestamp(&self) -> i64;
+
+ fn position(&self, asset_no: usize) -> f64;
+
+ fn state_values(&self, asset_no: usize) -> StateValues;
+
+ fn depth(&self, asset_no: usize) -> &MD;
+
+ fn trade(&self, asset_no: usize) -> &Vec<Row>;
+
+ fn clear_last_trades(&mut self, asset_no: Option<usize>);
+
+ fn orders(&self, asset_no: usize) -> &HashMap<i64, Order<Q>>;
+
+ fn submit_buy_order(
+ &mut self,
+ asset_no: usize,
+ order_id: i64,
+ price: f32,
+ qty: f32,
+ time_in_force: TimeInForce,
+ order_type: OrdType,
+ wait: bool,
+ ) -> Result<bool, Self::Error>;
+
+ fn submit_sell_order(
+ &mut self,
+ asset_no: usize,
+ order_id: i64,
+ price: f32,
+ qty: f32,
+ time_in_force: TimeInForce,
+ order_type: OrdType,
+ wait: bool,
+ ) -> Result<bool, Self::Error>;
+
+ fn cancel(&mut self, asset_no: usize, order_id: i64, wait: bool) -> Result<bool, Self::Error>;
+
+ fn clear_inactive_orders(&mut self, asset_no: Option<usize>);
+
+ fn elapse(&mut self, duration: i64) -> Result<bool, Self::Error>;
+
+ fn elapse_bt(&mut self, duration: i64) -> Result<bool, Self::Error>;
+
+ fn close(&mut self) -> Result<(), Self::Error>;
+}
diff --git a/rust/src/live/bot.rs b/rust/src/live/bot.rs
new file mode 100644
index 0000000..0c436ae
--- /dev/null
+++ b/rust/src/live/bot.rs
@@ -0,0 +1,392 @@
+use std::{
+ collections::{hash_map::Entry, HashMap},
+ sync::mpsc::{channel, Receiver, RecvTimeoutError, Sender},
+ thread,
+ time::{Duration, Instant},
+};
+
+use chrono::Utc;
+use tokio::{
+ select,
+ sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender},
+};
+use tracing::{debug, error, info, warn};
+
+use crate::{
+ backtest::{
+ state::{State, StateValues},
+ Error,
+ },
+ connector::Connector,
+ depth::{btreebook::BTreeMapMarketDepth, MarketDepth},
+ live::{AssetInfo, LiveBuilder},
+ ty::{Event, OrdType, Order, Request, Row, Side, Status, TimeInForce, BUY, SELL},
+ Interface,
+};
+
+#[derive(Eq, PartialEq, Clone, Copy, Debug)]
+pub enum BotError {
+ AssetNotFound,
+ OrderNotFound,
+ DuplicateOrderId,
+ InvalidOrderStatus,
+}
+
+#[tokio::main(worker_threads = 2)]
+async fn thread_main(
+ ev_tx: Sender<Event>,
+ mut req_rx: UnboundedReceiver<Request>,
+ mut conns: HashMap<String, Box<dyn Connector + Send + 'static>>,
+ mapping: Vec<(String, AssetInfo)>,
+) {
+ for (_, conn) in conns.iter_mut() {
+ conn.run(ev_tx.clone());
+ }
+ loop {
+ select! {
+ req = req_rx.recv() => {
+ match req {
+ Some(Request::Order((an, order))) => {
+ if let Some((connector_name, _)) = mapping.get(an) {
+ let conn_ = conns.get_mut(connector_name).unwrap();
+ let ev_tx_ = ev_tx.clone();
+ match order.req{
+ Status::New => {
+ if let Err(error) = conn_.submit(an, order, ev_tx_) {
+ error!(?error, "submit error");
+ }
+ }
+ Status::Canceled => {
+ if let Err(error) = conn_.cancel(an, order, ev_tx_) {
+ error!(?error, "cancel error");
+ }
+ }
+ req => {
+ error!(?req, "invalid request.");
+ }
+ }
+ }
+ }
+ None => {
+
+ }
+ }
+ }
+ }
+ }
+}
+
+pub struct Bot {
+ req_tx: UnboundedSender<Request>,
+ req_rx: Option<UnboundedReceiver<Request>>,
+ ev_tx: Option<Sender<Event>>,
+ ev_rx: Receiver<Event>,
+ pub depth: Vec<BTreeMapMarketDepth>,
+ pub orders: Vec<HashMap<i64, Order<()>>>,
+ pub position: Vec<f64>,
+ trade: Vec<Vec<Row>>,
+ conns: Option<HashMap<String, Box<dyn Connector + Send + 'static>>>,
+ assets: Vec<(String, AssetInfo)>,
+}
+
+impl Bot {
+ pub fn new(
+ conns: HashMap<String, Box<dyn Connector + Send + 'static>>,
+ assets: Vec<(String, AssetInfo)>,
+ ) -> Self {
+ let (ev_tx, ev_rx) = channel();
+ let (req_tx, req_rx) = unbounded_channel();
+
+ let depth = assets
+ .iter()
+ .map(|(_, asset_info)| {
+ BTreeMapMarketDepth::new(asset_info.tick_size, asset_info.lot_size)
+ })
+ .collect();
+
+ let orders = assets.iter().map(|_| HashMap::new()).collect();
+ let position = assets.iter().map(|_| 0.0).collect();
+ let trade = assets.iter().map(|_| Vec::new()).collect();
+
+ Self {
+ ev_tx: Some(ev_tx),
+ ev_rx,
+ req_rx: Some(req_rx),
+ req_tx,
+ depth,
+ orders,
+ position,
+ conns: Some(conns),
+ assets,
+ trade,
+ }
+ }
+
+ pub fn run(&mut self) {
+ let ev_tx = self.ev_tx.take().unwrap();
+ let req_rx = self.req_rx.take().unwrap();
+ let conns = self.conns.take().unwrap();
+ let assets = self.assets.clone();
+ let _ = thread::spawn(move || {
+ thread_main(ev_tx, req_rx, conns, assets);
+ });
+ }
+
+ fn elapse_(&mut self, duration: i64) -> Result<bool, BotError> {
+ let now = Instant::now();
+ let mut remaining_duration = duration;
+ loop {
+ let timeout = Duration::from_nanos(remaining_duration as u64);
+ match self.ev_rx.recv_timeout(timeout) {
+ Ok(Event::Depth(data)) => {
+ let depth = unsafe { self.depth.get_unchecked_mut(data.asset_no) };
+ depth.timestamp = data.exch_ts;
+ for (px, qty) in data.bids {
+ depth.update_bid_depth(px, qty, 0);
+ }
+ for (px, qty) in data.asks {
+ depth.update_ask_depth(px, qty, 0);
+ }
+ }
+ Ok(Event::Trade(data)) => {
+ let trade = unsafe { self.trade.get_unchecked_mut(data.asset_no) };
+ trade.push(Row {
+ exch_ts: data.exch_ts,
+ local_ts: data.local_ts,
+ ev: {
+ if data.side == 1 {
+ BUY
+ } else if data.side == -1 {
+ SELL
+ } else {
+ 0
+ }
+ },
+ px: data.price,
+ qty: data.qty,
+ });
+ }
+ Ok(Event::Order(data)) => {
+ debug!(?data, "Event::Order");
+ match self
+ .orders
+ .get_mut(data.asset_no)
+ .ok_or(BotError::AssetNotFound)?
+ .entry(data.order.order_id)
+ {
+ Entry::Occupied(mut entry) => {
+ let ex_order = entry.get_mut();
+ if data.order.exch_timestamp >= ex_order.exch_timestamp {
+ if ex_order.status == Status::Canceled
+ || ex_order.status == Status::Expired
+ || ex_order.status == Status::Filled
+ {
+ // Ignores the update since the current status is the final status.
+ } else {
+ ex_order.update(&data.order);
+ }
+ }
+ }
+ Entry::Vacant(entry) => {
+ warn!(?data, "Received an unmanaged order.");
+ entry.insert(data.order);
+ }
+ }
+ }
+ Ok(Event::Position(data)) => {
+ *(unsafe { self.position.get_unchecked_mut(data.asset_no) }) = data.qty;
+ }
+ Ok(Event::Error(code, _)) => {}
+ Err(RecvTimeoutError::Timeout) => {
+ return Ok(true);
+ }
+ Err(RecvTimeoutError::Disconnected) => {
+ return Ok(false);
+ }
+ }
+ let elapsed = now.elapsed().as_nanos() as i64;
+ if elapsed > duration {
+ return Ok(true);
+ }
+ remaining_duration = duration - elapsed;
+ }
+ }
+
+ fn submit_order(
+ &mut self,
+ asset_no: usize,
+ order_id: i64,
+ price: f32,
+ qty: f32,
+ time_in_force: TimeInForce,
+ order_type: OrdType,
+ wait: bool,
+ side: Side,
+ ) -> Result<bool, BotError> {
+ let orders = self
+ .orders
+ .get_mut(asset_no)
+ .ok_or(BotError::AssetNotFound)?;
+ if orders.contains_key(&order_id) {
+ return Err(BotError::DuplicateOrderId);
+ }
+ let tick_size = self.assets.get(asset_no).unwrap().1.tick_size;
+ let order = Order {
+ order_id,
+ q: (),
+ price_tick: (price / tick_size).round() as i32,
+ qty,
+ leaves_qty: 0.0,
+ tick_size,
+ side,
+ time_in_force,
+ order_type,
+ status: Status::New,
+ local_timestamp: Utc::now().timestamp_nanos_opt().unwrap(),
+ req: Status::New,
+ exec_price_tick: 0,
+ exch_timestamp: 0,
+ exec_qty: 0.0,
+ maker: false,
+ };
+ orders.insert(order.order_id, order.clone());
+ self.req_tx.send(Request::Order((asset_no, order))).unwrap();
+ Ok(true)
+ }
+}
+
+impl Interface<(), BTreeMapMarketDepth> for Bot {
+ type Error = BotError;
+
+ fn current_timestamp(&self) -> i64 {
+ Utc::now().timestamp_nanos_opt().unwrap()
+ }
+
+ fn position(&self, asset_no: usize) -> f64 {
+ *self.position.get(asset_no).unwrap_or(&0.0)
+ }
+
+ fn state_values(&self, asset_no: usize) -> StateValues {
+ StateValues {
+ position: *self.position.get(asset_no).unwrap_or(&0.0),
+ balance: 0.0,
+ fee: 0.0,
+ trade_num: 0,
+ trade_qty: 0.0,
+ trade_amount: 0.0,
+ }
+ }
+
+ fn depth(&self, asset_no: usize) -> &BTreeMapMarketDepth {
+ self.depth.get(asset_no).unwrap()
+ }
+
+ fn trade(&self, asset_no: usize) -> &Vec<Row> {
+ self.trade.get(asset_no).unwrap()
+ }
+
+ fn clear_last_trades(&mut self, asset_no: Option<usize>) {
+ match asset_no {
+ Some(asset_no) => {
+ self.trade.get_mut(asset_no).unwrap().clear();
+ }
+ None => {
+ for asset_no in 0..self.trade.len() {
+ self.trade.get_mut(asset_no).unwrap().clear();
+ }
+ }
+ }
+ }
+
+ fn orders(&self, asset_no: usize) -> &HashMap<i64, Order<()>> {
+ self.orders.get(asset_no).unwrap()
+ }
+
+ fn submit_buy_order(
+ &mut self,
+ asset_no: usize,
+ order_id: i64,
+ price: f32,
+ qty: f32,
+ time_in_force: TimeInForce,
+ order_type: OrdType,
+ wait: bool,
+ ) -> Result<bool, Self::Error> {
+ self.submit_order(
+ asset_no,
+ order_id,
+ price,
+ qty,
+ time_in_force,
+ order_type,
+ wait,
+ Side::Buy,
+ )
+ }
+
+ fn submit_sell_order(
+ &mut self,
+ asset_no: usize,
+ order_id: i64,
+ price: f32,
+ qty: f32,
+ time_in_force: TimeInForce,
+ order_type: OrdType,
+ wait: bool,
+ ) -> Result<bool, Self::Error> {
+ self.submit_order(
+ asset_no,
+ order_id,
+ price,
+ qty,
+ time_in_force,
+ order_type,
+ wait,
+ Side::Sell,
+ )
+ }
+
+ fn cancel(&mut self, asset_no: usize, order_id: i64, wait: bool) -> Result<bool, Self::Error> {
+ let orders = self
+ .orders
+ .get_mut(asset_no)
+ .ok_or(BotError::AssetNotFound)?;
+ let order = orders.get_mut(&order_id).ok_or(BotError::OrderNotFound)?;
+ if !order.cancellable() {
+ return Err(BotError::InvalidOrderStatus);
+ }
+ order.req = Status::Canceled;
+ order.local_timestamp = Utc::now().timestamp_nanos_opt().unwrap();
+ self.req_tx
+ .send(Request::Order((asset_no, order.clone())))
+ .unwrap();
+ Ok(true)
+ }
+
+ fn clear_inactive_orders(&mut self, an: Option<usize>) {
+ match an {
+ Some(an) => {
+ if let Some(orders) = self.orders.get_mut(an) {
+ orders.retain(|order_id, order| order.active());
+ }
+ }
+ None => {
+ for orders in self.orders.iter_mut() {
+ orders.retain(|order_id, order| order.active());
+ }
+ }
+ }
+ }
+
+ fn elapse(&mut self, duration: i64) -> Result<bool, Self::Error> {
+ self.elapse_(duration)
+ }
+
+ fn elapse_bt(&mut self, _duration: i64) -> Result<bool, Self::Error> {
+ Ok(true)
+ }
+
+ fn close(&mut self) -> Result<(), Self::Error> {
+ Ok(())
+ }
+}
diff --git a/rust/src/live/mod.rs b/rust/src/live/mod.rs
new file mode 100644
index 0000000..6ee4244
--- /dev/null
+++ b/rust/src/live/mod.rs
@@ -0,0 +1,74 @@
+use std::collections::{HashMap, HashSet};
+
+use crate::{connector::Connector, error::BuildError, live::bot::Bot};
+
+pub mod bot;
+
+#[derive(Clone)]
+pub struct AssetInfo {
+ pub asset_no: usize,
+ pub symbol: String,
+ pub tick_size: f32,
+ pub lot_size: f32,
+}
+
+pub struct LiveBuilder {
+ conns: HashMap<String, Box<dyn Connector + Send + 'static>>,
+ assets: Vec<(String, AssetInfo)>,
+}
+
+impl LiveBuilder {
+ pub fn new() -> Self {
+ Self {
+ conns: HashMap::new(),
+ assets: Vec::new(),
+ }
+ }
+
+ pub fn register<C>(mut self, name: &str, conn: C) -> Self
+ where
+ C: Connector + Send + 'static,
+ {
+ self.conns.insert(name.to_string(), Box::new(conn));
+ self
+ }
+
+ pub fn add(mut self, name: &str, symbol: &str, tick_size: f32, lot_size: f32) -> Self {
+ let asset_no = self.assets.len();
+ self.assets.push((
+ name.to_string(),
+ AssetInfo {
+ asset_no,
+ symbol: symbol.to_string(),
+ tick_size,
+ lot_size,
+ },
+ ));
+ self
+ }
+
+ pub fn build(self) -> Result<Bot, BuildError> {
+ let mut dup = HashSet::new();
+ let mut conns = self.conns;
+ for (an, (name, asset_info)) in self.assets.iter().enumerate() {
+ if !dup.insert(format!("{}/{}", name, asset_info.symbol)) {
+ Err(BuildError::Duplicate(
+ name.clone(),
+ asset_info.symbol.clone(),
+ ))?;
+ }
+ let conn = conns
+ .get_mut(name)
+ .ok_or(BuildError::ConnectorNotFound(name.to_string()))?;
+ conn.add(
+ an,
+ asset_info.symbol.clone(),
+ asset_info.tick_size,
+ asset_info.lot_size,
+ )?;
+ }
+
+ let con = Bot::new(conns, self.assets);
+ Ok(con)
+ }
+}
diff --git a/rust/src/ty.rs b/rust/src/ty.rs
new file mode 100644
index 0000000..39b9b27
--- /dev/null
+++ b/rust/src/ty.rs
@@ -0,0 +1,303 @@
+use std::{
+ collections::HashMap,
+ fmt::{Debug, Formatter},
+};
+
+#[derive(Clone, Debug)]
+pub enum Event {
+ Depth(Depth),
+ Trade(Trade),
+ Order(OrderResponse),
+ Position(Position),
+ Error(i64, Option<HashMap<&'static str, String>>),
+}
+
+pub const BUY: i64 = 1 << 29;
+pub const SELL: i64 = 0;
+
+pub const DEPTH_EVENT: i64 = 1;
+pub const TRADE_EVENT: i64 = 2;
+pub const DEPTH_CLEAR_EVENT: i64 = 3;
+pub const DEPTH_SNAPSHOT_EVENT: i64 = 4;
+pub const USER_DEFINED_EVENT: i64 = 100;
+
+pub trait ToStr {
+ fn to_str(&self) -> &'static str;
+}
+
+#[derive(Clone, PartialEq, Debug)]
+#[repr(C)]
+pub struct Row {
+ pub exch_ts: i64,
+ pub local_ts: i64,
+ pub ev: i64,
+ pub px: f32,
+ pub qty: f32,
+}
+
+#[derive(Clone, Copy, Eq, PartialEq, Debug)]
+#[repr(i64)]
+pub enum EvError {
+ ConnectionInterrupted = 0,
+ CriticalConnectionError = 1,
+}
+
+#[derive(Clone, PartialEq, Debug)]
+pub struct Depth {
+ pub asset_no: usize,
+ pub exch_ts: i64,
+ pub local_ts: i64,
+ pub bids: Vec<(f32, f32)>,
+ pub asks: Vec<(f32, f32)>,
+}
+
+#[derive(Clone, PartialEq, Debug)]
+pub struct Trade {
+ pub asset_no: usize,
+ pub exch_ts: i64,
+ pub local_ts: i64,
+ pub side: i8,
+ pub price: f32,
+ pub qty: f32,
+}
+
+#[derive(Clone, PartialEq, Debug)]
+pub struct Position {
+ pub asset_no: usize,
+ pub symbol: String,
+ pub qty: f64,
+}
+
+#[derive(Clone, Copy, Eq, PartialEq, Debug)]
+#[repr(i8)]
+pub enum Side {
+ Buy = 1,
+ Sell = -1,
+ Unsupported = 127,
+}
+
+impl Side {
+ pub fn as_f64(&self) -> f64 {
+ match self {
+ Side::Buy => 1f64,
+ Side::Sell => -1f64,
+ Side::Unsupported => panic!("Side::Unsupported"),
+ }
+ }
+
+ pub fn as_f32(&self) -> f32 {
+ match self {
+ Side::Buy => 1f32,
+ Side::Sell => -1f32,
+ Side::Unsupported => panic!("Side::Unsupported"),
+ }
+ }
+}
+
+impl ToStr for Side {
+ fn to_str(&self) -> &'static str {
+ match self {
+ Side::Buy => "BUY",
+ Side::Sell => "SELL",
+ Side::Unsupported => panic!("Side::Unsupported"),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Eq, PartialEq, Debug)]
+#[repr(u8)]
+pub enum Status {
+ None = 0,
+ New = 1,
+ Expired = 2,
+ Filled = 3,
+ Canceled = 4,
+ PartiallyFilled = 5,
+ Unsupported = 255,
+}
+
+#[derive(Clone, Copy, Eq, PartialEq, Debug)]
+#[repr(u8)]
+pub enum TimeInForce {
+ GTC = 0,
+ GTX = 1,
+ FOK = 2,
+ IOC = 3,
+ Unsupported = 255,
+}
+
+impl ToStr for TimeInForce {
+ fn to_str(&self) -> &'static str {
+ match self {
+ TimeInForce::GTC => "GTC",
+ TimeInForce::GTX => "GTX",
+ TimeInForce::FOK => "FOK",
+ TimeInForce::IOC => "IOC",
+ TimeInForce::Unsupported => panic!("TimeInForce::Unsupported"),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Eq, PartialEq, Debug)]
+#[repr(u8)]
+pub enum OrdType {
+ Limit = 0,
+ Market = 1,
+ Unsupported = 255,
+}
+
+impl ToStr for OrdType {
+ fn to_str(&self) -> &'static str {
+ match self {
+ OrdType::Limit => "LIMIT",
+ OrdType::Market => "MARKET",
+ OrdType::Unsupported => panic!("OrdType::Unsupported"),
+ }
+ }
+}
+
+#[derive(Clone)]
+pub struct Order<Q>
+where
+ Q: Sized + Clone,
+{
+ pub qty: f32,
+ pub leaves_qty: f32,
+ pub price_tick: i32,
+ pub tick_size: f32,
+ pub side: Side,
+ pub time_in_force: TimeInForce,
+ pub exch_timestamp: i64,
+ pub status: Status,
+ pub local_timestamp: i64,
+ pub req: Status,
+ pub exec_price_tick: i32,
+ pub exec_qty: f32,
+ pub order_id: i64,
+ pub q: Q,
+ pub maker: bool,
+ pub order_type: OrdType,
+}
+
+impl<Q> Order<Q>
+where
+ Q: Sized + Clone,
+{
+ pub fn new(
+ order_id: i64,
+ price_tick: i32,
+ tick_size: f32,
+ qty: f32,
+ side: Side,
+ order_type: OrdType,
+ time_in_force: TimeInForce,
+ ) -> Self
+ where
+ Q: Default,
+ {
+ Self {
+ qty,
+ leaves_qty: qty,
+ price_tick,
+ tick_size,
+ side,
+ time_in_force,
+ exch_timestamp: 0,
+ status: Status::None,
+ local_timestamp: 0,
+ req: Status::None,
+ exec_price_tick: 0,
+ exec_qty: 0.0,
+ order_id,
+ q: Q::default(),
+ maker: false,
+ order_type,
+ }
+ }
+
+ pub fn price(&self) -> f32 {
+ self.price_tick as f32 * self.tick_size
+ }
+
+ pub fn exec_price(&self) -> f32 {
+ self.exec_price_tick as f32 * self.tick_size
+ }
+
+ pub fn cancellable(&self) -> bool {
+ (self.status == Status::New || self.status == Status::PartiallyFilled)
+ && self.req == Status::None
+ }
+
+ pub fn active(&self) -> bool {
+ self.status == Status::New || self.status == Status::PartiallyFilled
+ }
+
+ pub fn pending(&self) -> bool {
+ self.req != Status::None
+ }
+
+ pub fn update(&mut self, order: &Order<Q>) {
+ self.qty = order.qty;
+ self.leaves_qty = order.leaves_qty;
+ self.price_tick = order.price_tick;
+ self.tick_size = order.tick_size;
+ self.side = order.side;
+ self.time_in_force = order.time_in_force;
+
+ if order.exch_timestamp < self.exch_timestamp {
+ tracing::info!(?order, ?self, "order::update");
+ }
+
+ assert!(order.exch_timestamp >= self.exch_timestamp);
+ if order.exch_timestamp > 0 {
+ self.exch_timestamp = order.exch_timestamp;
+ }
+ self.status = order.status;
+ if order.local_timestamp > 0 {
+ self.local_timestamp = order.local_timestamp;
+ }
+ self.req = order.req;
+ self.exec_price_tick = order.exec_price_tick;
+ self.exec_qty = order.exec_qty;
+ self.order_id = order.order_id;
+ self.q = order.q.clone();
+ self.maker = order.maker;
+ self.order_type = order.order_type;
+ }
+}
+
+impl<Q> Debug for Order<Q>
+where
+ Q: Sized + Clone,
+{
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("Order")
+ .field("qty", &self.qty)
+ .field("leaves_qty", &self.leaves_qty)
+ .field("price_tick", &self.price_tick)
+ .field("tick_size", &self.tick_size)
+ .field("side", &self.side)
+ .field("time_in_force", &self.time_in_force)
+ .field("exch_timestamp", &self.exch_timestamp)
+ .field("status", &self.status)
+ .field("local_timestamp", &self.local_timestamp)
+ .field("req", &self.req)
+ .field("exec_price_tick", &self.exec_price_tick)
+ .field("exec_qty", &self.exec_qty)
+ .field("order_id", &self.order_id)
+ .field("maker", &self.maker)
+ .field("order_type", &self.order_type)
+ .finish()
+ }
+}
+
+#[derive(Clone, Debug)]
+pub enum Request {
+ Order((usize, Order<()>)),
+}
+
+#[derive(Clone, Debug)]
+pub struct OrderResponse {
+ pub asset_no: usize,
+ pub order: Order<()>,
+}
| Normalize Binance futures orderbook data
I wanted to take advantage of the [freely available historical futures orderbook level 2 data from binance](https://github.com/binance/binance-public-data/tree/master/Futures_Order_Book_Download).
It should be possible by combining this with historical trade data (also available from binance I believe) to obtain normalized data for `htfbacktest`.
But I couldn't find this in the repo examples. I wanted to check if it has already been done, so I don't waste time redoing it?
Normalize Binance futures orderbook data
I wanted to take advantage of the [freely available historical futures orderbook level 2 data from binance](https://github.com/binance/binance-public-data/tree/master/Futures_Order_Book_Download).
It should be possible by combining this with historical trade data (also available from binance I believe) to obtain normalized data for `htfbacktest`.
But I couldn't find this in the repo examples. I wanted to check if it has already been done, so I don't waste time redoing it?
| There isn't. If you provide an example file for me to look into its format, I would add an example converter.
By the way, without a local timestamp indicating when you received the feed, accurate backtesting is not possible, as there is no feed latency information. While you can artificially generate a local timestamp by assuming feed latency, it is preferable to collect the data yourself for more reliable results.
> There isn't. If you provide an example file for me to look into its format, I would add an example converter.
Here is example LOB data for a single day: https://drive.google.com/file/d/1rVaDblmYJL0aPpgvdJ-fU9QFhMDga6f_/view?usp=sharing
Btw, I was also happy to write it, but wanted to make sure I wasn't "reinventing the wheel".
Yes, good point about the local timestamp. Thanks for the tip.
The artificial local timestamps are fine for my purposes at the moment.
trade data is also required. still it's possible to backtest only based on depth data. it's meaningless especially in high freq. backtesting.
Right. I was not suggesting trying to use OB data alone. Actually, I found your repo while looking for an implementation for inventory models, which of course need trade data to fit them.
The trade data is available from the [Binance Public Data](https://github.com/binance/binance-public-data):
```console
wget https://data.binance.vision/data/futures/um/daily/trades/BTCUSDT/BTCUSDT-trades-2020-07-01.zip
```
Here is the [trade data](https://drive.google.com/file/d/1eZ7Ze0lsU2hiyNC9K_KZIaBjegafeKrv/view?usp=sharing) corresponding to the above depth data.
I added the converter. [hftbacktest/data/utils/binancehistmktdata.py](https://github.com/nkaz001/hftbacktest/blob/master/hftbacktest/data/utils/binancehistmktdata.py) (a5d3f91)
could you check if it works as expected? again, in my experience, I have observed that backtest results can exhibit significant discrepancies unless precise feed latency and order latency are used.
Excellent!
My plan was to look into the inventory MM model (as you gave an example of). I will report it if anything unexpected shows up.
I think you mean significant discrepancies between backtest and live trading results, but I am not doing any live trading at the moment. If you want to me to try out one of your other examples with the binance historical data, please let me know.
I am getting an error using the following [trade data](https://drive.google.com/file/d/1tp2vxJzrdRI-txq6xwtgYfGombpOfVG5/view?usp=sharing), for ETHUSDT on 2022-10-03, as in your example notebook.
I think it is because the first row contains the column names, unlike the previous example. My guess is that the format has changed with newer data.
Thanks for the report. Please see the latest commit. 740feee413795ea2a196077926e5def9e123229b
Thanks for updating the code.
Now I can successfully run the data preparation notebook.
However, when I use the prepared data from binance in the Guéant–Lehalle–Fernandez-Tapia Market Making Model and Grid Trading notebook, it is off by a factor of about 2 in trading intensity from your calculated results. For example:

It's as if there are only half as many trades in the data files obtained from binance. To be safe, I added a 10ms feed latency, but as expected that does not affect the fitted model parameters.
Note that I had to adjust for the fact that the binance data is timestamped to milliseconds rather than microseconds.
Would it be possible to share your collected data for ETHUSDT futures on 2022-10-03 (e.g. on Google drive)? That way people could reproduce your results, and also I could directly compare the trade data to binance.
For your information, I used `trade` stream instead of `aggTrade` stream which is currently officially documented but aggregated.
I'm not sure I understand, since I also used `trade` data from binance, rather than `aggTrade`. In fact, your converter does not even work on the binance historical `aggTrade` data, though I don't see a need for it.
Unless you are suggesting that the `trade` data from binance is in fact still aggregated?
Anyhow, my plan is to collect my own data from the stream and then I can compare with the historical data from binance.
No. But `trade` stream functions as expected, just like its description in the official spot API document, even though it is not outlined in the official futures API documents. So I guess Binance's historical data also came from `aggTrade`. Comparison is the most effective way for figuring things out.
Another issue showed up: I was working with more recent data, and it has an additional undocumented field `trans_id`. This changes the offset of the other fields, and breaks the converter.
Here is an example of the recent snapshot data: https://drive.google.com/file/d/1y-9nt9V-eB_OV3uSq4-dzBe-eOsQDt4S/view?usp=sharing
See 2b3137c3c643e9a96621e8fb0c3cd46ab0922dde and let me know if it works as expected.
Code looks much better now without hard-coded indices, and it processes the snapshot fine.
But now it fails on the `convert` function call in the validation step with an exception.
Here is the [lob data](https://drive.google.com/file/d/1X46yMgNarKp6NR2WDIsNZeZ7wrfIKAtb/view?usp=sharing) and [trade data](https://drive.google.com/file/d/1tM39EYFsuXWpTGfv8HL8ODTbqG_z40cr/view?usp=sharing) to reproduce this.
See 7299d9a3968c7acc079dfffc5aad50c947e86cf2. I fixed the mingled timestamp issue but since the data hasn't local timestamp, there is no way but sorting. That can cause another discrepancy. Beware of that.
Thanks! I tested it out and there were no more errors.
I'm not sure exactly what discrepancy you mean, but perhaps it will become more clear as I continue working on it.
What I meant by that is that any difference from the live trading environment can cause a discrepancy.
There isn't. If you provide an example file for me to look into its format, I would add an example converter.
By the way, without a local timestamp indicating when you received the feed, accurate backtesting is not possible, as there is no feed latency information. While you can artificially generate a local timestamp by assuming feed latency, it is preferable to collect the data yourself for more reliable results.
> There isn't. If you provide an example file for me to look into its format, I would add an example converter.
Here is example LOB data for a single day: https://drive.google.com/file/d/1rVaDblmYJL0aPpgvdJ-fU9QFhMDga6f_/view?usp=sharing
Btw, I was also happy to write it, but wanted to make sure I wasn't "reinventing the wheel".
Yes, good point about the local timestamp. Thanks for the tip.
The artificial local timestamps are fine for my purposes at the moment.
trade data is also required. still it's possible to backtest only based on depth data. it's meaningless especially in high freq. backtesting.
Right. I was not suggesting trying to use OB data alone. Actually, I found your repo while looking for an implementation for inventory models, which of course need trade data to fit them.
The trade data is available from the [Binance Public Data](https://github.com/binance/binance-public-data):
```console
wget https://data.binance.vision/data/futures/um/daily/trades/BTCUSDT/BTCUSDT-trades-2020-07-01.zip
```
Here is the [trade data](https://drive.google.com/file/d/1eZ7Ze0lsU2hiyNC9K_KZIaBjegafeKrv/view?usp=sharing) corresponding to the above depth data.
I added the converter. [hftbacktest/data/utils/binancehistmktdata.py](https://github.com/nkaz001/hftbacktest/blob/master/hftbacktest/data/utils/binancehistmktdata.py) (a5d3f91)
could you check if it works as expected? again, in my experience, I have observed that backtest results can exhibit significant discrepancies unless precise feed latency and order latency are used.
Excellent!
My plan was to look into the inventory MM model (as you gave an example of). I will report it if anything unexpected shows up.
I think you mean significant discrepancies between backtest and live trading results, but I am not doing any live trading at the moment. If you want to me to try out one of your other examples with the binance historical data, please let me know.
I am getting an error using the following [trade data](https://drive.google.com/file/d/1tp2vxJzrdRI-txq6xwtgYfGombpOfVG5/view?usp=sharing), for ETHUSDT on 2022-10-03, as in your example notebook.
I think it is because the first row contains the column names, unlike the previous example. My guess is that the format has changed with newer data.
Thanks for the report. Please see the latest commit. 740feee413795ea2a196077926e5def9e123229b
Thanks for updating the code.
Now I can successfully run the data preparation notebook.
However, when I use the prepared data from binance in the Guéant–Lehalle–Fernandez-Tapia Market Making Model and Grid Trading notebook, it is off by a factor of about 2 in trading intensity from your calculated results. For example:

It's as if there are only half as many trades in the data files obtained from binance. To be safe, I added a 10ms feed latency, but as expected that does not affect the fitted model parameters.
Note that I had to adjust for the fact that the binance data is timestamped to milliseconds rather than microseconds.
Would it be possible to share your collected data for ETHUSDT futures on 2022-10-03 (e.g. on Google drive)? That way people could reproduce your results, and also I could directly compare the trade data to binance.
For your information, I used `trade` stream instead of `aggTrade` stream which is currently officially documented but aggregated.
I'm not sure I understand, since I also used `trade` data from binance, rather than `aggTrade`. In fact, your converter does not even work on the binance historical `aggTrade` data, though I don't see a need for it.
Unless you are suggesting that the `trade` data from binance is in fact still aggregated?
Anyhow, my plan is to collect my own data from the stream and then I can compare with the historical data from binance.
No. But `trade` stream functions as expected, just like its description in the official spot API document, even though it is not outlined in the official futures API documents. So I guess Binance's historical data also came from `aggTrade`. Comparison is the most effective way for figuring things out.
Another issue showed up: I was working with more recent data, and it has an additional undocumented field `trans_id`. This changes the offset of the other fields, and breaks the converter.
Here is an example of the recent snapshot data: https://drive.google.com/file/d/1y-9nt9V-eB_OV3uSq4-dzBe-eOsQDt4S/view?usp=sharing
See 2b3137c3c643e9a96621e8fb0c3cd46ab0922dde and let me know if it works as expected.
Code looks much better now without hard-coded indices, and it processes the snapshot fine.
But now it fails on the `convert` function call in the validation step with an exception.
Here is the [lob data](https://drive.google.com/file/d/1X46yMgNarKp6NR2WDIsNZeZ7wrfIKAtb/view?usp=sharing) and [trade data](https://drive.google.com/file/d/1tM39EYFsuXWpTGfv8HL8ODTbqG_z40cr/view?usp=sharing) to reproduce this.
See 7299d9a3968c7acc079dfffc5aad50c947e86cf2. I fixed the mingled timestamp issue but since the data hasn't local timestamp, there is no way but sorting. That can cause another discrepancy. Beware of that.
Thanks! I tested it out and there were no more errors.
I'm not sure exactly what discrepancy you mean, but perhaps it will become more clear as I continue working on it.
What I meant by that is that any difference from the live trading environment can cause a discrepancy. | 2024-03-13T13:48:31 | 0.0 | [] | [] |
||
djk2/django-admin-shell | djk2__django-admin-shell-7 | 90d094efabf3b13f739cfc521c9d52eebc14214b | diff --git a/README.rst b/README.rst
index 7f048d5..4e933f6 100644
--- a/README.rst
+++ b/README.rst
@@ -218,6 +218,18 @@ If you want disable auto import for models, set this flag to `False`.
**Nont**: *If during import occurred error `ImportError` then this module will be omitted.*
+ADMIN_SHELL_CLEAR_SCOPE_ON_CLEAR_HISTORY
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+*type* : **bool**
+
+*default* : **False**
+
+This flag is used to enable the gargabe collector on the declared variables from the shell execution when the "clear history" is executed.
+If this flag is set to `True`, then all the declared variables will be ERASED and FREED from memory on runtime when "clear history" is used.
+If you want to persist indefinitly all declared variables on the shell, set this flag to `False`.
+
+**BEWARE**: *leaving this disabled is not recomended on production code!*
+
Code examples
-------------
diff --git a/django_admin_shell/settings.py b/django_admin_shell/settings.py
index 477dc16..a41c7b7 100644
--- a/django_admin_shell/settings.py
+++ b/django_admin_shell/settings.py
@@ -68,3 +68,7 @@ def from_settings_or_default(name, default):
'ADMIN_SHELL_IMPORT_MODELS',
True
)
+ADMIN_SHELL_CLEAR_SCOPE_ON_CLEAR_HISTORY = from_settings_or_default(
+ 'ADMIN_SHELL_CLEAR_SCOPE_ON_CLEAR_HISTORY',
+ False
+)
diff --git a/django_admin_shell/urls.py b/django_admin_shell/urls.py
index 1cbe38b..f243e91 100644
--- a/django_admin_shell/urls.py
+++ b/django_admin_shell/urls.py
@@ -10,10 +10,10 @@
from django.contrib.admin.views.decorators import staff_member_required
-from .views import Shell
+from .views import ShellView
app_name = 'django_admin_shell'
urlpatterns = [
- re_path(r'^$', staff_member_required(Shell.as_view()), name="shell"),
+ re_path(r'^$', staff_member_required(ShellView.as_view()), name="shell"),
]
diff --git a/django_admin_shell/views.py b/django_admin_shell/views.py
index e8fa535..3893afc 100644
--- a/django_admin_shell/views.py
+++ b/django_admin_shell/views.py
@@ -24,6 +24,7 @@
ADMIN_SHELL_IMPORT_DJANGO,
ADMIN_SHELL_IMPORT_DJANGO_MODULES,
ADMIN_SHELL_IMPORT_MODELS,
+ ADMIN_SHELL_CLEAR_SCOPE_ON_CLEAR_HISTORY
)
import django
@@ -108,6 +109,14 @@ def get_scope(self):
return self._scope
+ def clear_scope(self):
+ """
+ clear the scope.
+
+ Freeing declared variables to be garbage collected.
+ """
+ self._scope = None
+
def __str__(self):
buf = ""
for module, symbols in self.get_modules().items():
@@ -162,7 +171,7 @@ def get_dj_version():
return django.__version__
-class Shell(FormView):
+class ShellView(FormView):
template_name = "django_admin_shell/shell.html"
form_class = ShellForm
@@ -194,7 +203,7 @@ def dispatch(self, request, *args, **kwargs):
return HttpResponseForbidden(
"Forbidden: To access Django admin shell you must be superuser"
)
- return super(Shell, self).dispatch(request, *args, **kwargs)
+ return super(ShellView, self).dispatch(request, *args, **kwargs)
def get_output(self):
if self.output is None:
@@ -215,12 +224,14 @@ def save_output(self):
def clear_output(self):
self.output = []
self.save_output()
+ if ADMIN_SHELL_CLEAR_SCOPE_ON_CLEAR_HISTORY:
+ self.runner.importer.clear_scope()
def get(self, request, *args, **kwargs):
# Clear output history - set empty list and save
if request.GET.get("clear_history", "no") == "yes":
self.clear_output()
- return super(Shell, self).get(request, *args, **kwargs)
+ return super(ShellView, self).get(request, *args, **kwargs)
def form_valid(self, form):
code = form.cleaned_data.get("code", "")
@@ -228,11 +239,11 @@ def form_valid(self, form):
result = self.runner.run_code(code)
self.add_to_outout(result)
self.save_output()
- return super(Shell, self).form_valid(form)
+ return super(ShellView, self).form_valid(form)
def get_context_data(self, **kwargs):
"""Add output to context"""
- ctx = super(Shell, self).get_context_data(**kwargs)
+ ctx = super(ShellView, self).get_context_data(**kwargs)
ctx['site_header'] = "Django admin shell"
ctx['has_permission'] = True
ctx['output'] = self.get_output()
| variables defined for shell remain even after "clear history"
# Resume
I'm going to use this lib in production! Mostly for report data.
So I'm going to create large variables with some data. Mostly JSON's with my model data.
# Problem
The variables defined inside the shell remain instantiated even **AFTER** the "clear history" button.
So running this snippet will allocate 400MB of memory in use.
```python
data = [i for i in range(10_000_000)]
```
> if 100_000_000 is used instead of 10_000_000 the used memory goes to 4.1GB
# Solution
Run some gargabe collector when the "clear history" is used.
Analysing the code I found where the variables are persisted. It's in the `Runner.Importer._scope` in the form of a dict. So it's just necessary to set `Runner.Importer._scope = None` again to free all declared variables!
| 2022-07-07T21:32:13 | 0.0 | [] | [] |
|||
TRI-AMDD/beep | TRI-AMDD__beep-250 | 8a0c251b33d37b63cbcb39779f8a8ee5756b935d | diff --git a/beep/structure/base.py b/beep/structure/base.py
index 1486288f..5faab54c 100644
--- a/beep/structure/base.py
+++ b/beep/structure/base.py
@@ -12,6 +12,8 @@
import numpy as np
from scipy import integrate
from monty.json import MSONable
+from monty.io import zopen
+from monty.serialization import dumpfn
from beep import tqdm
from beep import MODULE_DIR
@@ -258,6 +260,8 @@ def from_file(cls, path, *args, **kwargs):
def from_json_file(cls, filename):
"""Load a structured run previously saved to file.
+ .json.gz files are supported.
+
Loads a BEEPDatapath or (legacy) ProcessedCyclerRun structured object from json.
Can be used in combination with files serialized with BEEPDatapath.to_json_file.
@@ -268,7 +272,7 @@ def from_json_file(cls, filename):
Returns:
None
"""
- with open(filename, "r") as f:
+ with zopen(filename, "r") as f:
d = json.load(f)
# Add this structured file path to the paths dict
@@ -278,19 +282,26 @@ def from_json_file(cls, filename):
return cls.from_dict(d)
- def to_json_file(self, filename):
+ def to_json_file(self, filename, omit_raw=False):
"""Save a BEEPDatapath to disk as a json.
+ .json.gz files are supported.
+
Not named from_json to avoid conflict with MSONable.from_json(*)
Args:
filename (str, Pathlike): The filename to save the file to.
+ omit_raw (bool): If True, saves only structured (NOT RAW) data.
+ More efficient for saving/writing to disk.
Returns:
None
"""
- with open(filename, "w") as f:
- json.dump(self.as_dict(), f)
+ d = self.as_dict()
+ if omit_raw:
+ d.pop("raw_data")
+
+ dumpfn(d, filename)
@StructuringDecorators.must_not_be_legacy
def as_dict(self):
@@ -356,6 +367,7 @@ def from_dict(cls, d):
paths = d.get("paths", None)
# support legacy operations
+ # support loads when raw_data not available
if any([k not in d for k in ("raw_data", "metadata")]):
raw_data = None
metadata = {k: d.get(k) for k in ("barcode", "protocol", "channel_id")}
diff --git a/beep/structure/cli.py b/beep/structure/cli.py
index 7a017d5b..43dbe4a7 100644
--- a/beep/structure/cli.py
+++ b/beep/structure/cli.py
@@ -50,7 +50,7 @@
import json
from docopt import docopt
-from monty.serialization import loadfn, dumpfn
+from monty.serialization import loadfn
from beep import logger, __version__
from beep.conversion_schemas import (
@@ -75,7 +75,7 @@
SERVICE_CONFIG = {"service": "DataStructurer"}
-def process_file_list_from_json(file_list_json, processed_dir="data-share/structure/"):
+def process_file_list_from_json(file_list_json, processed_dir="data-share/structure/", omit_raw=True):
"""Function to take a json filename corresponding to a data structure
with a 'file_list' and a 'validity' attribute, process each file
with a corresponding True validity, dump the processed file into
@@ -89,6 +89,8 @@ def process_file_list_from_json(file_list_json, processed_dir="data-share/struct
and loaded, otherwise interpreted as a json string.
processed_dir (str): location for processed cycler run output
files to be placed.
+ omit_raw (bool): Omit the raw_data from being saved to file. Creates
+ legacy file structure for all structured datapaths.
Returns:
(str): json string of processed files (with key "processed_file_list").
@@ -137,7 +139,7 @@ def process_file_list_from_json(file_list_json, processed_dir="data-share/struct
new_filename = add_suffix_to_filename(new_filename, "_structure")
structured_run_loc = os.path.join(processed_dir, new_filename)
structured_run_loc = os.path.abspath(structured_run_loc)
- dumpfn(dp, structured_run_loc)
+ dp.to_json_file(structured_run_loc, omit_raw)
# Append file loc to list to be returned
processed_file_list.append(structured_run_loc)
| Automatically serialize BEEPDatapaths to .json.gz, not json
New BEEPDatapaths are a bit bigger than older ProcessedCyclerRuns since they include complete data, including the raw data.
Running the `TestMaccorDatapath.test_get_diagnostic` and checking that file:
```
processed_diagnostic.json 249.3MB
```
Compressing it
```
processed_diagnostic.json.gz 18.6MB
```
Compressing and decompressing even large files is computationally trivial in comparison with interpolation. It seems like the default surely should be compressing these saved files.
| 2021-04-27T04:00:51 | 0.0 | [] | [] |
|||
manmartgarc/stochatreat | manmartgarc__stochatreat-11 | fd45bf75924c4650fcbb00701281b67d0cb97382 | diff --git a/stochatreat/stochatreat.py b/stochatreat/stochatreat.py
index 087b005..6971ede 100644
--- a/stochatreat/stochatreat.py
+++ b/stochatreat/stochatreat.py
@@ -63,21 +63,22 @@ def stochatreat(data: pd.DataFrame,
Returns
-------
pandas.DataFrame with idx_col, treat (treatment assignments) and
- stratum_ids
+ stratum_id (the id of the stratum within which the assignment procedure
+ was carried out) columns
Usage
-----
- Single block:
- >>> treats = stochatreat(data=data, # your dataframe
- stratum_cols='block1', # the strata variable
- treats=2, # including control
- idx_col='myid', # the unique id column
- random_state=42) # seed for rng
+ Single stratum:
+ >>> treats = stochatreat(data=data, # your dataframe
+ stratum_cols='stratum1', # stratum variable
+ treats=2, # including control
+ idx_col='myid', # unique id column
+ random_state=42) # seed for rng
>>> data = data.merge(treats, how='left', on='myid')
- Multiple blocks:
+ Multiple strata:
>>> treats = stochatreat(data=data,
- stratum_cols=['block1', 'block2'],
+ stratum_cols=['stratum1', 'stratum2'],
treats=2,
probs=[1/3, 2/3],
idx_col='myid',
@@ -119,6 +120,9 @@ def stochatreat(data: pd.DataFrame,
idx_col = 'index'
elif type(idx_col) is not str:
raise TypeError('idx_col has to be a string.')
+
+ # retrieve type to check and re-assign in the end
+ idx_col_type = data[idx_col].dtype
# check for unique identifiers
if data[idx_col].duplicated(keep=False).sum() > 0:
@@ -139,7 +143,7 @@ def stochatreat(data: pd.DataFrame,
# assignments
data = data.sort_values(by=idx_col)
- # combine block cells - by assigning stratum ids
+ # combine strata cells - by assigning stratum ids
data['stratum_id'] = data.groupby(stratum_cols).ngroup()
# keep only ids and concatenated strata
@@ -205,13 +209,13 @@ def stochatreat(data: pd.DataFrame,
# assign treatments
# =========================================================================
- # sort by strata first, and assign a long list of permuted treat_mask to
- # deal with misfits, in this case we can add fake rows to make it so
- # everything is divisible and toss them later -> no costly apply inside
- # strata
+ # sort by strata first, and assign a long list of permuted `treat_mask` to
+ # deal with misfits, we add fake rows to each stratum so that its length is
+ # divisible by `lcm_prob_denominators` and toss them later
+ # -> no costly apply inside the strata
# add fake rows for each stratum so the total number can be divided by
- # num_treatments
+ # `lcm_prob_denominators`
fake = pd.DataFrame(
{'fake': data.groupby('stratum_id').size()}
).reset_index()
@@ -227,23 +231,25 @@ def stochatreat(data: pd.DataFrame,
data.loc[:, 'fake'] = False
fake_rep.loc[:, 'fake'] = True
- ordered = (pd.concat([data, fake_rep], sort=False)
+ data = (pd.concat([data, fake_rep], sort=False)
.sort_values(['stratum_id'])
)
# generate random permutations without loop by generating large number of
# random values and sorting row (meaning one permutation) wise
permutations = np.argsort(
- R.rand(len(ordered) // lcm_prob_denominators, lcm_prob_denominators),
+ R.rand(len(data) // lcm_prob_denominators, lcm_prob_denominators),
axis=1
)
# lookup treatment name for permutations. This works because we flatten
# row-major style, i.e. one row after another.
- ordered['treat'] = treat_mask[permutations].flatten(order='C')
- ordered = ordered[~ordered['fake']].drop(columns=['fake'])
+ data.loc[:, 'treat'] = treat_mask[permutations].flatten(order='C')
+ data = data[~data['fake']].drop(columns=['fake'])
- data.loc[:, 'treat'] = ordered['treat']
- data['treat'] = data['treat'].astype(np.int64)
+ # re-assign type - as it might have changed with the addition of fake data
+ data.loc[:, idx_col] = data[idx_col].astype(idx_col_type)
+
+ data.loc[:, 'treat'] = data['treat'].astype(np.int64)
assert data['treat'].isnull().sum() == 0
| Allow for no strategy for misfits
Currently, the user is forced to select a strategy to deal with misfits, which is either "stratum" or "global". It would be nice to have the option to not do anything, and just leave misfits without any assignment (e.g. as `NA`s).
https://github.com/manmartgarc/stochatreat/blob/38d0e8d087c11178a6dd5ecd7d518527153bf945/src/stochatreat/stochatreat.py#L137-L138
I can think of two reasons why this would be useful:
1. It makes it easier to debug/diagnose an allocation setup, as it allows you to see which observations are internally assigned to misfits.
2. It allows the researcher to deal with misfits in potentially different ways from the two pre-specified ones.
I suppose this is easy to implement, but I'd be happy to help if help is needed.
| 2019-07-24T12:24:52 | 0.0 | [] | [] |
|||
petereon/beaupy | petereon__beaupy-88 | 93a12915b896d7d9aedb2d5bdefc6ea98cd9d29b | diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml
index 52d2457..257cbab 100644
--- a/.github/workflows/python-publish.yml
+++ b/.github/workflows/python-publish.yml
@@ -42,16 +42,16 @@ jobs:
export PYTHONPATH=$(pwd)
poe test
- - name: Install documentation dependencies
- run: pip install -r docs/requirements.txt
- - name: Build documentation
- run: cd docs && novella
- - name: Publish documentation
- uses: JamesIves/[email protected]
- with:
- branch: gh-pages
- folder: docs/_site
- ssh-key: ${{ secrets.DEPLOY_KEY }}
+ # - name: Install documentation dependencies
+ # run: pip install -r docs/requirements.txt
+ # - name: Build documentation
+ # run: cd docs && novella
+ # - name: Publish documentation
+ # uses: JamesIves/[email protected]
+ # with:
+ # branch: gh-pages
+ # folder: docs/_site
+ # ssh-key: ${{ secrets.DEPLOY_KEY }}
- name: Publish package
uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29
diff --git a/beaupy/_beaupy.py b/beaupy/_beaupy.py
index 50e259f..e0879fd 100755
--- a/beaupy/_beaupy.py
+++ b/beaupy/_beaupy.py
@@ -66,6 +66,7 @@ class DefaultKeys:
home: List[Union[Tuple[int, ...], str]] = [Keys.HOME]
end: List[Union[Tuple[int, ...], str]] = [Keys.END]
interrupt: List[Union[Tuple[int, ...], str]] = [Keys.CTRL_C]
+ select_all: List[Union[Tuple[int, ...], str]] = ['a']
class Config:
@@ -140,16 +141,23 @@ def _navigate_select_multiple(
elif any([keypress in navigation_keys for navigation_keys in _navigation_keys]):
state = _navigate_select(state, keypress=keypress)
+ elif keypress in DefaultKeys.select_all:
+ if len(state.selected_indexes) == maximal_count if maximal_count is not None else len(state.options):
+ state.selected_indexes = []
+ else:
+ if maximal_count is not None:
+ state.selected_indexes = list(range(maximal_count))
+ state.error = f'Must select at most {maximal_count} options'
+ else:
+ state.selected_indexes = list(range(len(state.options)))
elif keypress in DefaultKeys.select:
if state.index in state.selected_indexes:
state.selected_indexes.remove(state.index)
- elif maximal_count is not None:
- if len(state.selected_indexes) + 1 <= maximal_count:
- state.selected_indexes.append(state.index)
- else:
- state.error = f'Must select at most {maximal_count} options'
else:
- state.selected_indexes.append(state.index)
+ if maximal_count is not None and len(state.selected_indexes) + 1 > maximal_count:
+ state.error = f'Must select at most {maximal_count} options'
+ else:
+ state.selected_indexes.append(state.index)
elif keypress in DefaultKeys.confirm:
if minimal_count > len(state.selected_indexes):
state.error = f'Must select at least {minimal_count} options'
@@ -453,7 +461,7 @@ def confirm(
question_line = f'{question}{yn_prompt}{current_message}'
yes_prefix = selected_prefix if yes else deselected_prefix
no_prefix = selected_prefix if no else deselected_prefix
- rendered = f'{question_line}\n{yes_prefix}{yes_text}\n{no_prefix}{no_text}\n\n(Confirm with [bold]enter[/bold])'
+ rendered = f'{question_line}\n{yes_prefix}{yes_text}\n{no_prefix}{no_text}\n\n([bold]enter[/bold] to confirm)'
_update_rendered(live, rendered)
keypress = get_key()
diff --git a/beaupy/_internals.py b/beaupy/_internals.py
index fff6d57..a89f62f 100644
--- a/beaupy/_internals.py
+++ b/beaupy/_internals.py
@@ -87,7 +87,7 @@ def _render_prompt(secure: bool, state: qprompt.PromptState) -> str:
)
render_value = f'{render_value}\n{rendered_completion_options}'
- render_value = f'{state.title}\n> {render_value}\n\n(Confirm with [bold]enter[/bold])'
+ render_value = f'{state.title}\n> {render_value}\n\n([bold]enter[/bold] to confirm)'
if state.error:
render_value = f'{render_value}\n[red]Error:[/red] {state.error}'
@@ -115,7 +115,7 @@ def _render_select(preprocessor: Callable[[Any], str], cursor_style: str, cursor
]
)
+ (f'[grey58]\n\nPage {page}/{total_pages}[/grey58]' if state.pagination and total_pages > 1 else '') # noqa: W503
- + '\n\n(Confirm with [bold]enter[/bold])' # noqa: W503
+ + '\n\n([bold]enter[/bold] to confirm)' # noqa: W503
)
@@ -143,7 +143,7 @@ def _render_select_multiple(
]
)
+ (f'[grey58]\n\nPage {page}/{total_pages}[/grey58]' if state.pagination and total_pages > 1 else '') # noqa: W503
- + '\n\n(Mark with [bold]space[/bold], confirm with [bold]enter[/bold])' # noqa: W503
+ + '\n\n([bold]space[/bold] to tick one, [bold]a[/bold] to tick/untick all, [bold]enter[/bold] to confirm)' # noqa: W503
)
if state.error:
rendered = f'{rendered}\n[red]Error:[/red] {state.error}'
| Select/deslect all options in `select_multiple`
https://github.com/petereon/beaupy/discussions/66#discussion-5226284
| 2024-03-21T20:38:31 | 0.0 | [] | [] |
|||
petereon/beaupy | petereon__beaupy-63 | dd0877b6bd9a8226e6a372341f12142a2e1d7ec8 | diff --git a/beaupy/_beaupy.py b/beaupy/_beaupy.py
index 88cf016..9f7a603 100755
--- a/beaupy/_beaupy.py
+++ b/beaupy/_beaupy.py
@@ -365,7 +365,7 @@ def select_multiple(
[
_render_option_select_multiple(
option=preprocessor(option),
- ticked=i + show_from in ticked_indices,
+ ticked=(i + show_from in ticked_indices) if pagination else (i in ticked_indices),
tick_character=tick_character,
tick_style=tick_style,
selected=i == (index % page_size if pagination else index),
| pagination=False for select_multiple is not working
Currently if `len(options) > page_size` then the select list doesn't work properly - all items are shown but checks are misattributed to different options and some options are not selectable
`pagination=False` is the default, my code was like:
```python
indices = select_multiple(
options=options,
ticked_indices=ticked_indices,
cursor_index=ticked_indices[0],
minimal_count=1,
return_indices=True,
)
```
(default page size is 5, I had 8 options)
I was able to work around it by setting `page_size=len(options),`
| Hi @anentropic, thanks for reporting the issue. I will investigate!
I have tested this with the following example under version `3.5.3`:
```
from beaupy import select_multiple
options = ["a", "b", "c", "d", "e", "f", "g", "h"]
ticked_indices = [2, 3, 4, 5]
indices = select_multiple(
options=options,
ticked_indices=ticked_indices,
cursor_index=ticked_indices[0],
minimal_count=1,
return_indices=True,
)
print(indices)
```
I've found a bug which manifests after pressing `END` key, I am going to track that one down and hope it fixes your issue.
Before pressing `END`:

Immediatelly after pressing `END`:

| 2023-05-04T20:42:16 | 0.0 | [] | [] |
||
petereon/beaupy | petereon__beaupy-61 | 4a62069d9123e839771ca3183b3c96576c8a2366 | diff --git a/beaupy/_beaupy.py b/beaupy/_beaupy.py
index 9f7a603..6ed9b82 100755
--- a/beaupy/_beaupy.py
+++ b/beaupy/_beaupy.py
@@ -7,6 +7,7 @@
import math
import warnings
+from itertools import cycle
from typing import Any, Callable, List, Optional, Tuple, Type, Union
from rich.console import Console
@@ -128,6 +129,7 @@ def prompt(
raise_validation_fail: bool = True,
raise_type_conversion_fail: bool = True,
initial_value: Optional[str] = None,
+ completion: Optional[Callable[[str], List[str]]] = None,
) -> TargetType:
"""Function that prompts the user for written input
@@ -155,11 +157,29 @@ def prompt(
value: List[str] = [*initial_value] if initial_value else []
cursor_index = len(initial_value) if initial_value else 0
error: str = ''
+ completion_context = False
+ completion_options: List[str] = []
while True:
- rendered = _render_prompt(secure, value, prompt, cursor_index, error)
+ rendered = _render_prompt(secure, value, prompt, cursor_index, error, completion_options)
error = ''
_update_rendered(live, rendered)
keypress = get_key()
+ if keypress in DefaultKeys.tab:
+ if completion:
+ if not completion_context:
+ completion_options = completion(''.join(value))
+ completion_options_iter = cycle(completion_options)
+ if completion_options:
+ completion_context = True
+
+ if completion_context:
+ value = [*next(completion_options_iter)]
+ cursor_index = len(value)
+ else:
+ completion_context = False
+ else:
+ completion_context = False
+
if keypress in DefaultKeys.interrupt:
if Config.raise_on_interrupt:
raise KeyboardInterrupt()
@@ -204,8 +224,9 @@ def prompt(
raise Abort(keypress)
return None
else:
- value.insert(cursor_index, str(keypress))
- cursor_index += 1
+ if not (keypress in DefaultKeys.tab and completion_context):
+ value.insert(cursor_index, str(keypress))
+ cursor_index += 1
def select(
diff --git a/beaupy/_internals.py b/beaupy/_internals.py
index f76a5f1..c675857 100644
--- a/beaupy/_internals.py
+++ b/beaupy/_internals.py
@@ -66,18 +66,26 @@ def _update_rendered(live: Live, renderable: Union[ConsoleRenderable, str]) -> N
live.refresh()
-def _render_prompt(secure: bool, typed_values: List[str], prompt: str, cursor_position: int, error: str) -> str:
- render_value = (len(typed_values) * '*' if secure else ''.join(typed_values)) + ' '
+def _render_prompt(
+ secure: bool, typed_values: List[str], prompt: str, cursor_position: int, error: str, completion_options: List[str] = []
+) -> str:
+ input_value = len(typed_values) * '*' if secure else ''.join(typed_values)
render_value = (
- render_value[:cursor_position]
+ (input_value + ' ')[:cursor_position]
+ '[black on white]' # noqa: W503
- + render_value[cursor_position] # noqa: W503
+ + (input_value + ' ')[cursor_position] # noqa: W503
+ '[/black on white]' # noqa: W503
- + render_value[(cursor_position + 1) :] # noqa: W503,E203
+ + (input_value + ' ')[(cursor_position + 1) :] # noqa: W503,E203
)
+
+ if completion_options and not secure:
+ rendered_completion_options = ' '.join(completion_options).replace(input_value, f'[black on white]{input_value}[/black on white]')
+ render_value = f'{render_value}\n{rendered_completion_options}'
+
render_value = f'{prompt}\n> {render_value}\n\n(Confirm with [bold]enter[/bold])'
if error:
render_value = f'{render_value}\n[red]Error:[/red] {error}'
+
return render_value
diff --git a/docs/content/examples.md b/docs/content/examples.md
index f048421..6d77f0c 100644
--- a/docs/content/examples.md
+++ b/docs/content/examples.md
@@ -133,6 +133,45 @@ very_secret_info = prompt("Type you API key, hehe",
secure=True)
```
+#### Completion
+
+You can provide a python callable such as `Callable[[str], List[str]]` to provide completion options. String passed to the callable is the current user input.
+
+```python
+favorite_color = prompt("What is your favorite color?",
+ completion=lambda _: ["pink", "PINK", "P1NK"])
+```
+
+A more complex example with path completion:
+
+```python
+from os import listdir
+from pathlib import Path
+
+# ugly hacky path completion callable:
+def path_completion(str_path: str = ""):
+ if not str_path:
+ return []
+ try:
+ path = Path(str_path)
+ rest = ''
+ if not path.exists():
+ str_path, rest = str_path.rsplit('/', 1)
+ path = Path(str_path or '/')
+
+ filtered_list_dir = [i for i in listdir(path) if i.startswith(rest)]
+
+ if not path.is_absolute():
+ return ['./'+str(Path(path)/i) for i in filtered_list_dir]
+ else:
+ return [str(Path(path)/i) for i in filtered_list_dir]
+ except Exception as e:
+ return []
+
+prompt(">", completion=path_completion)
+```
+
+
## Spinners
### Styling
diff --git a/pyproject.toml b/pyproject.toml
index 82e5b2e..a278ba9 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = 'beaupy'
-version = '3.5.4'
+version = '3.6.0'
description = 'A library of elements for interactive TUIs in Python'
authors = ['Peter Vyboch <[email protected]>']
license = 'MIT'
| Prompt does not bash complete
Hi,
it would be helpful if there is support for bash completion(an option for [prompt](https://petereon.github.io/beaupy/api/#prompt) would be enough).
For example, if I want that the user inputs a file path, it would be helpful if he could hit tab and get a path completion.
| Hi @juxeii ,
thanks for a feature request. It's a great idea. Sounds like it would be reasonably easy to implement too. I am very open to Pull Requests if you are technical and would like to take a stab at it.
Otherwise I'll take it upon myself, but it will take some time as I am currently jugging a bit more stuff than I'd like to.
I do not have time to do a pull request.
Take your time :) | 2023-04-19T20:33:29 | 0.0 | [] | [] |
||
petereon/beaupy | petereon__beaupy-59 | 70af950aa0ca90e0feed573543a7185589a9f4ec | diff --git a/beaupy/_beaupy.py b/beaupy/_beaupy.py
index e7f556e..88cf016 100755
--- a/beaupy/_beaupy.py
+++ b/beaupy/_beaupy.py
@@ -264,7 +264,11 @@ def select(
'\n'.join(
[
_format_option_select(
- i=i, cursor_index=index % page_size, option=preprocessor(option), cursor_style=cursor_style, cursor=cursor
+ i=i,
+ cursor_index=index % page_size if pagination else index,
+ option=preprocessor(option),
+ cursor_style=cursor_style,
+ cursor=cursor,
)
for i, option in enumerate(options[show_from:show_to] if pagination else options)
]
@@ -364,7 +368,7 @@ def select_multiple(
ticked=i + show_from in ticked_indices,
tick_character=tick_character,
tick_style=tick_style,
- selected=i == index % page_size,
+ selected=i == (index % page_size if pagination else index),
cursor_style=cursor_style,
)
for i, option in enumerate(options[show_from:show_to] if pagination else options)
diff --git a/pyproject.toml b/pyproject.toml
index 79fb271..4add74d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = 'beaupy'
-version = '3.5.1'
+version = '3.5.2'
description = 'A library of elements for interactive TUIs in Python'
authors = ['Peter Vyboch <[email protected]>']
license = 'MIT'
| Some items not shown in multiselect
I have a really weird issue with `select_multiple`.
Consider this snippet:
```
colored_options = [
'1',
'2',
'3',
'4',
'5',
'6',
'7',
'8']
print(colored_options)
print(len(colored_options))
select_multiple(colored_options)
```
In container A, in a **python vscode terminal**, everything works as expected:

In another container, in a **bash terminal**, I only ever see 5 items:

Why is the bash terminal only showing 5 items?
How can I debug this?
| Could you please share the version of `beaupy` this happens for?
What platform are you running on?
Version:
`3.3.0`
Working container OS:
```
NAME="Red Hat Enterprise Linux Server"
VERSION="7.9 (Maipo)"
```
Not working container OS:
```
PRETTY_NAME="Debian GNU/Linux 11 (bullseye)"
NAME="Debian GNU/Linux"
```
Really strange, why 5 and not 8?
Could you please verify it's not 3.5.0 on the Debian? In 3.5.0 I have been making changes that could have introduced this bug.
Sorry, I am not at the computer atm so I can't verify.
EDIT: I have taken a quick look on the phone. I have most probably introduced a bug that could be causing this in 3.5.0. I will investigate and fix today.
I am sorry, I gave you misinformation.
On the working RedHat it is `3.4.0`.
On the misbehaving Debian it is indeed `3.5.0`.
**Edit: confirmed! Version `3.5.0` also does not work on RedHat**
Thanks for correction. Fix should be reasonably easy, I'll release **3.5.1** fixing this later today.
Hi, I have released the fix, it's now available among [GitHub releases](https://github.com/petereon/beaupy/releases/tag/v3.5.1) and on [PyPI](https://pypi.org/project/beaupy/3.5.1/)
It is working again, thank you for your fast support!
That's great to hear!
I think there is a followup error on this:
I have a list with 11 entries, but with the up and down keys I can only navigate to the fifth element. Pressing down from there then goes back up to the first entry.
Since this happens to the fifth element, I guess this error is related to the other issue.
You are correct. I will be taking a look momentarily | 2023-03-29T11:16:09 | 0.0 | [] | [] |
||
petereon/beaupy | petereon__beaupy-58 | 1e00af8df205822ad1f8b5a3edf965a0ab564f7d | diff --git a/beaupy/_beaupy.py b/beaupy/_beaupy.py
index 93bd1b0..e7f556e 100755
--- a/beaupy/_beaupy.py
+++ b/beaupy/_beaupy.py
@@ -260,13 +260,13 @@ def select(
while True:
show_from = (page - 1) * page_size
show_to = min(show_from + page_size, len(options))
- rendered = (
+ rendered = ( # noqa: ECE001
'\n'.join(
[
_format_option_select(
i=i, cursor_index=index % page_size, option=preprocessor(option), cursor_style=cursor_style, cursor=cursor
)
- for i, option in enumerate(options[show_from:show_to])
+ for i, option in enumerate(options[show_from:show_to] if pagination else options)
]
)
+ (f'[grey58]\n\nPage {page}/{total_pages}[/grey58]' if pagination and total_pages > 1 else '') # noqa: W503
@@ -356,7 +356,7 @@ def select_multiple(
while True:
show_from = (page - 1) * page_size
show_to = min(show_from + page_size, len(options))
- rendered = (
+ rendered = ( # noqa: ECE001
'\n'.join(
[
_render_option_select_multiple(
@@ -367,7 +367,7 @@ def select_multiple(
selected=i == index % page_size,
cursor_style=cursor_style,
)
- for i, option in enumerate(options[show_from:show_to])
+ for i, option in enumerate(options[show_from:show_to] if pagination else options)
]
)
+ (f'[grey58]\n\nPage {page}/{total_pages}[/grey58]' if pagination and total_pages > 1 else '') # noqa: W503
diff --git a/poetry.lock b/poetry.lock
index e814fd0..d250f8e 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.4.1 and should not be changed by hand.
[[package]]
name = "astor"
@@ -29,20 +29,20 @@ typed = ["typed-ast"]
[[package]]
name = "astroid"
-version = "2.12.13"
+version = "2.15.1"
description = "An abstract syntax tree for Python with inference support."
category = "dev"
optional = false
python-versions = ">=3.7.2"
files = [
- {file = "astroid-2.12.13-py3-none-any.whl", hash = "sha256:10e0ad5f7b79c435179d0d0f0df69998c4eef4597534aae44910db060baeb907"},
- {file = "astroid-2.12.13.tar.gz", hash = "sha256:1493fe8bd3dfd73dc35bd53c9d5b6e49ead98497c47b2307662556a5692d29d7"},
+ {file = "astroid-2.15.1-py3-none-any.whl", hash = "sha256:89860bda98fe2bbd1f5d262229be7629d778ce280de68d95d4a73d1f592ad268"},
+ {file = "astroid-2.15.1.tar.gz", hash = "sha256:af4e0aff46e2868218502789898269ed95b663fba49e65d91c1e09c966266c34"},
]
[package.dependencies]
lazy-object-proxy = ">=1.4.0"
typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""}
-typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""}
+typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""}
wrapt = [
{version = ">=1.11,<2", markers = "python_version < \"3.11\""},
{version = ">=1.14,<2", markers = "python_version >= \"3.11\""},
@@ -50,43 +50,45 @@ wrapt = [
[[package]]
name = "attrs"
-version = "22.1.0"
+version = "22.2.0"
description = "Classes Without Boilerplate"
category = "dev"
optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.6"
files = [
- {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
- {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"},
+ {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"},
+ {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"},
]
[package.extras]
-dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
-docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
-tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
-tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
+cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"]
+dev = ["attrs[docs,tests]"]
+docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"]
+tests = ["attrs[tests-no-zope]", "zope.interface"]
+tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"]
[[package]]
name = "bandit"
-version = "1.7.4"
+version = "1.7.5"
description = "Security oriented static analyser for python code."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"},
- {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"},
+ {file = "bandit-1.7.5-py3-none-any.whl", hash = "sha256:75665181dc1e0096369112541a056c59d1c5f66f9bb74a8d686c3c362b83f549"},
+ {file = "bandit-1.7.5.tar.gz", hash = "sha256:bdfc739baa03b880c2d15d0431b31c658ffc348e907fe197e54e0389dd59e11e"},
]
[package.dependencies]
colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""}
GitPython = ">=1.0.1"
PyYAML = ">=5.3.1"
+rich = "*"
stevedore = ">=1.20.0"
[package.extras]
-test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"]
-toml = ["toml"]
+test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "tomli (>=1.1.0)"]
+toml = ["tomli (>=1.1.0)"]
yaml = ["PyYAML"]
[[package]]
@@ -140,19 +142,89 @@ files = [
[[package]]
name = "charset-normalizer"
-version = "2.1.1"
+version = "3.1.0"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "dev"
optional = false
-python-versions = ">=3.6.0"
+python-versions = ">=3.7.0"
files = [
- {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
- {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
+ {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"},
+ {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"},
]
-[package.extras]
-unicode-backport = ["unicodedata2"]
-
[[package]]
name = "click"
version = "8.1.3"
@@ -243,62 +315,63 @@ test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"]
[[package]]
name = "coverage"
-version = "6.5.0"
+version = "7.2.2"
description = "Code coverage measurement for Python"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"},
- {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"},
- {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"},
- {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"},
- {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"},
- {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"},
- {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"},
- {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"},
- {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"},
- {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"},
- {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"},
- {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"},
- {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"},
- {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"},
- {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"},
- {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"},
- {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"},
- {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"},
- {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"},
- {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"},
- {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"},
- {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"},
- {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"},
- {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"},
- {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"},
- {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"},
- {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"},
- {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"},
- {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"},
- {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"},
- {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"},
- {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"},
- {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"},
- {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"},
- {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"},
- {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"},
- {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"},
- {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"},
- {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"},
- {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"},
- {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"},
- {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"},
- {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"},
- {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"},
- {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"},
- {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"},
- {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"},
- {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"},
- {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"},
- {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"},
+ {file = "coverage-7.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7"},
+ {file = "coverage-7.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d"},
+ {file = "coverage-7.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5"},
+ {file = "coverage-7.2.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169"},
+ {file = "coverage-7.2.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6"},
+ {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137"},
+ {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90"},
+ {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2"},
+ {file = "coverage-7.2.2-cp310-cp310-win32.whl", hash = "sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292"},
+ {file = "coverage-7.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab"},
+ {file = "coverage-7.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b"},
+ {file = "coverage-7.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5"},
+ {file = "coverage-7.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731"},
+ {file = "coverage-7.2.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd"},
+ {file = "coverage-7.2.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d"},
+ {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212"},
+ {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54"},
+ {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57"},
+ {file = "coverage-7.2.2-cp311-cp311-win32.whl", hash = "sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d"},
+ {file = "coverage-7.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512"},
+ {file = "coverage-7.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9"},
+ {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e"},
+ {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69"},
+ {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0"},
+ {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f"},
+ {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67"},
+ {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9"},
+ {file = "coverage-7.2.2-cp37-cp37m-win32.whl", hash = "sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8"},
+ {file = "coverage-7.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25"},
+ {file = "coverage-7.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6"},
+ {file = "coverage-7.2.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5"},
+ {file = "coverage-7.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4"},
+ {file = "coverage-7.2.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd"},
+ {file = "coverage-7.2.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84"},
+ {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540"},
+ {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88"},
+ {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2"},
+ {file = "coverage-7.2.2-cp38-cp38-win32.whl", hash = "sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3"},
+ {file = "coverage-7.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8"},
+ {file = "coverage-7.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d"},
+ {file = "coverage-7.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005"},
+ {file = "coverage-7.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988"},
+ {file = "coverage-7.2.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149"},
+ {file = "coverage-7.2.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8"},
+ {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140"},
+ {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016"},
+ {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be"},
+ {file = "coverage-7.2.2-cp39-cp39-win32.whl", hash = "sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc"},
+ {file = "coverage-7.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef"},
+ {file = "coverage-7.2.2-pp37.pp38.pp39-none-any.whl", hash = "sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968"},
+ {file = "coverage-7.2.2.tar.gz", hash = "sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2"},
]
[package.extras]
@@ -335,7 +408,7 @@ files = [
"databind.json" = ">=1.5.3,<2.0.0"
[[package]]
-name = "databind.core"
+name = "databind-core"
version = "1.5.3"
description = "Databind is a library inspired by jackson-databind to de-/serialize Python dataclasses. Compatible with Python 3.7 and newer."
category = "dev"
@@ -352,7 +425,7 @@ Deprecated = ">=1.2.12,<2.0.0"
typing-extensions = ">=3.10.0"
[[package]]
-name = "databind.json"
+name = "databind-json"
version = "1.5.3"
description = "De-/serialize Python dataclasses to or from JSON payloads. Compatible with Python 3.7 and newer."
category = "dev"
@@ -369,7 +442,7 @@ files = [
typing-extensions = ">=3.10.0"
[[package]]
-name = "Deprecated"
+name = "deprecated"
version = "1.2.13"
description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
category = "dev"
@@ -403,14 +476,14 @@ graph = ["objgraph (>=1.7.2)"]
[[package]]
name = "docspec"
-version = "2.0.2"
+version = "2.1.2"
description = "Docspec is a JSON object specification for representing API documentation of programming languages."
category = "dev"
optional = false
python-versions = ">=3.7,<4.0"
files = [
- {file = "docspec-2.0.2-py3-none-any.whl", hash = "sha256:a10f39c9f968079b683deae24c48a99ecfa8eff92dd9c81f32420b62d226119f"},
- {file = "docspec-2.0.2.tar.gz", hash = "sha256:73fceec33f395688e19e0ed424a306504ff294e98a1c2a767be81bbf488bb450"},
+ {file = "docspec-2.1.2-py3-none-any.whl", hash = "sha256:19168242b1e6ca39553feba91b6284479e2bfdf6500949ce63e10c49354d00d7"},
+ {file = "docspec-2.1.2.tar.gz", hash = "sha256:0edb90f4c54edbd43170875dece7a50750a21e95a4c5faf3f7e655d01d483fa7"},
]
[package.dependencies]
@@ -463,14 +536,14 @@ dev = ["coverage", "coveralls", "pytest"]
[[package]]
name = "eradicate"
-version = "2.1.0"
+version = "2.2.0"
description = "Removes commented-out code."
category = "dev"
optional = false
python-versions = "*"
files = [
- {file = "eradicate-2.1.0-py3-none-any.whl", hash = "sha256:8bfaca181db9227dc88bdbce4d051a9627604c2243e7d85324f6d6ce0fd08bb2"},
- {file = "eradicate-2.1.0.tar.gz", hash = "sha256:aac7384ab25b1bf21c4c012de9b4bf8398945a14c98c911545b2ea50ab558014"},
+ {file = "eradicate-2.2.0-py3-none-any.whl", hash = "sha256:751813c315a48ce7e3d0483410991015342d380a956e86e0265c61bfb875bcbc"},
+ {file = "eradicate-2.2.0.tar.gz", hash = "sha256:c329a05def6a4b558dab58bb1b694f5209706b7c99ba174d226dfdb69a5ba0da"},
]
[[package]]
@@ -636,13 +709,13 @@ pycodestyle = "*"
[[package]]
name = "flake8-quotes"
-version = "3.3.1"
+version = "3.3.2"
description = "Flake8 lint for quotes."
category = "dev"
optional = false
python-versions = "*"
files = [
- {file = "flake8-quotes-3.3.1.tar.gz", hash = "sha256:633adca6fb8a08131536af0d750b44d6985b9aba46f498871e21588c3e6f525a"},
+ {file = "flake8-quotes-3.3.2.tar.gz", hash = "sha256:6e26892b632dacba517bf27219c459a8396dcfac0f5e8204904c5a4ba9b480e1"},
]
[package.dependencies]
@@ -693,14 +766,14 @@ smmap = ">=3.0.1,<6"
[[package]]
name = "gitpython"
-version = "3.1.30"
-description = "GitPython is a python library used to interact with Git repositories"
+version = "3.1.31"
+description = "GitPython is a Python library used to interact with Git repositories"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "GitPython-3.1.30-py3-none-any.whl", hash = "sha256:cd455b0000615c60e286208ba540271af9fe531fa6a87cc590a7298785ab2882"},
- {file = "GitPython-3.1.30.tar.gz", hash = "sha256:769c2d83e13f5d938b7688479da374c4e3d49f71549aaf462b646db9602ea6f8"},
+ {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"},
+ {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"},
]
[package.dependencies]
@@ -741,24 +814,24 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517",
[[package]]
name = "isort"
-version = "5.11.2"
+version = "5.11.5"
description = "A Python utility / library to sort Python imports."
category = "dev"
optional = false
python-versions = ">=3.7.0"
files = [
- {file = "isort-5.11.2-py3-none-any.whl", hash = "sha256:e486966fba83f25b8045f8dd7455b0a0d1e4de481e1d7ce4669902d9fb85e622"},
- {file = "isort-5.11.2.tar.gz", hash = "sha256:dd8bbc5c0990f2a095d754e50360915f73b4c26fc82733eb5bfc6b48396af4d2"},
+ {file = "isort-5.11.5-py3-none-any.whl", hash = "sha256:ba1d72fb2595a01c7895a5128f9585a5cc4b6d395f1c8d514989b9a7eb2a8746"},
+ {file = "isort-5.11.5.tar.gz", hash = "sha256:6be1f76a507cb2ecf16c7cf14a37e41609ca082330be4e3436a18ef74add55db"},
]
[package.extras]
colors = ["colorama (>=0.4.3,<0.5.0)"]
-pipfile-deprecated-finder = ["pipreqs", "requirementslib"]
+pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"]
plugins = ["setuptools"]
requirements-deprecated-finder = ["pip-api", "pipreqs"]
[[package]]
-name = "Jinja2"
+name = "jinja2"
version = "3.1.2"
description = "A very fast and expressive template engine."
category = "dev"
@@ -777,31 +850,48 @@ i18n = ["Babel (>=2.7)"]
[[package]]
name = "lazy-object-proxy"
-version = "1.8.0"
+version = "1.9.0"
description = "A fast and thorough lazy object proxy."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "lazy-object-proxy-1.8.0.tar.gz", hash = "sha256:c219a00245af0f6fa4e95901ed28044544f50152840c5b6a3e7b2568db34d156"},
- {file = "lazy_object_proxy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4fd031589121ad46e293629b39604031d354043bb5cdf83da4e93c2d7f3389fe"},
- {file = "lazy_object_proxy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:b70d6e7a332eb0217e7872a73926ad4fdc14f846e85ad6749ad111084e76df25"},
- {file = "lazy_object_proxy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:eb329f8d8145379bf5dbe722182410fe8863d186e51bf034d2075eb8d85ee25b"},
- {file = "lazy_object_proxy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4e2d9f764f1befd8bdc97673261b8bb888764dfdbd7a4d8f55e4fbcabb8c3fb7"},
- {file = "lazy_object_proxy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:e20bfa6db17a39c706d24f82df8352488d2943a3b7ce7d4c22579cb89ca8896e"},
- {file = "lazy_object_proxy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:14010b49a2f56ec4943b6cf925f597b534ee2fe1f0738c84b3bce0c1a11ff10d"},
- {file = "lazy_object_proxy-1.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6850e4aeca6d0df35bb06e05c8b934ff7c533734eb51d0ceb2d63696f1e6030c"},
- {file = "lazy_object_proxy-1.8.0-cp37-cp37m-win32.whl", hash = "sha256:5b51d6f3bfeb289dfd4e95de2ecd464cd51982fe6f00e2be1d0bf94864d58acd"},
- {file = "lazy_object_proxy-1.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6f593f26c470a379cf7f5bc6db6b5f1722353e7bf937b8d0d0b3fba911998858"},
- {file = "lazy_object_proxy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c1c7c0433154bb7c54185714c6929acc0ba04ee1b167314a779b9025517eada"},
- {file = "lazy_object_proxy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:d176f392dbbdaacccf15919c77f526edf11a34aece58b55ab58539807b85436f"},
- {file = "lazy_object_proxy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:afcaa24e48bb23b3be31e329deb3f1858f1f1df86aea3d70cb5c8578bfe5261c"},
- {file = "lazy_object_proxy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:71d9ae8a82203511a6f60ca5a1b9f8ad201cac0fc75038b2dc5fa519589c9288"},
- {file = "lazy_object_proxy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:8f6ce2118a90efa7f62dd38c7dbfffd42f468b180287b748626293bf12ed468f"},
- {file = "lazy_object_proxy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:eac3a9a5ef13b332c059772fd40b4b1c3d45a3a2b05e33a361dee48e54a4dad0"},
- {file = "lazy_object_proxy-1.8.0-pp37-pypy37_pp73-any.whl", hash = "sha256:ae032743794fba4d171b5b67310d69176287b5bf82a21f588282406a79498891"},
- {file = "lazy_object_proxy-1.8.0-pp38-pypy38_pp73-any.whl", hash = "sha256:7e1561626c49cb394268edd00501b289053a652ed762c58e1081224c8d881cec"},
- {file = "lazy_object_proxy-1.8.0-pp39-pypy39_pp73-any.whl", hash = "sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8"},
+ {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"},
+ {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"},
+ {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"},
+ {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"},
+ {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"},
+ {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"},
+ {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"},
+ {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"},
+ {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"},
+ {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"},
+ {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"},
+ {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"},
+ {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"},
+ {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"},
+ {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"},
+ {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"},
+ {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"},
+ {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"},
+ {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"},
+ {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"},
+ {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"},
+ {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"},
+ {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"},
+ {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"},
+ {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"},
+ {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"},
+ {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"},
+ {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"},
+ {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"},
+ {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"},
+ {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"},
+ {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"},
+ {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"},
+ {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"},
+ {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"},
+ {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"},
]
[[package]]
@@ -823,53 +913,63 @@ six = "*"
restructuredtext = ["rst2ansi"]
[[package]]
-name = "MarkupSafe"
-version = "2.1.1"
+name = "markupsafe"
+version = "2.1.2"
description = "Safely add untrusted strings to HTML/XML markup."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"},
- {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"},
+ {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"},
]
[[package]]
@@ -886,63 +986,59 @@ files = [
[[package]]
name = "mock"
-version = "4.0.3"
+version = "5.0.1"
description = "Rolling backport of unittest.mock for all Pythons"
category = "dev"
optional = false
python-versions = ">=3.6"
files = [
- {file = "mock-4.0.3-py3-none-any.whl", hash = "sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62"},
- {file = "mock-4.0.3.tar.gz", hash = "sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc"},
+ {file = "mock-5.0.1-py3-none-any.whl", hash = "sha256:c41cfb1e99ba5d341fbcc5308836e7d7c9786d302f995b2c271ce2144dece9eb"},
+ {file = "mock-5.0.1.tar.gz", hash = "sha256:e3ea505c03babf7977fd21674a69ad328053d414f05e6433c30d8fa14a534a6b"},
]
[package.extras]
build = ["blurb", "twine", "wheel"]
docs = ["sphinx"]
-test = ["pytest (<5.4)", "pytest-cov"]
+test = ["pytest", "pytest-cov"]
[[package]]
name = "mypy"
-version = "0.991"
+version = "1.1.1"
description = "Optional static typing for Python"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"},
- {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"},
- {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"},
- {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"},
- {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"},
- {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"},
- {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"},
- {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"},
- {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"},
- {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"},
- {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"},
- {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"},
- {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"},
- {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"},
- {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"},
- {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"},
- {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"},
- {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"},
- {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"},
- {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"},
- {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"},
- {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"},
- {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"},
- {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"},
- {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"},
- {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"},
- {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"},
- {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"},
- {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"},
- {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"},
+ {file = "mypy-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39c7119335be05630611ee798cc982623b9e8f0cff04a0b48dfc26100e0b97af"},
+ {file = "mypy-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61bf08362e93b6b12fad3eab68c4ea903a077b87c90ac06c11e3d7a09b56b9c1"},
+ {file = "mypy-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbb19c9f662e41e474e0cff502b7064a7edc6764f5262b6cd91d698163196799"},
+ {file = "mypy-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:315ac73cc1cce4771c27d426b7ea558fb4e2836f89cb0296cbe056894e3a1f78"},
+ {file = "mypy-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5cb14ff9919b7df3538590fc4d4c49a0f84392237cbf5f7a816b4161c061829e"},
+ {file = "mypy-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:26cdd6a22b9b40b2fd71881a8a4f34b4d7914c679f154f43385ca878a8297389"},
+ {file = "mypy-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b5f81b40d94c785f288948c16e1f2da37203c6006546c5d947aab6f90aefef2"},
+ {file = "mypy-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b437be1c02712a605591e1ed1d858aba681757a1e55fe678a15c2244cd68a5"},
+ {file = "mypy-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d809f88734f44a0d44959d795b1e6f64b2bbe0ea4d9cc4776aa588bb4229fc1c"},
+ {file = "mypy-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:a380c041db500e1410bb5b16b3c1c35e61e773a5c3517926b81dfdab7582be54"},
+ {file = "mypy-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b7c7b708fe9a871a96626d61912e3f4ddd365bf7f39128362bc50cbd74a634d5"},
+ {file = "mypy-1.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1c10fa12df1232c936830839e2e935d090fc9ee315744ac33b8a32216b93707"},
+ {file = "mypy-1.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0a28a76785bf57655a8ea5eb0540a15b0e781c807b5aa798bd463779988fa1d5"},
+ {file = "mypy-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ef6a01e563ec6a4940784c574d33f6ac1943864634517984471642908b30b6f7"},
+ {file = "mypy-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d64c28e03ce40d5303450f547e07418c64c241669ab20610f273c9e6290b4b0b"},
+ {file = "mypy-1.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64cc3afb3e9e71a79d06e3ed24bb508a6d66f782aff7e56f628bf35ba2e0ba51"},
+ {file = "mypy-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce61663faf7a8e5ec6f456857bfbcec2901fbdb3ad958b778403f63b9e606a1b"},
+ {file = "mypy-1.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b0c373d071593deefbcdd87ec8db91ea13bd8f1328d44947e88beae21e8d5e9"},
+ {file = "mypy-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:2888ce4fe5aae5a673386fa232473014056967f3904f5abfcf6367b5af1f612a"},
+ {file = "mypy-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:19ba15f9627a5723e522d007fe708007bae52b93faab00f95d72f03e1afa9598"},
+ {file = "mypy-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:59bbd71e5c58eed2e992ce6523180e03c221dcd92b52f0e792f291d67b15a71c"},
+ {file = "mypy-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9401e33814cec6aec8c03a9548e9385e0e228fc1b8b0a37b9ea21038e64cdd8a"},
+ {file = "mypy-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b398d8b1f4fba0e3c6463e02f8ad3346f71956b92287af22c9b12c3ec965a9f"},
+ {file = "mypy-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:69b35d1dcb5707382810765ed34da9db47e7f95b3528334a3c999b0c90fe523f"},
+ {file = "mypy-1.1.1-py3-none-any.whl", hash = "sha256:4e4e8b362cdf99ba00c2b218036002bdcdf1e0de085cdb296a49df03fb31dfc4"},
+ {file = "mypy-1.1.1.tar.gz", hash = "sha256:ae9ceae0f5b9059f33dbc62dea087e942c0ccab4b7a003719cb70f9b8abfa32f"},
]
[package.dependencies]
-mypy-extensions = ">=0.4.3"
+mypy-extensions = ">=1.0.0"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""}
typing-extensions = ">=3.10"
@@ -955,18 +1051,18 @@ reports = ["lxml"]
[[package]]
name = "mypy-extensions"
-version = "0.4.3"
-description = "Experimental type system extensions for programs checked with the mypy typechecker."
+version = "1.0.0"
+description = "Type system extensions for programs checked with the mypy type checker."
category = "dev"
optional = false
-python-versions = "*"
+python-versions = ">=3.5"
files = [
- {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
- {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
+ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
[[package]]
-name = "nr.util"
+name = "nr-util"
version = "0.8.12"
description = "General purpose Python utility library."
category = "dev"
@@ -995,26 +1091,26 @@ files = [
[[package]]
name = "pathspec"
-version = "0.10.3"
+version = "0.11.1"
description = "Utility library for gitignore style pattern matching of file paths."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "pathspec-0.10.3-py3-none-any.whl", hash = "sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6"},
- {file = "pathspec-0.10.3.tar.gz", hash = "sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6"},
+ {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"},
+ {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"},
]
[[package]]
name = "pbr"
-version = "5.11.0"
+version = "5.11.1"
description = "Python Build Reasonableness"
category = "dev"
optional = false
python-versions = ">=2.6"
files = [
- {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"},
- {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"},
+ {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"},
+ {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"},
]
[[package]]
@@ -1034,19 +1130,22 @@ pylint = ">=2.12.0,<3.0"
[[package]]
name = "platformdirs"
-version = "2.6.0"
+version = "3.2.0"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "platformdirs-2.6.0-py3-none-any.whl", hash = "sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca"},
- {file = "platformdirs-2.6.0.tar.gz", hash = "sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e"},
+ {file = "platformdirs-3.2.0-py3-none-any.whl", hash = "sha256:ebe11c0d7a805086e99506aa331612429a72ca7cd52a1f0d277dc4adc20cb10e"},
+ {file = "platformdirs-3.2.0.tar.gz", hash = "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08"},
]
+[package.dependencies]
+typing-extensions = {version = ">=4.5", markers = "python_version < \"3.8\""}
+
[package.extras]
-docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"]
-test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
+docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
[[package]]
name = "pluggy"
@@ -1069,14 +1168,14 @@ testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "poethepoet"
-version = "0.16.5"
+version = "0.19.0"
description = "A task runner that works well with poetry."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "poethepoet-0.16.5-py3-none-any.whl", hash = "sha256:493d5d47b4cb0894dde6a69d14129ba39ef3f124fabda1f83ebb39bbf737a40e"},
- {file = "poethepoet-0.16.5.tar.gz", hash = "sha256:3c958792ce488661ba09df67ba832a1b3141aa640236505ee60c23f4b1db4dbc"},
+ {file = "poethepoet-0.19.0-py3-none-any.whl", hash = "sha256:87038be589077e4b407050a9da644d9cd9e4076ccfc8abc7f855cf6870d5c6c2"},
+ {file = "poethepoet-0.19.0.tar.gz", hash = "sha256:897eb85ec15876d79befc7d19d4c80ce7c8b214d1bb0dcfec640abd81616bfed"},
]
[package.dependencies]
@@ -1150,15 +1249,15 @@ files = [
]
[[package]]
-name = "Pygments"
-version = "2.13.0"
+name = "pygments"
+version = "2.14.0"
description = "Pygments is a syntax highlighting package written in Python."
category = "main"
optional = false
python-versions = ">=3.6"
files = [
- {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"},
- {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"},
+ {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"},
+ {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"},
]
[package.extras]
@@ -1166,20 +1265,23 @@ plugins = ["importlib-metadata"]
[[package]]
name = "pylint"
-version = "2.15.8"
+version = "2.17.1"
description = "python code static checker"
category = "dev"
optional = false
python-versions = ">=3.7.2"
files = [
- {file = "pylint-2.15.8-py3-none-any.whl", hash = "sha256:ea82cd6a1e11062dc86d555d07c021b0fb65afe39becbe6fe692efd6c4a67443"},
- {file = "pylint-2.15.8.tar.gz", hash = "sha256:ec4a87c33da054ab86a6c79afa6771dc8765cb5631620053e727fcf3ef8cbed7"},
+ {file = "pylint-2.17.1-py3-none-any.whl", hash = "sha256:8660a54e3f696243d644fca98f79013a959c03f979992c1ab59c24d3f4ec2700"},
+ {file = "pylint-2.17.1.tar.gz", hash = "sha256:d4d009b0116e16845533bc2163493d6681846ac725eab8ca8014afb520178ddd"},
]
[package.dependencies]
-astroid = ">=2.12.13,<=2.14.0-dev0"
+astroid = ">=2.15.0,<=2.17.0-dev0"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
-dill = ">=0.2"
+dill = [
+ {version = ">=0.2", markers = "python_version < \"3.11\""},
+ {version = ">=0.3.6", markers = "python_version >= \"3.11\""},
+]
isort = ">=4.2.5,<6"
mccabe = ">=0.6,<0.8"
platformdirs = ">=2.2.0"
@@ -1209,18 +1311,18 @@ tomli = {version = "*", markers = "python_version < \"3.11\""}
[[package]]
name = "python-yakh"
-version = "0.3.0"
+version = "0.3.1"
description = "Yet Another Keypress Handler"
category = "main"
optional = false
python-versions = ">=3.7.8,<4.0.0"
files = [
- {file = "python_yakh-0.3.0-py3-none-any.whl", hash = "sha256:36e3ba1ebc9dd1d25d10eb42211317b96af77112ba9d714912691b82429aebab"},
- {file = "python_yakh-0.3.0.tar.gz", hash = "sha256:bb43d6a77f271d32c242b6472f97b4b00c25c86e43b77595839fa3cbbf92e59c"},
+ {file = "python_yakh-0.3.1-py3-none-any.whl", hash = "sha256:4eaaa5c0a369fa414c5f83d63d15b199ca8ed4b7df519cb38b8b9adec700fba5"},
+ {file = "python_yakh-0.3.1.tar.gz", hash = "sha256:cadf04077cc3a42285c54b0a65bce0e50b7850a64c24f9c76d9ee14d9b1cbd0f"},
]
[[package]]
-name = "PyYAML"
+name = "pyyaml"
version = "5.4.1"
description = "YAML parser and emitter for Python"
category = "dev"
@@ -1277,19 +1379,19 @@ mando = ">=0.6,<0.7"
[[package]]
name = "requests"
-version = "2.28.1"
+version = "2.28.2"
description = "Python HTTP for Humans."
category = "dev"
optional = false
python-versions = ">=3.7, <4"
files = [
- {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
- {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
+ {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"},
+ {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"},
]
[package.dependencies]
certifi = ">=2017.4.17"
-charset-normalizer = ">=2,<3"
+charset-normalizer = ">=2,<4"
idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<1.27"
@@ -1319,31 +1421,31 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
[[package]]
name = "setuptools"
-version = "65.6.3"
+version = "67.6.1"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"},
- {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"},
+ {file = "setuptools-67.6.1-py3-none-any.whl", hash = "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"},
+ {file = "setuptools-67.6.1.tar.gz", hash = "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a"},
]
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "shellingham"
-version = "1.5.0"
+version = "1.5.0.post1"
description = "Tool to Detect Surrounding Shell"
category = "dev"
optional = false
-python-versions = ">=3.4"
+python-versions = ">=3.7"
files = [
- {file = "shellingham-1.5.0-py2.py3-none-any.whl", hash = "sha256:a8f02ba61b69baaa13facdba62908ca8690a94b8119b69f5ec5873ea85f7391b"},
- {file = "shellingham-1.5.0.tar.gz", hash = "sha256:72fb7f5c63103ca2cb91b23dee0c71fe8ad6fbfd46418ef17dbe40db51592dad"},
+ {file = "shellingham-1.5.0.post1-py2.py3-none-any.whl", hash = "sha256:368bf8c00754fd4f55afb7bbb86e272df77e4dc76ac29dbcbb81a59e9fc15744"},
+ {file = "shellingham-1.5.0.post1.tar.gz", hash = "sha256:823bc5fb5c34d60f285b624e7264f4dda254bc803a3774a147bf99c0e3004a28"},
]
[[package]]
@@ -1388,18 +1490,18 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0"
[[package]]
name = "toml-sort"
-version = "0.20.1"
+version = "0.23.0"
description = "Toml sorting library"
category = "dev"
optional = false
python-versions = ">=3.7,<4.0"
files = [
- {file = "toml-sort-0.20.1.tar.gz", hash = "sha256:bce9023787c6f15ebbdf22d0b05eab928c047b164f65d393a25c27ac060bd7e1"},
- {file = "toml_sort-0.20.1-py3-none-any.whl", hash = "sha256:c5f5923969cbbf3b391dea47687733143d342ea438619790ee49d055052fd0f5"},
+ {file = "toml_sort-0.23.0-py3-none-any.whl", hash = "sha256:7804a341c2c692dd47f3cc35f06b90ee03cbaa1b2e319a83b67d063c07bce8a1"},
+ {file = "toml_sort-0.23.0.tar.gz", hash = "sha256:eba7b27d8c786612877cf2feccdd44c1beb9d8e37d810b4a4fa34426e914e35d"},
]
[package.dependencies]
-tomlkit = ">=0.8.0"
+tomlkit = ">=0.11.2"
[[package]]
name = "tomli"
@@ -1427,14 +1529,14 @@ files = [
[[package]]
name = "tomlkit"
-version = "0.11.6"
+version = "0.11.7"
description = "Style preserving TOML library"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
files = [
- {file = "tomlkit-0.11.6-py3-none-any.whl", hash = "sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b"},
- {file = "tomlkit-0.11.6.tar.gz", hash = "sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"},
+ {file = "tomlkit-0.11.7-py3-none-any.whl", hash = "sha256:5325463a7da2ef0c6bbfefb62a3dc883aebe679984709aee32a317907d0a8d3c"},
+ {file = "tomlkit-0.11.7.tar.gz", hash = "sha256:f392ef70ad87a672f02519f99967d28a4d3047133e2d1df936511465fbb3791d"},
]
[[package]]
@@ -1473,26 +1575,26 @@ files = [
[[package]]
name = "types-emoji"
-version = "2.1.0.1"
+version = "2.1.0.3"
description = "Typing stubs for emoji"
category = "dev"
optional = false
python-versions = "*"
files = [
- {file = "types-emoji-2.1.0.1.tar.gz", hash = "sha256:8abbd27b8ebd51ae0bafdf04ba07263b984bd300cf73eedceb1cfc156f589c8b"},
- {file = "types_emoji-2.1.0.1-py3-none-any.whl", hash = "sha256:fbbc07b57d82cdb7be3e34f3aad3a4157063f929668016bd27e62e528f3e656d"},
+ {file = "types-emoji-2.1.0.3.tar.gz", hash = "sha256:98ddb0ff5f48622550c431206e4dbfcbde8ca8bc03fcfbb9962a778d2049aa13"},
+ {file = "types_emoji-2.1.0.3-py3-none-any.whl", hash = "sha256:32fe5cf02c4834bb59579380f600a89d1471571fb56e36465cbd0c7d95f669ca"},
]
[[package]]
name = "typing-extensions"
-version = "4.4.0"
+version = "4.5.0"
description = "Backported and Experimental Type Hints for Python 3.7+"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
- {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
+ {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"},
+ {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"},
]
[[package]]
@@ -1522,14 +1624,14 @@ files = [
[[package]]
name = "urllib3"
-version = "1.26.13"
+version = "1.26.15"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
files = [
- {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"},
- {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"},
+ {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"},
+ {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"},
]
[package.extras]
@@ -1539,14 +1641,14 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "ward"
-version = "0.67.0b0"
+version = "0.67.2b0"
description = "A modern Python testing framework"
category = "dev"
optional = false
python-versions = ">=3.7.8,<4.0.0"
files = [
- {file = "ward-0.67.0b0-py3-none-any.whl", hash = "sha256:865c1559c99a89fa74a783fc04585c65811fb97fd912996e1cc34f0391216c23"},
- {file = "ward-0.67.0b0.tar.gz", hash = "sha256:24024a1c41f67ad1cd0310c7f29dafbe387cd8f49777fe12262c5ad66c4ed779"},
+ {file = "ward-0.67.2b0-py3-none-any.whl", hash = "sha256:fe351f5ae2fbbf8132af6260bdea8245dcef026943cb2084faaa48e6a1017c4e"},
+ {file = "ward-0.67.2b0.tar.gz", hash = "sha256:11bf0128e4696bfd1ca9a2b457c817378c4479a33fa45d0e3aabf7b301744f47"},
]
[package.dependencies]
@@ -1556,7 +1658,7 @@ click-default-group = ">=1.2.2,<2.0.0"
cucumber-tag-expressions = ">=2.0.0,<5.0.0"
pluggy = ">=0.13.1,<2.0.0"
pprintpp = ">=0.4.0,<0.5.0"
-rich = ">=12.2.0,<13.0.0"
+rich = ">=12.2.0"
tomli = ">=1.0.0,<3.0.0"
[[package]]
@@ -1578,40 +1680,39 @@ ward = "*"
[[package]]
name = "watchdog"
-version = "2.2.0"
+version = "3.0.0"
description = "Filesystem events monitoring"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
files = [
- {file = "watchdog-2.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ed91c3ccfc23398e7aa9715abf679d5c163394b8cad994f34f156d57a7c163dc"},
- {file = "watchdog-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:76a2743402b794629a955d96ea2e240bd0e903aa26e02e93cd2d57b33900962b"},
- {file = "watchdog-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:920a4bda7daa47545c3201a3292e99300ba81ca26b7569575bd086c865889090"},
- {file = "watchdog-2.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ceaa9268d81205876bedb1069f9feab3eccddd4b90d9a45d06a0df592a04cae9"},
- {file = "watchdog-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1893d425ef4fb4f129ee8ef72226836619c2950dd0559bba022b0818c63a7b60"},
- {file = "watchdog-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e99c1713e4436d2563f5828c8910e5ff25abd6ce999e75f15c15d81d41980b6"},
- {file = "watchdog-2.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a5bd9e8656d07cae89ac464ee4bcb6f1b9cecbedc3bf1334683bed3d5afd39ba"},
- {file = "watchdog-2.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a048865c828389cb06c0bebf8a883cec3ae58ad3e366bcc38c61d8455a3138f"},
- {file = "watchdog-2.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e722755d995035dd32177a9c633d158f2ec604f2a358b545bba5bed53ab25bca"},
- {file = "watchdog-2.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:af4b5c7ba60206759a1d99811b5938ca666ea9562a1052b410637bb96ff97512"},
- {file = "watchdog-2.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:619d63fa5be69f89ff3a93e165e602c08ed8da402ca42b99cd59a8ec115673e1"},
- {file = "watchdog-2.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1f2b0665c57358ce9786f06f5475bc083fea9d81ecc0efa4733fd0c320940a37"},
- {file = "watchdog-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:441024df19253bb108d3a8a5de7a186003d68564084576fecf7333a441271ef7"},
- {file = "watchdog-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a410dd4d0adcc86b4c71d1317ba2ea2c92babaf5b83321e4bde2514525544d5"},
- {file = "watchdog-2.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:28704c71afdb79c3f215c90231e41c52b056ea880b6be6cee035c6149d658ed1"},
- {file = "watchdog-2.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2ac0bd7c206bb6df78ef9e8ad27cc1346f2b41b1fef610395607319cdab89bc1"},
- {file = "watchdog-2.2.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:27e49268735b3c27310883012ab3bd86ea0a96dcab90fe3feb682472e30c90f3"},
- {file = "watchdog-2.2.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:2af1a29fd14fc0a87fb6ed762d3e1ae5694dcde22372eebba50e9e5be47af03c"},
- {file = "watchdog-2.2.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:c7bd98813d34bfa9b464cf8122e7d4bec0a5a427399094d2c17dd5f70d59bc61"},
- {file = "watchdog-2.2.0-py3-none-manylinux2014_i686.whl", hash = "sha256:56fb3f40fc3deecf6e518303c7533f5e2a722e377b12507f6de891583f1b48aa"},
- {file = "watchdog-2.2.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:74535e955359d79d126885e642d3683616e6d9ab3aae0e7dcccd043bd5a3ff4f"},
- {file = "watchdog-2.2.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:cf05e6ff677b9655c6e9511d02e9cc55e730c4e430b7a54af9c28912294605a4"},
- {file = "watchdog-2.2.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:d6ae890798a3560688b441ef086bb66e87af6b400a92749a18b856a134fc0318"},
- {file = "watchdog-2.2.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:e5aed2a700a18c194c39c266900d41f3db0c1ebe6b8a0834b9995c835d2ca66e"},
- {file = "watchdog-2.2.0-py3-none-win32.whl", hash = "sha256:d0fb5f2b513556c2abb578c1066f5f467d729f2eb689bc2db0739daf81c6bb7e"},
- {file = "watchdog-2.2.0-py3-none-win_amd64.whl", hash = "sha256:1f8eca9d294a4f194ce9df0d97d19b5598f310950d3ac3dd6e8d25ae456d4c8a"},
- {file = "watchdog-2.2.0-py3-none-win_ia64.whl", hash = "sha256:ad0150536469fa4b693531e497ffe220d5b6cd76ad2eda474a5e641ee204bbb6"},
- {file = "watchdog-2.2.0.tar.gz", hash = "sha256:83cf8bc60d9c613b66a4c018051873d6273d9e45d040eed06d6a96241bd8ec01"},
+ {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"},
+ {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"},
+ {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"},
+ {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"},
+ {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"},
+ {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"},
+ {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"},
+ {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"},
+ {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"},
+ {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"},
+ {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"},
+ {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"},
+ {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"},
+ {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"},
+ {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"},
+ {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"},
+ {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"},
+ {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"},
+ {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"},
+ {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"},
+ {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"},
+ {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"},
+ {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"},
+ {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"},
+ {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"},
+ {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"},
+ {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"},
]
[package.extras]
@@ -1619,76 +1720,87 @@ watchmedo = ["PyYAML (>=3.10)"]
[[package]]
name = "wrapt"
-version = "1.14.1"
+version = "1.15.0"
description = "Module for decorators, wrappers and monkey patching."
category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
files = [
- {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"},
- {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"},
- {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"},
- {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"},
- {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"},
- {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"},
- {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"},
- {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"},
- {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"},
- {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"},
- {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"},
- {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"},
- {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"},
- {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"},
- {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"},
- {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"},
- {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"},
- {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"},
- {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"},
- {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"},
- {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"},
- {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"},
- {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"},
- {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"},
- {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"},
- {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"},
- {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"},
- {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"},
- {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"},
- {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"},
- {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"},
- {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"},
- {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"},
- {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"},
- {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"},
- {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"},
- {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"},
- {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"},
- {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"},
- {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"},
- {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"},
- {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"},
- {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"},
- {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"},
- {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"},
- {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"},
- {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"},
- {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"},
- {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"},
- {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"},
- {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"},
- {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"},
- {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"},
- {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"},
- {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"},
- {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"},
- {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"},
- {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"},
- {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"},
- {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"},
- {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"},
- {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"},
- {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"},
- {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"},
+ {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"},
+ {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"},
+ {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"},
+ {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"},
+ {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"},
+ {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"},
+ {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"},
+ {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"},
+ {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"},
+ {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"},
+ {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"},
+ {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"},
+ {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"},
+ {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"},
+ {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"},
+ {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"},
+ {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"},
+ {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"},
+ {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"},
+ {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"},
+ {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"},
+ {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"},
+ {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"},
+ {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"},
+ {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"},
+ {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"},
+ {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"},
+ {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"},
+ {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"},
+ {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"},
+ {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"},
+ {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"},
+ {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"},
+ {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"},
+ {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"},
+ {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"},
+ {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"},
+ {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"},
+ {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"},
+ {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"},
+ {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"},
+ {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"},
+ {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"},
+ {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"},
+ {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"},
+ {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"},
+ {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"},
+ {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"},
+ {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"},
+ {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"},
+ {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"},
+ {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"},
+ {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"},
+ {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"},
+ {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"},
+ {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"},
+ {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"},
+ {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"},
+ {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"},
+ {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"},
+ {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"},
+ {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"},
+ {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"},
+ {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"},
+ {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"},
+ {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"},
+ {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"},
+ {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"},
+ {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"},
+ {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"},
+ {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"},
+ {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"},
+ {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"},
+ {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"},
+ {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"},
]
[[package]]
@@ -1722,21 +1834,21 @@ files = [
[[package]]
name = "zipp"
-version = "3.11.0"
+version = "3.15.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "zipp-3.11.0-py3-none-any.whl", hash = "sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa"},
- {file = "zipp-3.11.0.tar.gz", hash = "sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766"},
+ {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"},
+ {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"},
]
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
-testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
[metadata]
lock-version = "2.0"
-python-versions = "^3.7.8"
-content-hash = "33b28db7f600deff5cde3b4dfcdf6d56261e2d2a949c74e1d77d726642ab671b"
+python-versions = '^3.7.8'
+content-hash = "f1305094bd5d02b3de2633d8c863e6c32556b0474562328fca09b4e5d48adc02"
diff --git a/pyproject.toml b/pyproject.toml
index daa41d0..79fb271 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = 'beaupy'
-version = '3.5.0'
+version = '3.5.1'
description = 'A library of elements for interactive TUIs in Python'
authors = ['Peter Vyboch <[email protected]>']
license = 'MIT'
@@ -93,7 +93,14 @@ branch = true
relative_files = true
report = {skip_empty = true}
-[tool.poetry.dev-dependencies]
+[tool.poetry.group.test.dependencies]
+expycted = '*'
+mock = '*'
+poethepoet = '*'
+ward = '*'
+ward-coverage = "^0.3.0"
+
+[tool.poetry.group.dev.dependencies]
# Code Scanning and Formatting
mypy = '*'
pyproject-flake8 = "*"
| Some items not shown in multiselect
I have a really weird issue with `select_multiple`.
Consider this snippet:
```
colored_options = [
'1',
'2',
'3',
'4',
'5',
'6',
'7',
'8']
print(colored_options)
print(len(colored_options))
select_multiple(colored_options)
```
In container A, in a **python vscode terminal**, everything works as expected:

In another container, in a **bash terminal**, I only ever see 5 items:

Why is the bash terminal only showing 5 items?
How can I debug this?
| Could you please share the version of `beaupy` this happens for?
What platform are you running on?
Version:
`3.3.0`
Working container OS:
```
NAME="Red Hat Enterprise Linux Server"
VERSION="7.9 (Maipo)"
```
Not working container OS:
```
PRETTY_NAME="Debian GNU/Linux 11 (bullseye)"
NAME="Debian GNU/Linux"
```
Really strange, why 5 and not 8?
Could you please verify it's not 3.5.0 on the Debian? In 3.5.0 I have been making changes that could have introduced this bug.
Sorry, I am not at the computer atm so I can't verify.
EDIT: I have taken a quick look on the phone. I have most probably introduced a bug that could be causing this in 3.5.0. I will investigate and fix today.
I am sorry, I gave you misinformation.
On the working RedHat it is `3.4.0`.
On the misbehaving Debian it is indeed `3.5.0`.
**Edit: confirmed! Version `3.5.0` also does not work on RedHat**
Thanks for correction. Fix should be reasonably easy, I'll release **3.5.1** fixing this later today. | 2023-03-28T16:22:18 | 0.0 | [] | [] |
||
petereon/beaupy | petereon__beaupy-56 | 59da61dedfcfcc76723168bd31b6b4d68b395bc2 | diff --git a/beaupy/_beaupy.py b/beaupy/_beaupy.py
index e0c60f5..93bd1b0 100755
--- a/beaupy/_beaupy.py
+++ b/beaupy/_beaupy.py
@@ -5,13 +5,14 @@
__license__ = 'MIT'
+import math
import warnings
from typing import Any, Callable, List, Optional, Tuple, Type, Union
from rich.console import Console
from rich.live import Live
from yakh import get_key
-from yakh.key import Keys
+from yakh.key import Key, Keys
from beaupy._internals import (
Abort,
@@ -20,6 +21,8 @@
ValidationError,
_cursor_hidden,
_format_option_select,
+ _paginate_back,
+ _paginate_forward,
_render_option_select_multiple,
_render_prompt,
_update_rendered,
@@ -77,6 +80,46 @@ class Config:
raise_on_escape: bool = False
+_navigation_keys = [DefaultKeys.up, DefaultKeys.down, DefaultKeys.right, DefaultKeys.left, DefaultKeys.home, DefaultKeys.end]
+
+
+def _navigate_select(
+ index: int,
+ page: int,
+ keypress: Key,
+ total_options: int,
+ pagination: int,
+ total_pages: int,
+ page_size: int,
+ show_from: int,
+ show_to: int,
+) -> Tuple[int, int]:
+ if keypress in DefaultKeys.up:
+ if index <= show_from and pagination:
+ page = _paginate_back(page, total_pages)
+ index -= 1
+ index = index % total_options
+ elif keypress in DefaultKeys.down:
+ if index > show_to - 2 and pagination:
+ page = _paginate_forward(page, total_pages)
+ index += 1
+ index = index % total_options
+ elif keypress in DefaultKeys.right and pagination:
+ page = _paginate_forward(page, total_pages)
+ index = (page - 1) * page_size
+ elif keypress in DefaultKeys.left and pagination:
+ page = _paginate_back(page, total_pages)
+ index = (page - 1) * page_size
+ elif keypress in DefaultKeys.home:
+ page = 1
+ index = 0
+ elif keypress in DefaultKeys.end:
+ page = total_pages
+ index = total_options - 1
+
+ return index, page
+
+
def prompt(
prompt: str,
target_type: Type[TargetType] = str,
@@ -165,9 +208,6 @@ def prompt(
cursor_index += 1
-Selection = Union[int, Any]
-
-
def select(
options: List[Union[Tuple[int, ...], str]],
preprocessor: Callable[[Any], Any] = lambda val: val,
@@ -176,7 +216,9 @@ def select(
cursor_index: int = 0,
return_index: bool = False,
strict: bool = False,
-) -> Union[Selection, None]:
+ pagination: bool = False,
+ page_size: int = 5,
+) -> Union[int, Any, None]:
"""A prompt that allows selecting one option from a list of options
Args:
@@ -191,6 +233,8 @@ def select(
return_index (bool, optional): If `True`, `select` will return the index of selected element in options. Defaults to `False`.
strict (bool, optional): If empty `options` is provided and strict is `False`, None will be returned,
if it's `True`, `ValueError` will be thrown. Defaults to False.
+ pagination (bool, optional): If `True`, pagination will be used. Defaults to False.
+ page_size (int, optional): Number of options to show on a single page if pagination is enabled. Defaults to 5.
Raises:
ValueError: Thrown if no `options` are povided and strict is `True`
@@ -210,17 +254,22 @@ def select(
cursor_style = 'white'
index: int = cursor_index
+ page: int = index // page_size + 1
+ total_pages = math.ceil(len(options) / page_size)
while True:
+ show_from = (page - 1) * page_size
+ show_to = min(show_from + page_size, len(options))
rendered = (
'\n'.join(
[
_format_option_select(
- i=i, cursor_index=index, option=preprocessor(option), cursor_style=cursor_style, cursor=cursor
+ i=i, cursor_index=index % page_size, option=preprocessor(option), cursor_style=cursor_style, cursor=cursor
)
- for i, option in enumerate(options)
+ for i, option in enumerate(options[show_from:show_to])
]
)
+ + (f'[grey58]\n\nPage {page}/{total_pages}[/grey58]' if pagination and total_pages > 1 else '') # noqa: W503
+ '\n\n(Confirm with [bold]enter[/bold])' # noqa: W503
)
_update_rendered(live, rendered)
@@ -229,16 +278,8 @@ def select(
if Config.raise_on_interrupt:
raise KeyboardInterrupt()
return None
- elif keypress in DefaultKeys.up:
- index -= 1
- index = index % len(options)
- elif keypress in DefaultKeys.down:
- index += 1
- index = index % len(options)
- elif keypress in DefaultKeys.home:
- index = 0
- elif keypress in DefaultKeys.end:
- index = len(options) - 1
+ elif any([keypress in navigation_keys for navigation_keys in _navigation_keys]):
+ index, page = _navigate_select(index, page, keypress, len(options), pagination, total_pages, page_size, show_from, show_to)
elif keypress in DefaultKeys.confirm:
if return_index:
return index
@@ -249,9 +290,6 @@ def select(
return None
-Selections = List[Selection]
-
-
def select_multiple(
options: List[Union[Tuple[int, ...], str]],
preprocessor: Callable[[Any], Any] = lambda val: val,
@@ -264,7 +302,9 @@ def select_multiple(
maximal_count: Optional[int] = None,
return_indices: bool = False,
strict: bool = False,
-) -> Selections:
+ pagination: bool = False,
+ page_size: int = 5,
+) -> List[Union[int, Any]]:
"""A prompt that allows selecting multiple options from a list of options
Args:
@@ -284,6 +324,8 @@ def select_multiple(
of ticked elements in options. Defaults to `False`.
strict (bool, optional): If empty `options` is provided and strict is `False`, None will be returned,
if it's `True`, `ValueError` will be thrown. Defaults to False.
+ pagination (bool, optional): If `True`, pagination will be used. Defaults to False.
+ page_size (int, optional): Number of options to show on a single page if pagination is enabled. Defaults to 5.
Raises:
KeyboardInterrupt: Raised when keyboard interrupt is encountered and Config.raise_on_interrupt is True
@@ -307,23 +349,28 @@ def select_multiple(
ticked_indices = []
index = cursor_index
+ page: int = index // page_size + 1
+ total_pages = math.ceil(len(options) / page_size)
error_message = ''
while True:
+ show_from = (page - 1) * page_size
+ show_to = min(show_from + page_size, len(options))
rendered = (
'\n'.join(
[
_render_option_select_multiple(
option=preprocessor(option),
- ticked=i in ticked_indices,
+ ticked=i + show_from in ticked_indices,
tick_character=tick_character,
tick_style=tick_style,
- selected=i == index,
+ selected=i == index % page_size,
cursor_style=cursor_style,
)
- for i, option in enumerate(options)
+ for i, option in enumerate(options[show_from:show_to])
]
)
+ + (f'[grey58]\n\nPage {page}/{total_pages}[/grey58]' if pagination and total_pages > 1 else '') # noqa: W503
+ '\n\n(Mark with [bold]space[/bold], confirm with [bold]enter[/bold])' # noqa: W503
)
if error_message:
@@ -335,16 +382,8 @@ def select_multiple(
if Config.raise_on_interrupt:
raise KeyboardInterrupt()
return []
- elif keypress in DefaultKeys.up:
- index -= 1
- index = index % len(options)
- elif keypress in DefaultKeys.down:
- index += 1
- index = index % len(options)
- elif keypress in DefaultKeys.home:
- index = 0
- elif keypress in DefaultKeys.end:
- index = len(options) - 1
+ elif any([keypress in navigation_keys for navigation_keys in _navigation_keys]):
+ index, page = _navigate_select(index, page, keypress, len(options), pagination, total_pages, page_size, show_from, show_to)
elif keypress in DefaultKeys.select:
if index in ticked_indices:
ticked_indices.remove(index)
diff --git a/beaupy/_internals.py b/beaupy/_internals.py
index 45700e1..dc528f7 100644
--- a/beaupy/_internals.py
+++ b/beaupy/_internals.py
@@ -96,3 +96,19 @@ def _validate_prompt_value(
except ValueError:
error = f"Input {'<secure_input>' if secure else '`'+str_value+'`'} cannot be converted to type `{target_type}`"
raise ConversionError(error)
+
+
+def _paginate_forward(page_num: int, total_pages: int) -> int:
+ if page_num < total_pages:
+ page_num += 1
+ else:
+ page_num = 1
+ return page_num
+
+
+def _paginate_back(page: int, total_pages: int) -> int:
+ if page > 1:
+ page -= 1
+ else:
+ page = total_pages
+ return page
| select_multiple scrolling feature if number of options exceed console size
I made a list with over 40 options available for use to select on, but the console size isn't enough to show all of the item_options. Is there any way to scroll the options ? (Or some features like chosen option stay on the middle while the whole list shift up every time I hit the down arrow until it reaches the last option)
| Hi @SamLo322, thank you for the issue.
This is currently a limitation, there is not way to have scrolling or pagination for the options.
I am accepting pull requests, so if you'd like to have a go at this yourself, feel free. Otherwise, I am going to have a look at this over the weekend.
Just an FYI not to duplicate efforts: I have implemented it today. It seems to be working but I don't have it covered by tests yet and it is pending a little refactor. I expect to release it during the weekend. | 2023-03-22T22:22:31 | 0.0 | [] | [] |
||
petereon/beaupy | petereon__beaupy-51 | ec95f65dc6f73ba6014f2f4d9a316e04c7c3910b | diff --git a/beaupy/__init__.py b/beaupy/__init__.py
index d4aa784..ede18a3 100644
--- a/beaupy/__init__.py
+++ b/beaupy/__init__.py
@@ -8,4 +8,4 @@
select,
select_multiple,
)
-from beaupy._internals import ConversionError, ValidationError # noqa
+from beaupy._internals import Abort, ConversionError, ValidationError # noqa
diff --git a/beaupy/_beaupy.py b/beaupy/_beaupy.py
index c9d51e7..a196f23 100755
--- a/beaupy/_beaupy.py
+++ b/beaupy/_beaupy.py
@@ -14,6 +14,7 @@
from yakh.key import Keys
from beaupy._internals import (
+ Abort,
ConversionError,
TargetType,
ValidationError,
@@ -69,6 +70,7 @@ class Config:
"""
raise_on_interrupt: bool = False
+ raise_on_escape: bool = False
def prompt(
@@ -141,8 +143,6 @@ def prompt(
elif keypress in DefaultKeys.right:
if cursor_index < len(value):
cursor_index += 1
- elif keypress in DefaultKeys.escape:
- return None
elif keypress in DefaultKeys.up + DefaultKeys.down:
pass
elif keypress in DefaultKeys.home:
@@ -152,6 +152,10 @@ def prompt(
elif keypress in DefaultKeys.delete:
if cursor_index < len(value):
del value[cursor_index]
+ elif keypress in DefaultKeys.escape:
+ if Config.raise_on_escape:
+ raise Abort(keypress)
+ return None
else:
value.insert(cursor_index, str(keypress))
cursor_index += 1
@@ -236,6 +240,8 @@ def select(
return index
return options[index]
elif keypress in DefaultKeys.escape:
+ if Config.raise_on_escape:
+ raise Abort(keypress)
return None
@@ -351,6 +357,8 @@ def select_multiple(
else:
break
elif keypress in DefaultKeys.escape:
+ if Config.raise_on_escape:
+ raise Abort(keypress)
return []
if return_indices:
return ticked_indices
@@ -419,14 +427,16 @@ def confirm(
elif keypress in DefaultKeys.backspace:
if current_message:
current_message = current_message[:-1]
- elif keypress in DefaultKeys.escape:
- return None
elif keypress in DefaultKeys.confirm:
if is_selected:
break
elif keypress in DefaultKeys.tab:
if is_selected:
current_message = yes_text if is_yes else no_text
+ elif keypress in DefaultKeys.escape:
+ if Config.raise_on_escape:
+ raise Abort(keypress)
+ return None
else:
current_message += str(keypress)
match_yes = yes_text
diff --git a/beaupy/_internals.py b/beaupy/_internals.py
index c588b54..45700e1 100644
--- a/beaupy/_internals.py
+++ b/beaupy/_internals.py
@@ -5,6 +5,7 @@
import emoji
from rich.console import Console, ConsoleRenderable
from rich.live import Live
+from yakh.key import Key
TargetType = Any
@@ -17,6 +18,14 @@ class ConversionError(Exception):
pass
+class Abort(Exception):
+ key: Key
+
+ def __init__(self, key: Key) -> None:
+ super().__init__(f'Aborted by user with key {key.key if key.is_printable else key.key_codes}')
+ self.key = key
+
+
def _replace_emojis(text: str) -> str:
return str(emoji.replace_emoji(text, ' '))
diff --git a/pyproject.toml b/pyproject.toml
index 56dd536..ed62447 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = 'beaupy'
-version = '3.3.1'
+version = '3.4.0'
description = 'A library of elements for interactive TUIs in Python'
authors = ['Peter Vyboch <[email protected]>']
license = 'MIT'
| Use "ESC" key as abort
In my use case it would be great if I could detect that the user has pressed `ESC `in `select_multiple`.
Is this somehow possible?
| I have just checked and `ESC` is accepted as an _abort_ functionality in `select_multiple` on my Mac.
For keypress handling beaupy uses `python_yakh` under the hood which tries to abstract the platform layer as much as possible but I am sure there are some hiccups still.
Could you please elaborate in terms of your platform and terminal emulator?
Further, if you wouldn't mind testing this for me, you could run this script and press `ESC` while it's running and post the resulting prints here.
```
from yakh import get_key
key = get_key()
print(key.key_codes)
print(bytes(key.key, 'utf-8'))
print(key.is_printable)
```
`yakh` is a dependency of `beaupy` so as long as you run this in the same virtual env you run `beaupy` in, it should work.
I am on
08:20 $ cat /etc/os-release
```
NAME="Red Hat Enterprise Linux Server"
VERSION="7.9 (Maipo)"
ID="rhel"
ID_LIKE="fedora"
VARIANT="Server"
VARIANT_ID="server"
VERSION_ID="7.9"
PRETTY_NAME="Red Hat Enterprise Linux Server 7.9 (Maipo)"
ANSI_COLOR="0;31"
CPE_NAME="cpe:/o:redhat:enterprise_linux:7.9:GA:server"
HOME_URL="https://www.redhat.com/"
BUG_REPORT_URL="https://bugzilla.redhat.com/"
REDHAT_BUGZILLA_PRODUCT="Red Hat Enterprise Linux 7"
REDHAT_BUGZILLA_PRODUCT_VERSION=7.9
REDHAT_SUPPORT_PRODUCT="Red Hat Enterprise Linux"
REDHAT_SUPPORT_PRODUCT_VERSION="7.9"
```
Running your snippets yields
```
(snaputils-i_jQa2nS-py3.9) ✔ .../coredev [main ↑·1|✚ 7⚑ 22]
08:22 $ /home/user/.cache/pypoetry/virtualenvs/snaputils-i_jQa2nS-py3.9/bin/python /test/playground.py
(27,)
b'\x1b'
False
```
I need do distinguish between `ENTER `and `ESC`, which both yield no selection on my side. But "Enter" should mean no selection and `ESC `should mean abort all together.
I see, would raising a `KeyboardInterrupt` on escape and handling it yourself be a good alternative?
If so, you could edit the global `Config` object, setting `.raise_on_interrupt` to `True` and additonally `DefaultKeys` object setting `.escape` to `[]` and appending `Keys.ESC` into `.interrupt` attribute.
Just for my reference, what exact behavior would you envision for `ESC` key? Should it raise some class of `Exception`?
I am not a big fan of exceptions. Returning the last key pressed(as tuple element to the already existing return values) would be the most flexible I guess.
So I did
```
elif keypress in DefaultKeys.escape:
if Config.raise_on_interrupt:
raise KeyboardInterrupt()
```
which is OK for me.
Is it also OK for you?
Possible to release a patch version? I really think that `ENTER `and `ESC `should have different semantics when the user selects nothing.
So I agree that `ENTER` and `ESC` might benefit from different semantics, but I don't want to be releasing breaking changes, so having the functions return tuples is not viable for me.
I would say my best option is actually going for a second config option which controls whether to raise an Exception on `ESC` key, defaulting to current behavior of not raising on `ESC`.
Coming from languages that have proper way to deal with nothingness I appreciate that exceptions are suboptiomal, but they are canonical in Python.
Good point on breaking changes.
You proposal sounds reasonable and it would be great if you could release this patch soon.
Hope you have simple deploy scripts ;)
I should be able to get it out today, just much later, I am at work atm :sweat_smile:
Me2 :) Thx for you prompt effort. I will close this issue. | 2023-02-03T11:11:42 | 0.0 | [] | [] |
||
petereon/beaupy | petereon__beaupy-27 | 18595fdc83ffadcc21583a8ad0fa39fb7f2c0626 | diff --git a/beaupy/__init__.py b/beaupy/__init__.py
index 40b6b81..38ac0a5 100644
--- a/beaupy/__init__.py
+++ b/beaupy/__init__.py
@@ -1,1 +1,2 @@
+import beaupy.spinners as spinners # noqa
from beaupy.beaupy import * # noqa
diff --git a/beaupy/spinners.py b/beaupy/spinners.py
new file mode 100644
index 0000000..6021cf4
--- /dev/null
+++ b/beaupy/spinners.py
@@ -0,0 +1,37 @@
+from itertools import cycle
+from typing import List
+
+from rich.live import Live
+
+ARC = ['◜', '◠', '◝', '◞', '◡', '◟']
+ARROWS = ['←', '↖', '↑', '↗', '→', '↘', '↓', '↙']
+BARS = ['▁', '▃', '▄', '▅', '▆', '▇', '█', '▇', '▆', '▅', '▄', '▃']
+CLOCK = ['🕛 ', '🕐 ', '🕑 ', '🕒 ', '🕓 ', '🕔 ', '🕕 ', '🕖 ', '🕗 ', '🕘 ', '🕙 ', '🕚 ']
+DIAMOND = ['◇', '◈', '◆']
+DOT = ['⠁', '⠂', '⠄', '⡀', '⢀', '⠠', '⠐', '⠈']
+DOTS = ['⣾', '⣽', '⣻', '⢿', '⡿', '⣟', '⣯', '⣷']
+LINE = ['|', '/', '-', '\\']
+LOADING = ['l ', 'lo ', 'loa ', 'load ', 'loadi ', 'loadin ', 'loading']
+MOON = ['🌑', '🌒', '🌓', '🌔', '🌕', '🌖', '🌗', '🌘']
+
+
+class CustomSpinner:
+ _spinner_characters: cycle
+ _live_display: Live
+
+ def __init__(self, spinner_characters: List[str], text: str, refresh_per_second: float = 4, transient: bool = True):
+ if len(spinner_characters) == 0:
+ raise ValueError('`spinner_characters` can\'t be empty')
+ self._spinner_characters = cycle(spinner_characters)
+ self._live_display = Live(
+ '',
+ transient=transient,
+ refresh_per_second=refresh_per_second,
+ get_renderable=lambda: f'{next(self._spinner_characters)} {text}',
+ )
+
+ def start(self) -> None:
+ self._live_display.start()
+
+ def stop(self) -> None:
+ self._live_display.stop()
| Customizable spinners
Loading spinners with customizable changing characters and custom interval between changes
| 2022-09-04T16:00:34 | 0.0 | [] | [] |
|||
petereon/beaupy | petereon__beaupy-26 | 307d37b1a2d6879d2a20d7bdb274a0060ed7ca42 | diff --git a/beaupy/beaupy.py b/beaupy/beaupy.py
index a3452f7..3f9241a 100755
--- a/beaupy/beaupy.py
+++ b/beaupy/beaupy.py
@@ -11,15 +11,16 @@
import readchar
from rich.console import Console
+from rich.live import Live
from beaupy.internals import (
ConversionError,
ValidationError,
cursor_hidden,
format_option_select,
- format_option_select_multiple,
- render,
- reset_lines,
+ render_option_select_multiple,
+ render_prompt,
+ update_rendered,
)
console = Console()
@@ -68,6 +69,8 @@ def prompt(
target_type: Type[TargetType] = str,
validator: Callable[[TargetType], bool] = lambda input: True,
secure: bool = False,
+ raise_validation_fail: bool = True,
+ raise_type_conversion_fail: bool = True,
) -> TargetType:
"""Function that prompts the user for written input
@@ -85,11 +88,15 @@ def prompt(
Returns:
Union[T, str]: Returns a value formatted as provided type or string if no type is provided
"""
- with cursor_hidden():
+ rendered = ''
+ with cursor_hidden(console), Live(rendered, console=console, auto_refresh=False, transient=True) as live:
value: List[str] = []
cursor_index = 0
- render(secure, [], prompt, len(value), console)
+ error: str = ''
while True:
+ rendered = render_prompt(secure, value, prompt, cursor_index, error)
+ error = ''
+ update_rendered(live, rendered)
keypress = readchar.readkey()
if keypress in DefaultKeys.confirm:
str_value = ''.join(value)
@@ -103,24 +110,23 @@ def prompt(
if validator(result):
return result
else:
- raise ValidationError(f"`{'secure input' if secure else str_value}` cannot be validated")
+ error = f"Input {'<secure_input>' if secure else '`'+str_value+'`'} is invalid"
+ if raise_validation_fail:
+ raise ValidationError(error)
except ValueError:
- raise ConversionError(
- f"`{'secure input' if secure else str_value}` cannot be converted to type `{target_type}`"
- ) from None
+ error = f"Input {'<secure_input>' if secure else '`'+str_value+'`'} cannot be converted to type `{target_type}`"
+ if raise_type_conversion_fail:
+ raise ConversionError(error) from None
elif keypress in DefaultKeys.delete:
if cursor_index > 0:
cursor_index -= 1
del value[cursor_index]
- render(secure, value, prompt, cursor_index, console)
elif keypress in DefaultKeys.left:
if cursor_index > 0:
cursor_index -= 1
- render(secure, value, prompt, cursor_index, console)
elif keypress in DefaultKeys.right:
if cursor_index < len(value):
cursor_index += 1
- render(secure, value, prompt, cursor_index, console)
elif keypress in DefaultKeys.interrupt:
if Config.raise_on_interrupt:
raise KeyboardInterrupt
@@ -128,7 +134,6 @@ def prompt(
else:
value.insert(cursor_index, keypress)
cursor_index += 1
- render(secure, value, prompt, cursor_index, console)
Selection = Union[int, Any]
@@ -165,7 +170,8 @@ def select(
Returns:
Union[int, str, None]: Selected value or the index of a selected option or `None`
"""
- with cursor_hidden():
+ rendered = ''
+ with cursor_hidden(console), Live(rendered, console=console, auto_refresh=False, transient=True) as live:
if not options:
if strict:
raise ValueError('`options` cannot be empty')
@@ -177,16 +183,15 @@ def select(
index: int = cursor_index
while True:
- console.print(
- '\n'.join(
- [
- format_option_select(i=i, cursor_index=index, option=preprocessor(option), cursor_style=cursor_style, cursor=cursor)
- for i, option in enumerate(options)
- ]
- )
+ rendered = '\n'.join(
+ [
+ format_option_select(i=i, cursor_index=index, option=preprocessor(option), cursor_style=cursor_style, cursor=cursor)
+ for i, option in enumerate(options)
+ ]
)
- reset_lines(len(options))
+ update_rendered(live, rendered)
keypress = readchar.readkey()
+
if keypress in DefaultKeys.up:
if index > 0:
index -= 1
@@ -245,7 +250,8 @@ def select_multiple(
Returns:
Union[List[str], List[int]]: A list of selected values or indices of selected options
"""
- with cursor_hidden():
+ rendered = ''
+ with cursor_hidden(console), Live(rendered, console=console, auto_refresh=False, transient=True) as live:
if not options:
if strict:
raise ValueError('`options` cannot be empty')
@@ -264,23 +270,25 @@ def select_multiple(
max_index = len(options) - (1 if True else 0)
error_message = ''
while True:
- console.print(
- '\n'.join(
- [
- format_option_select_multiple(
- option=preprocessor(option),
- ticked=i in ticked_indices,
- tick_character=tick_character,
- tick_style=tick_style,
- selected=i == index,
- cursor_style=cursor_style,
- )
- for i, option in enumerate(options)
- ]
- )
+ rendered = '\n'.join(
+ [
+ render_option_select_multiple(
+ option=preprocessor(option),
+ ticked=i in ticked_indices,
+ tick_character=tick_character,
+ tick_style=tick_style,
+ selected=i == index,
+ cursor_style=cursor_style,
+ )
+ for i, option in enumerate(options)
+ ]
)
- reset_lines(len(options))
+ if error_message:
+ rendered = f'{rendered}\n[red]Error:[/red] {error_message}'
+ error_message = ''
+ update_rendered(live, rendered)
keypress = readchar.readkey()
+
if keypress in DefaultKeys.up:
if index > 0:
index -= 1
@@ -289,8 +297,7 @@ def select_multiple(
index += 1
elif keypress in DefaultKeys.select:
if index in ticked_indices:
- if len(ticked_indices) - 1 >= minimal_count:
- ticked_indices.remove(index)
+ ticked_indices.remove(index)
elif maximal_count is not None:
if len(ticked_indices) + 1 <= maximal_count:
ticked_indices.append(index)
@@ -307,9 +314,6 @@ def select_multiple(
if Config.raise_on_interrupt:
raise KeyboardInterrupt
return []
- if error_message:
- console.print(error_message)
- error_message = ''
if return_indices:
return ticked_indices
return [options[i] for i in ticked_indices]
@@ -345,7 +349,8 @@ def confirm(
Returns:
Optional[bool]
"""
- with cursor_hidden():
+ rendered = ''
+ with cursor_hidden(console), Live(rendered, console=console, auto_refresh=False, transient=True) as live:
if cursor_style in ['', None]:
logging.warning('`cursor_style` should be a valid style, defaulting to `white`')
cursor_style = 'white'
@@ -361,9 +366,10 @@ def confirm(
question_line = f'{question}{yn_prompt}{current_message}'
yes_prefix = selected_prefix if yes else deselected_prefix
no_prefix = selected_prefix if no else deselected_prefix
- console.print(f'{question_line}\n{yes_prefix}{yes_text}\n{no_prefix}{no_text}')
- reset_lines(3)
+ rendered = f'{question_line}\n{yes_prefix}{yes_text}\n{no_prefix}{no_text}'
+ update_rendered(live, rendered)
keypress = readchar.readkey()
+
if keypress in DefaultKeys.down or keypress in DefaultKeys.up:
is_yes = not is_yes
is_selected = True
diff --git a/beaupy/internals.py b/beaupy/internals.py
index ecb44ef..c183de6 100644
--- a/beaupy/internals.py
+++ b/beaupy/internals.py
@@ -1,9 +1,9 @@
from contextlib import contextmanager
-from sys import stdout
-from typing import Iterator, List
+from typing import Iterator, List, Union
import emoji
-from rich.console import Console
+from rich.console import Console, ConsoleRenderable
+from rich.live import Live
class ValidationError(Exception):
@@ -24,7 +24,7 @@ def format_option_select(i: int, cursor_index: int, option: str, cursor_style: s
)
-def format_option_select_multiple(
+def render_option_select_multiple(
option: str, ticked: bool, tick_character: str, tick_style: str, selected: bool, cursor_style: str
) -> str:
prefix = '\[{}]'.format(' ' * len(__replace_emojis(tick_character))) # noqa: W605
@@ -35,12 +35,13 @@ def format_option_select_multiple(
return f'{prefix} {option}'
-def reset_lines(num_lines: int) -> None:
- stdout.write(f'\x1b[{num_lines}F\x1b[0J')
+def update_rendered(live: Live, renderable: Union[ConsoleRenderable, str]) -> None:
+ live.update(renderable=renderable)
+ live.refresh()
-def render(secure: bool, return_value: List[str], prompt: str, cursor_position: int, console: Console) -> None:
- render_value = (len(return_value) * '*' if secure else ''.join(return_value)) + ' '
+def render_prompt(secure: bool, typed_values: List[str], prompt: str, cursor_position: int, error: str) -> str:
+ render_value = (len(typed_values) * '*' if secure else ''.join(typed_values)) + ' '
render_value = (
render_value[:cursor_position]
+ '[black on white]' # noqa: W503
@@ -48,20 +49,14 @@ def render(secure: bool, return_value: List[str], prompt: str, cursor_position:
+ '[/black on white]' # noqa: W503
+ render_value[(cursor_position + 1) :] # noqa: W503,E203
)
- console.print(f'{prompt}\n> {render_value}')
- reset_lines(2)
-
-
-def hide_cursor() -> None:
- stdout.write('\x1b[?25l')
-
-
-def show_cursor() -> None:
- stdout.write('\x1b[?25h')
+ render_value = f'{prompt}\n> {render_value}'
+ if error:
+ render_value = f'{render_value}\n[red]Error:[/red] {error}'
+ return render_value
@contextmanager
-def cursor_hidden() -> Iterator:
- hide_cursor()
+def cursor_hidden(console: Console) -> Iterator:
+ console.show_cursor(False)
yield
- show_cursor()
+ console.show_cursor(True)
diff --git a/pyproject.toml b/pyproject.toml
index 6b71b3d..37a0f46 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = 'beaupy'
-version = '1.2.0'
+version = '1.3.0'
description = 'A library of elements for interactive TUIs in Python'
authors = ['Peter Vyboch <[email protected]>']
license = 'MIT'
| Give option to not raise when validation error or inconvertible types are encountered in `prompt`
| 2022-09-03T16:34:19 | 0.0 | [] | [] |
|||
petereon/beaupy | petereon__beaupy-24 | 5077b1f29c8c0d85e822b2ad6dbe05dafea7bd7a | diff --git a/beaupy/beaupy.py b/beaupy/beaupy.py
index edbd06d..a3452f7 100755
--- a/beaupy/beaupy.py
+++ b/beaupy/beaupy.py
@@ -15,6 +15,7 @@
from beaupy.internals import (
ConversionError,
ValidationError,
+ cursor_hidden,
format_option_select,
format_option_select_multiple,
render,
@@ -42,6 +43,10 @@ class DefaultKeys:
delete: List[str] = [readchar.key.BACKSPACE]
down: List[str] = [readchar.key.DOWN]
up: List[str] = [readchar.key.UP]
+ left: List[str] = [readchar.key.LEFT]
+ right: List[str] = [readchar.key.RIGHT]
+ home: List[str] = [readchar.key.HOME]
+ end: List[str] = [readchar.key.END]
class Config:
@@ -80,34 +85,50 @@ def prompt(
Returns:
Union[T, str]: Returns a value formatted as provided type or string if no type is provided
"""
- value: str = ''
- render(secure, '', prompt, console)
- while True:
- keypress = readchar.readkey()
- if keypress in DefaultKeys.confirm:
- try:
- if target_type is bool:
- result: bool = literal_eval(value)
- if not isinstance(result, bool):
- raise ValueError()
- else:
- result: target_type = target_type(value) # type: ignore
- if validator(result):
- return result
- else:
- raise ValidationError(f"`{'secure input' if secure else value}` cannot be validated")
- except ValueError:
- raise ConversionError(f"`{'secure input' if secure else value}` cannot be converted to type `{target_type}`") from None
- elif keypress in DefaultKeys.delete:
- value = value[:-1]
- render(secure, value, prompt, console)
- elif keypress in DefaultKeys.interrupt:
- if Config.raise_on_interrupt:
- raise KeyboardInterrupt
- return None
- else:
- value += keypress
- render(secure, value, prompt, console)
+ with cursor_hidden():
+ value: List[str] = []
+ cursor_index = 0
+ render(secure, [], prompt, len(value), console)
+ while True:
+ keypress = readchar.readkey()
+ if keypress in DefaultKeys.confirm:
+ str_value = ''.join(value)
+ try:
+ if target_type is bool:
+ result: bool = literal_eval(str_value)
+ if not isinstance(result, bool):
+ raise ValueError()
+ else:
+ result: target_type = target_type(str_value) # type: ignore
+ if validator(result):
+ return result
+ else:
+ raise ValidationError(f"`{'secure input' if secure else str_value}` cannot be validated")
+ except ValueError:
+ raise ConversionError(
+ f"`{'secure input' if secure else str_value}` cannot be converted to type `{target_type}`"
+ ) from None
+ elif keypress in DefaultKeys.delete:
+ if cursor_index > 0:
+ cursor_index -= 1
+ del value[cursor_index]
+ render(secure, value, prompt, cursor_index, console)
+ elif keypress in DefaultKeys.left:
+ if cursor_index > 0:
+ cursor_index -= 1
+ render(secure, value, prompt, cursor_index, console)
+ elif keypress in DefaultKeys.right:
+ if cursor_index < len(value):
+ cursor_index += 1
+ render(secure, value, prompt, cursor_index, console)
+ elif keypress in DefaultKeys.interrupt:
+ if Config.raise_on_interrupt:
+ raise KeyboardInterrupt
+ return None
+ else:
+ value.insert(cursor_index, keypress)
+ cursor_index += 1
+ render(secure, value, prompt, cursor_index, console)
Selection = Union[int, Any]
@@ -144,41 +165,42 @@ def select(
Returns:
Union[int, str, None]: Selected value or the index of a selected option or `None`
"""
- if not options:
- if strict:
- raise ValueError('`options` cannot be empty')
- return None
- if cursor_style in ['', None]:
- logging.warning('`cursor_style` should be a valid style, defaulting to `white`')
- cursor_style = 'white'
-
- index: int = cursor_index
-
- while True:
- console.print(
- '\n'.join(
- [
- format_option_select(i=i, cursor_index=index, option=preprocessor(option), cursor_style=cursor_style, cursor=cursor)
- for i, option in enumerate(options)
- ]
- )
- )
- reset_lines(len(options))
- keypress = readchar.readkey()
- if keypress in DefaultKeys.up:
- if index > 0:
- index -= 1
- elif keypress in DefaultKeys.down:
- if index < len(options) - 1:
- index += 1
- elif keypress in DefaultKeys.confirm:
- if return_index:
- return index
- return options[index]
- elif keypress in DefaultKeys.interrupt:
- if Config.raise_on_interrupt:
- raise KeyboardInterrupt
+ with cursor_hidden():
+ if not options:
+ if strict:
+ raise ValueError('`options` cannot be empty')
return None
+ if cursor_style in ['', None]:
+ logging.warning('`cursor_style` should be a valid style, defaulting to `white`')
+ cursor_style = 'white'
+
+ index: int = cursor_index
+
+ while True:
+ console.print(
+ '\n'.join(
+ [
+ format_option_select(i=i, cursor_index=index, option=preprocessor(option), cursor_style=cursor_style, cursor=cursor)
+ for i, option in enumerate(options)
+ ]
+ )
+ )
+ reset_lines(len(options))
+ keypress = readchar.readkey()
+ if keypress in DefaultKeys.up:
+ if index > 0:
+ index -= 1
+ elif keypress in DefaultKeys.down:
+ if index < len(options) - 1:
+ index += 1
+ elif keypress in DefaultKeys.confirm:
+ if return_index:
+ return index
+ return options[index]
+ elif keypress in DefaultKeys.interrupt:
+ if Config.raise_on_interrupt:
+ raise KeyboardInterrupt
+ return None
Selections = List[Selection]
@@ -223,73 +245,74 @@ def select_multiple(
Returns:
Union[List[str], List[int]]: A list of selected values or indices of selected options
"""
- if not options:
- if strict:
- raise ValueError('`options` cannot be empty')
- return []
- if cursor_style in ['', None]:
- logging.warning('`cursor_style` should be a valid style, defaulting to `white`')
- cursor_style = 'white'
- if tick_style in ['', None]:
- logging.warning('`tick_style` should be a valid style, defaulting to `white`')
- tick_style = 'white'
- if ticked_indices is None:
- ticked_indices = []
-
- index = cursor_index
-
- max_index = len(options) - (1 if True else 0)
- error_message = ''
- while True:
- console.print(
- '\n'.join(
- [
- format_option_select_multiple(
- option=preprocessor(option),
- ticked=i in ticked_indices,
- tick_character=tick_character,
- tick_style=tick_style,
- selected=i == index,
- cursor_style=cursor_style,
- )
- for i, option in enumerate(options)
- ]
+ with cursor_hidden():
+ if not options:
+ if strict:
+ raise ValueError('`options` cannot be empty')
+ return []
+ if cursor_style in ['', None]:
+ logging.warning('`cursor_style` should be a valid style, defaulting to `white`')
+ cursor_style = 'white'
+ if tick_style in ['', None]:
+ logging.warning('`tick_style` should be a valid style, defaulting to `white`')
+ tick_style = 'white'
+ if ticked_indices is None:
+ ticked_indices = []
+
+ index = cursor_index
+
+ max_index = len(options) - (1 if True else 0)
+ error_message = ''
+ while True:
+ console.print(
+ '\n'.join(
+ [
+ format_option_select_multiple(
+ option=preprocessor(option),
+ ticked=i in ticked_indices,
+ tick_character=tick_character,
+ tick_style=tick_style,
+ selected=i == index,
+ cursor_style=cursor_style,
+ )
+ for i, option in enumerate(options)
+ ]
+ )
)
- )
- reset_lines(len(options))
- keypress = readchar.readkey()
- if keypress in DefaultKeys.up:
- if index > 0:
- index -= 1
- elif keypress in DefaultKeys.down:
- if index + 1 <= max_index:
- index += 1
- elif keypress in DefaultKeys.select:
- if index in ticked_indices:
- if len(ticked_indices) - 1 >= minimal_count:
- ticked_indices.remove(index)
- elif maximal_count is not None:
- if len(ticked_indices) + 1 <= maximal_count:
+ reset_lines(len(options))
+ keypress = readchar.readkey()
+ if keypress in DefaultKeys.up:
+ if index > 0:
+ index -= 1
+ elif keypress in DefaultKeys.down:
+ if index + 1 <= max_index:
+ index += 1
+ elif keypress in DefaultKeys.select:
+ if index in ticked_indices:
+ if len(ticked_indices) - 1 >= minimal_count:
+ ticked_indices.remove(index)
+ elif maximal_count is not None:
+ if len(ticked_indices) + 1 <= maximal_count:
+ ticked_indices.append(index)
+ else:
+ error_message = f'Must select at most {maximal_count} options'
+ else:
ticked_indices.append(index)
+ elif keypress in DefaultKeys.confirm:
+ if minimal_count > len(ticked_indices):
+ error_message = f'Must select at least {minimal_count} options'
else:
- error_message = f'Must select at most {maximal_count} options'
- else:
- ticked_indices.append(index)
- elif keypress in DefaultKeys.confirm:
- if minimal_count > len(ticked_indices):
- error_message = f'Must select at least {minimal_count} options'
- else:
- break
- elif keypress in DefaultKeys.interrupt:
- if Config.raise_on_interrupt:
- raise KeyboardInterrupt
- return []
- if error_message:
- console.print(error_message)
- error_message = ''
- if return_indices:
- return ticked_indices
- return [options[i] for i in ticked_indices]
+ break
+ elif keypress in DefaultKeys.interrupt:
+ if Config.raise_on_interrupt:
+ raise KeyboardInterrupt
+ return []
+ if error_message:
+ console.print(error_message)
+ error_message = ''
+ if return_indices:
+ return ticked_indices
+ return [options[i] for i in ticked_indices]
def confirm(
@@ -322,56 +345,57 @@ def confirm(
Returns:
Optional[bool]
"""
- if cursor_style in ['', None]:
- logging.warning('`cursor_style` should be a valid style, defaulting to `white`')
- cursor_style = 'white'
- is_yes = default_is_yes
- is_selected = enter_empty_confirms
- current_message = ''
- yn_prompt = f' ({yes_text[0]}/{no_text[0]}) ' if char_prompt else ': '
- selected_prefix = f'[{cursor_style}]{cursor}[/{cursor_style}] '
- deselected_prefix = (' ' * len(cursor)) + ' '
- while True:
- yes = is_yes and is_selected
- no = not is_yes and is_selected
- question_line = f'{question}{yn_prompt}{current_message}'
- yes_prefix = selected_prefix if yes else deselected_prefix
- no_prefix = selected_prefix if no else deselected_prefix
- console.print(f'{question_line}\n{yes_prefix}{yes_text}\n{no_prefix}{no_text}')
- reset_lines(3)
- keypress = readchar.readkey()
- if keypress in DefaultKeys.down or keypress in DefaultKeys.up:
- is_yes = not is_yes
- is_selected = True
- current_message = yes_text if is_yes else no_text
- elif keypress in DefaultKeys.delete:
- if current_message:
- current_message = current_message[:-1]
- elif keypress in DefaultKeys.interrupt:
- if Config.raise_on_interrupt:
- raise KeyboardInterrupt
- return None
- elif keypress in DefaultKeys.confirm:
- if is_selected:
- break
- elif keypress in '\t':
- if is_selected:
- current_message = yes_text if is_yes else no_text
- else:
- current_message += keypress
- match_yes = yes_text
- match_no = no_text
- match_text = current_message
- if not has_to_match_case:
- match_yes = match_yes.upper()
- match_no = match_no.upper()
- match_text = match_text.upper()
- if match_no.startswith(match_text):
- is_selected = True
- is_yes = False
- elif match_yes.startswith(match_text):
+ with cursor_hidden():
+ if cursor_style in ['', None]:
+ logging.warning('`cursor_style` should be a valid style, defaulting to `white`')
+ cursor_style = 'white'
+ is_yes = default_is_yes
+ is_selected = enter_empty_confirms
+ current_message = ''
+ yn_prompt = f' ({yes_text[0]}/{no_text[0]}) ' if char_prompt else ': '
+ selected_prefix = f'[{cursor_style}]{cursor}[/{cursor_style}] '
+ deselected_prefix = (' ' * len(cursor)) + ' '
+ while True:
+ yes = is_yes and is_selected
+ no = not is_yes and is_selected
+ question_line = f'{question}{yn_prompt}{current_message}'
+ yes_prefix = selected_prefix if yes else deselected_prefix
+ no_prefix = selected_prefix if no else deselected_prefix
+ console.print(f'{question_line}\n{yes_prefix}{yes_text}\n{no_prefix}{no_text}')
+ reset_lines(3)
+ keypress = readchar.readkey()
+ if keypress in DefaultKeys.down or keypress in DefaultKeys.up:
+ is_yes = not is_yes
is_selected = True
- is_yes = True
+ current_message = yes_text if is_yes else no_text
+ elif keypress in DefaultKeys.delete:
+ if current_message:
+ current_message = current_message[:-1]
+ elif keypress in DefaultKeys.interrupt:
+ if Config.raise_on_interrupt:
+ raise KeyboardInterrupt
+ return None
+ elif keypress in DefaultKeys.confirm:
+ if is_selected:
+ break
+ elif keypress in '\t':
+ if is_selected:
+ current_message = yes_text if is_yes else no_text
else:
- is_selected = False
- return is_selected and is_yes
+ current_message += keypress
+ match_yes = yes_text
+ match_no = no_text
+ match_text = current_message
+ if not has_to_match_case:
+ match_yes = match_yes.upper()
+ match_no = match_no.upper()
+ match_text = match_text.upper()
+ if match_no.startswith(match_text):
+ is_selected = True
+ is_yes = False
+ elif match_yes.startswith(match_text):
+ is_selected = True
+ is_yes = True
+ else:
+ is_selected = False
+ return is_selected and is_yes
diff --git a/beaupy/internals.py b/beaupy/internals.py
index 540c65f..ecb44ef 100644
--- a/beaupy/internals.py
+++ b/beaupy/internals.py
@@ -1,4 +1,6 @@
+from contextlib import contextmanager
from sys import stdout
+from typing import Iterator, List
import emoji
from rich.console import Console
@@ -34,11 +36,32 @@ def format_option_select_multiple(
def reset_lines(num_lines: int) -> None:
- for _ in range(num_lines):
- stdout.write('\x1b[2K\033[F\x1b[2K')
+ stdout.write(f'\x1b[{num_lines}F\x1b[0J')
-def render(secure: bool, return_value: str, prompt: str, console: Console) -> None:
- render_value = len(return_value) * '*' if secure else return_value
+def render(secure: bool, return_value: List[str], prompt: str, cursor_position: int, console: Console) -> None:
+ render_value = (len(return_value) * '*' if secure else ''.join(return_value)) + ' '
+ render_value = (
+ render_value[:cursor_position]
+ + '[black on white]' # noqa: W503
+ + render_value[cursor_position] # noqa: W503
+ + '[/black on white]' # noqa: W503
+ + render_value[(cursor_position + 1) :] # noqa: W503,E203
+ )
console.print(f'{prompt}\n> {render_value}')
reset_lines(2)
+
+
+def hide_cursor() -> None:
+ stdout.write('\x1b[?25l')
+
+
+def show_cursor() -> None:
+ stdout.write('\x1b[?25h')
+
+
+@contextmanager
+def cursor_hidden() -> Iterator:
+ hide_cursor()
+ yield
+ show_cursor()
diff --git a/pyproject.toml b/pyproject.toml
index 0a0a84c..6b71b3d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = 'beaupy'
-version = '1.1.0'
+version = '1.2.0'
description = 'A library of elements for interactive TUIs in Python'
authors = ['Peter Vyboch <[email protected]>']
license = 'MIT'
@@ -27,7 +27,7 @@ black = { shell = 'poetry run black ./beaupy --skip-string-normalization' }
mypy = { shell = 'poetry run mypy ./beaupy/' }
flake8 = { shell = 'poetry run pflake8 ./beaupy/' }
perflint = { shell = "poetry run perflint ./beaupy/" }
-lint = { shell = 'echo "Running isort..." ; poetry run poe isort; echo "Running black..." ; poetry run poe black ; echo "Running unify..." ; poetry run unify ./beaupy -r -i ; echo "Running flake8..." ; poetry run poe flake8; echo Done!' }
+lint = { shell = 'echo "Running isort..." ; poetry run poe isort; echo "Running black..." ; poetry run poe black ; echo "Running unify..." ; poetry run unify ./beaupy -r -i ; echo "Running flake8..." ; poetry run poe flake8 ; echo "Running mypy..." ; poetry run poe mypy ; echo Done!' }
"lint:watch" = { shell = "poetry run poe lint ; poetry run watchmedo shell-command --patterns='*.py;*.feature;*.toml' --recursive --drop --command='poetry run poe lint'" }
"test" = { shell = "poetry run ward" }
| Prompt places cursor at the wrong place, arrow key naviagation would also be nice
| 2022-09-01T08:26:12 | 0.0 | [] | [] |
|||
petereon/beaupy | petereon__beaupy-22 | d79c0751bc165e2545ec6f014db9549749d2f8e2 | diff --git a/beaupy/beaupy.py b/beaupy/beaupy.py
index 9494604..edbd06d 100755
--- a/beaupy/beaupy.py
+++ b/beaupy/beaupy.py
@@ -55,12 +55,15 @@ class Config:
raise_on_interrupt: bool = False
+TargetType = Any
+
+
def prompt(
prompt: str,
- target_type: Type = str,
- validator: Callable[[Any], bool] = lambda input: True,
+ target_type: Type[TargetType] = str,
+ validator: Callable[[TargetType], bool] = lambda input: True,
secure: bool = False,
-) -> Any:
+) -> TargetType:
"""Function that prompts the user for written input
Args:
@@ -107,18 +110,26 @@ def prompt(
render(secure, value, prompt, console)
+Selection = Union[int, Any]
+
+
def select(
- options: List[str],
+ options: List[Any],
+ preprocessor: Callable[[Any], Any] = lambda val: val,
cursor: str = '>',
cursor_style: str = 'pink1',
cursor_index: int = 0,
return_index: bool = False,
strict: bool = False,
-) -> Union[int, str, None]:
+) -> Union[Selection, None]:
"""A prompt that allows selecting one option from a list of options
Args:
- options (List[str]): A list of options to select from
+ options (List[Any]): A list of options to select from
+ preprocessor (Callable[[Any], Any]): A callable that can be used to preprocess the list of options prior to printing.
+ For example, if you passed a `Person` object with `name` attribute, preprocessor
+ could be `lambda person: person.name` to just show the content of `name` attribute
+ in the select dialog. Defaults to `lambda val: val`
cursor (str, optional): Cursor that is going to appear in front of currently selected option. Defaults to '> '.
cursor_style (str, optional): Rich friendly style for the cursor. Defaults to 'pink1'.
cursor_index (int, optional): Option can be preselected based on its list index. Defaults to 0.
@@ -147,7 +158,7 @@ def select(
console.print(
'\n'.join(
[
- format_option_select(i=i, cursor_index=index, option=option, cursor_style=cursor_style, cursor=cursor)
+ format_option_select(i=i, cursor_index=index, option=preprocessor(option), cursor_style=cursor_style, cursor=cursor)
for i, option in enumerate(options)
]
)
@@ -170,8 +181,12 @@ def select(
return None
+Selections = List[Selection]
+
+
def select_multiple(
- options: List[str],
+ options: List[Any],
+ preprocessor: Callable[[Any], Any] = lambda val: val,
tick_character: str = '✓',
tick_style: str = 'pink1',
cursor_style: str = 'pink1',
@@ -181,11 +196,15 @@ def select_multiple(
maximal_count: Optional[int] = None,
return_indices: bool = False,
strict: bool = False,
-) -> Union[List[str], List[int]]:
+) -> Selections:
"""A prompt that allows selecting multiple options from a list of options
Args:
- options (List[str]): A list of options to select from
+ options (List[Any]): A list of options to select from
+ preprocessor (Callable[[Any], Any]): A callable that can be used to preprocess the list of options prior to printing.
+ For example, if you passed a `Person` object with `name` attribute, preprocessor
+ could be `lambda person: person.name` to just show the content of `name` attribute
+ in the select dialog. Defaults to `lambda val: val`
tick_character (str, optional): Character that will be used as a tick in a checkbox. Defaults to 'x'.
tick_style (str, optional): Rich friendly style for the tick character. Defaults to 'pink1'.
cursor_style (str, optional): Rich friendly style for the option when the cursor is currently on it. Defaults to 'pink1'.
@@ -207,7 +226,7 @@ def select_multiple(
if not options:
if strict:
raise ValueError('`options` cannot be empty')
- return [] # type: ignore
+ return []
if cursor_style in ['', None]:
logging.warning('`cursor_style` should be a valid style, defaulting to `white`')
cursor_style = 'white'
@@ -226,7 +245,7 @@ def select_multiple(
'\n'.join(
[
format_option_select_multiple(
- option=option,
+ option=preprocessor(option),
ticked=i in ticked_indices,
tick_character=tick_character,
tick_style=tick_style,
@@ -264,7 +283,7 @@ def select_multiple(
elif keypress in DefaultKeys.interrupt:
if Config.raise_on_interrupt:
raise KeyboardInterrupt
- return [] # type: ignore
+ return []
if error_message:
console.print(error_message)
error_message = ''
diff --git a/poetry.lock b/poetry.lock
index 7729358..e032641 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -19,17 +19,20 @@ typed = ["typed-ast"]
[[package]]
name = "astroid"
-version = "2.11.7"
+version = "2.12.5"
description = "An abstract syntax tree for Python with inference support."
category = "dev"
optional = false
-python-versions = ">=3.6.2"
+python-versions = ">=3.7.2"
[package.dependencies]
lazy-object-proxy = ">=1.4.0"
typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""}
typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""}
-wrapt = ">=1.11,<2"
+wrapt = [
+ {version = ">=1.11,<2", markers = "python_version < \"3.11\""},
+ {version = ">=1.14,<2", markers = "python_version >= \"3.11\""},
+]
[[package]]
name = "attrs"
@@ -97,7 +100,7 @@ python-versions = ">=3.6"
[[package]]
name = "charset-normalizer"
-version = "2.1.0"
+version = "2.1.1"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "dev"
optional = false
@@ -172,7 +175,7 @@ test = ["hypothesis (==3.55.3)", "flake8 (==3.7.8)"]
[[package]]
name = "coverage"
-version = "6.4.3"
+version = "6.4.4"
description = "Code coverage measurement for Python"
category = "dev"
optional = false
@@ -636,11 +639,11 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pathspec"
-version = "0.9.0"
+version = "0.10.0"
description = "Utility library for gitignore style pattern matching of file paths."
category = "dev"
optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+python-versions = ">=3.7"
[[package]]
name = "pbr"
@@ -690,7 +693,7 @@ dev = ["tox", "pre-commit"]
[[package]]
name = "poethepoet"
-version = "0.16.0"
+version = "0.16.1"
description = "A task runner that works well with poetry."
category = "dev"
optional = false
@@ -752,22 +755,25 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pygments"
-version = "2.12.0"
+version = "2.13.0"
description = "Pygments is a syntax highlighting package written in Python."
category = "main"
optional = false
python-versions = ">=3.6"
+[package.extras]
+plugins = ["importlib-metadata"]
+
[[package]]
name = "pylint"
-version = "2.14.5"
+version = "2.15.0"
description = "python code static checker"
category = "dev"
optional = false
python-versions = ">=3.7.2"
[package.dependencies]
-astroid = ">=2.11.6,<=2.12.0-dev0"
+astroid = ">=2.12.4,<=2.14.0-dev0"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
dill = ">=0.2"
isort = ">=4.2.5,<6"
@@ -778,8 +784,8 @@ tomlkit = ">=0.10.1"
typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""}
[package.extras]
-testutils = ["gitpython (>3)"]
spelling = ["pyenchant (>=3.2,<4.0)"]
+testutils = ["gitpython (>3)"]
[[package]]
name = "pyproject-flake8"
@@ -980,16 +986,16 @@ python-versions = "*"
[[package]]
name = "urllib3"
-version = "1.26.11"
+version = "1.26.12"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4"
[package.extras]
+brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"]
+secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
-secure = ["ipaddress", "certifi", "idna (>=2.0.0)", "cryptography (>=1.3.4)", "pyOpenSSL (>=0.14)"]
-brotli = ["brotlipy (>=0.6.0)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
[[package]]
name = "ward"
@@ -1015,19 +1021,12 @@ version = "0.1.5"
description = "A coverage plugin for Ward testing framework"
category = "dev"
optional = false
-python-versions = "^3.7.8"
-develop = false
+python-versions = ">=3.7.8,<4.0.0"
[package.dependencies]
-coverage = "^6.0"
-toml = "^0.10.0"
-ward = "^0.66.0-beta.0"
-
-[package.source]
-type = "git"
-url = "https://github.com/petereon/ward-coverage.git"
-reference = "master"
-resolved_reference = "8569dec34dc482778af2c64691283cb22bdd2330"
+coverage = ">=6.0,<7.0"
+toml = ">=0.10.0,<0.11.0"
+ward = ">=0.66.0-beta.0,<0.67.0"
[[package]]
name = "watchdog"
@@ -1084,7 +1083,7 @@ docs = ["jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "jaraco.packaging (>=9)
[metadata]
lock-version = "1.1"
python-versions = '^3.7.8'
-content-hash = "9cb95651430d1e53c672af6f4bacca8e276df6fbfd31da87a4f4a1539c0619f8"
+content-hash = "8e6b15d5d7dd70c9d2d7fe31f2ce9630ae8ec794551841e869a23d5dfcae5150"
[metadata.files]
astor = [
@@ -1096,8 +1095,8 @@ astpretty = [
{file = "astpretty-2.1.0.tar.gz", hash = "sha256:8a801fcda604ec741f010bb36d7cbadc3ec8a182ea6fb83e20ab663463e75ff6"},
]
astroid = [
- {file = "astroid-2.11.7-py3-none-any.whl", hash = "sha256:86b0a340a512c65abf4368b80252754cda17c02cdbbd3f587dddf98112233e7b"},
- {file = "astroid-2.11.7.tar.gz", hash = "sha256:bb24615c77f4837c707669d16907331374ae8a964650a66999da3f5ca68dc946"},
+ {file = "astroid-2.12.5-py3-none-any.whl", hash = "sha256:d612609242996c4365aeb0345e61edba34363eaaba55f1c0addf6a98f073bef6"},
+ {file = "astroid-2.12.5.tar.gz", hash = "sha256:396c88d0a58d7f8daadf730b2ce90838bf338c6752558db719ec6f99c18ec20e"},
]
attrs = [
{file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
@@ -1137,8 +1136,8 @@ certifi = [
{file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"},
]
charset-normalizer = [
- {file = "charset-normalizer-2.1.0.tar.gz", hash = "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413"},
- {file = "charset_normalizer-2.1.0-py3-none-any.whl", hash = "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5"},
+ {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
+ {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
]
click = [
{file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
@@ -1162,47 +1161,56 @@ commonmark = [
{file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"},
]
coverage = [
- {file = "coverage-6.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f50d3a822947572496ea922ee7825becd8e3ae6fbd2400cd8236b7d64b17f285"},
- {file = "coverage-6.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d5191d53afbe5b6059895fa7f58223d3751c42b8101fb3ce767e1a0b1a1d8f87"},
- {file = "coverage-6.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04010af3c06ce2bfeb3b1e4e05d136f88d88c25f76cd4faff5d1fd84d11581ea"},
- {file = "coverage-6.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6630d8d943644ea62132789940ca97d05fac83f73186eaf0930ffa715fbdab6b"},
- {file = "coverage-6.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05de0762c1caed4a162b3e305f36cf20a548ff4da0be6766ad5c870704be3660"},
- {file = "coverage-6.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e3a41aad5919613483aad9ebd53336905cab1bd6788afd3995c2a972d89d795"},
- {file = "coverage-6.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a2738ba1ee544d6f294278cfb6de2dc1f9a737a780469b5366e662a218f806c3"},
- {file = "coverage-6.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a0d2df4227f645a879010461df2cea6b7e3fb5a97d7eafa210f7fb60345af9e8"},
- {file = "coverage-6.4.3-cp310-cp310-win32.whl", hash = "sha256:73a10939dc345460ca0655356a470dd3de9759919186a82383c87b6eb315faf2"},
- {file = "coverage-6.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:53c8edd3b83a4ddba3d8c506f1359401e7770b30f2188f15c17a338adf5a14db"},
- {file = "coverage-6.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f1eda5cae434282712e40b42aaf590b773382afc3642786ac3ed39053973f61f"},
- {file = "coverage-6.4.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59fc88bc13e30f25167e807b8cad3c41b7218ef4473a20c86fd98a7968733083"},
- {file = "coverage-6.4.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75314b00825d70e1e34b07396e23f47ed1d4feedc0122748f9f6bd31a544840"},
- {file = "coverage-6.4.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52f8b9fcf3c5e427d51bbab1fb92b575a9a9235d516f175b24712bcd4b5be917"},
- {file = "coverage-6.4.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5a559aab40c716de80c7212295d0dc96bc1b6c719371c20dd18c5187c3155518"},
- {file = "coverage-6.4.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:306788fd019bb90e9cbb83d3f3c6becad1c048dd432af24f8320cf38ac085684"},
- {file = "coverage-6.4.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:920a734fe3d311ca01883b4a19aa386c97b82b69fbc023458899cff0a0d621b9"},
- {file = "coverage-6.4.3-cp37-cp37m-win32.whl", hash = "sha256:ab9ef0187d6c62b09dec83a84a3b94f71f9690784c84fd762fb3cf2d2b44c914"},
- {file = "coverage-6.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:39ebd8e120cb77a06ee3d5fc26f9732670d1c397d7cd3acf02f6f62693b89b80"},
- {file = "coverage-6.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc698580216050b5f4a34d2cdd2838b429c53314f1c4835fab7338200a8396f2"},
- {file = "coverage-6.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:877ee5478fd78e100362aed56db47ccc5f23f6e7bb035a8896855f4c3e49bc9b"},
- {file = "coverage-6.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:555a498999c44f5287cc95500486cd0d4f021af9162982cbe504d4cb388f73b5"},
- {file = "coverage-6.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eff095a5aac7011fdb51a2c82a8fae9ec5211577f4b764e1e59cfa27ceeb1b59"},
- {file = "coverage-6.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5de1e9335e2569974e20df0ce31493d315a830d7987e71a24a2a335a8d8459d3"},
- {file = "coverage-6.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7856ea39059d75f822ff0df3a51ea6d76307c897048bdec3aad1377e4e9dca20"},
- {file = "coverage-6.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:411fdd9f4203afd93b056c0868c8f9e5e16813e765de962f27e4e5798356a052"},
- {file = "coverage-6.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cdf7b83f04a313a21afb1f8730fe4dd09577fefc53bbdfececf78b2006f4268e"},
- {file = "coverage-6.4.3-cp38-cp38-win32.whl", hash = "sha256:ab2b1a89d2bc7647622e9eaf06128a5b5451dccf7c242deaa31420b055716481"},
- {file = "coverage-6.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:0e34247274bde982bbc613894d33f9e36358179db2ed231dd101c48dd298e7b0"},
- {file = "coverage-6.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b104b6b1827d6a22483c469e3983a204bcf9c6bf7544bf90362c4654ebc2edf3"},
- {file = "coverage-6.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:adf1a0d272633b21d645dd6e02e3293429c1141c7d65a58e4cbcd592d53b8e01"},
- {file = "coverage-6.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff9832434a9193fbd716fbe05f9276484e18d26cc4cf850853594bb322807ac3"},
- {file = "coverage-6.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:923f9084d7e1d31b5f74c92396b05b18921ed01ee5350402b561a79dce3ea48d"},
- {file = "coverage-6.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d64304acf79766e650f7acb81d263a3ea6e2d0d04c5172b7189180ff2c023c"},
- {file = "coverage-6.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fc294de50941d3da66a09dca06e206297709332050973eca17040278cb0918ff"},
- {file = "coverage-6.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a42eaaae772f14a5194f181740a67bfd48e8806394b8c67aa4399e09d0d6b5db"},
- {file = "coverage-6.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4822327b35cb032ff16af3bec27f73985448f08e874146b5b101e0e558b613dd"},
- {file = "coverage-6.4.3-cp39-cp39-win32.whl", hash = "sha256:f217850ac0e046ede611312703423767ca032a7b952b5257efac963942c055de"},
- {file = "coverage-6.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0a84376e4fd13cebce2c0ef8c2f037929c8307fb94af1e5dbe50272a1c651b5d"},
- {file = "coverage-6.4.3-pp36.pp37.pp38-none-any.whl", hash = "sha256:068d6f2a893af838291b8809c876973d885543411ea460f3e6886ac0ee941732"},
- {file = "coverage-6.4.3.tar.gz", hash = "sha256:ec2ae1f398e5aca655b7084392d23e80efb31f7a660d2eecf569fb9f79b3fb94"},
+ {file = "coverage-6.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7b4da9bafad21ea45a714d3ea6f3e1679099e420c8741c74905b92ee9bfa7cc"},
+ {file = "coverage-6.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fde17bc42e0716c94bf19d92e4c9f5a00c5feb401f5bc01101fdf2a8b7cacf60"},
+ {file = "coverage-6.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdbb0d89923c80dbd435b9cf8bba0ff55585a3cdb28cbec65f376c041472c60d"},
+ {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67f9346aeebea54e845d29b487eb38ec95f2ecf3558a3cffb26ee3f0dcc3e760"},
+ {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42c499c14efd858b98c4e03595bf914089b98400d30789511577aa44607a1b74"},
+ {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c35cca192ba700979d20ac43024a82b9b32a60da2f983bec6c0f5b84aead635c"},
+ {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9cc4f107009bca5a81caef2fca843dbec4215c05e917a59dec0c8db5cff1d2aa"},
+ {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f444627b3664b80d078c05fe6a850dd711beeb90d26731f11d492dcbadb6973"},
+ {file = "coverage-6.4.4-cp310-cp310-win32.whl", hash = "sha256:66e6df3ac4659a435677d8cd40e8eb1ac7219345d27c41145991ee9bf4b806a0"},
+ {file = "coverage-6.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:35ef1f8d8a7a275aa7410d2f2c60fa6443f4a64fae9be671ec0696a68525b875"},
+ {file = "coverage-6.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c1328d0c2f194ffda30a45f11058c02410e679456276bfa0bbe0b0ee87225fac"},
+ {file = "coverage-6.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61b993f3998ee384935ee423c3d40894e93277f12482f6e777642a0141f55782"},
+ {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5dd4b8e9cd0deb60e6fcc7b0647cbc1da6c33b9e786f9c79721fd303994832f"},
+ {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7026f5afe0d1a933685d8f2169d7c2d2e624f6255fb584ca99ccca8c0e966fd7"},
+ {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9c7b9b498eb0c0d48b4c2abc0e10c2d78912203f972e0e63e3c9dc21f15abdaa"},
+ {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ee2b2fb6eb4ace35805f434e0f6409444e1466a47f620d1d5763a22600f0f892"},
+ {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ab066f5ab67059d1f1000b5e1aa8bbd75b6ed1fc0014559aea41a9eb66fc2ce0"},
+ {file = "coverage-6.4.4-cp311-cp311-win32.whl", hash = "sha256:9d6e1f3185cbfd3d91ac77ea065d85d5215d3dfa45b191d14ddfcd952fa53796"},
+ {file = "coverage-6.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e3d3c4cc38b2882f9a15bafd30aec079582b819bec1b8afdbde8f7797008108a"},
+ {file = "coverage-6.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a095aa0a996ea08b10580908e88fbaf81ecf798e923bbe64fb98d1807db3d68a"},
+ {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef6f44409ab02e202b31a05dd6666797f9de2aa2b4b3534e9d450e42dea5e817"},
+ {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b7101938584d67e6f45f0015b60e24a95bf8dea19836b1709a80342e01b472f"},
+ {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a32ec68d721c3d714d9b105c7acf8e0f8a4f4734c811eda75ff3718570b5e3"},
+ {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6a864733b22d3081749450466ac80698fe39c91cb6849b2ef8752fd7482011f3"},
+ {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08002f9251f51afdcc5e3adf5d5d66bb490ae893d9e21359b085f0e03390a820"},
+ {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a3b2752de32c455f2521a51bd3ffb53c5b3ae92736afde67ce83477f5c1dd928"},
+ {file = "coverage-6.4.4-cp37-cp37m-win32.whl", hash = "sha256:f855b39e4f75abd0dfbcf74a82e84ae3fc260d523fcb3532786bcbbcb158322c"},
+ {file = "coverage-6.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ee6ae6bbcac0786807295e9687169fba80cb0617852b2fa118a99667e8e6815d"},
+ {file = "coverage-6.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:564cd0f5b5470094df06fab676c6d77547abfdcb09b6c29c8a97c41ad03b103c"},
+ {file = "coverage-6.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cbbb0e4cd8ddcd5ef47641cfac97d8473ab6b132dd9a46bacb18872828031685"},
+ {file = "coverage-6.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6113e4df2fa73b80f77663445be6d567913fb3b82a86ceb64e44ae0e4b695de1"},
+ {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d032bfc562a52318ae05047a6eb801ff31ccee172dc0d2504614e911d8fa83e"},
+ {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e431e305a1f3126477abe9a184624a85308da8edf8486a863601d58419d26ffa"},
+ {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cf2afe83a53f77aec067033199797832617890e15bed42f4a1a93ea24794ae3e"},
+ {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:783bc7c4ee524039ca13b6d9b4186a67f8e63d91342c713e88c1865a38d0892a"},
+ {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ff934ced84054b9018665ca3967fc48e1ac99e811f6cc99ea65978e1d384454b"},
+ {file = "coverage-6.4.4-cp38-cp38-win32.whl", hash = "sha256:e1fabd473566fce2cf18ea41171d92814e4ef1495e04471786cbc943b89a3781"},
+ {file = "coverage-6.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:4179502f210ebed3ccfe2f78bf8e2d59e50b297b598b100d6c6e3341053066a2"},
+ {file = "coverage-6.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c0b9e9b572893cdb0a00e66cf961a238f8d870d4e1dc8e679eb8bdc2eb1b86"},
+ {file = "coverage-6.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc600f6ec19b273da1d85817eda339fb46ce9eef3e89f220055d8696e0a06908"},
+ {file = "coverage-6.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a98d6bf6d4ca5c07a600c7b4e0c5350cd483c85c736c522b786be90ea5bac4f"},
+ {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01778769097dbd705a24e221f42be885c544bb91251747a8a3efdec6eb4788f2"},
+ {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfa0b97eb904255e2ab24166071b27408f1f69c8fbda58e9c0972804851e0558"},
+ {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fcbe3d9a53e013f8ab88734d7e517eb2cd06b7e689bedf22c0eb68db5e4a0a19"},
+ {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:15e38d853ee224e92ccc9a851457fb1e1f12d7a5df5ae44544ce7863691c7a0d"},
+ {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6913dddee2deff8ab2512639c5168c3e80b3ebb0f818fed22048ee46f735351a"},
+ {file = "coverage-6.4.4-cp39-cp39-win32.whl", hash = "sha256:354df19fefd03b9a13132fa6643527ef7905712109d9c1c1903f2133d3a4e145"},
+ {file = "coverage-6.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:1238b08f3576201ebf41f7c20bf59baa0d05da941b123c6656e42cdb668e9827"},
+ {file = "coverage-6.4.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:f67cf9f406cf0d2f08a3515ce2db5b82625a7257f88aad87904674def6ddaec1"},
+ {file = "coverage-6.4.4.tar.gz", hash = "sha256:e16c45b726acb780e1e6f88b286d3c10b3914ab03438f32117c4aa52d7f30d58"},
]
cucumber-tag-expressions = [
{file = "cucumber-tag-expressions-4.1.0.tar.gz", hash = "sha256:e314d5fed6eebb2f90380271f562248fb15e18636764faf40f4dde4b28b1f960"},
@@ -1449,8 +1457,8 @@ pastel = [
{file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"},
]
pathspec = [
- {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
- {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
+ {file = "pathspec-0.10.0-py3-none-any.whl", hash = "sha256:aefa80ac32d5bf1f96139dca67cefb69a431beff4e6bf1168468f37d7ab87015"},
+ {file = "pathspec-0.10.0.tar.gz", hash = "sha256:01eecd304ba0e6eeed188ae5fa568e99ef10265af7fd9ab737d6412b4ee0ab85"},
]
pbr = [
{file = "pbr-5.10.0-py2.py3-none-any.whl", hash = "sha256:da3e18aac0a3c003e9eea1a81bd23e5a3a75d745670dcf736317b7d966887fdf"},
@@ -1469,8 +1477,8 @@ pluggy = [
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
]
poethepoet = [
- {file = "poethepoet-0.16.0-py3-none-any.whl", hash = "sha256:87482ea8bba4e5db4abbd8e6360baee73b2ce0f3d5f5e99e81cdfa39d72d118f"},
- {file = "poethepoet-0.16.0.tar.gz", hash = "sha256:6455aec39f198be92dbf210a4416e1635119e967204c092b431c8b10024db1d1"},
+ {file = "poethepoet-0.16.1-py3-none-any.whl", hash = "sha256:ec1cd83e23f9141d642c0edf64f3fe1ddccfac60f48aee99521839f15b04965f"},
+ {file = "poethepoet-0.16.1.tar.gz", hash = "sha256:d3b23bc7c4dcc92b61c20c1949cd4182dfcab805de001383cef28e6307702c6f"},
]
pprintpp = [
{file = "pprintpp-0.4.0-py2.py3-none-any.whl", hash = "sha256:b6b4dcdd0c0c0d75e4d7b2f21a9e933e5b2ce62b26e1a54537f9651ae5a5c01d"},
@@ -1489,12 +1497,12 @@ pyflakes = [
{file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"},
]
pygments = [
- {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"},
- {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"},
+ {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"},
+ {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"},
]
pylint = [
- {file = "pylint-2.14.5-py3-none-any.whl", hash = "sha256:fabe30000de7d07636d2e82c9a518ad5ad7908590fe135ace169b44839c15f90"},
- {file = "pylint-2.14.5.tar.gz", hash = "sha256:487ce2192eee48211269a0e976421f334cf94de1806ca9d0a99449adcdf0285e"},
+ {file = "pylint-2.15.0-py3-none-any.whl", hash = "sha256:4b124affc198b7f7c9b5f9ab690d85db48282a025ef9333f51d2d7281b92a6c3"},
+ {file = "pylint-2.15.0.tar.gz", hash = "sha256:4f3f7e869646b0bd63b3dfb79f3c0f28fc3d2d923ea220d52620fd625aed92b0"},
]
pyproject-flake8 = [
{file = "pyproject-flake8-0.0.1a5.tar.gz", hash = "sha256:22542080ba90d4bd80ee060852db15a24aeea61c9a29ed7c16f5b59b0e47a03a"},
@@ -1624,14 +1632,17 @@ untokenize = [
{file = "untokenize-0.1.1.tar.gz", hash = "sha256:3865dbbbb8efb4bb5eaa72f1be7f3e0be00ea8b7f125c69cbd1f5fda926f37a2"},
]
urllib3 = [
- {file = "urllib3-1.26.11-py2.py3-none-any.whl", hash = "sha256:c33ccba33c819596124764c23a97d25f32b28433ba0dedeb77d873a38722c9bc"},
- {file = "urllib3-1.26.11.tar.gz", hash = "sha256:ea6e8fb210b19d950fab93b60c9009226c63a28808bc8386e05301e25883ac0a"},
+ {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"},
+ {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"},
]
ward = [
{file = "ward-0.66.1b0-py3-none-any.whl", hash = "sha256:ca02367c6cbd0fbbadd9fc1fd35f25c19913c6de4c7742dfafbe7239e7434754"},
{file = "ward-0.66.1b0.tar.gz", hash = "sha256:cd79c54ce6c5cf47e58a672a7724e22de6ea8e63ea6941c4e886321e637e50d8"},
]
-ward-coverage = []
+ward-coverage = [
+ {file = "ward_coverage-0.1.5-py3-none-any.whl", hash = "sha256:0b3b7d927590c6e7320cc1effa0461531f0dec1de87f294f0b9987b256b9699a"},
+ {file = "ward_coverage-0.1.5.tar.gz", hash = "sha256:ab74ed1195d4e51357770c87eb18fd53958904eb0e1bdd667255b67dc888d639"},
+]
watchdog = [
{file = "watchdog-2.1.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a735a990a1095f75ca4f36ea2ef2752c99e6ee997c46b0de507ba40a09bf7330"},
{file = "watchdog-2.1.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b17d302850c8d412784d9246cfe8d7e3af6bcd45f958abb2d08a6f8bedf695d"},
diff --git a/pyproject.toml b/pyproject.toml
index ac9e466..0a0a84c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = 'beaupy'
-version = '1.0.3'
+version = '1.1.0'
description = 'A library of elements for interactive TUIs in Python'
authors = ['Peter Vyboch <[email protected]>']
license = 'MIT'
@@ -120,7 +120,7 @@ expycted = '*'
# Unit Testing
ward = '*'
-ward-coverage = { git = "https://github.com/petereon/ward-coverage.git" }
+ward-coverage = "*"
mock = '*'
watchdog = '*'
| Provide parameter which will enable optional manipulation of the list of options to select and select_multiple before rendering
Say if user wanted to select people based on name, he could provide a list of Person objects with name and age and a `selector` like `lambda person: person.name` to preselect only the name prior to rendering
| 2022-08-31T14:22:03 | 0.0 | [] | [] |
|||
petereon/beaupy | petereon__beaupy-11 | 2be3828442d2103ba2f83057acc3d9028aaff0a3 | diff --git a/README.md b/README.md
index 22e0c30..1f4c19e 100644
--- a/README.md
+++ b/README.md
@@ -107,14 +107,14 @@ For styling you can leverage [numerous styling options](https://rich.readthedocs
```python
-res_index = select(options = ["red", "on", "white"],
- cursor = "x ",
- cursor_style= "red on white")
+result = select(options = ["red", "on", "white"],
+ cursor = "x",
+ cursor_style= "red on white")
```
to use a red `x` character on a white background, or
```python
-res_indices = select_multiple(options = ["ok", "ko"],
+result_list = select_multiple(options = ["ok", "ko"],
tick_character = "k",
tick_style="#af00ff)
```
@@ -122,20 +122,20 @@ to use a purple `k` as in a tick in checked option
You can also use whatever as a cursor:
```python
-res_index = select(options = ["here", "comes", "the", "sun"],
- cursor = "🌞 ")
+result = select(options = ["here", "comes", "the", "sun"],
+ cursor = "🌞")
```
to use a sun emoji, or
```python
-res_index = select(options = ["hardcore", "unicode"],
- cursor = "⇉ ")
+result = select(options = ["hardcore", "unicode"],
+ cursor = "⇉")
```
to use `⇉`.
You don't even have to use one character, this also works perfectly fine:
```python
-res_index = select(options = ["this", "other thing"],
- cursor = "selected ")
+result = select(options = ["this", "other thing"],
+ cursor = "selected ")
```
As you can see, world is your oyster.
diff --git a/beaupy/beaupy.py b/beaupy/beaupy.py
index 0301e38..1845a3f 100755
--- a/beaupy/beaupy.py
+++ b/beaupy/beaupy.py
@@ -6,21 +6,21 @@
__license__ = "MIT"
import ast
-import sys
from typing import Any, Callable, List, Optional, Type, Union
import readchar
from rich.console import Console
-console = Console()
-
-
-class ValidationError(Exception):
- pass
-
+from beaupy.internals import (
+ ConversionError,
+ ValidationError,
+ format_option_select,
+ format_option_select_multiple,
+ render,
+ reset_lines,
+)
-class ConversionError(Exception):
- pass
+console = Console()
class DefaultKeys:
@@ -55,17 +55,6 @@ class Config:
default_keys = DefaultKeys
-def __reset_lines(num_lines: int) -> None:
- for _ in range(num_lines):
- sys.stdout.write("\x1b[2K\033[F\x1b[2K")
-
-
-def __render(secure: bool, return_value: str, prompt: str) -> None:
- render_value = len(return_value) * "*" if secure else return_value
- console.print(f"{prompt}\n> {render_value}")
- __reset_lines(2)
-
-
def prompt(
prompt: str,
target_type: Type = str,
@@ -89,7 +78,7 @@ def prompt(
Union[T, str]: Returns a value formatted as provided type or string if no type is provided
"""
value: str = ""
- __render(secure, "", prompt)
+ render(secure, "", prompt, console)
while True:
char = readchar.readkey()
if char in Config.default_keys.confirm:
@@ -109,7 +98,7 @@ def prompt(
raise ConversionError(f"`{'secure input' if secure else value}` cannot be converted to type `{target_type}`") from None
elif char in Config.default_keys.delete:
value = value[:-1]
- __render(secure, value, prompt)
+ render(secure, value, prompt, console)
elif char in Config.default_keys.interrupt:
if Config.raise_on_interrupt:
raise KeyboardInterrupt()
@@ -117,16 +106,12 @@ def prompt(
return None
else:
value += char
- __render(secure, value, prompt)
-
-
-def __format_option_select(i: int, cursor_index: int, option: str, cursor_style: str, cursor: str) -> str:
- return "{}{}".format(f"[{cursor_style}]{cursor}[/{cursor_style}]" if i == cursor_index else " " * len(cursor), option)
+ render(secure, value, prompt, console)
def select(
options: List[str],
- cursor: str = "> ",
+ cursor: str = ">",
cursor_style: str = "pink1",
cursor_index: int = 0,
return_index: bool = False,
@@ -155,11 +140,9 @@ def select(
raise ValueError("`options` cannot be empty")
return None
while True:
- console.print(
- "\n".join([__format_option_select(i, cursor_index, option, cursor_style, cursor) for i, option in enumerate(options)])
- )
+ console.print("\n".join([format_option_select(i, cursor_index, option, cursor_style, cursor) for i, option in enumerate(options)]))
- __reset_lines(len(options))
+ reset_lines(len(options))
keypress = readchar.readkey()
if keypress in Config.default_keys.up:
new_index = cursor_index
@@ -181,17 +164,6 @@ def select(
return None
-def __format_option_select_multiple(
- option: str, ticked: bool, tick_character: str, tick_style: str, selected: bool, cursor_style: str
-) -> str:
- prefix = "\[ ]" # noqa: W605
- if ticked:
- prefix = f"\[[{tick_style}]{tick_character}[/{tick_style}]]" # noqa: W605
- if selected:
- option = f"[{cursor_style}]{option}[/{cursor_style}]"
- return f"{prefix} {option}"
-
-
def select_multiple(
options: List[str],
tick_character: str = "✓",
@@ -236,7 +208,7 @@ def select_multiple(
console.print(
"\n".join(
[
- __format_option_select_multiple(
+ format_option_select_multiple(
option=option,
ticked=i in ticked_indices,
tick_character=tick_character,
@@ -248,7 +220,7 @@ def select_multiple(
]
)
)
- __reset_lines(len(options))
+ reset_lines(len(options))
keypress = readchar.readkey()
if keypress in Config.default_keys.up:
new_index = cursor_index
@@ -331,7 +303,7 @@ def confirm(
console.print(
f"{question_line}\n{selected_prefix if yes else deselected_prefix}{yes_text}\n{selected_prefix if no else deselected_prefix}{no_text}"
)
- __reset_lines(3)
+ reset_lines(3)
keypress = readchar.readkey()
if keypress in Config.default_keys.down or keypress in Config.default_keys.up:
is_yes = not is_yes
diff --git a/beaupy/internals.py b/beaupy/internals.py
new file mode 100644
index 0000000..e79b360
--- /dev/null
+++ b/beaupy/internals.py
@@ -0,0 +1,44 @@
+import sys
+
+import emoji
+from rich.console import Console
+
+
+class ValidationError(Exception):
+ pass
+
+
+class ConversionError(Exception):
+ pass
+
+
+def __replace_emojis(text: str) -> str:
+ return str(emoji.replace_emoji(text, " "))
+
+
+def format_option_select(i: int, cursor_index: int, option: str, cursor_style: str, cursor: str) -> str:
+ return "{}{}".format(
+ f"[{cursor_style}]{cursor}[/{cursor_style}] " if i == cursor_index else " " * (len(__replace_emojis(cursor)) + 1), option
+ )
+
+
+def format_option_select_multiple(
+ option: str, ticked: bool, tick_character: str, tick_style: str, selected: bool, cursor_style: str
+) -> str:
+ prefix = "\[{}]".format(" " * len(__replace_emojis(tick_character))) # noqa: W605
+ if ticked:
+ prefix = f"\[[{tick_style}]{tick_character}[/{tick_style}]]" # noqa: W605
+ if selected:
+ option = f"[{cursor_style}]{option}[/{cursor_style}]"
+ return f"{prefix} {option}"
+
+
+def reset_lines(num_lines: int) -> None:
+ for _ in range(num_lines):
+ sys.stdout.write("\x1b[2K\033[F\x1b[2K")
+
+
+def render(secure: bool, return_value: str, prompt: str, console: Console) -> None:
+ render_value = len(return_value) * "*" if secure else return_value
+ console.print(f"{prompt}\n> {render_value}")
+ reset_lines(2)
diff --git a/coverage.xml b/coverage.xml
index a95b409..8e34fbb 100644
--- a/coverage.xml
+++ b/coverage.xml
@@ -1,12 +1,12 @@
<?xml version="1.0" ?>
-<coverage version="6.4.3" timestamp="1660884907996" lines-valid="186" lines-covered="170" line-rate="0.914" branches-covered="0" branches-valid="0" branch-rate="0" complexity="0">
+<coverage version="6.4.3" timestamp="1660975606550" lines-valid="191" lines-covered="175" line-rate="0.9162" branches-covered="0" branches-valid="0" branch-rate="0" complexity="0">
<!-- Generated by coverage.py: https://coverage.readthedocs.io -->
<!-- Based on https://raw.githubusercontent.com/cobertura/web/master/htdocs/xml/coverage-04.dtd -->
<sources>
<source>/home/petervyboch/Projects/pytui</source>
</sources>
<packages>
- <package name="beaupy" line-rate="0.914" branch-rate="0" complexity="0">
+ <package name="beaupy" line-rate="0.9162" branch-rate="0" complexity="0">
<classes>
<class name="__init__.py" filename="beaupy/__init__.py" complexity="0" line-rate="1" branch-rate="0">
<methods/>
@@ -14,20 +14,16 @@
<line number="1" hits="1"/>
</lines>
</class>
- <class name="beaupy.py" filename="beaupy/beaupy.py" complexity="0" line-rate="0.9135" branch-rate="0">
+ <class name="beaupy.py" filename="beaupy/beaupy.py" complexity="0" line-rate="0.903" branch-rate="0">
<methods/>
<lines>
<line number="2" hits="1"/>
<line number="6" hits="1"/>
<line number="8" hits="1"/>
<line number="9" hits="1"/>
- <line number="10" hits="1"/>
+ <line number="11" hits="1"/>
<line number="12" hits="1"/>
- <line number="13" hits="1"/>
- <line number="15" hits="1"/>
- <line number="18" hits="1"/>
- <line number="19" hits="1"/>
- <line number="22" hits="1"/>
+ <line number="14" hits="1"/>
<line number="23" hits="1"/>
<line number="26" hits="1"/>
<line number="38" hits="1"/>
@@ -40,75 +36,73 @@
<line number="54" hits="1"/>
<line number="55" hits="1"/>
<line number="58" hits="1"/>
- <line number="59" hits="1"/>
- <line number="60" hits="1"/>
- <line number="63" hits="1"/>
- <line number="64" hits="1"/>
- <line number="65" hits="1"/>
- <line number="66" hits="1"/>
- <line number="69" hits="1"/>
+ <line number="80" hits="1"/>
+ <line number="81" hits="1"/>
+ <line number="82" hits="1"/>
+ <line number="83" hits="1"/>
+ <line number="84" hits="1"/>
+ <line number="85" hits="1"/>
+ <line number="86" hits="1"/>
+ <line number="87" hits="1"/>
+ <line number="88" hits="1"/>
+ <line number="89" hits="1"/>
<line number="91" hits="1"/>
- <line number="92" hits="1"/>
<line number="93" hits="1"/>
<line number="94" hits="1"/>
- <line number="95" hits="1"/>
<line number="96" hits="1"/>
<line number="97" hits="1"/>
<line number="98" hits="1"/>
<line number="99" hits="1"/>
- <line number="100" hits="1"/>
+ <line number="100" hits="0"/>
+ <line number="101" hits="0"/>
<line number="102" hits="1"/>
- <line number="104" hits="1"/>
- <line number="105" hits="1"/>
- <line number="107" hits="1"/>
+ <line number="103" hits="0"/>
+ <line number="104" hits="0"/>
+ <line number="106" hits="0"/>
<line number="108" hits="1"/>
<line number="109" hits="1"/>
- <line number="110" hits="1"/>
- <line number="111" hits="0"/>
- <line number="112" hits="0"/>
- <line number="113" hits="1"/>
- <line number="114" hits="0"/>
- <line number="115" hits="0"/>
- <line number="117" hits="0"/>
- <line number="119" hits="1"/>
- <line number="120" hits="1"/>
- <line number="123" hits="1"/>
- <line number="124" hits="1"/>
- <line number="127" hits="1"/>
+ <line number="112" hits="1"/>
+ <line number="138" hits="1"/>
+ <line number="139" hits="1"/>
+ <line number="140" hits="1"/>
+ <line number="141" hits="1"/>
+ <line number="142" hits="1"/>
+ <line number="143" hits="1"/>
+ <line number="145" hits="1"/>
+ <line number="146" hits="1"/>
+ <line number="147" hits="1"/>
+ <line number="148" hits="1"/>
+ <line number="149" hits="1"/>
+ <line number="150" hits="1"/>
+ <line number="151" hits="1"/>
+ <line number="152" hits="1"/>
<line number="153" hits="1"/>
<line number="154" hits="1"/>
<line number="155" hits="1"/>
<line number="156" hits="1"/>
<line number="157" hits="1"/>
<line number="158" hits="1"/>
+ <line number="159" hits="1"/>
+ <line number="160" hits="1"/>
+ <line number="161" hits="1"/>
<line number="162" hits="1"/>
<line number="163" hits="1"/>
<line number="164" hits="1"/>
- <line number="165" hits="1"/>
- <line number="166" hits="1"/>
<line number="167" hits="1"/>
- <line number="168" hits="1"/>
- <line number="169" hits="1"/>
- <line number="170" hits="1"/>
- <line number="171" hits="1"/>
- <line number="172" hits="1"/>
- <line number="173" hits="1"/>
- <line number="174" hits="1"/>
- <line number="175" hits="1"/>
- <line number="176" hits="1"/>
- <line number="177" hits="1"/>
- <line number="178" hits="1"/>
- <line number="179" hits="1"/>
- <line number="180" hits="1"/>
- <line number="181" hits="1"/>
- <line number="184" hits="1"/>
- <line number="187" hits="1"/>
- <line number="188" hits="1"/>
- <line number="189" hits="1"/>
- <line number="190" hits="1"/>
- <line number="191" hits="1"/>
- <line number="192" hits="1"/>
- <line number="195" hits="1"/>
+ <line number="199" hits="1"/>
+ <line number="200" hits="1"/>
+ <line number="201" hits="1"/>
+ <line number="202" hits="1"/>
+ <line number="203" hits="1"/>
+ <line number="204" hits="1"/>
+ <line number="205" hits="1"/>
+ <line number="206" hits="1"/>
+ <line number="207" hits="1"/>
+ <line number="208" hits="1"/>
+ <line number="223" hits="1"/>
+ <line number="224" hits="1"/>
+ <line number="225" hits="1"/>
+ <line number="226" hits="1"/>
<line number="227" hits="1"/>
<line number="228" hits="1"/>
<line number="229" hits="1"/>
@@ -119,6 +113,18 @@
<line number="234" hits="1"/>
<line number="235" hits="1"/>
<line number="236" hits="1"/>
+ <line number="237" hits="0"/>
+ <line number="238" hits="0"/>
+ <line number="239" hits="1"/>
+ <line number="240" hits="1"/>
+ <line number="241" hits="1"/>
+ <line number="243" hits="1"/>
+ <line number="244" hits="1"/>
+ <line number="245" hits="1"/>
+ <line number="246" hits="1"/>
+ <line number="247" hits="1"/>
+ <line number="248" hits="0"/>
+ <line number="250" hits="1"/>
<line number="251" hits="1"/>
<line number="252" hits="1"/>
<line number="253" hits="1"/>
@@ -127,81 +133,85 @@
<line number="256" hits="1"/>
<line number="257" hits="1"/>
<line number="258" hits="1"/>
- <line number="259" hits="1"/>
+ <line number="259" hits="0"/>
<line number="260" hits="1"/>
- <line number="261" hits="1"/>
- <line number="262" hits="1"/>
<line number="263" hits="1"/>
- <line number="264" hits="1"/>
- <line number="265" hits="0"/>
- <line number="266" hits="0"/>
- <line number="267" hits="1"/>
- <line number="268" hits="1"/>
- <line number="269" hits="1"/>
- <line number="271" hits="1"/>
- <line number="272" hits="1"/>
- <line number="273" hits="1"/>
- <line number="274" hits="1"/>
- <line number="275" hits="1"/>
- <line number="276" hits="0"/>
- <line number="278" hits="1"/>
- <line number="279" hits="1"/>
- <line number="280" hits="1"/>
- <line number="281" hits="1"/>
- <line number="282" hits="1"/>
- <line number="283" hits="1"/>
- <line number="284" hits="1"/>
- <line number="285" hits="1"/>
- <line number="286" hits="1"/>
- <line number="287" hits="0"/>
- <line number="288" hits="1"/>
- <line number="291" hits="1"/>
+ <line number="293" hits="1"/>
+ <line number="294" hits="1"/>
+ <line number="295" hits="1"/>
+ <line number="296" hits="1"/>
+ <line number="297" hits="1"/>
+ <line number="298" hits="1"/>
+ <line number="299" hits="1"/>
+ <line number="300" hits="1"/>
+ <line number="301" hits="1"/>
+ <line number="302" hits="1"/>
+ <line number="303" hits="1"/>
+ <line number="306" hits="1"/>
+ <line number="307" hits="1"/>
+ <line number="308" hits="1"/>
+ <line number="309" hits="1"/>
+ <line number="310" hits="1"/>
+ <line number="311" hits="1"/>
+ <line number="312" hits="1"/>
+ <line number="313" hits="1"/>
+ <line number="314" hits="1"/>
+ <line number="315" hits="1"/>
+ <line number="316" hits="0"/>
+ <line number="317" hits="0"/>
+ <line number="318" hits="0"/>
+ <line number="319" hits="1"/>
+ <line number="320" hits="1"/>
<line number="321" hits="1"/>
<line number="322" hits="1"/>
- <line number="323" hits="1"/>
- <line number="324" hits="1"/>
- <line number="325" hits="1"/>
+ <line number="323" hits="0"/>
+ <line number="324" hits="0"/>
<line number="326" hits="1"/>
<line number="327" hits="1"/>
<line number="328" hits="1"/>
<line number="329" hits="1"/>
<line number="330" hits="1"/>
<line number="331" hits="1"/>
+ <line number="332" hits="1"/>
+ <line number="333" hits="1"/>
<line number="334" hits="1"/>
<line number="335" hits="1"/>
<line number="336" hits="1"/>
<line number="337" hits="1"/>
- <line number="338" hits="1"/>
- <line number="339" hits="1"/>
- <line number="340" hits="1"/>
+ <line number="338" hits="0"/>
+ <line number="339" hits="0"/>
<line number="341" hits="1"/>
<line number="342" hits="1"/>
- <line number="343" hits="1"/>
- <line number="344" hits="0"/>
- <line number="345" hits="0"/>
- <line number="346" hits="0"/>
- <line number="347" hits="1"/>
- <line number="348" hits="1"/>
- <line number="349" hits="1"/>
- <line number="350" hits="1"/>
- <line number="351" hits="0"/>
- <line number="352" hits="0"/>
- <line number="354" hits="1"/>
- <line number="355" hits="1"/>
- <line number="356" hits="1"/>
- <line number="357" hits="1"/>
- <line number="358" hits="1"/>
- <line number="359" hits="1"/>
- <line number="360" hits="1"/>
- <line number="361" hits="1"/>
- <line number="362" hits="1"/>
- <line number="363" hits="1"/>
- <line number="364" hits="1"/>
- <line number="365" hits="1"/>
- <line number="366" hits="0"/>
- <line number="367" hits="0"/>
- <line number="369" hits="1"/>
- <line number="370" hits="1"/>
+ </lines>
+ </class>
+ <class name="internals.py" filename="beaupy/internals.py" complexity="0" line-rate="1" branch-rate="0">
+ <methods/>
+ <lines>
+ <line number="1" hits="1"/>
+ <line number="3" hits="1"/>
+ <line number="4" hits="1"/>
+ <line number="7" hits="1"/>
+ <line number="8" hits="1"/>
+ <line number="11" hits="1"/>
+ <line number="12" hits="1"/>
+ <line number="15" hits="1"/>
+ <line number="16" hits="1"/>
+ <line number="19" hits="1"/>
+ <line number="20" hits="1"/>
+ <line number="25" hits="1"/>
+ <line number="28" hits="1"/>
+ <line number="29" hits="1"/>
+ <line number="30" hits="1"/>
+ <line number="31" hits="1"/>
+ <line number="32" hits="1"/>
+ <line number="33" hits="1"/>
+ <line number="36" hits="1"/>
+ <line number="37" hits="1"/>
+ <line number="38" hits="1"/>
+ <line number="41" hits="1"/>
+ <line number="42" hits="1"/>
+ <line number="43" hits="1"/>
+ <line number="44" hits="1"/>
</lines>
</class>
</classes>
diff --git a/example.py b/example.py
index 1adf9d4..0870403 100755
--- a/example.py
+++ b/example.py
@@ -16,7 +16,7 @@ def main():
"Sir Galahad the Pure",
]
- name = beaupy.select(names, cursor_index=3)
+ name = beaupy.select(names, cursor_index=3, cursor="🏰")
print(f"Welcome, {name}")
# Get an integer greater or equal to 0
age = beaupy.prompt("What is your age?", target_type=int, validator=lambda val: val > 0)
diff --git a/poetry.lock b/poetry.lock
index 5182825..bb04433 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -290,6 +290,17 @@ python-versions = ">=3.6"
[package.extras]
test = ["pytest", "black"]
+[[package]]
+name = "emoji"
+version = "2.0.0"
+description = "Emoji for Python"
+category = "main"
+optional = false
+python-versions = "*"
+
+[package.extras]
+dev = ["pytest", "coverage", "coveralls"]
+
[[package]]
name = "eradicate"
version = "2.1.0"
@@ -932,6 +943,14 @@ category = "dev"
optional = false
python-versions = ">=3.6"
+[[package]]
+name = "types-emoji"
+version = "2.0.1"
+description = "Typing stubs for emoji"
+category = "dev"
+optional = false
+python-versions = "*"
+
[[package]]
name = "typing-extensions"
version = "4.3.0"
@@ -1046,7 +1065,7 @@ docs = ["jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "jaraco.packaging (>=9)
[metadata]
lock-version = "1.1"
python-versions = '^3.7.8'
-content-hash = "e1e1133e203cba96c6f9464e0e96294d945a9efcc458bae4a709fce6510c2e3d"
+content-hash = "97451243a4be6d98f7897a93db3c5a1ba3931737c7b1e516fd3875bef58624a8"
[metadata.files]
astor = [
@@ -1200,6 +1219,9 @@ docspec-python = [
docstring-parser = [
{file = "docstring_parser-0.11.tar.gz", hash = "sha256:93b3f8f481c7d24e37c5d9f30293c89e2933fa209421c8abd731dd3ef0715ecb"},
]
+emoji = [
+ {file = "emoji-2.0.0.tar.gz", hash = "sha256:297fac7ec9e86f7b602792c28eb6f04819ba67ab88a34c56afcde52243a9a105"},
+]
eradicate = [
{file = "eradicate-2.1.0-py3-none-any.whl", hash = "sha256:8bfaca181db9227dc88bdbce4d051a9627604c2243e7d85324f6d6ce0fd08bb2"},
{file = "eradicate-2.1.0.tar.gz", hash = "sha256:aac7384ab25b1bf21c4c012de9b4bf8398945a14c98c911545b2ea50ab558014"},
@@ -1568,6 +1590,10 @@ typed-ast = [
{file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"},
{file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"},
]
+types-emoji = [
+ {file = "types-emoji-2.0.1.tar.gz", hash = "sha256:aa8fc23a9d4a5d1f367b27d269242ecf638f5bf05144f48ec7ceb219419f349b"},
+ {file = "types_emoji-2.0.1-py3-none-any.whl", hash = "sha256:7af47823924415b64909b7de0248339c6715667e155bf3616d294e7556464757"},
+]
typing-extensions = [
{file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"},
{file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"},
diff --git a/pyproject.toml b/pyproject.toml
index 43f290a..2db3b61 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -32,6 +32,7 @@ lint = { shell = 'echo "Running isort..." ; poetry run isort ./beaupy ; echo "Ru
python = '^3.7.8'
readchar = '^3.1.0'
rich = '^12.5.1'
+emoji = "^2.0.0"
[tool.mypy]
@@ -122,6 +123,7 @@ poethepoet = '*'
# Docs
pydoc-markdown = "^4.6.3"
+types-emoji = "^2.0.1"
[build-system]
requires = ['poetry-core>=1.0.0']
| Include handling for emojis which are 2 characters if they are used as a cursor or tick character
| 2022-08-20T06:08:32 | 0.0 | [] | [] |
|||
atopile/faebryk | atopile__faebryk-27 | c517f463186adea67ebfac9e1e5ed695160c84a1 | diff --git a/src/faebryk/library/Constant.py b/src/faebryk/library/Constant.py
index ea6cbc27..ec9c853a 100644
--- a/src/faebryk/library/Constant.py
+++ b/src/faebryk/library/Constant.py
@@ -3,6 +3,8 @@
from typing import Self, SupportsAbs
+import numpy as np
+
from faebryk.core.parameter import Parameter, _resolved
from faebryk.libs.units import Quantity
@@ -32,6 +34,11 @@ def __eq__(self, other) -> bool:
if not isinstance(other, Constant):
return False
+ try:
+ return np.allclose(self.value, other.value)
+ except (TypeError, np.exceptions.DTypePromotionError):
+ ...
+
return self.value == other.value
def __hash__(self) -> int:
| Parameters/Picker: Different scaler for same unit result in picker no-match
### Current Behaviour
I think it's in the picker.
```0.1 * P.uF != 100 * P.nF```
e.g.
in the project you define ```0.1 * P.uF```
and in the picker.py you have as parameter ```100 * P.nF```
### Expected Behaviour
```0.1 * P.uF = 100 * P.nF```
### Possible Solution
_No response_
### Version
feature/new_holders
### Relevant log output
```shell
Could not find part <some capacitor> with params:
capacitance: [<*|Constant>(0.1 uF)]
rated_voltage: [<*|Range>(<*|Constant>(10.00 V), <*|Constant>(inf V))]
temperature_coefficient: [<*|ANY>]
in options:
{ 'capacitance': <*|Constant>(1.00 µF),
'rated_voltage': <*|Constant>(25.00 V),
'temperature_coefficient': <*|Constant>(<TemperatureCoefficient.X5R: 4>)}
{ 'capacitance': <*|Constant>(100.00 nF),
'rated_voltage': <*|Constant>(16.00 V),
'temperature_coefficient': <*|Constant>(<TemperatureCoefficient.X7R: 6>)}
{ 'capacitance': <*|Constant>(100.00 nF),
'rated_voltage': <*|Constant>(1.00 kV),
'temperature_coefficient': <*|Constant>(<TemperatureCoefficient.X7R: 6>)}
{ 'capacitance': <*|Constant>(4.70 µF),
'rated_voltage': <*|Constant>(10.00 V),
'temperature_coefficient': <*|Constant>(<TemperatureCoefficient.X5R: 4>)}
{ 'capacitance': <*|Constant>(10.00 µF),
'rated_voltage': <*|Constant>(10.00 V),
'temperature_coefficient': <*|Constant>(<TemperatureCoefficient.X5R: 4>)})]
```
### Code of Conduct
- [X] I agree to follow this project's Code of Conduct
| https://github.com/hgrecco/pint/issues/1837
use ```np.allclose(self.value, other.value)``` | 2024-09-02T09:47:32 | 0.0 | [] | [] |
||
microbiomedata/nmdc-runtime | microbiomedata__nmdc-runtime-813 | 55c7f94dbf4bc5eeed27e0b29d4f37038ddbea9a | diff --git a/db/README.md b/db/README.md
new file mode 100644
index 00000000..83195e1c
--- /dev/null
+++ b/db/README.md
@@ -0,0 +1,7 @@
+# Database
+
+This directory contains files related to the MongoDB database managed by the Runtime.
+
+It has the following subdirectories:
+
+- `./migrations`: files related to migrating the MongoDB database
diff --git a/db/migrations/README.md b/db/migrations/README.md
new file mode 100644
index 00000000..5cf247a3
--- /dev/null
+++ b/db/migrations/README.md
@@ -0,0 +1,3 @@
+# Migrations
+
+This directory contains files related to migrating the MongoDB database between schemas.
diff --git a/demo/metadata_migration/notebooks/.gitignore b/db/migrations/notebooks/.gitignore
similarity index 100%
rename from demo/metadata_migration/notebooks/.gitignore
rename to db/migrations/notebooks/.gitignore
diff --git a/demo/metadata_migration/notebooks/.notebook.env.example b/db/migrations/notebooks/.notebook.env.example
similarity index 100%
rename from demo/metadata_migration/notebooks/.notebook.env.example
rename to db/migrations/notebooks/.notebook.env.example
diff --git a/demo/metadata_migration/notebooks/bookkeeper.py b/db/migrations/notebooks/bookkeeper.py
similarity index 100%
rename from demo/metadata_migration/notebooks/bookkeeper.py
rename to db/migrations/notebooks/bookkeeper.py
diff --git a/demo/metadata_migration/notebooks/helpers.py b/db/migrations/notebooks/helpers.py
similarity index 100%
rename from demo/metadata_migration/notebooks/helpers.py
rename to db/migrations/notebooks/helpers.py
diff --git a/demo/metadata_migration/notebooks/migrate_10_0_0_to_10_1_4.ipynb b/db/migrations/notebooks/migrate_10_0_0_to_10_1_4.ipynb
similarity index 100%
rename from demo/metadata_migration/notebooks/migrate_10_0_0_to_10_1_4.ipynb
rename to db/migrations/notebooks/migrate_10_0_0_to_10_1_4.ipynb
diff --git a/demo/metadata_migration/notebooks/migrate_10_3_0_to_10_4_0.ipynb b/db/migrations/notebooks/migrate_10_3_0_to_10_4_0.ipynb
similarity index 100%
rename from demo/metadata_migration/notebooks/migrate_10_3_0_to_10_4_0.ipynb
rename to db/migrations/notebooks/migrate_10_3_0_to_10_4_0.ipynb
diff --git a/demo/metadata_migration/notebooks/migrate_10_4_0_to_10_9_1.ipynb b/db/migrations/notebooks/migrate_10_4_0_to_10_9_1.ipynb
similarity index 100%
rename from demo/metadata_migration/notebooks/migrate_10_4_0_to_10_9_1.ipynb
rename to db/migrations/notebooks/migrate_10_4_0_to_10_9_1.ipynb
diff --git a/demo/metadata_migration/notebooks/migrate_10_9_1_to_11_0_0.ipynb b/db/migrations/notebooks/migrate_10_9_1_to_11_0_0.ipynb
similarity index 100%
rename from demo/metadata_migration/notebooks/migrate_10_9_1_to_11_0_0.ipynb
rename to db/migrations/notebooks/migrate_10_9_1_to_11_0_0.ipynb
diff --git a/demo/metadata_migration/notebooks/migrate_11_0_0_to_11_0_1.ipynb b/db/migrations/notebooks/migrate_11_0_0_to_11_0_1.ipynb
similarity index 100%
rename from demo/metadata_migration/notebooks/migrate_11_0_0_to_11_0_1.ipynb
rename to db/migrations/notebooks/migrate_11_0_0_to_11_0_1.ipynb
diff --git a/demo/metadata_migration/notebooks/migrate_11_0_3_to_11_1_0.ipynb b/db/migrations/notebooks/migrate_11_0_3_to_11_1_0.ipynb
similarity index 100%
rename from demo/metadata_migration/notebooks/migrate_11_0_3_to_11_1_0.ipynb
rename to db/migrations/notebooks/migrate_11_0_3_to_11_1_0.ipynb
diff --git a/db/migrations/notebooks/migrate_11_1_0_to_11_2_0.ipynb b/db/migrations/notebooks/migrate_11_1_0_to_11_2_0.ipynb
new file mode 100644
index 00000000..bb747b0e
--- /dev/null
+++ b/db/migrations/notebooks/migrate_11_1_0_to_11_2_0.ipynb
@@ -0,0 +1,824 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "initial_id",
+ "metadata": {
+ "collapsed": true,
+ "jupyter": {
+ "outputs_hidden": true
+ }
+ },
+ "source": "# Migrate MongoDB database from `nmdc-schema` `v11.1.0` to `v11.2.0`"
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3c31d85d",
+ "metadata": {},
+ "source": [
+ "## Introduction\n",
+ "\n",
+ "This notebook will be used to migrate the database from `nmdc-schema` `v11.1.0` ([released](https://github.com/microbiomedata/nmdc-schema/releases/tag/v11.1.0) November 12, 2024) to `v11.2.0` (whose target release date is December 6, 2024).\n",
+ "\n",
+ "### Heads up\n",
+ "\n",
+ "Unlike some previous migrators, this one does not \"pick and choose\" which collections it will dump. There are two reasons for this: (1) migrators no longer have a dedicated `self.agenda` dictionary that indicates all the collections involved in the migration; and (2) migrators can now create, rename, and drop collections; none of which are things that the old `self.agenda`-based system was designed to handle. So, instead of picking and choosing collections, this migrator **dumps them all.**"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "f65ad4ab",
+ "metadata": {},
+ "source": [
+ "## Prerequisites"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "17f351e8",
+ "metadata": {},
+ "source": [
+ "### 1. Coordinate with stakeholders.\n",
+ "\n",
+ "We will be enacting full Runtime and Database downtime for this migration. Ensure stakeholders are aware of that."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "233a35c3",
+ "metadata": {},
+ "source": [
+ "### 2. Set up notebook environment.\n",
+ "\n",
+ "Here, you'll prepare an environment for running this notebook.\n",
+ "\n",
+ "1. Start a **MongoDB server** on your local machine (and ensure it does **not** already contain a database having the name specified in the notebook configuration file).\n",
+ " 1. You can start a [Docker](https://hub.docker.com/_/mongo)-based MongoDB server at `localhost:27055` by running the following command. A MongoDB server started this way will be accessible without a username or password.\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "8aee55e3",
+ "metadata": {},
+ "source": [
+ "!docker run --rm --detach --name mongo-migration-transformer -p 27055:27017 mongo:6.0.4"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6cd05ccb",
+ "metadata": {},
+ "source": [
+ "2. Create and populate a **notebook configuration file** named `.notebook.env`.\n",
+ " > You can use `.notebook.env.example` as a template."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "69937b18",
+ "metadata": {},
+ "source": [
+ "## Procedure"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "fe81196a",
+ "metadata": {},
+ "source": [
+ "### Install Python packages\n",
+ "\n",
+ "In this step, you'll [install](https://saturncloud.io/blog/what-is-the-difference-between-and-in-jupyter-notebooks/) the Python packages upon which this notebook depends.\n",
+ "\n",
+ "> Note: If the output of this cell says \"Note: you may need to restart the kernel to use updated packages\", restart the kernel (not the notebook cells), then proceed to the next cell.\n",
+ "\n",
+ "##### References\n",
+ "\n",
+ "| Description | Link |\n",
+ "|---------------------------------------------------------------------------------|--------------------------------------------------------|\n",
+ "| NMDC Schema PyPI package | https://pypi.org/project/nmdc-schema |\n",
+ "| How to `pip install` from a Git branch<br>instead of PyPI | https://stackoverflow.com/a/20101940 |"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "e25a0af308c3185b",
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "scrolled": true
+ },
+ "source": [
+ "%pip install --upgrade pip\n",
+ "%pip install -r requirements.txt\n",
+ "%pip install nmdc-schema==11.2.0"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "a407c354",
+ "metadata": {},
+ "source": [
+ "### Import Python dependencies\n",
+ "\n",
+ "Import the Python objects upon which this notebook depends.\n",
+ "\n",
+ "#### References\n",
+ "\n",
+ "| Description | Link |\n",
+ "|----------------------------------------|-------------------------------------------------------------------------------------------------------|\n",
+ "| Dynamically importing a Python module | [`importlib.import_module`](https://docs.python.org/3/library/importlib.html#importlib.import_module) |\n",
+ "| Confirming something is a Python class | [`inspect.isclass`](https://docs.python.org/3/library/inspect.html#inspect.isclass) |"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "9e8a3ceb",
+ "metadata": {},
+ "source": "MIGRATOR_MODULE_NAME = \"migrator_from_11_1_0_to_11_2_0\"",
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "code",
+ "id": "dbecd561",
+ "metadata": {},
+ "source": [
+ "# Standard library packages:\n",
+ "import subprocess\n",
+ "from typing import List\n",
+ "import importlib\n",
+ "from inspect import isclass\n",
+ "\n",
+ "# Third-party packages:\n",
+ "import pymongo\n",
+ "from linkml.validator import Validator, ValidationReport\n",
+ "from linkml.validator.plugins import JsonschemaValidationPlugin\n",
+ "from nmdc_schema.nmdc_data import get_nmdc_schema_definition\n",
+ "from nmdc_schema.migrators.adapters.mongo_adapter import MongoAdapter\n",
+ "from linkml_runtime import SchemaView\n",
+ "\n",
+ "# First-party packages:\n",
+ "from helpers import Config, setup_logger, get_collection_names_from_schema, derive_schema_class_name_from_document\n",
+ "from bookkeeper import Bookkeeper, MigrationEvent\n",
+ "\n",
+ "# Dynamic imports:\n",
+ "migrator_module = importlib.import_module(f\".{MIGRATOR_MODULE_NAME}\", package=\"nmdc_schema.migrators\")\n",
+ "Migrator = getattr(migrator_module, \"Migrator\") # gets the class\n",
+ "assert isclass(Migrator), \"Failed to import Migrator class.\""
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "99b20ff4",
+ "metadata": {},
+ "source": [
+ "### Parse configuration files\n",
+ "\n",
+ "Parse the notebook and Mongo configuration files."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "1eac645a",
+ "metadata": {},
+ "source": [
+ "cfg = Config()\n",
+ "\n",
+ "# Define some aliases we can use to make the shell commands in this notebook easier to read.\n",
+ "mongodump = cfg.mongodump_path\n",
+ "mongorestore = cfg.mongorestore_path\n",
+ "mongosh = cfg.mongosh_path\n",
+ "\n",
+ "# Make the base CLI options for Mongo shell commands.\n",
+ "origin_mongo_cli_base_options = Config.make_mongo_cli_base_options(\n",
+ " mongo_host=cfg.origin_mongo_host,\n",
+ " mongo_port=cfg.origin_mongo_port,\n",
+ " mongo_username=cfg.origin_mongo_username,\n",
+ " mongo_password=cfg.origin_mongo_password,\n",
+ ")\n",
+ "transformer_mongo_cli_base_options = Config.make_mongo_cli_base_options(\n",
+ " mongo_host=cfg.transformer_mongo_host,\n",
+ " mongo_port=cfg.transformer_mongo_port,\n",
+ " mongo_username=cfg.transformer_mongo_username,\n",
+ " mongo_password=cfg.transformer_mongo_password,\n",
+ ")\n",
+ "\n",
+ "# Perform a sanity test of the application paths.\n",
+ "!{mongodump} --version\n",
+ "!{mongorestore} --version\n",
+ "!{mongosh} --version"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "68245d2b",
+ "metadata": {},
+ "source": [
+ "### Create MongoDB clients\n",
+ "\n",
+ "Create MongoDB clients you can use to access the \"origin\" and \"transformer\" MongoDB servers."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "8e95f559",
+ "metadata": {},
+ "source": [
+ "# Mongo client for \"origin\" MongoDB server.\n",
+ "origin_mongo_client = pymongo.MongoClient(host=cfg.origin_mongo_host, \n",
+ " port=int(cfg.origin_mongo_port),\n",
+ " username=cfg.origin_mongo_username,\n",
+ " password=cfg.origin_mongo_password,\n",
+ " directConnection=True)\n",
+ "\n",
+ "# Mongo client for \"transformer\" MongoDB server.\n",
+ "transformer_mongo_client = pymongo.MongoClient(host=cfg.transformer_mongo_host, \n",
+ " port=int(cfg.transformer_mongo_port),\n",
+ " username=cfg.transformer_mongo_username,\n",
+ " password=cfg.transformer_mongo_password,\n",
+ " directConnection=True)\n",
+ "\n",
+ "# Perform sanity tests of those MongoDB clients' abilities to access their respective MongoDB servers.\n",
+ "with pymongo.timeout(3):\n",
+ " # Display the MongoDB server version (running on the \"origin\" Mongo server).\n",
+ " print(\"Origin Mongo server version: \" + origin_mongo_client.server_info()[\"version\"])\n",
+ "\n",
+ " # Sanity test: Ensure the origin database exists.\n",
+ " assert cfg.origin_mongo_database_name in origin_mongo_client.list_database_names(), \"Origin database does not exist.\"\n",
+ "\n",
+ " # Display the MongoDB server version (running on the \"transformer\" Mongo server).\n",
+ " print(\"Transformer Mongo server version: \" + transformer_mongo_client.server_info()[\"version\"])\n",
+ "\n",
+ " # Sanity test: Ensure the transformation database does not exist.\n",
+ " assert cfg.transformer_mongo_database_name not in transformer_mongo_client.list_database_names(), \"Transformation database already exists.\""
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1e195db1",
+ "metadata": {},
+ "source": [
+ "Delete the transformer database from the transformer MongoDB server if that database already exists there (e.g. if it was left over from an experiment).\n",
+ "\n",
+ "#### References\n",
+ "\n",
+ "| Description | Link |\n",
+ "|------------------------------|---------------------------------------------------------------|\n",
+ "| Python's `subprocess` module | https://docs.python.org/3/library/subprocess.html |\n",
+ "| `mongosh` CLI options | https://www.mongodb.com/docs/mongodb-shell/reference/options/ |"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "8939a2ed",
+ "metadata": {},
+ "source": [
+ "# Note: I run this command via Python's `subprocess` module instead of via an IPython magic `!` command\n",
+ "# because I expect to eventually use regular Python scripts—not Python notebooks—for migrations.\n",
+ "shell_command = f\"\"\"\n",
+ " {cfg.mongosh_path} {transformer_mongo_cli_base_options} \\\n",
+ " --eval 'use {cfg.transformer_mongo_database_name}' \\\n",
+ " --eval 'db.dropDatabase()' \\\n",
+ " --quiet\n",
+ "\"\"\"\n",
+ "completed_process = subprocess.run(shell_command, shell=True)\n",
+ "print(f\"\\nReturn code: {completed_process.returncode}\")"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "bc387abc62686091",
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ }
+ },
+ "source": [
+ "### Create validator\n",
+ "\n",
+ "In this step, you'll create a validator that can be used to check whether data conforms to the NMDC Schema. You'll use it later, to do that.\n",
+ "\n",
+ "#### References\n",
+ "\n",
+ "| Description | Link |\n",
+ "|------------------------------|------------------------------------------------------------------------------|\n",
+ "| LinkML's `Validator` class | https://linkml.io/linkml/code/validator.html#linkml.validator.Validator |\n",
+ "| Validating data using LinkML | https://linkml.io/linkml/data/validating-data.html#validation-in-python-code |"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "5c982eb0c04e606d",
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ }
+ },
+ "source": [
+ "schema_definition = get_nmdc_schema_definition()\n",
+ "validator = Validator(\n",
+ " schema=schema_definition,\n",
+ " validation_plugins=[JsonschemaValidationPlugin(closed=True)],\n",
+ ")\n",
+ "\n",
+ "# Perform a sanity test of the validator.\n",
+ "assert callable(validator.validate), \"Failed to instantiate a validator\""
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "e7e8befb362a1670",
+ "metadata": {},
+ "source": [
+ "### Create SchemaView\n",
+ "\n",
+ "In this step, you'll instantiate a `SchemaView` that is bound to the destination schema. \n",
+ "\n",
+ "#### References\n",
+ "\n",
+ "| Description | Link |\n",
+ "|-----------------------------|-----------------------------------------------------|\n",
+ "| LinkML's `SchemaView` class | https://linkml.io/linkml/developers/schemaview.html |"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "625a6e7df5016677",
+ "metadata": {},
+ "source": [
+ "schema_view = SchemaView(get_nmdc_schema_definition())\n",
+ "\n",
+ "# As a sanity test, confirm we can use the `SchemaView` instance to access a schema class.\n",
+ "schema_view.get_class(class_name=\"Database\")[\"name\"]"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3975ac24",
+ "metadata": {},
+ "source": [
+ "### Revoke access from the \"origin\" MongoDB server\n",
+ "\n",
+ "We revoke both \"write\" and \"read\" access to the server.\n",
+ "\n",
+ "#### Rationale\n",
+ "\n",
+ "We revoke \"write\" access so people don't make changes to the original data while the migration is happening, given that the migration ends with an overwriting of the original data (which would wipe out any changes made in the meantime).\n",
+ "\n",
+ "We also revoke \"read\" access. The revocation of \"read\" access is technically optional, but (a) the JavaScript mongosh script will be easier for me to maintain if it revokes everything and (b) this prevents people from reading data during the restore step, during which the database may not be self-consistent.\n",
+ "\n",
+ "#### References\n",
+ "\n",
+ "| Description | Link |\n",
+ "|--------------------------------|-----------------------------------------------------------|\n",
+ "| Running a script via `mongosh` | https://www.mongodb.com/docs/mongodb-shell/write-scripts/ |"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "f761caad",
+ "metadata": {},
+ "source": [
+ "shell_command = f\"\"\"\n",
+ " {cfg.mongosh_path} {origin_mongo_cli_base_options} \\\n",
+ " --file='mongosh-scripts/revoke-privileges.mongo.js' \\\n",
+ " --quiet\n",
+ "\"\"\"\n",
+ "completed_process = subprocess.run(shell_command, shell=True)\n",
+ "print(f\"\\nReturn code: {completed_process.returncode}\")"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "7f9c87de6fb8530c",
+ "metadata": {},
+ "source": [
+ "### Delete obsolete dumps from previous migrations\n",
+ "\n",
+ "Delete any existing dumps before we create new ones in this notebook. This is so the dumps you generate with this notebook do not get merged with any unrelated ones."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "6a949d0fcb4b6fa0",
+ "metadata": {},
+ "source": [
+ "!rm -rf {cfg.origin_dump_folder_path}\n",
+ "!rm -rf {cfg.transformer_dump_folder_path}"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "b7799910b6b0715d",
+ "metadata": {},
+ "source": [
+ "### Dump collections from the \"origin\" MongoDB server\n",
+ "\n",
+ "Use `mongodump` to dump all the collections **from** the \"origin\" MongoDB server **into** a local directory.\n",
+ "\n",
+ "- TODO: Consider only dumping collections represented by the initial schema."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "da530d6754c4f6fe",
+ "metadata": {
+ "scrolled": true
+ },
+ "source": [
+ "# Dump all collections from the \"origin\" database.\n",
+ "shell_command = f\"\"\"\n",
+ " {mongodump} {origin_mongo_cli_base_options} \\\n",
+ " --db='{cfg.origin_mongo_database_name}' \\\n",
+ " --out='{cfg.origin_dump_folder_path}' \\\n",
+ " --gzip\n",
+ "\"\"\"\n",
+ "completed_process = subprocess.run(shell_command, shell=True)\n",
+ "print(f\"\\nReturn code: {completed_process.returncode}\")"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "932ebde8abdd70ec",
+ "metadata": {},
+ "source": [
+ "### Load the dumped collections into the \"transformer\" MongoDB server\n",
+ "\n",
+ "Use `mongorestore` to load the dumped collections **from** the local directory **into** the \"transformer\" MongoDB server.\n",
+ "\n",
+ "References:\n",
+ "- https://www.mongodb.com/docs/database-tools/mongorestore/#std-option-mongorestore\n",
+ "- https://www.mongodb.com/docs/database-tools/mongorestore/mongorestore-examples/#copy-clone-a-database"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "79bd888e82d52a93",
+ "metadata": {
+ "scrolled": true
+ },
+ "source": [
+ "# Restore the dumped collections to the \"transformer\" MongoDB server.\n",
+ "shell_command = f\"\"\"\n",
+ " {mongorestore} {transformer_mongo_cli_base_options} \\\n",
+ " --nsFrom='{cfg.origin_mongo_database_name}.*' \\\n",
+ " --nsTo='{cfg.transformer_mongo_database_name}.*' \\\n",
+ " --dir='{cfg.origin_dump_folder_path}' \\\n",
+ " --stopOnError \\\n",
+ " --drop \\\n",
+ " --gzip\n",
+ "\"\"\"\n",
+ "completed_process = subprocess.run(shell_command, shell=True)\n",
+ "print(f\"\\nReturn code: {completed_process.returncode}\")"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "c3e3c9c4",
+ "metadata": {},
+ "source": [
+ "### Transform the collections within the \"transformer\" MongoDB server\n",
+ "\n",
+ "Use the migrator to transform the collections in the \"transformer\" database.\n",
+ "\n",
+ "> Reminder: The database transformation functions are defined in the `nmdc-schema` Python package installed earlier.\n",
+ "\n",
+ "> Reminder: The \"origin\" database is **not** affected by this step."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "9c89c9dd3afe64e2",
+ "metadata": {
+ "scrolled": true
+ },
+ "source": [
+ "# Instantiate a MongoAdapter bound to the \"transformer\" database.\n",
+ "adapter = MongoAdapter(\n",
+ " database=transformer_mongo_client[cfg.transformer_mongo_database_name],\n",
+ " on_collection_created=lambda name: print(f'Created collection \"{name}\"'),\n",
+ " on_collection_renamed=lambda old_name, name: print(f'Renamed collection \"{old_name}\" to \"{name}\"'),\n",
+ " on_collection_deleted=lambda name: print(f'Deleted collection \"{name}\"'),\n",
+ ")\n",
+ "\n",
+ "# Instantiate a Migrator bound to that adapter.\n",
+ "logger = setup_logger()\n",
+ "migrator = Migrator(adapter=adapter, logger=logger)\n",
+ "\n",
+ "# Execute the Migrator's `upgrade` method to perform the migration.\n",
+ "migrator.upgrade()"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "4c090068",
+ "metadata": {},
+ "source": [
+ "### Validate the transformed documents\n",
+ "\n",
+ "Now that we have transformed the database, validate each document in each collection in the \"transformer\" MongoDB server."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "e1c50b9911e02e70",
+ "metadata": {},
+ "source": [
+ "# Get the names of all collections.\n",
+ "collection_names: List[str] = get_collection_names_from_schema(schema_view)\n",
+ "\n",
+ "# Ensure that, if the (large) \"functional_annotation_agg\" collection is present in `collection_names`,\n",
+ "# it goes at the end of the list we process. That way, we can find out about validation errors in\n",
+ "# other collections without having to wait for that (large) collection to be validated.\n",
+ "ordered_collection_names = sorted(collection_names.copy())\n",
+ "large_collection_name = \"functional_annotation_agg\"\n",
+ "if large_collection_name in ordered_collection_names:\n",
+ " ordered_collection_names = list(filter(lambda n: n != large_collection_name, ordered_collection_names))\n",
+ " ordered_collection_names.append(large_collection_name) # puts it last\n",
+ "\n",
+ "# Process each collection.\n",
+ "for collection_name in ordered_collection_names:\n",
+ " collection = transformer_mongo_client[cfg.transformer_mongo_database_name][collection_name]\n",
+ " num_documents_in_collection = collection.count_documents({})\n",
+ " print(f\"Validating collection {collection_name} ({num_documents_in_collection} documents) [\", end=\"\") # no newline\n",
+ "\n",
+ " # Calculate how often we'll display a tick mark (i.e. a sign of life).\n",
+ " num_documents_per_tick = num_documents_in_collection * 0.10 # one tenth of the total\n",
+ " num_documents_since_last_tick = 0\n",
+ " \n",
+ " for document in collection.find():\n",
+ " # Validate the transformed document.\n",
+ " #\n",
+ " # Reference: https://github.com/microbiomedata/nmdc-schema/blob/main/src/docs/schema-validation.md\n",
+ " #\n",
+ " # Note: Dictionaries originating as Mongo documents include a Mongo-generated key named `_id`. However,\n",
+ " # the NMDC Schema does not describe that key and, indeed, data validators consider dictionaries\n",
+ " # containing that key to be invalid with respect to the NMDC Schema. So, here, we validate a\n",
+ " # copy (i.e. a shallow copy) of the document that lacks that specific key.\n",
+ " #\n",
+ " # Note: The reason we don't use a progress bar library such as `rich[jupyter]`, `tqdm`, or `ipywidgets`\n",
+ " # is that _PyCharm's_ Jupyter Notebook integration doesn't fully work with any of them. :(\n",
+ " #\n",
+ " schema_class_name = derive_schema_class_name_from_document(schema_view=schema_view, document=document)\n",
+ " document_without_underscore_id_key = {key: value for key, value in document.items() if key != \"_id\"}\n",
+ " validation_report: ValidationReport = validator.validate(document_without_underscore_id_key, schema_class_name)\n",
+ " if len(validation_report.results) > 0:\n",
+ " result_messages = [result.message for result in validation_report.results]\n",
+ " raise TypeError(f\"Document is invalid.\\n{result_messages=}\\n{document_without_underscore_id_key=}\")\n",
+ "\n",
+ " # Display a tick mark if we have validated enough documents since we last displayed one.\n",
+ " num_documents_since_last_tick += 1\n",
+ " if num_documents_since_last_tick >= num_documents_per_tick:\n",
+ " num_documents_since_last_tick = 0\n",
+ " print(\".\", end=\"\") # no newline\n",
+ " \n",
+ " print(\"]\")"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3edf77c7",
+ "metadata": {},
+ "source": [
+ "### Dump the collections from the \"transformer\" MongoDB server\n",
+ "\n",
+ "Now that the collections have been transformed and validated, dump them **from** the \"transformer\" MongoDB server **into** a local directory."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "db6e432d",
+ "metadata": {
+ "scrolled": true
+ },
+ "source": [
+ "# Dump the database from the \"transformer\" MongoDB server.\n",
+ "shell_command = f\"\"\"\n",
+ " {mongodump} {transformer_mongo_cli_base_options} \\\n",
+ " --db='{cfg.transformer_mongo_database_name}' \\\n",
+ " --out='{cfg.transformer_dump_folder_path}' \\\n",
+ " --gzip\n",
+ "\"\"\"\n",
+ "completed_process = subprocess.run(shell_command, shell=True)\n",
+ "print(f\"\\nReturn code: {completed_process.returncode}\") "
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "997fcb281d9d3222",
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ }
+ },
+ "source": [
+ "### Create a bookkeeper\n",
+ "\n",
+ "Create a `Bookkeeper` that can be used to document migration events in the \"origin\" server."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "dbbe706d",
+ "metadata": {},
+ "source": [
+ "bookkeeper = Bookkeeper(mongo_client=origin_mongo_client)"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1e0c8891",
+ "metadata": {},
+ "source": [
+ "### Indicate — on the \"origin\" server — that the migration is underway\n",
+ "\n",
+ "Add an entry to the migration log collection to indicate that this migration has started."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "ca49f61a",
+ "metadata": {},
+ "source": [
+ "bookkeeper.record_migration_event(migrator=migrator, event=MigrationEvent.MIGRATION_STARTED)"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "9c253e6f",
+ "metadata": {},
+ "source": [
+ "### Drop the original collections from the \"origin\" MongoDB server\n",
+ "\n",
+ "This is necessary for situations where collections were renamed or deleted. (The `--drop` option of `mongorestore` would only drop collections that exist in the dump being restored, which would not include renamed or deleted collections.)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "0b26e434",
+ "metadata": {},
+ "source": [
+ "shell_command = f\"\"\"\n",
+ " {cfg.mongosh_path} {origin_mongo_cli_base_options} \\\n",
+ " --eval 'use {cfg.origin_mongo_database_name}' \\\n",
+ " --eval 'db.dropDatabase()'\n",
+ "\"\"\"\n",
+ "completed_process = subprocess.run(shell_command, shell=True)\n",
+ "print(f\"\\nReturn code: {completed_process.returncode}\")"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "d84bdc11",
+ "metadata": {},
+ "source": [
+ "### Load the collections into the \"origin\" MongoDB server\n",
+ "\n",
+ "Load the transformed collections into the \"origin\" MongoDB server."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "1dfbcf0a",
+ "metadata": {
+ "scrolled": true
+ },
+ "source": [
+ "# Load the transformed collections into the origin server, replacing any same-named ones that are there.\n",
+ "shell_command = f\"\"\"\n",
+ " {mongorestore} {origin_mongo_cli_base_options} \\\n",
+ " --nsFrom='{cfg.transformer_mongo_database_name}.*' \\\n",
+ " --nsTo='{cfg.origin_mongo_database_name}.*' \\\n",
+ " --dir='{cfg.transformer_dump_folder_path}' \\\n",
+ " --stopOnError \\\n",
+ " --verbose \\\n",
+ " --drop \\\n",
+ " --gzip\n",
+ "\"\"\"\n",
+ "completed_process = subprocess.run(shell_command, shell=True)\n",
+ "print(f\"\\nReturn code: {completed_process.returncode}\") "
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "ca5ee89a79148499",
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ }
+ },
+ "source": [
+ "### Indicate that the migration is complete\n",
+ "\n",
+ "Add an entry to the migration log collection to indicate that this migration is complete."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "d1eaa6c92789c4f3",
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ }
+ },
+ "source": [
+ "bookkeeper.record_migration_event(migrator=migrator, event=MigrationEvent.MIGRATION_COMPLETED)"
+ ],
+ "outputs": [],
+ "execution_count": null
+ },
+ {
+ "cell_type": "markdown",
+ "id": "04c856a8",
+ "metadata": {},
+ "source": [
+ "### Restore access to the \"origin\" MongoDB server\n",
+ "\n",
+ "This effectively un-does the access revocation that we did earlier."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "id": "9aab3c7e",
+ "metadata": {},
+ "source": [
+ "shell_command = f\"\"\"\n",
+ " {cfg.mongosh_path} {origin_mongo_cli_base_options} \\\n",
+ " --file='mongosh-scripts/restore-privileges.mongo.js' \\\n",
+ " --quiet\n",
+ "\"\"\"\n",
+ "completed_process = subprocess.run(shell_command, shell=True)\n",
+ "print(f\"\\nReturn code: {completed_process.returncode}\")"
+ ],
+ "outputs": [],
+ "execution_count": null
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": ".venv",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.12"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/demo/metadata_migration/notebooks/migrate_7_7_2_to_7_8_0.ipynb b/db/migrations/notebooks/migrate_7_7_2_to_7_8_0.ipynb
similarity index 100%
rename from demo/metadata_migration/notebooks/migrate_7_7_2_to_7_8_0.ipynb
rename to db/migrations/notebooks/migrate_7_7_2_to_7_8_0.ipynb
diff --git a/demo/metadata_migration/notebooks/migrate_7_8_0_to_8_0_0.ipynb b/db/migrations/notebooks/migrate_7_8_0_to_8_0_0.ipynb
similarity index 100%
rename from demo/metadata_migration/notebooks/migrate_7_8_0_to_8_0_0.ipynb
rename to db/migrations/notebooks/migrate_7_8_0_to_8_0_0.ipynb
diff --git a/demo/metadata_migration/notebooks/migrate_8_0_0_to_8_1_2.ipynb b/db/migrations/notebooks/migrate_8_0_0_to_8_1_2.ipynb
similarity index 100%
rename from demo/metadata_migration/notebooks/migrate_8_0_0_to_8_1_2.ipynb
rename to db/migrations/notebooks/migrate_8_0_0_to_8_1_2.ipynb
diff --git a/demo/metadata_migration/notebooks/migrate_8_1_2_to_9_0_4.ipynb b/db/migrations/notebooks/migrate_8_1_2_to_9_0_4.ipynb
similarity index 100%
rename from demo/metadata_migration/notebooks/migrate_8_1_2_to_9_0_4.ipynb
rename to db/migrations/notebooks/migrate_8_1_2_to_9_0_4.ipynb
diff --git a/demo/metadata_migration/notebooks/migrate_9_0_4_to_9_1_0.ipynb b/db/migrations/notebooks/migrate_9_0_4_to_9_1_0.ipynb
similarity index 100%
rename from demo/metadata_migration/notebooks/migrate_9_0_4_to_9_1_0.ipynb
rename to db/migrations/notebooks/migrate_9_0_4_to_9_1_0.ipynb
diff --git a/demo/metadata_migration/notebooks/migrate_9_1_0_to_9_2_0.ipynb b/db/migrations/notebooks/migrate_9_1_0_to_9_2_0.ipynb
similarity index 100%
rename from demo/metadata_migration/notebooks/migrate_9_1_0_to_9_2_0.ipynb
rename to db/migrations/notebooks/migrate_9_1_0_to_9_2_0.ipynb
diff --git a/demo/metadata_migration/notebooks/migrate_9_3_2_to_10_0_0.ipynb b/db/migrations/notebooks/migrate_9_3_2_to_10_0_0.ipynb
similarity index 100%
rename from demo/metadata_migration/notebooks/migrate_9_3_2_to_10_0_0.ipynb
rename to db/migrations/notebooks/migrate_9_3_2_to_10_0_0.ipynb
diff --git a/demo/metadata_migration/notebooks/migrate_A_B_C_to_X_Y_Z.ipynb b/db/migrations/notebooks/migrate_A_B_C_to_X_Y_Z.ipynb
similarity index 100%
rename from demo/metadata_migration/notebooks/migrate_A_B_C_to_X_Y_Z.ipynb
rename to db/migrations/notebooks/migrate_A_B_C_to_X_Y_Z.ipynb
diff --git a/demo/metadata_migration/notebooks/mongosh-scripts/restore-privileges.mongo.js b/db/migrations/notebooks/mongosh-scripts/restore-privileges.mongo.js
similarity index 100%
rename from demo/metadata_migration/notebooks/mongosh-scripts/restore-privileges.mongo.js
rename to db/migrations/notebooks/mongosh-scripts/restore-privileges.mongo.js
diff --git a/demo/metadata_migration/notebooks/mongosh-scripts/revoke-privileges.mongo.js b/db/migrations/notebooks/mongosh-scripts/revoke-privileges.mongo.js
similarity index 100%
rename from demo/metadata_migration/notebooks/mongosh-scripts/revoke-privileges.mongo.js
rename to db/migrations/notebooks/mongosh-scripts/revoke-privileges.mongo.js
diff --git a/demo/metadata_migration/notebooks/requirements.txt b/db/migrations/notebooks/requirements.txt
similarity index 100%
rename from demo/metadata_migration/notebooks/requirements.txt
rename to db/migrations/notebooks/requirements.txt
diff --git a/demo/metadata_migration/notebooks/stakeholders.md b/db/migrations/notebooks/stakeholders.md
similarity index 100%
rename from demo/metadata_migration/notebooks/stakeholders.md
rename to db/migrations/notebooks/stakeholders.md
| Implement migration notebook for December NMDC Release
Implement a migration notebook for the [December NMDC Release](https://github.com/microbiomedata/issues/issues/947).
| 2024-12-06T07:45:12 | 0.0 | [] | [] |
|||
DFKI-Interactive-Machine-Learning/multisensor-pipeline | DFKI-Interactive-Machine-Learning__multisensor-pipeline-83 | ea08dc0cd7d79fc8bb679d6c5d0268a8bbedb5ff | diff --git a/.coveragerc b/.coveragerc
deleted file mode 100644
index 5b120256..00000000
--- a/.coveragerc
+++ /dev/null
@@ -1,5 +0,0 @@
-[run]
-omit = *experimental*
-
-[report]
-omit = *experimental*
\ No newline at end of file
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
deleted file mode 100644
index ee4b9028..00000000
--- a/.github/workflows/ci.yml
+++ /dev/null
@@ -1,308 +0,0 @@
-on:
- push:
- pull_request:
-
-jobs:
- lint:
- name: lint
- runs-on: ubuntu-latest
- steps:
- - name: Set up Python
- uses: actions/setup-python@v2
- with:
- python-version: 3.9
-
- - name: Checkout Working Copy
- uses: actions/checkout@master
- with:
- fetch-depth: 1
-
- - name: Setup Virtual Environment
- run: |
- python${{ matrix.py }} -m venv venv
- venv/bin/python -m pip install --upgrade -r requirements.d/venv.txt
-
- - name: Lint
- run: |
- venv/bin/python -m tox -e flake8
-
- test:
- name: Test Python ${{ matrix.py }} on ${{ matrix.os }}
- needs: lint
- runs-on: ${{ matrix.os }}
- strategy:
- fail-fast: false
- matrix:
- os:
- - ubuntu-latest
- - windows-latest
- - macos-latest
- py:
- - 3.6
- - 3.7
- - 3.8
- - 3.9
- timeout-minutes: 30
-
- steps:
- - name: Set up Python ${{ matrix.py }}
- uses: actions/setup-python@v2
- with:
- python-version: ${{ matrix.py }}
-
- - name: Get pip cache directory
- id: pip-cache
- run: |
- echo "::set-output name=dir::$(pip cache dir)"
-
- - name: Cache pip packages
- uses: actions/cache@v2
- with:
- path: ${{ steps.pip-cache.outputs.dir }}
- key: ${{ runner.os }}-pip-${{ hashFiles('requirements.d/*') }}
- restore-keys: |
- ${{ runner.os }}-pip-
-
- - name: Pick environment to run (Ubuntu)
- if: startsWith( matrix.os, 'ubuntu' )
- run: |
- python${{ matrix.py }} -c """
- import platform; import os; import sys; import codecs
- cpy = platform.python_implementation() == 'CPython'
- base =('{}{}{}' if cpy else '{}{}').format('py' if cpy else 'pypy', *sys.version_info[0:2])
- env = f'TOXENV={base}-ubuntu'
- print('Picked {} for {}'.format(env, sys.version))
- with codecs.open(os.environ['GITHUB_ENV'], 'a', 'utf-8') as file_handler:
- file_handler.write(f'{env}\n')
- """
- shell: bash
-
- - name: Pick environment to run (macOS)
- if: startsWith( matrix.os, 'macos' )
- run: |
- python${{ matrix.py }} -c """
- import platform; import os; import sys; import codecs
- cpy = platform.python_implementation() == 'CPython'
- base =('{}{}{}' if cpy else '{}{}').format('py' if cpy else 'pypy', *sys.version_info[0:2])
- env = f'TOXENV={base}-macos'
- print('Picked {} for {}'.format(env, sys.version))
- with codecs.open(os.environ['GITHUB_ENV'], 'a', 'utf-8') as file_handler:
- file_handler.write(f'{env}\n')
- """
- shell: bash
-
- - name: Pick environment to run (Windows)
- if: startsWith( matrix.os, 'windows' ) &&
- startsWith( matrix.py, '3.6')
- run: |
- python -c """
- import platform; import os; import sys; import codecs
- cpy = platform.python_implementation() == 'CPython'
- base =('{}{}{}' if cpy else '{}{}').format('py' if cpy else 'pypy', *sys.version_info[0:2])
- env = f'TOXENV={base}-windows'
- print('Picked {} for {}'.format(env, sys.version))
- with codecs.open(os.environ['GITHUB_ENV'], 'a', 'utf-8') as file_handler:
- file_handler.write(f'{env}\n')
- """
- shell: bash
-
- - name: Install OS dependencies (macOS)
- if: startsWith( matrix.os, 'macos-' )
- run: |
- brew install portaudio
-
- - name: Install OS dependencies (Ubuntu)
- if: startsWith( matrix.os, 'ubuntu-' )
- run: |
- sudo apt update
- sudo apt install --yes --no-install-recommends \
- ffmpeg \
- gcc \
- libgl1-mesa-glx \
- portaudio19-dev \
- xvfb
-
- - name: Checkout Working Copy
- uses: actions/checkout@master
- with:
- fetch-depth: 1
-
- - name: Setup Virtual Environment (macOS | Ubuntu)
- if: startsWith( matrix.os, 'macos-' ) ||
- startsWith( matrix.os, 'ubuntu-' )
- run: |
- python --version
- python${{ matrix.py }} -m venv venv
- venv/bin/python -m pip install --upgrade -r requirements.d/venv.txt
-
- - name: Setup Virtual Environment (Windows)
- if: startsWith( matrix.os, 'windows-' ) &&
- startsWith( matrix.py, '3.6')
- # There seems to be no clean way to create a venv on windows.
- # So, do not actually install a virtual environment.
- # Instead, install the venv requirements to the system python.
- run: |
- python --version
- python -m pip install --upgrade -r requirements.d\venv.txt
-
- - name: setup-conda
- uses: s-weigand/setup-conda@v1
- if: startsWith( matrix.os, 'windows-' )
- with:
- activate-conda: false
- update-conda: true
- python-version: ${{ matrix.py }}
- conda-channels: ''
-
- - run: conda --version
- if: startsWith( matrix.os, 'windows-' ) &&
- startsWith( matrix.py, '3.6')
-
- - name: Install OS dependencies (Windows)
- if: startsWith( matrix.os, 'windows-' ) &&
- startsWith( matrix.py, '3.6')
- run: |
- conda install -c conda-forge portaudio
- shell: cmd
-
- # Take multiple attempts at running the tests
- - name: Test Attempt 1 (macOS | Ubuntu)
- continue-on-error: true
- id: test-attempt-1-macos_ubuntu
- if: startsWith( matrix.os, 'macos-' ) ||
- startsWith( matrix.os, 'ubuntu-' )
- run: |
- venv/bin/python -m tox -e $TOXENV
- timeout-minutes: 9 # About ten times the time of a successful step
- shell: bash
- - name: Test Attempt 2 (macOS | Ubuntu)
- continue-on-error: true
- id: test-attempt-2-macos_ubuntu
- if: (
- startsWith( matrix.os, 'macos-' ) ||
- startsWith( matrix.os, 'ubuntu-' )
- ) &&
- steps.test-attempt-1-macos_ubuntu.outcome=='failure'
- run: |
- venv/bin/python -m tox -e $TOXENV
- timeout-minutes: 9 # About ten times the time of a successful step
- shell: bash
- - name: Test Attempt 3 (macOS | Ubuntu)
- continue-on-error: true
- id: test-attempt-3-macos_ubuntu
- if: (
- startsWith( matrix.os, 'macos-' ) ||
- startsWith( matrix.os, 'ubuntu-' )
- ) &&
- steps.test-attempt-2-macos_ubuntu.outcome=='failure'
- run: |
- venv/bin/python -m tox -e $TOXENV
- timeout-minutes: 9 # About ten times the time of a successful step
- shell: bash
- - name: Test Attempt 4 (macOS | Ubuntu)
- continue-on-error: true
- id: test-attempt-4-macos_ubuntu
- if: (
- startsWith( matrix.os, 'macos-' ) ||
- startsWith( matrix.os, 'ubuntu-' )
- ) &&
- steps.test-attempt-3-macos_ubuntu.outcome=='failure'
- run: |
- venv/bin/python -m tox -e $TOXENV
- timeout-minutes: 9 # About ten times the time of a successful step
- shell: bash
- - name: Test Attempt 5 (macOS | Ubuntu)
- continue-on-error: true
- id: test-attempt-5-macos_ubuntu
- if: (
- startsWith( matrix.os, 'macos-' ) ||
- startsWith( matrix.os, 'ubuntu-' )
- ) &&
- steps.test-attempt-4-macos_ubuntu.outcome=='failure'
- run: |
- venv/bin/python -m tox -e $TOXENV
- timeout-minutes: 9 # About ten times the time of a successful step
- shell: bash
- - name: Check for Success (macOS | Ubuntu)
- if: startsWith( matrix.os, 'macos-' ) ||
- startsWith( matrix.os, 'ubuntu-' )
- run: |
- if ${{
- steps.test-attempt-1-macos_ubuntu.outcome=='success' ||
- steps.test-attempt-2-macos_ubuntu.outcome=='success' ||
- steps.test-attempt-3-macos_ubuntu.outcome=='success' ||
- steps.test-attempt-4-macos_ubuntu.outcome=='success' ||
- steps.test-attempt-5-macos_ubuntu.outcome=='success'
- }}; then
- echo "At least one attempt was successful!"
- else
- exit 1
- fi
- shell: bash
-
- # Take multiple attempts at running the tests
- - name: Test Attempt 1 (Windows)
- if: startsWith( matrix.os, 'windows-' ) &&
- startsWith( matrix.py, '3.6')
- continue-on-error: true
- id: test-attempt-1-windows
- run: |
- python -m tox -e $TOXENV
- timeout-minutes: 7 # About ten times the time of a successful step
- shell: bash
- - name: Test Attempt 2 (Windows)
- continue-on-error: true
- id: test-attempt-2-windows
- if: startsWith( matrix.os, 'windows-' ) &&
- steps.test-attempt-1-windows.outcome=='failure' &&
- startsWith( matrix.py, '3.6')
- run: |
- python -m tox -e $TOXENV
- timeout-minutes: 7 # About ten times the time of a successful step
- shell: bash
- - name: Test Attempt 3 (Windows)
- continue-on-error: true
- id: test-attempt-3-windows
- if: startsWith( matrix.os, 'windows-' ) &&
- steps.test-attempt-2-windows.outcome=='failure' &&
- startsWith( matrix.py, '3.6')
- run: |
- python -m tox -e $TOXENV
- timeout-minutes: 7 # About ten times the time of a successful step
- shell: bash
- - name: Test Attempt 4 (Windows)
- continue-on-error: true
- id: test-attempt-4-windows
- if: startsWith( matrix.os, 'windows-' ) &&
- steps.test-attempt-3-windows.outcome=='failure' &&
- startsWith( matrix.py, '3.6')
- run: |
- python -m tox -e $TOXENV
- timeout-minutes: 7 # About ten times the time of a successful step
- shell: bash
- - name: Test Attempt 5 (Windows)
- continue-on-error: true
- id: test-attempt-5-windows
- if: startsWith( matrix.os, 'windows-' ) &&
- steps.test-attempt-4-windows.outcome=='failure' &&
- startsWith( matrix.py, '3.6')
- run: |
- python -m tox -e $TOXENV
- timeout-minutes: 7 # About ten times the time of a successful step
- shell: bash
- - name: Check for Success (Windows)
- if: startsWith( matrix.os, 'windows-' ) &&
- startsWith( matrix.py, '3.6')
- run: |
- if ${{
- steps.test-attempt-1-windows.outcome=='success' ||
- steps.test-attempt-2-windows.outcome=='success' ||
- steps.test-attempt-3-windows.outcome=='success' ||
- steps.test-attempt-4-windows.outcome=='success' ||
- steps.test-attempt-5-windows.outcome=='success'
- }}; then
- echo "At least one attempt was successful!"
- else
- exit 1
- fi
- shell: bash
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 4fb605ca..2212dc26 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -1,4 +1,4 @@
-# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
+# This workflow will install Python dependencies, run tests_ci and lint with a variety of Python versions
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
name: Python package
diff --git a/.gitignore b/.gitignore
index 55cad511..26afabed 100644
--- a/.gitignore
+++ b/.gitignore
@@ -35,7 +35,7 @@ wheels/
pip-log.txt
pip-delete-this-directory.txt
-# Unit test / coverage reports
+# Unit tests_ci / coverage reports
htmlcov/
.tox/
.coverage
@@ -138,10 +138,10 @@ crashlytics.properties
crashlytics-build.properties
fabric.properties
-# Files created by running the test suite
+# Files created by running the tests_ci suite
/data/json_test.json
/data/output_av.mp4
-/multisensor_pipeline/tests/test_mic_to_wave_pipeline.wav
+/multisensor_pipeline/tests_ci/test_mic_to_wave_pipeline.wav
# Conda virtual environment
/.condaenv
diff --git a/README.md b/README.md
index 44e27795..73116e30 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
-# Multisensor Pipeline (MSP)
+# Multisensor Pipeline (MSP) [](https://github.com/DFKI-Interactive-Machine-Learning/multisensor-pipeline/actions/workflows/python-package.yml)
-The multisensor pipeline (`msp`) package enables stream and event processing with a small amount of dependencies. The main purpose of the `msp` pipeline is the development of research prototypes, but it can also be used for realizing small productive systems or demos that require an acquisition of multiple sensors or data streams (*source*), processing of this data(*processor*), and a utilization of the output (*sink*). The modules in a pipeline form a weakly connected directed graph. Sources and sinks are defined analogously to graph theory, processors are equivalent to internals (see this [Wikipedia article](https://en.wikipedia.org/wiki/Directed_graph#Indegree_and_outdegree)). A pipeline needs at least one source and one sink module. An `msp` pipeline can...
+The multisensor pipeline (`msp`) package enables stream and event processing with a small amount of dependencies. The main purpose of the `msp` pipeline is the development of research prototypes, but it can also be used for realizing small productive systems or demos that require acquisition of samples from multiple sensor or data streams (via *source* modules), processing of these samples (via *processor* modules), and a utilization of the output (via *sink* modules). Modules can be connected to a pipeline which is represented as a weakly connected directed graph. Sources and sinks are defined analogously to graph theory, processors are equivalent to internals (see this [Wikipedia article](https://en.wikipedia.org/wiki/Directed_graph#Indegree_and_outdegree)). A pipeline needs at least one source and one sink module. An `msp` pipeline can...
- read/stream signals from any number of **source modules** like sensors, microphones, cameras, pens, eye trackers, etc.
- flexibly process incoming data with **processor modules** (e.g. signal filtering, manipulation, and classification; signal fusion).
@@ -8,45 +8,19 @@ The multisensor pipeline (`msp`) package enables stream and event processing wit
**What are the advantages of `msp`?**
-* It allows setting up flexible processing pipelines with any number of sources, processors and sinks.
+* It enables setting up flexible processing pipelines with any number of sources, processors and sinks.
* You can easily extend the pipeline by implementing [custom modules](#custom-modules).
* Each module runs in a separate thread to ensure responsiveness.
* Low number of dependencies = easy to integrate in your project.
-# Status
-
-[](https://github.com/DFKI-Interactive-Machine-Learning/multisensor-pipeline/actions/workflows/ci.yml)
-[](https://github.com/DFKI-Interactive-Machine-Learning/multisensor-pipeline/actions/workflows/python-package.yml)
-
## Installation
-We recommend using an Anaconda environment with Python 3.6 (x64) or greater. To install the `multisensor_pipeline`, activate your environment of choice and run the following command:
+We recommend using an Anaconda environment with Python >= 3.6. To install the `multisensor_pipeline`, activate your environment of choice and run the following command:
```shell
pip install multisensor-pipeline
```
-**System Requirements & Prerequisites**
-
-* Operating System: We currently support Linux, Windows and macOS.
-* Desktop Environments: Under Linux, we currently support the X Window System (X11), only.
-* Python Versions: We currently support Python 3.6, 3.7, 3.8, and 3.9.
-* Library dependency: This package depends on PyAudio, which are bindings for portaudio19 and their
-development headers:
-
-Under Linux:
-
-```shell
-sudo apt install --yes --no-install-recommends gcc portaudio19-dev
-```
-
-Under macOS:
-
-```shell
-brew install portaudio
-```
-
-
## Quick Start Example
```python
@@ -70,12 +44,11 @@ pipeline.connect(module=processor, successor=sink)
# (optional) add another edge to print all random numbers
pipeline.connect(module=source, successor=sink)
-# print mean of random numbers for 0.1 seconds
+# print means of random numbers for 0.1 seconds
pipeline.start()
sleep(.1)
pipeline.stop()
-# wait until all processes have stopped
-pipeline.join()
+pipeline.join()
```
The example initializes three modules, one source, one processor and one sink. The `RandomArraySource` generates numpy arrays (ndarray) with random numbers 60 times per second. Each array contains 50 random numbers (shape). The `ArrayManipulationProcessor` takes an array as input, computes the mean of it, and provides it to registered observers. The `ConsoleSink` prints all incoming messages to the console.
@@ -84,44 +57,54 @@ The example contains four major steps:
1. All modules are created and parametrized
2. The pipeline is created and all modules are added to it.
-3. The modules are connected to build the multisensor pipeline. This step defines what your pipeline is going to do and therefore is the most important step.
+3. The modules are connected to build the multisensor pipeline. This step defines what your pipeline is going to do and, therefore, is the most important step.
- *source >> processor*: the random arrays are sent to the array manipulator.
- - *processor >> sink*: the manipulated arrays, i.e., the means of them, are sent to the sink module which prints them to the console.
+ - *processor >> sink*: the manipulated arrays, i.e., their means, are sent to the sink module which prints them to the console.
- *source >> sink*: in addition, all random arrays are printed to the console.
-4. Starting and stopping the pipeline: `start()` is starting all modules of the pipeline, e.g., the source starts to generate arrays now. This loop runs infinitely long and has to be stopped from outside by calling the non-blocking `stop()` function of the pipeline instance. You can wait until the pipeline has stopped using its `join()` function.
+4. Starting and stopping the pipeline: `start()` is starting all modules of the pipeline, e.g., the source starts to generate arrays now. This loop runs infinitely long and has to be stopped from outside by calling the non-blocking `stop()` function of the pipeline. You can wait until the pipeline has stopped using its `join()` function.
## The MSPDataFrame Class
Instances of the `MSPDataFrame` class are used to transfer data and meta information from one module to the next.
-The only required parameter is `topic: Topic` which defines what kind of data the frame delivers. The best practice is to use the factory method for this: `self._generate_topic(self, name: str, dtype: type = None)`. The actual payload can be added using keyword arguments (kwargs) when initializing an instance of `MSPDataFrame` (see `value` in the examples above). Also, you can add key-value pairs after instantiation as with any Python `dict`, because `MSPDataFrame` inherits from `dict`.
+The `topic: Topic` of a dataframe defines what kind of data the frame delivers. It can have a `name: str` and a `dtype: type`. The payload is contained in the `data` field.
-In principle, `MSPDataFrame` can carry any data type. However, the `persistence` and `networking` package requires serialization and deserialization of data frames. Currently, we support all standard data types in Python and numpy arrays (`ndarray`). Support for pillow images (`PIL.Image`) will follow.
+In principle, `MSPDataFrame` can carry any data type in `data`. However, the `persistence` and `networking` packages require serialization and deserialization of dataframes. Currently, we support all standard data types of Python, numpy arrays (`ndarray`), and pillow images (`PIL.Image`) based on the *msgpack* library.
## Custom Modules
-You can easily create custom modules by inheriting from one of the abstract module classes: `BaseSource`, `BaseProcessor`, and `BaseSink`. All modules offer and/or consume data streams frame-by-frame using the `MSPDataFrame` class as data structure.
+You can easily create custom modules by inheriting from one of the abstract module classes: `BaseSource`, `BaseProcessor`, and `BaseSink`. All modules offer and/or consume data streams frame-by-frame using the `MSPDataFrame` class as data structure. We provide a few simple examples and show how they work together in a pipeline:
### Inherit from _BaseSource_
```python
class RandomIntSource(BaseSource):
- """ Generate 50 random numbers per second. """
-
+ """Generate 50 random integer numbers per second."""
+
def on_update(self) -> Optional[MSPDataFrame]:
sleep(.02)
- topic = self._generate_topic(name="random", dtype=int)
- return MSPDataFrame(topic=topic, value=randint(0, 100))
+ return MSPDataFrame(topic=self.output_topics[0], data=randint(0, 100))
+
+ @property
+ def output_topics(self) -> Optional[List[Topic]]:
+ return [Topic(name='random', dtype=int)]
```
### Inherit from _BaseProcessor_
```python
class ConstraintCheckingProcessor(BaseProcessor):
- """ Checks, if incoming values are greater than 50. """
+ """Checks, if incoming integer values are greater than 50."""
def on_update(self, frame: MSPDataFrame) -> Optional[MSPDataFrame]:
- topic = self._generate_topic(name="constraint_check", dtype=bool)
- return MSPDataFrame(topic=topic, value=frame["value"] > 50)
+ return MSPDataFrame(topic=self.output_topics[0], data=frame.data > 50)
+
+ @property
+ def input_topics(self) -> List[Topic]:
+ return [Topic(dtype=int)]
+
+ @property
+ def output_topics(self) -> Optional[List[Topic]]:
+ return [Topic(name='constraint_check', dtype=bool)]
```
### Inherit from _BaseSink_
@@ -131,10 +114,11 @@ class ConsoleSink(BaseSink):
""" Prints incoming frames to the console. """
def on_update(self, frame: MSPDataFrame):
- print(frame)
+ if frame is not None:
+ print(f"{frame.timestamp}\t{frame.topic}\t{frame.data}")
```
-### Using your Modules
+### Using the Modules
```python
if __name__ == '__main__':
@@ -157,5 +141,36 @@ if __name__ == '__main__':
pipeline.join()
```
-You can now use your custom modules as part of a pipeline. This example connects the three sample modules using the `GraphPipeline` and executes it for 0.1 seconds. It prints the output of the `ConstraintCheckingProcessor` approximately 4 times: half of them show `value=True`, the other half shows `value=False`.
-More examples can be found in the `modules` and `tests` packages.
+You can now use the custom modules in a pipeline. The example above connects the three sample modules using the `GraphPipeline` and executes it for 0.1 seconds. The output of the `ConstraintCheckingProcessor` should look like this:
+
+```shell
+1639412295.2498586 <class 'bool'>:constraint_check False
+1639412295.2803597 <class 'bool'>:constraint_check False
+1639412295.3114836 <class 'bool'>:constraint_check False
+1639412295.342433 <class 'bool'>:constraint_check True
+```
+
+More examples can be found in the `tests` packages.
+Please note that you can define input and output topics. If you do so, the pipeline will automatically reduce traffic to supported topics. Setting the name or dtype of a Topic instance to None corresponds to a whitespace. For instance, the `ConstraintCheckingProcessor` will try to process all dataframes with `topic.dtype==int`, the topic name does not matter in this case.
+
+## Cite Us
+Please cite the following paper when using the multisensor-pipeline in your project:
+
+```
+@inproceedings{barz_multisensor-pipeline_2021,
+ author = {Barz, Michael and Bhatti, Omair Shahzad and L\"{u}ers, Bengt and Prange, Alexander and Sonntag, Daniel},
+ title = {Multisensor-Pipeline: A Lightweight, Flexible, and Extensible Framework for Building Multimodal-Multisensor Interfaces},
+ year = {2021},
+ isbn = {9781450384711},
+ publisher = {Association for Computing Machinery},
+ address = {New York, NY, USA},
+ url = {https://doi.org/10.1145/3461615.3485432},
+ doi = {10.1145/3461615.3485432},
+ booktitle = {Companion Publication of the 2021 International Conference on Multimodal Interaction},
+ pages = {13–18},
+ numpages = {6},
+ keywords = {open source framework, stream processing, eye tracking, computer vision, prototyping, multimodal-multisensor interfaces},
+ location = {Montreal, QC, Canada},
+ series = {ICMI '21 Companion}
+}
+```
\ No newline at end of file
diff --git a/multisensor_pipeline/__init__.py b/multisensor_pipeline/__init__.py
index 8c07fa89..e5eb44c9 100644
--- a/multisensor_pipeline/__init__.py
+++ b/multisensor_pipeline/__init__.py
@@ -1,2 +1,2 @@
from multisensor_pipeline.modules.base import BaseSource, BaseProcessor, BaseSink
-from multisensor_pipeline.pipeline import GraphPipeline
\ No newline at end of file
+from multisensor_pipeline.pipeline import GraphPipeline
diff --git a/multisensor_pipeline/dataframe/__init__.py b/multisensor_pipeline/dataframe/__init__.py
index 7768bc7b..d00b71bd 100644
--- a/multisensor_pipeline/dataframe/__init__.py
+++ b/multisensor_pipeline/dataframe/__init__.py
@@ -1,2 +1,1 @@
-from .dataframe import Topic, MSPDataFrame, MSPEventFrame
-from .control import MSPControlMessage
+from .dataframe import Topic, MSPDataFrame, MSPControlMessage
diff --git a/multisensor_pipeline/dataframe/control.py b/multisensor_pipeline/dataframe/control.py
deleted file mode 100644
index 22261914..00000000
--- a/multisensor_pipeline/dataframe/control.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from multisensor_pipeline.dataframe import MSPDataFrame, Topic
-
-
-class MSPControlMessage(MSPDataFrame):
-
- END_OF_STREAM = "EOS"
-
- def __init__(self, message, source):
- topic = Topic(name="control", source_module=source.name, source_uuid=source.uuid)
- super(MSPControlMessage, self).__init__(topic=topic, message=message)
-
- @property
- def message(self):
- return self['message']
diff --git a/multisensor_pipeline/dataframe/dataframe.py b/multisensor_pipeline/dataframe/dataframe.py
index 13add7af..002adfda 100644
--- a/multisensor_pipeline/dataframe/dataframe.py
+++ b/multisensor_pipeline/dataframe/dataframe.py
@@ -1,25 +1,27 @@
-from typing import Any
+from typing import Optional, TypeVar, Generic, Any
import logging
-from time import time
-import json
+import io
+import time
+import msgpack
import numpy as np
+from PIL import Image
+
logger = logging.getLogger(__name__)
+T = TypeVar('T')
class Topic:
- def __init__(self, name: str, source_uuid: str, dtype: type = None, source_module: type = None):
+ def __init__(self, dtype: type = Any, name: Optional[str] = None,):
"""
-
:param name:
:param dtype:
- :param source_module:
"""
self._name = name
self._dtype = dtype
- self._source = source_module
- self._uuid = source_uuid
+ if self.name is not None and dtype == Any:
+ logger.warning("If dtype is Any, topic.name has no effect.")
@property
def name(self) -> str:
@@ -30,115 +32,176 @@ def dtype(self) -> type:
return self._dtype
@property
- def source_module(self) -> type:
- return self._source
+ def uuid(self):
+ return f"{self.name}:{self.dtype if self.dtype is not None else None}"
@property
- def source_uuid(self) -> str:
- return self._uuid
+ def is_control_topic(self):
+ return self.dtype == MSPControlMessage.ControlTopic.ControlType
- @property
- def uuid(self):
- return f"{self.source_uuid}:{self.name}:{self.dtype.__name__}"
+ def __hash__(self):
+ return hash(self.uuid)
+
+ def __eq__(self, sink_topic):
+ """
+ Args:
+ sink_topic: Sink
+ TODO:
+ Returns:
+ """
+ if not isinstance(sink_topic, Topic):
+ return False #TODO: can we replace isinstance?
+
+ if self.dtype is Any or sink_topic.dtype is Any:
+ return True
+
+ dtype_matches = sink_topic.dtype == self.dtype
+
+ if sink_topic.name is not None:
+ name_matches = sink_topic.name == self.name
+ else:
+ name_matches = True
- def __eq__(self, other):
- if not isinstance(other, Topic):
- return False
- return self.dtype == other.dtype and self.name == other.name and self.source_uuid == other.source_uuid
+ return dtype_matches and name_matches
def __str__(self):
- return f"{self.source_module.__name__}:{self.name}:{self.dtype.__name__}"
+ return f"{self.dtype if self.dtype is not None else None}:{self.name}"
def __repr__(self):
- return f"Topic(name={self.name}, dtype={self.dtype}, source_module={self.source_module})"
-
-
-class MSPDataFrame(dict):
- class JsonEncoder(json.JSONEncoder):
-
- def default(self, obj: Any) -> Any:
- if isinstance(obj, np.integer):
- return int(obj)
- elif isinstance(obj, np.floating):
- return float(obj)
- elif isinstance(obj, np.ndarray):
- return {
- "_kind_": "ndarray",
- "_value_": obj.tolist()
- }
- if isinstance(obj, Topic):
- assert isinstance(obj, Topic)
- return {
- "_kind_": "topic",
- "_value_": {
- "name": obj.name,
- "dtype": str(obj.dtype),
- "source_module": str(obj.source_module),
- "source_uuid": obj.source_uuid
- }
- }
- return super(MSPDataFrame.JsonEncoder, self).default(obj)
-
- class JsonDecoder(json.JSONDecoder):
-
- def __init__(self, *args, **kwargs):
- json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs)
-
- def object_hook(self, obj):
- if '_kind_' in obj:
- kind = obj['_kind_']
- if kind == 'ndarray':
- return np.array(obj['_value_'])
- elif kind == 'topic':
- return Topic(**obj['_value_'])
- # TODO: decode class types (#22)
- return obj
-
- def __init__(self, topic: Topic, timestamp: float = None, **kwargs):
- super(MSPDataFrame, self).__init__()
- if timestamp is None:
- self['timestamp'] = time()
- else:
- self['timestamp'] = timestamp
- self['topic'] = topic
+ return f"Topic(dtype={self.dtype if self.dtype is not None else None}, name={self.name})"
+
- if kwargs is not None:
- self.update(kwargs)
+class MSPDataFrame(Generic[T]):
+
+ def __init__(self, topic: Topic, timestamp: float = None, duration: float = 0, data: Optional[T] = None):
+ super(MSPDataFrame, self).__init__()
+ self._timestamp = time.perf_counter() if timestamp is None else timestamp
+ self._duration = duration
+ self._topic = topic
+ self._data = data
+ self._source_uuid = None
@property
def timestamp(self) -> float:
- return self['timestamp']
+ return self._timestamp
@timestamp.setter
- def timestamp(self, value: float):
- self['timestamp'] = value
+ def timestamp(self, timestamp: float):
+ self._timestamp = timestamp
@property
def topic(self) -> Topic:
- return self['topic']
+ return self._topic
@topic.setter
- def topic(self, value: Topic):
- self['topic'] = value
-
+ def topic(self, topic: Topic):
+ self._topic = topic
-class MSPEventFrame(MSPDataFrame):
+ @property
+ def source_uuid(self) -> str:
+ return self._source_uuid
- def __init__(self, value=None, duration: float = 0, **kwargs):
- super(MSPEventFrame, self).__init__(value=value, duration=duration, **kwargs)
+ @source_uuid.setter
+ def source_uuid(self, source_uuid: str):
+ self._source_uuid = source_uuid
@property
- def duration(self) -> float:
- return self['duration']
+ def data(self) -> T:
+ return self._data
- @duration.setter
- def duration(self, value: float):
- self['duration'] = value
+ @data.setter
+ def data(self, data: T):
+ self._data = data
@property
- def value(self) -> str:
- return self['value']
+ def duration(self) -> float:
+ return self._duration
- @value.setter
- def value(self, value: str):
- self['value'] = value
+ @duration.setter
+ def duration(self, duration: float):
+ self._duration = duration
+
+ @staticmethod
+ def msgpack_encode(obj):
+ if isinstance(obj, MSPDataFrame):
+ return {
+ "__dataframe__": True,
+ "topic": obj.topic,
+ "timestamp": obj.timestamp,
+ "duration": obj.duration,
+ "data": obj.data
+ }
+ if isinstance(obj, Topic):
+ return {
+ "__topic__": True,
+ "name": obj.name,
+ "dtype": str(obj.dtype) if obj.dtype is not None else None # TODO: how to encode a type?
+ }
+ if isinstance(obj, np.integer):
+ return int(obj)
+ if isinstance(obj, np.float):
+ return float(obj)
+ if isinstance(obj, np.ndarray):
+ return {
+ "__ndarray__": True,
+ "data": obj.tolist(),
+ "shape": obj.shape,
+ "dtype": obj.dtype.name
+ }
+ if isinstance(obj, Image.Image):
+ buffer = io.BytesIO()
+ obj.save(buffer, format="jpeg", quality=90)
+ return {
+ "__jpeg__": True,
+ "bytes": buffer.getvalue()
+ }
+ return obj
+
+ def serialize(self) -> bytes:
+ return msgpack.packb(self, default=MSPDataFrame.msgpack_encode)
+
+ @staticmethod
+ def deserialize(frame: bytes):
+ return msgpack.unpackb(frame, object_hook=MSPDataFrame.msgpack_decode, raw=False)
+
+ @staticmethod
+ def msgpack_decode(obj):
+ if '__dataframe__' in obj:
+ obj = MSPDataFrame(
+ topic=obj["topic"],
+ timestamp=obj["timestamp"],
+ duration=obj["duration"],
+ data=obj["data"]
+ )
+ elif '__topic__' in obj:
+ obj = Topic(name=obj["name"], dtype=obj["dtype"])
+ elif '__ndarray__' in obj:
+ obj = np.array(
+ object=obj["data"],
+ # shape=obj["shape"],
+ dtype=obj["dtype"]
+ )
+ elif '__jpeg__' in obj:
+ obj = Image.open(io.BytesIO(obj["bytes"]))
+ return obj
+
+ @staticmethod
+ def get_msgpack_unpacker(filehandle) -> msgpack.Unpacker:
+ return msgpack.Unpacker(file_like=filehandle, object_hook=MSPDataFrame.msgpack_decode, raw=False)
+
+
+class MSPControlMessage(MSPDataFrame):
+
+ class ControlTopic(Topic):
+ class ControlType:
+ pass
+ name = None
+ dtype = ControlType
+
+ END_OF_STREAM = "EOS"
+ PASS = "PASS"
+
+ def __init__(self, message):
+ topic = self.ControlTopic()
+ super(MSPControlMessage, self).__init__(topic=topic)
+ self._data = message
\ No newline at end of file
diff --git a/multisensor_pipeline/dataframe/eyetracking.py b/multisensor_pipeline/dataframe/eyetracking.py
deleted file mode 100644
index 891cbdf5..00000000
--- a/multisensor_pipeline/dataframe/eyetracking.py
+++ /dev/null
@@ -1,75 +0,0 @@
-from multisensor_pipeline.dataframe.dataframe import MSPDataFrame, MSPEventFrame
-
-
-class MSPGazeFrame(MSPDataFrame):
- """
- Data structure for gaze data. It enforces
- (1) that the origin of gaze coordinates is at the upper left, and
- (2) the gaze coordinates are normalized.
- """
-
- # see also https://pillow.readthedocs.io/en/stable/handbook/concepts.html#coordinate-system
- ORIGIN_BOTTOM_LEFT = "bl"
- ORIGIN_TOP_LEFT = "tl"
- ORIGIN_CENTER = "c"
-
- def __init__(self, gaze, max_width=1., max_height=1., normalized=True, origin="bl", **kwargs):
- scaled_gaze = self._scale_gaze(gaze, normalized, origin)
- super(MSPGazeFrame, self).__init__(max_width=max_width, max_height=max_height, gaze=scaled_gaze, **kwargs)
-
- def _scale_gaze(self, gaze, normalized, origin):
- if origin == self.ORIGIN_CENTER:
- raise NotImplementedError()
-
- x, y = tuple(gaze)
- if not normalized:
- x /= float(self.max_width)
- y /= float(self.max_height)
-
- if origin == self.ORIGIN_BOTTOM_LEFT:
- # convert to top-left coordinate
- y = 1. - y
-
- return x, y
-
- @property
- def x(self):
- return self['gaze'][0]
-
- @property
- def y(self):
- return self['gaze'][1]
-
- @property
- def gaze(self):
- return self['gaze']
-
- @property
- def x_scaled(self):
- return self.x * self.max_width
-
- @property
- def y_scaled(self):
- return self.y * self.max_height
-
- @property
- def gaze_scaled(self):
- return self.x_scaled, self.y_scaled
-
- @property
- def max_width(self):
- return self["max_width"]
-
- @property
- def max_height(self):
- return self["max_height"]
-
-
-class MSPFixationFrame(MSPEventFrame):
-
- def __init__(self, fixation_position: MSPGazeFrame = None, **kwargs):
- super(MSPFixationFrame, self).__init__(fixation_position=fixation_position, **kwargs)
-
- @property
- def fixation_position(self) -> MSPGazeFrame:
- return self["fixation_position"]
diff --git a/multisensor_pipeline/modules/__init__.py b/multisensor_pipeline/modules/__init__.py
index 10553fde..94d56e66 100644
--- a/multisensor_pipeline/modules/__init__.py
+++ b/multisensor_pipeline/modules/__init__.py
@@ -21,19 +21,6 @@ def on_update(self, frame: MSPDataFrame) -> Optional[MSPDataFrame]:
return frame
-class AttributeExtractionProcessor(BaseProcessor):
-
- def __init__(self, target_topic_name=None, key="timestamp"):
- super(AttributeExtractionProcessor, self).__init__()
- self._topic_name = target_topic_name
- self._key = key
-
- def on_update(self, frame: MSPDataFrame) -> Optional[MSPDataFrame]:
- if (self._topic_name is None or frame.topic.name == self._topic_name) and self._key in frame:
- _topic = self._generate_topic(name=f"{frame.topic.name}.{self._key}")
- return MSPDataFrame(topic=_topic, timestamp=frame['key'])
-
-
class ListSink(BaseSink):
def __init__(self):
@@ -76,13 +63,20 @@ class ConsoleSink(BaseSink):
def on_update(self, frame: MSPDataFrame):
if frame is not None:
- print(f"{frame.topic}:\t{frame}")
+ print(f"{frame.timestamp}\t{frame.topic}\t{frame.data}")
class TrashSink(BaseSink):
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self._counter = 0
def on_update(self, frame: MSPDataFrame):
- pass
+ self._counter += 1
+
+ @property
+ def counter(self) -> int:
+ return self._counter
class SleepTrashSink(TrashSink):
@@ -93,3 +87,8 @@ def __init__(self, sleep_time: float, **kwargs):
def on_update(self, frame: MSPDataFrame):
sleep(self._sleep_time)
+ self._counter += 1
+
+ @property
+ def counter(self) -> int:
+ return self._counter
diff --git a/multisensor_pipeline/modules/audio/__init__.py b/multisensor_pipeline/modules/audio/__init__.py
index c35ab5ef..19a408ab 100644
--- a/multisensor_pipeline/modules/audio/__init__.py
+++ b/multisensor_pipeline/modules/audio/__init__.py
@@ -1,2 +1,2 @@
-from .microphone import Microphone
-from .wave import WaveFile
\ No newline at end of file
+from .microphone import MicrophoneSource
+from .file import AudioFileSink
\ No newline at end of file
diff --git a/multisensor_pipeline/modules/audio/file.py b/multisensor_pipeline/modules/audio/file.py
new file mode 100644
index 00000000..785d521c
--- /dev/null
+++ b/multisensor_pipeline/modules/audio/file.py
@@ -0,0 +1,42 @@
+from typing import List
+
+import numpy as np
+
+from multisensor_pipeline.modules.base import BaseSink
+from multisensor_pipeline.dataframe import MSPDataFrame, Topic
+import soundfile as sf
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+class AudioFileSink(BaseSink):
+ """
+ AudioFileSink writes audio files. It supports formats that are supported by libsndfile.
+ """
+ def __init__(self, filename: str, channels: int = 2, samplerate: float = 44100., mode="w"):
+ """
+ Initialize the WaveFile Sink
+ Args:
+ filename: path of the audio file
+ channels: Number of channels of the file
+ samplerate: The audio sampling rate
+ mode: w for overriding existing files,
+ """
+ super(AudioFileSink, self).__init__()
+ self._frames = []
+ self._wf = sf.SoundFile(filename, mode=mode, samplerate=int(samplerate), channels=channels)
+
+ def on_update(self, frame: MSPDataFrame):
+ self._wf.write(frame.data)
+
+ def on_stop(self):
+ """
+ Stops the AudioFileSink and closes the filestream
+ """
+ self._wf.close()
+
+ @property
+ def input_topics(self) -> List[Topic]:
+ return [Topic(name="audio", dtype=np.ndarray)]
+
diff --git a/multisensor_pipeline/modules/audio/microphone.py b/multisensor_pipeline/modules/audio/microphone.py
index db0c4186..b24cff88 100644
--- a/multisensor_pipeline/modules/audio/microphone.py
+++ b/multisensor_pipeline/modules/audio/microphone.py
@@ -1,54 +1,103 @@
from multisensor_pipeline.modules.base import BaseSource
-from multisensor_pipeline.dataframe import MSPDataFrame
-from typing import Optional
-import pyaudio
+from multisensor_pipeline.dataframe import MSPDataFrame, Topic
+from typing import Optional, List
+import sounddevice as sd
+import numpy as np
import logging
logger = logging.getLogger(__name__)
-class Microphone(BaseSource):
+class MicrophoneSource(BaseSource):
"""
Microphone Source for live audio recording of a connected microphone
"""
- def __init__(self, device: str, format=pyaudio.paInt16, channels: int = 2, sampling_rate: int = 44100,
- chunk_size: int = 1024):
+ class InputDevice:
+
+ def __init__(self, device_info: dict):
+ self._device_info = device_info
+
+ @property
+ def name(self) -> str:
+ return self._device_info["name"]
+
+ @property
+ def channels(self) -> int:
+ return self._device_info["max_input_channels"]
+
+ @property
+ def default_samplerate(self) -> float:
+ return self._device_info["default_samplerate"]
+
+ @property
+ def device_info(self) -> dict:
+ return self._device_info
+
+ def __str__(self):
+ return f"[{self.name}; channels={self.channels}; rate={self.default_samplerate}"
+
+ @staticmethod
+ def available_input_devices():
+ devices = sd.query_devices()
+ return [MicrophoneSource.InputDevice(d) for d in devices if d["max_input_channels"] > 0]
+
+ def __init__(self, device: Optional[InputDevice] = None,
+ channels: Optional[int] = None,
+ samplerate: Optional[float] = None,
+ blocksize: Optional[int] = 1024):
"""
Initialize the Source
Args:
device: Device id of the microphone
- format: PyAudio format specification
channels: Number of channels of the device
- sampling_rate: The audio sampling rate
- chunk_size: Size of the chunks of the recordings
+ samplerate: The audio sampling rate
+ blocksize: Size of the chunks of the recordings
"""
- super(Microphone, self).__init__()
+ super(MicrophoneSource, self).__init__()
- self.device = device
- self.format = format
- self.channels = channels
- self.sampling_rate = sampling_rate
- self.chunk_size = chunk_size
+ if device is None:
+ device = MicrophoneSource.InputDevice(sd.query_devices(kind='input'))
+ self._device = device
+ self._samplerate = self._device.default_samplerate if samplerate is None else samplerate
+ self._channels = self._device.channels if channels is None else channels
+ self._blocksize = blocksize
+ self._stream = sd.InputStream(
+ samplerate=self._samplerate,
+ blocksize=self._blocksize,
+ device=self._device.name,
+ channels=self._channels
+ )
- self._mic = pyaudio.PyAudio()
- self._stream = self._mic.open(format=self.format,
- channels=self.channels,
- rate=self.sampling_rate,
- input=True,
- frames_per_buffer=self.chunk_size)
+ def on_start(self):
+ self._stream.start()
def on_update(self) -> Optional[MSPDataFrame]:
"""
Sends chunks of the audio recording
"""
- data = self._stream.read(self.chunk_size)
- return MSPDataFrame(topic=self._generate_topic(name="audio"), chunk=data)
+ t = self._stream.time
+ data, _ = self._stream.read(self._blocksize)
+ return MSPDataFrame(topic=self.output_topics[0], data=data.copy(), timestamp=t)
def on_stop(self):
"""
Stops the Microphone source and closes the stream
"""
- self._stream.stop_stream()
self._stream.close()
- self._mic.terminate()
+
+ @property
+ def device(self) -> InputDevice:
+ return self._device
+
+ @property
+ def channels(self) -> int:
+ return self._channels
+
+ @property
+ def samplerate(self) -> float:
+ return self._samplerate
+
+ @property
+ def output_topics(self) -> Optional[List[Topic]]:
+ return [Topic(name="audio", dtype=np.ndarray)]
diff --git a/multisensor_pipeline/modules/audio/wave.py b/multisensor_pipeline/modules/audio/wave.py
deleted file mode 100644
index 1add5757..00000000
--- a/multisensor_pipeline/modules/audio/wave.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from multisensor_pipeline.modules.base import BaseSink
-from multisensor_pipeline.dataframe import MSPDataFrame
-import wave
-import pyaudio
-import logging
-
-logger = logging.getLogger(__name__)
-
-
-class WaveFile(BaseSink):
- """
- WaveFile Sink for .wav files
- """
- def __init__(self, filename: str, channels: int = 2, format: int = pyaudio.paInt16, rate: int = 44100):
- """
- Initialize the WaveFile Sink
- Args:
- filename: path of the wav file
- channels: Number of channels of the file
- format: PyAudio format specification
- rate: The audio sampling rate
- """
- super(WaveFile, self).__init__()
- self._frames = []
- self._wf = wave.open(filename, 'wb')
- self._wf.setnchannels(channels)
- self._wf.setsampwidth(pyaudio.get_sample_size(format))
- self._wf.setframerate(rate)
-
- def on_update(self, frame: MSPDataFrame):
- """
- Writes chunks of the .wav file
- """
- if frame.topic.name == "audio":
- self._wf.writeframes(frame["chunk"])
-
- def on_stop(self):
- """
- Stops the WaveFileSink and closes the filestream
- """
- self._wf.close()
diff --git a/multisensor_pipeline/modules/base/base.py b/multisensor_pipeline/modules/base/base.py
index 48ec7c7b..ac33a42d 100644
--- a/multisensor_pipeline/modules/base/base.py
+++ b/multisensor_pipeline/modules/base/base.py
@@ -1,13 +1,14 @@
-from abc import ABC
+from abc import ABC, abstractmethod
from threading import Thread
from queue import Queue
-from multiprocessing.queues import Queue as MPQueue
from multisensor_pipeline.dataframe.dataframe import MSPDataFrame, Topic
-from multisensor_pipeline.dataframe.control import MSPControlMessage
+from multisensor_pipeline.dataframe import MSPControlMessage
from multisensor_pipeline.modules.base.profiling import MSPModuleStats
-from typing import Union, Optional
+from multiprocessing.queues import Queue as MPQueue
+from typing import Union, Optional, List
import logging
import uuid
+from collections import defaultdict
logger = logging.getLogger(__name__)
@@ -38,17 +39,16 @@ def start(self):
self.on_start()
self._thread.start()
- def _generate_topic(self, name: str, dtype: type = None):
- return Topic(name=name, dtype=dtype, source_module=self.__class__, source_uuid=self.uuid)
-
def on_start(self):
""" Custom initialization """
pass
+ @abstractmethod
def _worker(self):
""" Main worker function (async) """
raise NotImplementedError()
+ @abstractmethod
def on_update(self):
""" Custom update routine. """
raise NotImplementedError()
@@ -88,6 +88,18 @@ def stats(self) -> MSPModuleStats:
""" Returns real-time profiling information. """
return self._stats
+ @property
+ def profiling(self) -> bool:
+ """ Profiling actvice/deactive """
+ return self._profiling
+
+ @profiling.setter
+ def profiling(self, value):
+ self._profiling = value
+
+ def __hash__(self):
+ return hash(self.uuid)
+
class BaseSource(BaseModule, ABC):
""" Base class for data sources. """
@@ -97,32 +109,57 @@ def __init__(self):
Initializes the worker thread and a queue list for communication with observers that listen to that source.
"""
super().__init__()
- self._sinks = []
+ self._sinks = defaultdict(list)
def _worker(self):
""" Source worker function: notify observer when source update function returns a DataFrame """
while self._active:
self._notify(self.on_update())
+ @abstractmethod
def on_update(self) -> Optional[MSPDataFrame]:
""" Custom update routine. """
raise NotImplementedError()
- def add_observer(self, sink):
+ def add_observer(self, sink, topics: Optional[Union[Topic, List[Topic]]] = None):
"""
Register a Sink or Queue as an observer.
Args:
+ topics:
sink: A thread-safe Queue object or Sink [or any class that implements put(tuple)]
"""
+ connected = False
+ if isinstance(topics, Topic):
+ topics = [topics]
+
if isinstance(sink, Queue) or isinstance(sink, MPQueue):
- self._sinks.append(sink)
+ if topics is None:
+ self._sinks[Topic()].append(sink)
+ connected = True
+ else:
+ for topic in topics:
+ self._sinks[topic].append(sink)
+ connected = True
return
assert isinstance(sink, BaseSink) or isinstance(sink, BaseProcessor)
- sink.add_source(self)
- self._sinks.append(sink)
- # TODO: check if types match -> raise error or warning
+ # case 1: if no topic filter is specified
+ if topics is None:
+ for topic in self.output_topics:
+ if topic in sink.input_topics:
+ self._sinks[topic].append(sink)
+ sink.add_source(self)
+ connected = True
+ # case 2: connection with specified topic
+ else:
+ for topic in topics:
+ matches_output = any([t == topic for t in self.output_topics])
+ if matches_output and topic in sink.input_topics:
+ self._sinks[topic].append(sink)
+ sink.add_source(self)
+ connected = True
+ assert connected, f"No connection could be established between {self.name}:{sink.name} with topic(s) {topics}"
def _notify(self, frame: Optional[MSPDataFrame]):
"""
@@ -134,10 +171,13 @@ def _notify(self, frame: Optional[MSPDataFrame]):
if frame is None:
return
- assert isinstance(frame, MSPDataFrame), "You must use a MSPDataFrame instance to wrap your data."
+ # assert isinstance(frame, MSPDataFrame), "You must use a MSPDataFrame instance to wrap your data."
+ frame.source_uuid = self.uuid
- for sink in self._sinks:
- sink.put(frame)
+ for topic, sinks in self._sinks.items():
+ if frame.topic.is_control_topic or frame.topic == topic:
+ for sink in sinks:
+ sink.put(frame)
if self._profiling:
self._stats.add_frame(frame, MSPModuleStats.Direction.OUT)
@@ -149,9 +189,14 @@ def stop(self, blocking: bool = True):
Args:
blocking:
"""
- self._notify(MSPControlMessage(message=MSPControlMessage.END_OF_STREAM, source=self))
+ self._notify(MSPControlMessage(message=MSPControlMessage.END_OF_STREAM))
super(BaseSource, self).stop(blocking=blocking)
+ @property
+ def output_topics(self) -> Optional[List[Topic]]:
+ """ Returns outgoing topics that are provided by the source module at hand. """
+ return [Topic()]
+
class BaseSink(BaseModule, ABC):
""" Base class for data sinks. """
@@ -186,17 +231,19 @@ def _handle_control_message(self, frame: MSPDataFrame):
Args:
frame: frame containing MSPControlMessage
"""
- if isinstance(frame, MSPControlMessage):
- logger.debug(f"[CONTROL] {frame.topic.source_uuid} -> {frame.message} -> {self.uuid}")
- if frame.message == MSPControlMessage.END_OF_STREAM:
- if frame.topic.source_uuid in self._active_sources:
+ if frame.topic.is_control_topic:
+ if frame.data == MSPControlMessage.END_OF_STREAM:
+ if frame.source_uuid in self._active_sources:
+ logger.debug(f"[CONTROL] {frame.source_uuid} -> {frame.data} -> {self.uuid}")
# set source to inactive
- self._active_sources[frame.topic.source_uuid] = False
+ self._active_sources[frame.source_uuid] = False
# if no active source is left
if not any(self._active_sources.values()):
self.stop(blocking=False)
+ elif frame.data == MSPControlMessage.PASS:
+ pass
else:
- logger.warning(f"unhandled control message: {frame.message}")
+ logger.warning(f"[UNHANDLED CONTROL] {frame.source_uuid} -> {frame.data} -> {self.uuid}")
return True
return False
@@ -210,16 +257,18 @@ def _worker(self):
if self._handle_control_message(frame):
continue
- if self._profiling:
+ if self._profiling: # TODO: check profiling
self._stats.add_frame(frame, MSPModuleStats.Direction.IN)
self.on_update(frame)
+ @abstractmethod
def on_update(self, frame: MSPDataFrame):
""" Custom update routine. """
raise NotImplementedError()
def _perform_sample_dropout(self, frame_time) -> int:
+ # TODO: do this per topic (single queue)
if not self._dropout:
return 0
@@ -235,11 +284,17 @@ def _perform_sample_dropout(self, frame_time) -> int:
return num_skipped
def put(self, frame: MSPDataFrame):
+ # TODO: create a queue per topic and perform explicit sample synchronization
skipped_frames = self._perform_sample_dropout(frame.timestamp)
self._queue.put(frame)
if self._profiling:
self._stats.add_queue_state(qsize=self._queue.qsize(), skipped_frames=skipped_frames)
+ @property
+ def input_topics(self) -> List[Topic]:
+ """ Returns topics which can be handled by the sink module at hand. """
+ return [Topic()]
+
class BaseProcessor(BaseSink, BaseSource, ABC):
""" Base class for data processors. """
diff --git a/multisensor_pipeline/modules/base/profiling.py b/multisensor_pipeline/modules/base/profiling.py
index 564f0a56..bc1ade3c 100644
--- a/multisensor_pipeline/modules/base/profiling.py
+++ b/multisensor_pipeline/modules/base/profiling.py
@@ -1,7 +1,9 @@
-from multisensor_pipeline.dataframe import MSPDataFrame, MSPControlMessage
-from time import time
+import logging
+from typing import Optional
+from multisensor_pipeline.dataframe import MSPDataFrame, MSPControlMessage, Topic
from datetime import datetime
from collections import deque
+import time
class MSPModuleStats:
@@ -24,7 +26,7 @@ def _next_sma(self, rate: float):
# see https://en.wikipedia.org/wiki/Moving_average
self._samples.append(rate)
if self._num_samples >= self._k - 1:
- return self._sma + (rate - self._samples.popleft()) / self._k
+ return self._sma + (rate - self._samples.popleft()) / self._k
else:
return self._cma
@@ -37,18 +39,39 @@ def update(self, sample: float):
self._sma = self._next_sma(sample)
self._num_samples += 1
- class FrequencyStats(MovingAverageStats):
- """
- Implementation of FrequencyStats
- """
+ @property
+ def sma(self):
+ return self._sma
- _last_sample = None # timestamp of last frame (time of being received)
+ @property
+ def cma(self):
+ return self._cma
- def update(self, sample: float):
- if self._last_sample is not None:
- rate = 1. / (sample - self._last_sample)
- super(MSPModuleStats.FrequencyStats, self).update(rate)
- self._last_sample = sample
+ class RobustSamplerateStats(object):
+
+ def __init__(self, max_measurement_interval: float = 1. / 10):
+ super(MSPModuleStats.RobustSamplerateStats, self).__init__()
+ self._num_samples = 0
+ self._samplerate = 0.
+ self._t_start = None
+ self._t_last_update = None
+ self._measurement_interval = max_measurement_interval
+
+ def update(self, timestamp: float):
+ if self._t_start is None:
+ self._t_start = timestamp
+ self._t_last_update = timestamp
+ self._num_samples += 1
+
+ time_since_last_update = timestamp - self._t_last_update
+ if time_since_last_update >= self._measurement_interval:
+ measurement_time = timestamp - self._t_start
+ self._samplerate = float(self._num_samples - 1.) / measurement_time
+ self._t_last_update = timestamp
+
+ @property
+ def samplerate(self):
+ return self._samplerate
class Direction:
OUT = 0
@@ -64,30 +87,45 @@ def __init__(self):
self._in_stats = {}
self._out_stats = {}
self._queue_size = self.MovingAverageStats()
- self._skipped_frames = self.MovingAverageStats()
+ self._skipped_frames = self.RobustSamplerateStats()
- def get_stats(self, direction: Direction):
+ def get_stats(self, direction: Direction, topic: Optional[Topic] = None):
if direction == self.Direction.IN:
- return self._in_stats
+ if topic:
+ return self._in_stats[topic.uuid]
+ else:
+ return self._in_stats
elif direction == self.Direction.OUT:
+ if topic:
+ return self._out_stats[topic.uuid]
return self._out_stats
else:
raise NotImplementedError()
def add_frame(self, frame: MSPDataFrame, direction: Direction):
- time_received = time()
- if isinstance(frame, MSPControlMessage):
+ time_received = time.perf_counter()
+ if frame.topic.is_control_topic:
return
# per direction, topic -> update stats
stats = self.get_stats(direction)
- if frame.topic not in stats:
- stats[frame.topic] = self.FrequencyStats()
- stats[frame.topic].update(time_received)
+ if frame.topic.uuid not in stats:
+ stats[frame.topic.uuid] = self.RobustSamplerateStats()
+ stats[frame.topic.uuid].update(time_received)
def add_queue_state(self, qsize: int, skipped_frames: int):
+ time_received = time.perf_counter()
self._queue_size.update(qsize)
- self._skipped_frames.update(skipped_frames)
+ for i in range(skipped_frames):
+ self._skipped_frames.update(time_received)
+
+ @property
+ def frame_skip_rate(self):
+ return self._skipped_frames.samplerate
+
+ @property
+ def average_queue_size(self):
+ return self._queue_size.cma
def finalize(self):
self._stop_time = datetime.now()
diff --git a/multisensor_pipeline/modules/base/sampler.py b/multisensor_pipeline/modules/base/sampler.py
deleted file mode 100644
index 45c10328..00000000
--- a/multisensor_pipeline/modules/base/sampler.py
+++ /dev/null
@@ -1,37 +0,0 @@
-from abc import ABC
-from multisensor_pipeline.modules import BaseSource
-from multisensor_pipeline.dataframe import MSPDataFrame, MSPControlMessage
-from time import time, sleep
-
-
-class BaseFixedRateSource(BaseSource, ABC):
-
- def __init__(self, sampling_rate: float = 1.):
- super().__init__()
- self._sampling_rate = sampling_rate
- self._sleep_time = 1. / self._sampling_rate
-
- self._last_frame_timestamp = None
-
- @property
- def sampling_rate(self):
- return self._sampling_rate
-
- def _notify(self, frame: MSPDataFrame):
- super(BaseFixedRateSource, self)._notify(frame)
- self._sleep(frame)
-
- def _sleep(self, frame: MSPDataFrame):
- if isinstance(frame, MSPControlMessage):
- return
- if self._sampling_rate == float("inf"):
- return
-
- if self._last_frame_timestamp is None:
- sleep(self._sleep_time)
- else:
- processing_duration = time() - self._last_frame_timestamp
- if processing_duration < self._sleep_time:
- sleep(self._sleep_time - processing_duration)
-
- self._last_frame_timestamp = time()
diff --git a/multisensor_pipeline/modules/base/sampling.py b/multisensor_pipeline/modules/base/sampling.py
new file mode 100644
index 00000000..42d99068
--- /dev/null
+++ b/multisensor_pipeline/modules/base/sampling.py
@@ -0,0 +1,38 @@
+from abc import ABC
+import logging
+from multisensor_pipeline.modules import BaseSource
+import time
+import sched
+
+logger = logging.getLogger(__name__)
+
+
+class BaseDiscreteSamplingSource(BaseSource, ABC):
+
+ def __init__(self, samplerate: float = 1.):
+ """
+ Args:
+ samplerate: set the intended samplerate in Hertz [Hz]
+ """
+ super().__init__()
+ self._samplerate = samplerate
+ self._period_time = 1. / self._samplerate
+
+ self._scheduler = sched.scheduler(time.perf_counter, time.sleep)
+
+ @property
+ def samplerate(self):
+ return self._samplerate
+
+ def _worker(self):
+ t = time.perf_counter()
+ self._notify(self.on_update())
+ while self._active:
+ t_next = t + self._period_time
+ _ = self._scheduler.enterabs(
+ time=t_next,
+ priority=0,
+ action=lambda: self._notify(self.on_update())
+ )
+ t = t_next
+ self._scheduler.run(blocking=True)
diff --git a/multisensor_pipeline/modules/image/__init__.py b/multisensor_pipeline/modules/image/__init__.py
index 0e253a88..2038f8e0 100644
--- a/multisensor_pipeline/modules/image/__init__.py
+++ b/multisensor_pipeline/modules/image/__init__.py
@@ -1,2 +1,2 @@
from .pillow import CropByPointerProcessor
-from .utils import roi_rect, scale_to_image_coordinate
+from .utils import roi_rect
diff --git a/multisensor_pipeline/modules/image/pillow.py b/multisensor_pipeline/modules/image/pillow.py
index 75cb7b9f..c3aaa5e8 100644
--- a/multisensor_pipeline/modules/image/pillow.py
+++ b/multisensor_pipeline/modules/image/pillow.py
@@ -1,65 +1,45 @@
from PIL import Image, ImageFile
from multisensor_pipeline.modules.base import BaseProcessor
-from .utils import roi_rect
-from multisensor_pipeline.dataframe.dataframe import MSPDataFrame
-from typing import Optional
+from .utils import crop
+from multisensor_pipeline.dataframe.dataframe import MSPDataFrame, Topic
+from typing import Optional, List, Tuple
+import numpy as np
ImageFile.LOAD_TRUNCATED_IMAGES = True
class CropByPointerProcessor(BaseProcessor):
- """
- Crops PillowImage on a give point with a definable crop size
- """
- def __init__(self, image_topic_name, pointer_topic_names, crop_size: int = 200, image_key: str = "image",
- point_key: str = "point"):
- """
- Initialize the Processor
- Args:
- image_topic_name: image topic names to be handled
- pointer_topic_names: pointer topic names to be handled
- crop_size: size of the crop AOI
- image_key: should always be "image" because it's the default for transferring images
- point_key: key of the point in the signal
- """
+ def __init__(self, crop_size: int = 200, pointer_topic_name: Optional[str] = None):
super(CropByPointerProcessor, self).__init__()
- self.crop_size = crop_size
- self._image = None
-
- # set topic names to be handled and dict keys to access the correct data fields
- # TODO: if a names are set to None, consider all topics that include the correct key
- self._image_topic_name = image_topic_name
- self._image_key = image_key # should always be "image" because it's the default for transferring images
- self._crop_signal_topic_names = pointer_topic_names
- self._crop_signal_key = point_key
-
- @staticmethod
- def crop(image: Image, point, crop_size: int):
- """
- Crops the PillowImage
- """
- if image is None:
- return None
- w, h = image.size
- # pos = scale_to_image_coordinate(point, w, h, flip_y=False)
- rect = roi_rect(width=w, height=h, center_x=point[0], center_y=point[1], size=crop_size)
- if rect is None:
- return None
- return image.crop(rect)
+ self._crop_size = crop_size
+ self._latest_image = None
+ self._latest_image_name = None
+ self._pointer_topic_name = pointer_topic_name
def on_update(self, frame: MSPDataFrame) -> Optional[MSPDataFrame]:
- # update internal temporary fields
- if frame.topic.name == self._image_topic_name:
- img = frame[self._image_key]
- self._image = img
- elif any([frame.topic.name == t for t in self._crop_signal_topic_names]):
- # for each crop signal -> crop image patch and notify observers
- point = frame[self._crop_signal_key]
- img_patch = self.crop(self._image, point, self.crop_size)
+ # store the latest image
+ if frame.topic.dtype == Image.Image:
+ self._latest_image = frame.data
+ self._latest_image_name = frame.topic.name
+
+ # crop an image patch for each point
+ elif frame.topic.dtype == Tuple[int, int] or frame.topic.dtype == np.ndarray:
+ img_patch = crop(self._latest_image, frame.data, self._crop_size)
if img_patch is None:
return None
- return MSPDataFrame(topic=self._generate_topic(name=f"{self._image_topic_name}.cropped"),
- timestamp=frame.timestamp, image=img_patch, base_topic=frame.topic,
- crop_size=self.crop_size)
+ return MSPDataFrame(
+ topic=Topic(dtype=Image.Image, name=f"{self._latest_image_name}.cropped"),
+ timestamp=frame.timestamp,
+ data=img_patch
+ )
+
+ @property
+ def output_topics(self) -> Optional[List[Topic]]:
+ return [Topic(dtype=Image.Image)]
+
+ @property
+ def input_topics(self) -> List[Topic]:
+ return [Topic(dtype=Image.Image), Topic(name=self._pointer_topic_name, dtype=Tuple[int, int]),
+ Topic(name=self._pointer_topic_name, dtype=np.ndarray)]
diff --git a/multisensor_pipeline/modules/image/utils.py b/multisensor_pipeline/modules/image/utils.py
index 41c105bb..560d857e 100644
--- a/multisensor_pipeline/modules/image/utils.py
+++ b/multisensor_pipeline/modules/image/utils.py
@@ -1,13 +1,9 @@
-def scale_to_image_coordinate(norm_pos, width, height, flip_y=False):
- """Scales normalized coordinates to the image coordinate system."""
- pos = [norm_pos[0] * width, norm_pos[1] * height]
- if flip_y:
- pos[1] = height - pos[1]
- return pos
+from typing import Tuple, Optional
+from PIL import Image
-def roi_rect(width, height, center_x, center_y, size):
- """Returns a tuple defining a box with edge size `size` around a center point"""
+def roi_rect(width: int, height: int, center_x: int, center_y: int, size: int) -> Optional[Tuple[int, int, int, int]]:
+ """ Returns a tuple defining a box with edge size `size` around a center point. """
s = int(.5 * size)
x, y = int(center_x) - s, int(center_y) - s
@@ -25,4 +21,15 @@ def roi_rect(width, height, center_x, center_y, size):
elif y + size > height - 1:
y = int(height - size - 1)
- return x, y, x+size, y+size
+ return x, y, x + size, y + size
+
+
+def crop(image: Image.Image, point: Tuple[int, int], crop_size: int) -> Optional[Image.Image]:
+ """ Crops an image patch from a pillow Image. """
+ if image is None:
+ return None
+ w, h = image.size
+ rect = roi_rect(width=w, height=h, center_x=point[0], center_y=point[1], size=crop_size)
+ if rect is None:
+ return None
+ return image.crop(rect)
diff --git a/multisensor_pipeline/modules/keyboard.py b/multisensor_pipeline/modules/keyboard.py
index c8da5eed..807256e1 100644
--- a/multisensor_pipeline/modules/keyboard.py
+++ b/multisensor_pipeline/modules/keyboard.py
@@ -1,14 +1,15 @@
import collections
+
from pynput import keyboard
from multisensor_pipeline.modules.base import BaseSource
-from multisensor_pipeline.dataframe import MSPEventFrame, MSPDataFrame
-from typing import Optional
+from multisensor_pipeline.dataframe import MSPDataFrame, Topic
+from typing import Optional, List
import logging
logger = logging.getLogger(__name__)
-class Keyboard(BaseSource):
+class KeyboardSource(BaseSource):
"""
Source for keyboard input. Can observe keyboard press and releases of button
"""
@@ -20,6 +21,8 @@ def __init__(self, press=True, release=False):
self.stop_listener = False
self.listener = None
self.queue = collections.deque()
+ self._keypress_topic = Topic(name="keyboard.press", dtype=str)
+ self._keyrelease_topic = Topic(name="keyboard.release", dtype=str)
def on_start(self):
args = {}
@@ -32,11 +35,11 @@ def on_start(self):
self.listener.start()
def on_press(self, key):
- frame = MSPEventFrame(topic=self._generate_topic(name="keyboard.press", dtype=str), chunk={"key": key})
+ frame = MSPDataFrame(topic=self._keypress_topic, data=key)
self.queue.append(frame)
def on_release(self, key):
- frame = MSPEventFrame(topic=self._generate_topic(name="keyboard.release", dtype=str), chunk={"key": key})
+ frame = MSPDataFrame(topic=self._keyrelease_topic, data=key)
self.queue.append(frame)
def on_update(self) -> Optional[MSPDataFrame]:
@@ -49,3 +52,8 @@ def on_stop(self):
self.stop_listener = True
self.listener.stop()
+ @property
+ def output_topics(self) -> Optional[List[Topic]]:
+ return [self._keyrelease_topic, self._keypress_topic]
+
+
diff --git a/multisensor_pipeline/modules/mouse.py b/multisensor_pipeline/modules/mouse.py
index b39680dc..113abdac 100644
--- a/multisensor_pipeline/modules/mouse.py
+++ b/multisensor_pipeline/modules/mouse.py
@@ -1,8 +1,10 @@
import collections
+
+import typing
from pynput import mouse
from multisensor_pipeline.modules.base import BaseSource
-from multisensor_pipeline.dataframe import MSPEventFrame, MSPDataFrame
-from typing import Optional
+from multisensor_pipeline.dataframe import MSPDataFrame, Topic
+from typing import Optional, List, Tuple, Dict, Generic, Any
import logging
logger = logging.getLogger(__name__)
@@ -22,6 +24,9 @@ def __init__(self, move=True, click=False, scroll=False):
self.stop_listener = False
self.listener = None
self.queue = collections.deque()
+ self._mouse_scroll_topic = Topic(name="mouse.scroll", dtype=Tuple[float, float])
+ self._mouse_click_topic = Topic(name="mouse.click", dtype=Dict)
+ self._mouse_move_topic = Topic(name="mouse.coordinates", dtype=Tuple[float,float])
def on_start(self):
args = {}
@@ -36,17 +41,17 @@ def on_start(self):
self.listener.start()
def on_move(self, x, y):
- frame = MSPEventFrame(topic=self._generate_topic(name="mouse.coordinates", dtype=float), chunk={"x": x, "y": y})
+ frame = MSPDataFrame(topic=self._mouse_move_topic, data=(x, y))
self.queue.append(frame)
def on_click(self, x, y, button, pressed):
- frame = MSPEventFrame(topic=self._generate_topic(name="mouse.click", dtype=float),
- chunk={"x": x, "y": y, "button": button, "pressed": pressed})
+ frame = MSPDataFrame(topic=self._mouse_click_topic,
+ data={"point": (x, y), "button": button, "pressed": pressed})
self.queue.append(frame)
def on_scroll(self, x, y, dx, dy):
- frame = MSPEventFrame(topic=self._generate_topic(name="mouse.scroll", dtype=float),
- chunk={"x": x, "y": y, "scroll_x": dx, "scroll_y": dy})
+ frame = MSPDataFrame(topic=self._mouse_scroll_topic,
+ data=(dx, dy))
self.queue.append(frame)
def on_update(self) -> Optional[MSPDataFrame]:
@@ -58,3 +63,7 @@ def on_update(self) -> Optional[MSPDataFrame]:
def on_stop(self):
self.stop_listener = True
self.listener.stop()
+
+ @property
+ def output_topics(self) -> Optional[List[Topic]]:
+ return [self._mouse_move_topic, self._mouse_click_topic, self._mouse_scroll_topic]
diff --git a/multisensor_pipeline/modules/multiprocess.py b/multisensor_pipeline/modules/multiprocess.py
index 4ded4ea2..7e731980 100644
--- a/multisensor_pipeline/modules/multiprocess.py
+++ b/multisensor_pipeline/modules/multiprocess.py
@@ -46,7 +46,7 @@ def on_start(self):
@staticmethod
@abstractmethod
- def _process_worker(module_cls: type, module_args: dict, init_event, start_event, stop_event, queue: mp.queues.Queue):
+ def _process_worker(module_cls: type, module_args: dict, init_event, start_event, stop_event, queue):
raise NotImplementedError()
@@ -59,7 +59,7 @@ def _init_process(self) -> mp.Process:
self._start_event, self._stop_event, self._queue_out))
@staticmethod
- def _process_worker(module_cls: type, module_args: dict, init_event, start_event, stop_event, queue_out: mp.queues.Queue):
+ def _process_worker(module_cls: type, module_args: dict, init_event, start_event, stop_event, queue_out):
module = initialize_module_and_wait_for_start(module_cls, module_args, init_event, start_event)
assert isinstance(module, BaseSource)
@@ -81,7 +81,8 @@ def stop(self, blocking=False):
""" Stops the module. """
self._stop_process()
super(MultiprocessModuleWrapper, self).stop(blocking=blocking)
- self._queue_out.put(MSPControlMessage(message=MSPControlMessage.END_OF_STREAM, source=self))
+ eof_msg = MSPControlMessage(message=MSPControlMessage.END_OF_STREAM)
+ self._queue_out.put(eof_msg)
class MultiprocessSinkWrapper(MultiprocessModuleWrapper, BaseSink):
@@ -93,7 +94,7 @@ def _init_process(self) -> mp.Process:
self._start_event, self._stop_event, self._queue_in))
@staticmethod
- def _process_worker(module_cls: type, module_args: dict, init_event, start_event, stop_event, queue_in: mp.queues.Queue):
+ def _process_worker(module_cls: type, module_args: dict, init_event, start_event, stop_event, queue_in):
module = initialize_module_and_wait_for_start(module_cls, module_args, init_event, start_event)
assert isinstance(module, BaseSink)
@@ -108,7 +109,8 @@ def _stop_process(self):
logger.debug("stopping: {}.{}".format(self.name, self._wrapped_module_cls.__name__))
# ask module process to stop
self._stop_event.set()
- self._queue_in.put(MSPControlMessage(message=MSPControlMessage.END_OF_STREAM, source=self))
+ eof_msg = MSPControlMessage(message=MSPControlMessage.END_OF_STREAM)
+ self._queue_in.put(eof_msg)
self._process.join()
def stop(self, blocking=False):
@@ -131,8 +133,7 @@ def on_update(self, frame: MSPDataFrame) -> Optional[MSPDataFrame]:
return self._queue_out.get()
@staticmethod
- def _process_worker(module_cls: type, module_args: dict, init_event, start_event, stop_event,
- queue_in: mp.queues.Queue, queue_out: mp.queues.Queue):
+ def _process_worker(module_cls: type, module_args: dict, init_event, start_event, stop_event, queue_in, queue_out):
module = initialize_module_and_wait_for_start(module_cls, module_args, init_event, start_event)
assert isinstance(module, BaseProcessor)
diff --git a/multisensor_pipeline/modules/network.py b/multisensor_pipeline/modules/network.py
index a895b8d6..9961c0e6 100644
--- a/multisensor_pipeline/modules/network.py
+++ b/multisensor_pipeline/modules/network.py
@@ -1,10 +1,9 @@
from multisensor_pipeline.modules.base import BaseSink, BaseSource
-from multisensor_pipeline.dataframe.dataframe import MSPDataFrame
-from typing import Optional
+from multisensor_pipeline.dataframe.dataframe import MSPDataFrame, Topic
+from typing import Optional, List
import zmq
import logging
-# import msgpack
-import json
+import msgpack
logger = logging.getLogger(__name__)
@@ -22,15 +21,16 @@ def __init__(self, protocol='tcp', url='*', port=5000):
self.socket.bind("{}://{}:{}".format(self.protocol, self.url, self.port))
def on_update(self, frame: MSPDataFrame):
- # payload = (frame.topic.name, msgpack.packb(frame, use_bin_type=True))
- # self.socket.send_multipart(payload)
- payload = json.dumps(frame, cls=MSPDataFrame.JsonEncoder)
- self.socket.send_json(payload)
+ self.socket.send(data=frame.serialize())
def on_stop(self):
self.socket.close()
self.context.term()
+ @property
+ def input_topics(self) -> List[Topic]:
+ return [Topic()]
+
class ZmqSubscriber(BaseSource):
@@ -48,12 +48,14 @@ def __init__(self, topic_filter='', protocol='tcp', url='127.0.0.1', port=5000):
self.socket.setsockopt_string(zmq.SUBSCRIBE, self.source_filter)
def on_update(self) -> Optional[MSPDataFrame]:
- # packet = self.socket.recv_multipart()
- # frame = msgpack.unpackb(packet[1], raw=False)
- payload = self.socket.recv_json()
- frame = MSPDataFrame(**json.loads(s=payload, cls=MSPDataFrame.JsonDecoder))
+ frame = MSPDataFrame.deserialize(self.socket.recv())
return frame
def on_stop(self):
self.socket.close()
self.context.term()
+
+ @property
+ def output_topics(self) -> Optional[List[Topic]]:
+ return [Topic()]
+
diff --git a/multisensor_pipeline/modules/npy.py b/multisensor_pipeline/modules/npy.py
index 53308023..e19a0451 100644
--- a/multisensor_pipeline/modules/npy.py
+++ b/multisensor_pipeline/modules/npy.py
@@ -1,29 +1,32 @@
import numpy as np
from multisensor_pipeline.modules import BaseProcessor
-from multisensor_pipeline.modules.base.sampler import BaseFixedRateSource
-from multisensor_pipeline.dataframe import MSPDataFrame
-from typing import Optional
+from multisensor_pipeline.modules.base.sampling import BaseDiscreteSamplingSource
+from multisensor_pipeline.dataframe import MSPDataFrame, Topic
+from typing import Optional, List
-class RandomArraySource(BaseFixedRateSource):
+class RandomArraySource(BaseDiscreteSamplingSource):
- def __init__(self, shape=None, min: int = 0, max: int = 100, sampling_rate: float = 1., max_count=float("inf")):
- super(RandomArraySource, self).__init__(sampling_rate)
+ @property
+ def output_topics(self) -> List[Topic]:
+ return [Topic(name="random", dtype=int if self._shape is None else np.ndarray)]
+
+ def __init__(self, shape=None, min: int = 0, max: int = 100, samplerate: float = 1., max_count=float("inf")):
+ super(RandomArraySource, self).__init__(samplerate)
self._shape = shape
self._min = min
self._max = max
self.max_count = max_count
self.index = 0
- # define what is offered
- dtype = int if shape is None else np.ndarray
- self._topic = self._generate_topic(name="random", dtype=dtype)
-
def on_update(self) -> Optional[MSPDataFrame]:
if self.index < self.max_count:
self.index += 1
- return MSPDataFrame(topic=self._topic, value=np.random.randint(self._min, self._max, size=self._shape))
- return
+ return MSPDataFrame(
+ topic=self.output_topics[0],
+ data=np.random.randint(self._min, self._max, size=self._shape)
+ )
+ return None
class ArrayManipulationProcessor(BaseProcessor):
@@ -33,6 +36,14 @@ def __init__(self, numpy_operation):
self._op = numpy_operation
def on_update(self, frame: MSPDataFrame) -> Optional[MSPDataFrame]:
- value = self._op(frame['value'])
- topic = self._generate_topic(name=f"{frame.topic.name}.{self._op.__name__}", dtype=type(value))
- return MSPDataFrame(topic=topic, value=value)
+ data = self._op(frame.data)
+ topic = self.output_topics[0] if type(data) is int else self.output_topics[1]
+ return MSPDataFrame(topic=topic, data=data)
+
+ @property
+ def input_topics(self) -> List[Topic]:
+ return [Topic(dtype=int), Topic(dtype=np.ndarray)]
+
+ @property
+ def output_topics(self) -> Optional[List[Topic]]:
+ return [Topic(name=self._op, dtype=int), Topic(name=self._op, dtype=np.ndarray)]
diff --git a/multisensor_pipeline/modules/persistence/__init__.py b/multisensor_pipeline/modules/persistence/__init__.py
index 1a2cd0d3..590ab08e 100644
--- a/multisensor_pipeline/modules/persistence/__init__.py
+++ b/multisensor_pipeline/modules/persistence/__init__.py
@@ -1,3 +1,3 @@
from .dataset import BaseDatasetSource
-from .recording import RecordingSink, JsonRecordingSink
-from .replay import JsonReplaySource
+from .recording import RecordingSink, DefaultRecordingSink
+from .replay import DefaultReplaySource
diff --git a/multisensor_pipeline/modules/persistence/dataset.py b/multisensor_pipeline/modules/persistence/dataset.py
index 5951b44f..28c60d41 100644
--- a/multisensor_pipeline/modules/persistence/dataset.py
+++ b/multisensor_pipeline/modules/persistence/dataset.py
@@ -1,8 +1,9 @@
from abc import ABC
-from multisensor_pipeline.dataframe import MSPControlMessage, MSPDataFrame
+from multisensor_pipeline.dataframe import MSPDataFrame
from multisensor_pipeline.modules import BaseSource
from typing import Optional
-from time import time, sleep
+import time
+import sched
class BaseDatasetSource(BaseSource, ABC):
@@ -13,12 +14,14 @@ def __init__(self, playback_speed: float = float("inf")):
"""
Initializes the BaseDatasetSource
Args:
- playback_speed: sets the playback speed. Default set to as fast as possible.
+ playback_speed: sets the playback speed (1 is original playback speed). Default set to as fast as possible.
"""
super(BaseDatasetSource, self).__init__()
- self._playback_speed = playback_speed
+ self._playback_speed = float(playback_speed)
self._last_frame_timestamp = None
self._last_playback_timestamp = None
+ if not self._playback_speed == float("inf"):
+ self._scheduler = sched.scheduler(time.perf_counter, time.sleep)
@property
def eof(self):
@@ -39,30 +42,58 @@ def _notify(self, frame: Optional[MSPDataFrame]):
"""
if frame is None:
self._auto_stop()
- else:
- self._sleep(frame)
+ return
+
+ if self._playback_speed == float("inf") or frame.topic.is_control_topic:
super(BaseDatasetSource, self)._notify(frame)
+ return
+
+ def _idle():
+ pass
+
+ # schedule the notification of the next dataframe
+ t_target = time.perf_counter()
+ if self._last_frame_timestamp is not None:
+ original_delta = frame.timestamp - self._last_frame_timestamp
+ target_delta = original_delta / self._playback_speed
+ t_target = self._last_playback_timestamp + target_delta
+ if time.perf_counter() < t_target:
+ _ = self._scheduler.enterabs(
+ time=t_target,
+ priority=0,
+ action=lambda: _idle() # do nothing, we just want to wait here
+ )
+ # wait until the dataframe shall be sent
+ self._scheduler.run(blocking=True)
+
+ # send the dataframe (will be done immediately for the first frame)
+ self._last_frame_timestamp = frame.timestamp
+ self._last_playback_timestamp = t_target
+ super(BaseDatasetSource, self)._notify(frame)
def _sleep(self, frame: MSPDataFrame):
"""
- Modifies the dataframe timestamp corresponding the playback speed. Sleeps if necessary to achieve correct
- playback speed
+ Modifies the dataframe timestamp corresponding the playback speed.
+ Sleeps if necessary to achieve correct playback speed
Args:
frame: Dataframe
"""
- if isinstance(frame, MSPControlMessage):
- return
if self._playback_speed == float("inf"):
return
+ if frame.topic.is_control_topic:
+ return
+
+ t_now = time.perf_counter()
+ # if this is not the first frame
if self._last_frame_timestamp is not None:
original_delta = frame.timestamp - self._last_frame_timestamp
target_delta = original_delta / self._playback_speed
- actual_delta = time() - self._last_playback_timestamp
+ actual_delta = time.perf_counter() - self._last_playback_timestamp
if actual_delta < target_delta:
- sleep(target_delta - actual_delta)
+ time.sleep(target_delta - actual_delta)
+ # else: don't wait and return -> this will issue the first frame immediately and the next one will be scheduled
self._last_frame_timestamp = frame.timestamp
- self._last_playback_timestamp = time()
- frame['playback_timestamp'] = self._last_playback_timestamp
+ self._last_playback_timestamp = time.perf_counter()
diff --git a/multisensor_pipeline/modules/persistence/recording.py b/multisensor_pipeline/modules/persistence/recording.py
index 974a8ea3..7e948f32 100644
--- a/multisensor_pipeline/modules/persistence/recording.py
+++ b/multisensor_pipeline/modules/persistence/recording.py
@@ -1,9 +1,8 @@
from abc import ABC
-from typing import List
+from typing import List, Optional
from multisensor_pipeline.modules.base import BaseSink
-from multisensor_pipeline.dataframe import MSPDataFrame
+from multisensor_pipeline.dataframe import MSPDataFrame, Topic
from pathlib import Path
-import json
class RecordingSink(BaseSink, ABC):
@@ -16,14 +15,14 @@ def target(self) -> Path:
return self._target
@property
- def topics(self) -> List[str]:
+ def topics(self) -> Optional[List[Topic]]:
return self._topics
@property
def override(self) -> bool:
return self._override
- def __init__(self, target, topics: List = None, override=False):
+ def __init__(self, target, topics: Optional[List[Topic]] = None, override=False):
"""
initializes RecordingSink
Args:
@@ -42,7 +41,7 @@ def __init__(self, target, topics: List = None, override=False):
# set override flag
self._override = override
- def check_topic(self, topic):
+ def check_topic(self, topic: Topic):
"""Check whether the given topic shall be captured."""
if self._topics is None:
return True
@@ -57,24 +56,23 @@ def write(self, frame):
raise NotImplementedError()
-class JsonRecordingSink(RecordingSink):
+class DefaultRecordingSink(RecordingSink):
"""
- JsonReplaySource replays a recorded json dataset
+ The DefaultRecordingSink enables recording of dataframes for all connected modules and topics.
+ It uses the default serialization based on msgpack.
"""
- _json_file = None
+ _file_handle = None
def on_start(self):
- """ Checks if file and file path is correct and override if exists"""
- assert self.target.suffix == ".json", f"The file extension must be json, but was {self.target.suffix}"
+ assert self.target.suffix == ".msgpack", f"The file extension must be json, but was {self.target.suffix}"
if not self.override:
assert not self.target.exists(), f"The file existis, but override is disabled ({self.target})"
- self._json_file = self.target.open(mode="w")
+
+ self._file_handle = self.target.open(mode="wb")
def write(self, frame):
- """ Writes the json file """
- self._json_file.write(json.dumps(obj=frame, cls=MSPDataFrame.JsonEncoder) + '\n')
+ self._file_handle.write(frame.serialize())
def on_stop(self):
- """ Stops tne Sink and closes the json file """
- self._json_file.close()
+ self._file_handle.close()
diff --git a/multisensor_pipeline/modules/persistence/replay.py b/multisensor_pipeline/modules/persistence/replay.py
index d1360475..ee72f989 100644
--- a/multisensor_pipeline/modules/persistence/replay.py
+++ b/multisensor_pipeline/modules/persistence/replay.py
@@ -1,40 +1,47 @@
-import json
from multisensor_pipeline.dataframe import MSPDataFrame
from multisensor_pipeline.modules.persistence.dataset import BaseDatasetSource
from typing import Optional
+from pathlib import Path
-class JsonReplaySource(BaseDatasetSource):
+class DefaultReplaySource(BaseDatasetSource):
"""
- JsonReplaySource replays a recorded json dataset
+ The DefaultReplaySource loads a recorded dataset (from the DefaultRecordingSink) and replays it:
+ it simulates the recorded stream by sending all dataframes in the same order into a connected pipeline.
"""
def __init__(self, file_path: str, **kwargs):
"""
Initializes the source
Args:
- file_path: file path to the json file
+ file_path: file path to the recording
"""
- super(JsonReplaySource, self).__init__(**kwargs)
- self._file_path = file_path
+ super(DefaultReplaySource, self).__init__(**kwargs)
+ self._file_path = Path(file_path)
self._file_handle = None
+ self._unpacker = None
+
+ assert self._file_path.exists() and self._file_path.is_file()
+ assert self._file_path.suffix == ".msgpack"
def on_start(self):
- self._file_handle = open(self._file_path, mode="r")
+ self._file_handle = open(self._file_path, mode="rb")
+ self._unpacker = MSPDataFrame.get_msgpack_unpacker(self._file_handle)
def on_update(self) -> Optional[MSPDataFrame]:
"""
- Iterates over the entries in the json file and returns a dataframe. Stops if EOF is reached
+ Iterates over the entries in the recorded file and returns all dataframes. Stops if EOF is reached
"""
- line = self._file_handle.readline()
- if line != '':
- return MSPDataFrame(**json.loads(s=line, cls=MSPDataFrame.JsonDecoder))
+ try:
+ frame = next(self._unpacker)
+ except StopIteration:
+ frame = None
- # EOF is reached -> auto-stop (you can alternatively return None)
- self._auto_stop()
+ if frame is not None:
+ return frame
+ else:
+ # EOF is reached -> auto-stop (you can alternatively return None)
+ self._auto_stop()
def on_stop(self):
- """
- Stopping and cleanup
- """
self._file_handle.close()
diff --git a/multisensor_pipeline/modules/signal/filtering.py b/multisensor_pipeline/modules/signal/filtering.py
index 09adb963..c1ac30f3 100644
--- a/multisensor_pipeline/modules/signal/filtering.py
+++ b/multisensor_pipeline/modules/signal/filtering.py
@@ -1,8 +1,9 @@
from multisensor_pipeline import BaseProcessor
-from multisensor_pipeline.dataframe import MSPDataFrame
+from multisensor_pipeline.dataframe import MSPDataFrame, Topic
from multisensor_pipeline.modules.signal.one_euro_filter import OneEuroFilter
-from typing import Optional
+from typing import Optional, List, Tuple
import logging
+import numpy as np
logger = logging.getLogger(__name__)
@@ -25,11 +26,9 @@ class OneEuroProcessor(BaseProcessor):
if slow speed jitter is a problem, decrease fcmin.
"""
- def __init__(self, signal_topic_name, signal_key, freq=30, fcmin=1.5, beta=.001, dcutoff=1):
+ def __init__(self, freq=30, fcmin=1.5, beta=.001, dcutoff=1):
super(OneEuroProcessor, self).__init__()
- self._signal_topic_name = signal_topic_name
- self._signal_key = signal_key
config = {
'freq': freq, # Hz
'mincutoff': fcmin,
@@ -47,9 +46,15 @@ def _filter(self, point, timestamp):
return self._filter_x(point[0], timestamp), self._filter_y(point[1], timestamp)
def on_update(self, frame: MSPDataFrame) -> Optional[MSPDataFrame]:
- if frame.topic.name == self._signal_topic_name:
- smoothed_point = self._filter(frame[self._signal_key], frame.timestamp)
- if smoothed_point is not None:
- frame[self._signal_key] = smoothed_point
- frame.topic = self._generate_topic(f"{frame.topic.name}.smoothed", frame.topic.dtype)
- return frame
+ smoothed_point = self._filter(frame.data, frame.timestamp)
+ if smoothed_point is not None:
+ return MSPDataFrame(topic=self.output_topics[0], data=smoothed_point)
+
+ @property
+ def input_topics(self) -> List[Topic]:
+ return [Topic(dtype=Tuple[float, float]),
+ Topic(dtype=np.ndarray)]
+
+ @property
+ def output_topics(self) -> Optional[List[Topic]]:
+ return [Topic(name="smoothed", dtype=Tuple[float, float])]
diff --git a/multisensor_pipeline/modules/signal/one_euro_filter.py b/multisensor_pipeline/modules/signal/one_euro_filter.py
index fcb4b4e9..4cc93af8 100644
--- a/multisensor_pipeline/modules/signal/one_euro_filter.py
+++ b/multisensor_pipeline/modules/signal/one_euro_filter.py
@@ -98,36 +98,3 @@ def __call__(self, x, timestamp=None):
cutoff = self.__mincutoff + self.__beta * math.fabs(edx)
# ---- filter the given value
return self.__x(x, timestamp, alpha=self.__alpha(cutoff))
-
-
-# ----------------------------------------------------------------------------
-
-if __name__ == "__main__":
-
- import random
-
- duration = 10.0 # seconds
-
- config = {
- 'freq': 120, # Hz
- 'mincutoff': 1.0, # FIXME
- 'beta': 1.0, # FIXME
- 'dcutoff': 1.0 # this one should be ok
- }
-
- print
- "#SRC OneEuroFilter.py"
- print
- "#CFG %s" % config
- print
- "#LOG timestamp, signal, noisy, filtered"
-
- f = OneEuroFilter(**config)
- timestamp = 0.0 # seconds
- while timestamp < duration:
- signal = math.sin(timestamp)
- noisy = signal + (random.random() - 0.5) / 5.0
- filtered = f(noisy, timestamp)
- print
- "{0}, {1}, {2}, {3}".format(timestamp, signal, noisy, filtered)
- timestamp += 1.0 / config["freq"]
\ No newline at end of file
diff --git a/multisensor_pipeline/modules/signal/sampling.py b/multisensor_pipeline/modules/signal/sampling.py
index 74f18326..43076713 100644
--- a/multisensor_pipeline/modules/signal/sampling.py
+++ b/multisensor_pipeline/modules/signal/sampling.py
@@ -1,19 +1,18 @@
from multisensor_pipeline import BaseProcessor
-from multisensor_pipeline.dataframe.dataframe import MSPDataFrame
-from typing import Optional
+from multisensor_pipeline.dataframe.dataframe import MSPDataFrame, Topic
+from typing import Optional, List
import logging
import numpy as np
-
+from copy import copy
logger = logging.getLogger(__name__)
class DownsamplingProcessor(BaseProcessor):
-
class DataFrameHistory:
- def __init__(self, topic_uid, fps_out, window_size=5, interpolation=None):
- self.topic_uid = topic_uid
+ def __init__(self, topic_uuid, fps_out, window_size=5, interpolation=None):
+ self.topic_uid = topic_uuid
self.target_fps = fps_out
self.window_size = window_size # TODO: expose window_size, mainly for interpolation
self.interpolation = interpolation # TODO: utilize interpolation for, e.g., averaging the data
@@ -86,35 +85,43 @@ def fps_in(self):
def period_time_in(self):
return (self.dataframes[-1].timestamp - self.dataframes[0].timestamp) / len(self.dataframes)
- def __init__(self, topic_names=None, sampling_rate=5):
+ def __init__(self, target_topics: Optional[List[Topic]] = None, samplerate: int = 5):
"""
Downsamples a signal to a given sampling_rate [Hz], if the original rate is higher.
Otherwise, the sampling rate stays the same (no upsampling).
- @param topic_names: the dtype to be resampled; if None, all incoming dtypes are resampled
- @param sampling_rate: the desired sampling rate [Hz]
+ @param target_topics: the dtype to be resampled; if None, all incoming dtypes are resampled
+ @param samplerate: the desired sampling rate [Hz]
"""
super(DownsamplingProcessor, self).__init__()
- self._topic_names = topic_names
- self._sampling_rate = sampling_rate
- self._period_time = 1. / sampling_rate
+ self._target_topics = target_topics if target_topics is not None else [Topic()]
+ self._sampling_rate = samplerate
+ self._period_time = 1. / samplerate
self._sample_hist = dict()
self._last_sent = dict()
self._last_received = dict()
- def _get_history(self, uid) -> DataFrameHistory:
- if uid not in self._sample_hist:
- self._sample_hist[uid] = self.DataFrameHistory(uid, fps_out=self._sampling_rate)
- return self._sample_hist[uid]
+ def _get_history(self, uuid) -> DataFrameHistory:
+ if uuid not in self._sample_hist:
+ self._sample_hist[uuid] = self.DataFrameHistory(uuid, fps_out=self._sampling_rate)
+ return self._sample_hist[uuid]
def on_update(self, frame: MSPDataFrame) -> Optional[MSPDataFrame]:
- if self._topic_names is None or frame.topic.name in self._topic_names:
+ if self._target_topics is None or frame.topic in self._target_topics:
hist = self._get_history(frame.topic.uuid)
hist.add(frame)
_frame = hist.get_dataframe()
if _frame is not None:
- _topic = self._generate_topic(name=f"{frame.topic.name}.{self._sampling_rate}Hz",
- dtype=frame.topic.dtype)
- _frame.topic = _topic
- return _frame
-
+ _topic = Topic(name=f"{frame.topic.name}.{self._sampling_rate}Hz",
+ dtype=frame.topic.dtype)
+ new_frame = copy(_frame)
+ new_frame.topic = _topic
+ return new_frame
+
+ @property
+ def input_topics(self) -> List[Topic]:
+ return self._target_topics
+
+ @property
+ def output_topics(self) -> Optional[List[Topic]]:
+ return [Topic(name=f"{t.name}.{self._sampling_rate}Hz", dtype=t.dtype) for t in self._target_topics]
diff --git a/multisensor_pipeline/modules/video/__init__.py b/multisensor_pipeline/modules/video/__init__.py
index 2d5c3f72..391b36a1 100644
--- a/multisensor_pipeline/modules/video/__init__.py
+++ b/multisensor_pipeline/modules/video/__init__.py
@@ -1,2 +1,2 @@
-from .video import VideoSource
-from .webcam import WebCamSource
\ No newline at end of file
+from .video import VideoSource, VideoSink
+from .webcam import WebcamSource
diff --git a/multisensor_pipeline/modules/video/video.py b/multisensor_pipeline/modules/video/video.py
index 1d6709d0..018d77e6 100644
--- a/multisensor_pipeline/modules/video/video.py
+++ b/multisensor_pipeline/modules/video/video.py
@@ -1,54 +1,67 @@
-from typing import Optional
+from ..base import BaseSink
+from abc import ABC
+from typing import Optional, List
import av
-import cv2
-import numpy as np
+from PIL import Image
+from multisensor_pipeline.dataframe import Topic, MSPDataFrame, MSPControlMessage
+from multisensor_pipeline.modules.persistence import BaseDatasetSource
-from multisensor_pipeline import BaseSink, GraphPipeline
-from multisensor_pipeline.dataframe import MSPDataFrame
-from multisensor_pipeline.modules.persistence.dataset import BaseDatasetSource
+class PyAVSource(BaseDatasetSource, ABC):
-class VideoSource(BaseDatasetSource):
- """
- Source for video file input. Sends PIL frames.
- """
+ def __init__(
+ self, file: str, av_format: Optional[str] = None, av_options: Optional[dict] = None,
+ playback_speed: float = float("inf")
+ ):
+ super(PyAVSource, self).__init__(playback_speed=playback_speed)
- def __init__(self, file_path: str = "", **kwargs):
- """
- Args:
- file_path: video file path
- kwargs: kwargs for BaseDa
- """
- super(VideoSource, self).__init__(**kwargs)
- self.file_path = file_path
- self.video = None
- self.queue = None
+ self._file = file
+ self._av_format = av_format
+ self._av_options = av_options if av_options is not None else {}
+ self._frame_topic = Topic(name="frame", dtype=Image.Image)
+ self._container = None
def on_start(self):
- """
- Initialize video container with the provided path.
- """
- self.video = av.open(self.file_path)
+ """ Initialize the file/device handle. """
+ self._container = av.open(
+ file=self._file,
+ format=self._av_format,
+ options=self._av_options
+ )
+
+ def on_update(self) -> Optional[MSPDataFrame]:
+ try:
+ frame, frame_time = next(self.frame_gen())
+ return MSPDataFrame(topic=self._frame_topic, data=frame, timestamp=frame_time)
+ except av.error.EOFError as e:
+ return None
+ except av.error.BlockingIOError as e:
+ return MSPControlMessage(message=MSPControlMessage.PASS)
+ except av.error.ValueError as e:
+ return MSPControlMessage(message=MSPControlMessage.PASS)
def frame_gen(self):
"""
Generator for iterating over frames of the video file
"""
- stream = self.video.streams.video[0]
- for frame in self.video.decode(stream):
+ for frame in self._container.decode(video=0):
img = frame.to_image()
- yield img
+ yield img, frame.time
- def on_update(self) -> Optional[MSPDataFrame]:
- try:
- frame = next(self.frame_gen())
- return MSPDataFrame(topic=self._generate_topic(name="frame", dtype=str),
- chunk={"frame": frame})
- except av.error.EOFError as e:
- return
def on_stop(self):
- self.video.close()
+ """ Close the file/device handle. """
+ self._container.close()
+
+ @property
+ def output_topics(self) -> Optional[List[Topic]]:
+ return [self._frame_topic]
+
+
+class VideoSource(PyAVSource):
+
+ def __init__(self, file: str, playback_speed: float = 1.):
+ super(VideoSource, self).__init__(file=file, playback_speed=playback_speed)
class VideoSink(BaseSink):
@@ -56,8 +69,7 @@ class VideoSink(BaseSink):
Sink to export PIL-Images to a video file and/or show a live preview
"""
- def __init__(self, file_path: str = "output.mp4", live_preview: bool = True,
- topic_name: str = "frame", **kwargs):
+ def __init__(self, file_path: str = "output.mp4", **kwargs):
"""
Args:
file_path: path of the export video
@@ -66,27 +78,18 @@ def __init__(self, file_path: str = "output.mp4", live_preview: bool = True,
"""
super(VideoSink, self).__init__(**kwargs)
self.file_path = file_path
- self.live_preview = live_preview
- self.topic_name = topic_name
self.output = av.open(self.file_path, "w")
self.stream = self.output.add_stream('h264')
+ self._frame_topic = Topic(name="frame", dtype=Image.Image)
def on_update(self, frame: MSPDataFrame):
"""
Writes to the video file
"""
- if frame.topic.name == self.topic_name:
- pil_frame = frame["chunk"][self.topic_name]
- video_frame = av.VideoFrame.from_image(pil_frame)
- packet = self.stream.encode(video_frame)
- self.output.mux(packet)
- if self.live_preview:
- cv_img = np.array(pil_frame)
- cv_img = cv_img[:, :, ::-1]
- cv2.startWindowThread()
- cv2.namedWindow("preview")
- cv2.imshow("preview", cv_img)
- cv2.waitKey(1)
+ pil_frame = frame.data
+ video_frame = av.VideoFrame.from_image(pil_frame)
+ packet = self.stream.encode(video_frame)
+ self.output.mux(packet)
def on_stop(self):
"""
@@ -95,3 +98,6 @@ def on_stop(self):
self.output.mux(self.stream.encode())
self.output.close()
+ @property
+ def input_topics(self) -> List[Topic]:
+ return [self._frame_topic]
\ No newline at end of file
diff --git a/multisensor_pipeline/modules/video/webcam.py b/multisensor_pipeline/modules/video/webcam.py
index ec9ed4c8..2b4cc9ed 100644
--- a/multisensor_pipeline/modules/video/webcam.py
+++ b/multisensor_pipeline/modules/video/webcam.py
@@ -1,52 +1,69 @@
-from time import sleep
+import sys
+import os
from typing import Optional
-import av
-
-from multisensor_pipeline import BaseSource, GraphPipeline
-from multisensor_pipeline.dataframe import MSPDataFrame
-from multisensor_pipeline.modules import ConsoleSink
-
-
-class WebCamSource(BaseSource):
- """
- Source for webcam. Sends PIL frames.
- """
-
- def __init__(self, web_cam_format="avfoundation", web_cam_id: str = "0", options={'framerate': '30'}):
- """
- Initialize the Source
- Args:
- web_cam_format: See more information about which web_cam_format to use in https://ffmpeg.org/ffmpeg-devices.html#Input-Devices
- web_cam_id: ID of the webcam usually "0"
- options: Options is a dict and uses following format https://ffmpeg.org/ffmpeg.html#Video-Options
- """
- super(WebCamSource, self).__init__()
- self.web_cam_id = web_cam_id
- self.web_cam_format = web_cam_format
- self.video = None
- self.queue = None
- self.options = options
- self.video = av.open(format=self.web_cam_format, file=self.web_cam_id, options=self.options)
-
-
- def frame_gen(self):
- """
- Generator for iterating over frames of the webcam input
- """
- stream = self.video.streams.video[0]
- for frame in self.video.decode(stream):
- img = frame.to_image()
- yield img
-
- def on_update(self) -> Optional[MSPDataFrame]:
- try:
- frame = next(self.frame_gen())
- return MSPDataFrame(topic=self._generate_topic(name="frame", dtype=str),
- chunk={"frame": frame})
- except av.error.BlockingIOError as e:
- return
-
- def on_stop(self):
- self.video.close()
+from .video import PyAVSource
+def _get_device_list_linux():
+ devs = os.listdir('/dev')
+ vid_indices = [int(dev[-1]) for dev in devs
+ if dev.startswith('video')]
+ vid_indices = sorted(vid_indices)
+ return vid_indices
+
+def _get_device_list_mac():
+ import subprocess
+ cmd = "system_profiler SPCameraDataType | awk '/Unique ID:/ {print $3}'"
+ result = subprocess.run(cmd, stdout=subprocess.PIPE, shell=True, check=True)
+ serial_number = str(result.stdout.strip()).split("\\n")
+ return list(range(len(serial_number)))
+
+
+if sys.platform.startswith("win32"):
+ from windows_capture_devices import get_capture_devices
+elif sys.platform.startswith("linux"):
+ get_capture_devices = _get_device_list_linux
+elif sys.platform.startswith("darwin"):
+ get_capture_devices = _get_device_list_mac
+else:
+ raise NotImplementedError(f"WebcamSource is currently not supported for the platform {sys.platform}")
+
+
+def _get_av_format():
+ if sys.platform.startswith("win32"):
+ return "dshow"
+ elif sys.platform.startswith("linux"):
+ return "video4linux2"
+ elif sys.platform.startswith("darwin"):
+ return "avfoundation"
+ else:
+ raise NotImplementedError(f"WebcamSource is currently not supported for the platform {sys.platform}")
+
+
+def _get_av_file_from_webcam_id(webcam_id: str):
+ if sys.platform.startswith("win32"):
+ return f"video={webcam_id}"
+ elif sys.platform.startswith("linux"):
+ return f"/dev/video{webcam_id}"
+ elif sys.platform.startswith("darwin"):
+ return f"{webcam_id}"
+ else:
+ raise NotImplementedError(f"WebcamSource is currently not supported for the platform {sys.platform}")
+
+
+class WebcamSource(PyAVSource):
+
+ def __init__(self, webcam_id: str, framerate: int = 30, options: Optional[dict] = None):
+ self._webcam_id = webcam_id
+ self._options = options if options is not None else {}
+ self._options["framerate"] = str(framerate)
+ super(WebcamSource, self).__init__(
+ file=_get_av_file_from_webcam_id(webcam_id),
+ av_format=_get_av_format(),
+ av_options=self._options,
+ playback_speed=float("inf")
+ )
+
+ @staticmethod
+ def available_webcams():
+ return get_capture_devices()
diff --git a/multisensor_pipeline/pipeline/graph.py b/multisensor_pipeline/pipeline/graph.py
index bc877e4f..e547e963 100644
--- a/multisensor_pipeline/pipeline/graph.py
+++ b/multisensor_pipeline/pipeline/graph.py
@@ -1,9 +1,9 @@
from .base import PipelineBase
from multisensor_pipeline.modules.base import *
import networkx as nx
-from typing import Union, List
+from typing import Union, List, Optional
-from ..dataframe import MSPDataFrame
+from ..dataframe import MSPDataFrame, Topic
class GraphPipeline(PipelineBase):
@@ -12,7 +12,8 @@ class GraphPipeline(PipelineBase):
ROLE_PROCESSOR = "processor"
ROLE_SINK = "sink"
- def __init__(self):
+ def __init__(self, profiling=False):
+ self._profiling = profiling
self._graph = nx.DiGraph()
def add(self, modules: Union[BaseModule, List[BaseModule]]):
@@ -30,18 +31,24 @@ def add(self, modules: Union[BaseModule, List[BaseModule]]):
def add_source(self, source_module: BaseSource):
assert isinstance(source_module, BaseSource)
+ if self._profiling:
+ source_module.profiling = True
self._graph.add_node(source_module, role=self.ROLE_SOURCE)
def add_processor(self, processor_module: BaseProcessor):
assert isinstance(processor_module, BaseProcessor)
+ if self._profiling:
+ processor_module.profiling = True
self._graph.add_node(processor_module, role=self.ROLE_PROCESSOR)
def add_sink(self, sink_module: BaseSink):
assert isinstance(sink_module, BaseSink)
+ if self._profiling:
+ sink_module.profiling = True
self._graph.add_node(sink_module, role=self.ROLE_SINK)
- def connect(self, module, successor):
- module.add_observer(successor) # must be first, because it implicitly validates the connection
+ def connect(self, module, successor, topics: Optional[Union[Topic, List[Topic]]] = None):
+ module.add_observer(successor, topics) # must be called first -> it implicitly validates the connection
self._graph.add_edge(module, successor)
def add_connection(
diff --git a/requirements.d/flake8.txt b/requirements.d/flake8.txt
deleted file mode 100644
index 5aa0d966..00000000
--- a/requirements.d/flake8.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-attrs==21.2.0
-flake8==3.9.2
-flake8-bugbear==21.4.3
-flake8-docstrings==1.6.0
-flake8-polyfill==1.0.2
-flake8-typing-imports==1.10.1
-mccabe==0.6.1
-pep8-naming==0.11.1
-pip==21.2.4
-pycodestyle==2.7.0
-pydocstyle==6.0.0
-pyflakes==2.3.1
-setuptools==56.0.0
-snowballstemmer==2.1.0
-wheel==0.36.2
diff --git a/requirements.d/venv.txt b/requirements.d/venv.txt
deleted file mode 100644
index 42272530..00000000
--- a/requirements.d/venv.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-appdirs==1.4.4
-distlib==0.3.1
-filelock==3.0.12
-packaging==20.9
-pip==21.2.4
-pluggy==0.13.1
-py==1.10.0
-pyparsing==2.4.7
-setuptools==56.2.0
-six==1.16.0
-toml==0.10.2
-tox==3.23.1
-virtualenv==20.4.6
-wheel==0.36.2
diff --git a/setup.py b/setup.py
index 4456601c..78624542 100644
--- a/setup.py
+++ b/setup.py
@@ -1,33 +1,37 @@
-"""Packaging logic for Multisensor Pipeline."""
-
+import sys
from distutils.core import setup
from setuptools import find_packages
+install_requires = [
+ 'decorator<5.0.0', # For networkx
+ 'networkx>=2.5',
+ 'numpy>1.7.0',
+ 'Pillow>=8.3.2',
+ 'pynput>=1.7.3',
+ 'pyzmq>=20.0.0',
+ 'av>=8.0.1',
+ 'sounddevice>=0.4.3',
+ 'soundfile>=0.10.3',
+ 'msgpack>1.0.0'
+ ]
+if sys.platform.startswith("win32"):
+ install_requires.append('windows-capture-devices')
+
setup(
name='multisensor-pipeline',
- version='2.0.0',
+ version='2.1.0',
author='Michael Barz',
author_email='[email protected]',
license='CC BY-NC-SA 4.0',
packages=find_packages(
include=('multisensor_pipeline.*', 'multisensor_pipeline')
),
- url="https://github.com/" +
- "DFKI-Interactive-Machine-Learning/multisensor-pipeline",
+ url="https://github.com/DFKI-Interactive-Machine-Learning/multisensor-pipeline",
description="The core library of the DFKI multisensor pipeline framework.",
python_requires='>=3.6.0',
- install_requires=[
- 'decorator<5.0.0', # For networkx
- 'networkx',
- 'numpy<1.20.0', # For Python 3.6
- 'Pillow',
- 'PyAudio',
- 'pynput',
- 'pyzmq',
- ],
+ install_requires=install_requires,
keywords=[
- 'multimodality', 'streaming', 'multisensor', 'sensors',
- 'multimodal interaction', 'pipeline', 'stream processing',
- 'multiprocessing',
+ 'multimodality', 'streaming', 'multisensor', 'sensors', 'multimodal interaction',
+ 'pipeline', 'stream processing', 'multiprocessing'
]
)
diff --git a/tox.ini b/tox.ini
deleted file mode 100644
index f21d1c80..00000000
--- a/tox.ini
+++ /dev/null
@@ -1,35 +0,0 @@
-[tox]
-envlist = {py36, py37, py38, py39}-{macos, ubuntu, windows}, flake8
-
-[testenv]
-dev = true
-deps =
- ubuntu: -rrequirements.d/pytest-ubuntu.txt
- macos: -rrequirements.d/pytest-macos.txt
- windows: -rrequirements.d/pytest-windows.txt
-commands =
- pytest \
- --cov=multisensor_pipeline \
- --cov-config=.coveragerc \
- --durations=0 \
- --verbose
-
-[testenv:flake8]
-skip_install = true
-deps =
- -rrequirements.d/flake8.txt
-commands =
- # Strict requirements on the packaging code
- flake8 \
- setup.py
- # Less strict requirements on the package code
- flake8 \
- --extend-ignore=E501,D103,F403,N802,W292,D403,D204,D208,E501,E231,E271,F841,W391,E303,B006,D100,D101,D102,D104,D105,D106,D107,D200,D205,D210,D400,D401,F401,D405,F405 \
- --exclude=multisensor_pipeline/tests/* \
- --max-complexity=10 --max-line-length=127 \
- multisensor_pipeline/
- # Lax requirements on the test suite code
- flake8 \
- --extend-ignore=E225,F841,F811,B007,D210,F401,F821,W391,F821,D100,D101,D102,D103,D104,D107 \
- --max-complexity=10 --max-line-length=127 \
- multisensor_pipeline/tests/
| BaseFixedRateSource computes wrong sleep time
Sleeptime needs to be fixed
Dashboard
merge None to Topic() change in output and input topics
| 2021-12-21T14:13:18 | 0.0 | [] | [] |
|||
mqtt-tools/mqttwarn | mqtt-tools__mqttwarn-681 | 29ea2f59f1acc71f99af9b69e15f3e5c7115c7b3 | diff --git a/docs/configure/mqttwarn.ini.md b/docs/configure/mqttwarn.ini.md
index 81010297..ee5d5f01 100644
--- a/docs/configure/mqttwarn.ini.md
+++ b/docs/configure/mqttwarn.ini.md
@@ -218,3 +218,29 @@ display them on all XBMC targets:
targets = log:error, xbmc
title = mqttwarn
```
+
+## Variables
+
+You can load option values either from environment variables or file content.
+To do this, replace option's value with one of the following:
+
+- `${ENV:FOO}` - Replaces option's value with environment variable `FOO`.
+- `${FILE:/path/to/foo.txt}` - Replaces option's value with file contents from
+ `/path/to/foo.txt`. The file path can also be relative like `${FILE:foo.txt}`
+ in which case the file is loaded relative to configuration file's location.
+
+The variable pattern can take either form like `$TYPE:NAME` or `${TYPE:NAME}`.
+Latter pattern is required when variable name (`NAME`) contains characters that
+are not alphanumeric or underscore.
+
+For example:
+```ini
+[defaults]
+username = $ENV:MQTTWARN_USERNAME
+password = $ENV:MQTTWARN_PASSWORD
+
+[config:xxx]
+targets = {
+ 'targetname1': [ '${FILE:/run/secrets/address.txt}' ],
+ }
+```
diff --git a/mqttwarn/configuration.py b/mqttwarn/configuration.py
index 88225929..5c5f050b 100644
--- a/mqttwarn/configuration.py
+++ b/mqttwarn/configuration.py
@@ -4,9 +4,10 @@
import codecs
import logging
import os
+import re
import sys
import typing as t
-from configparser import NoOptionError, NoSectionError, RawConfigParser
+from configparser import Interpolation, NoOptionError, NoSectionError, RawConfigParser
from mqttwarn.util import load_functions
@@ -20,6 +21,72 @@
logger = logging.getLogger(__name__)
+def expand_vars(input: str, sources: t.Dict[str, t.Callable[[str], str]]) -> str:
+ """
+ Expand variables in `input` string with values from `sources` dict.
+
+ Variables may be in two forms, either $TYPE:KEY or ${TYPE:KEY}. The second form must be used when `KEY` contains
+ characters other than numbers, alphabets or underscore. Supported `TYPE`s depends on keys of `sources` dict.
+
+ The `sources` is a dict where key is name of `TYPE` in the pattern above and value is a function that takes `KEY`
+ as argument and returns contents of the variable to be expanded.
+
+ :return: Input string with variables expanded
+ """
+ expanded = ""
+ input_index = 0
+ match = None
+ # `input` may have multiple variables in form of $TYPE:KEY or ${TYPE:KEY} pattern, iterate through them
+ for match in re.finditer(r"\$(\w+):(\w+)|\$\{(\w+):([^}]+)\}", input):
+ var_type = match[1] if match[1] else match[3] # TYPE part in the variable pattern
+ var_key = match[2] if match[2] else match[4] # KEY part in the variable pattern
+
+ if var_type not in sources:
+ raise KeyError(f"{match[0]}: Variable type '{var_type}' not supported")
+ source = sources[var_type]
+
+ try:
+ value = source(var_key)
+ except Exception as ex:
+ raise KeyError(f"{match[0]}: {str(ex)}") from ex
+
+ match_start, match_end = match.span()
+ expanded += input[input_index:match_start] + value
+ input_index = match_end
+
+ if match:
+ return expanded + input[input_index:]
+ return input
+
+
+class VariableInterpolation(Interpolation):
+ def __init__(self, configuration_path):
+ self.configuration_path = configuration_path
+ self.sources = {
+ "ENV": self.get_env_variable,
+ "FILE": self.get_file_contents,
+ }
+
+ def before_get(self, parser, section, option, value, defaults):
+ return expand_vars(value, self.sources) if type(value) == str else value
+
+ def get_env_variable(self, name: str) -> str:
+ """
+ Get environment variable of `name` and return it
+ """
+ return os.environ[name]
+
+ def get_file_contents(self, filepath: str) -> str:
+ """
+ Get file contents from `filepath` and return it
+ """
+ if not os.path.isfile(filepath):
+ # Read file contents relative to path of configuration file if path is relative
+ filepath = os.path.join(self.configuration_path, filepath)
+ with open(filepath) as file:
+ return file.read()
+
+
class Config(RawConfigParser):
specials: t.Dict[str, t.Union[bool, None]] = {
@@ -34,13 +101,14 @@ def __init__(self, configuration_file: t.Optional[str] = None, defaults: t.Optio
self.configuration_path = None
- RawConfigParser.__init__(self)
+ configuration_path = os.path.dirname(configuration_file) if configuration_file else None
+ RawConfigParser.__init__(self, interpolation=VariableInterpolation(configuration_path))
if configuration_file is not None:
f = codecs.open(configuration_file, "r", encoding="utf-8")
self.read_file(f)
f.close()
- self.configuration_path = os.path.dirname(configuration_file)
+ self.configuration_path = configuration_path
""" set defaults """
self.hostname = "localhost"
| Environment variables for sensitive configuration file values
The configuration file contains sensitive data like passwords. Is it possible to pass values via environment variables into the configuration of mqttwarn? That would allow me to e.g. share my configuration in public, without having to place passwords into the configuration file.
| I don't think this is yet possible, but it is a good idea, and maybe we could take this a step further.
Whenever a password is required in the service code, the service invokes a function `pass()` with the configured secret. If the secret begins with a special token, we obtain the clear text password appropriately, otherwise, we use the verbatim password.
1. `pass("$ENV:SECRET")` would use the value from the environment's `$SECRET`
2. `pass("$FS:/etc/mysecret")` would use the value from the first white-space trimmed line in `/etc/mysecret`
3. `pass("bla17")` would use the password "bla17"
Using these special tokens would mean that in the above example a password must not begin with either `$ENV:` or with `$FS:` (regex `^\$[A-Z]+:`), but I think we should be able to risk that.
This is also extensible; imagine `$ETCD:` or `$GPG:`, etc. Imagination can now run wild.
This proposal sounds great, it would extend the possibilities to have sensitive data stored on another (secure) place than the non-sensitive configuration data.
Hi Tobias and JP,
I also believe this would make a sweet feature, kudos!
Because `ConfigParser` already has interpolation capabilities [1], with specific examples how to expand environment variables [2,3], I would suggest building this feature on top of this in a generic way instead of having to tweak all service plugins by sprinkling calls to `getpass()`, or similar.
Attaching to the suggestion by @jpmens to support multiple password backends, I would like to add [HashiCorp's Vault](https://www.vaultproject.io/) to the list. From the perspetive of Salt, where I just happen to have an example at hand, reading secrets from Vault (in this case, a TLS private key) looks like `salt['vault'].read_secret('acme/infra/files/www.example.org.key')`.
With kind regards,
Andreas.
[1] https://docs.python.org/3.6/library/configparser.html#interpolation-of-values
[2] https://newbedev.com/configparser-and-string-interpolation-with-env-variable
[3] https://gist.github.com/malexer/ee2f93b1973120925e8beb3f36b184b8
Hello again.
Did this function get shipped? I'm trying to push all keys etc to environment vars, so for example my pushover config, Id like to use the following:
```
'HomeAssistant' : [ '${PUSHOVER_KEY}', 'apjvopwx2jrqbdzaneh6tzvskq1b84', 'none'],
```
Where PUSHOVER_KEY is an environment variable for the container. Which I can see when I post an env in the container.
Odd that for the mqtt client name I have
clientid = 'mqttwarn-${HOSTNAME}'
Which gets resolved properly.
Dear @psyciknz. Thank you for asking. We did not work on this feature yet. | 2023-07-24T07:20:38 | 0.0 | [] | [] |
||
mqtt-tools/mqttwarn | mqtt-tools__mqttwarn-614 | dd9edba6e25df43ab00267b049a9f170a2a43e79 | diff --git a/Dockerfile b/Dockerfile
index fadbb527..8ea61450 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -17,6 +17,7 @@ RUN chown -R mqttwarn:mqttwarn /etc/mqttwarn
# Install mqttwarn
COPY . /src
+RUN pip install wheel
RUN pip install /src
# Make process run as "mqttwarn" user
diff --git a/Dockerfile.full b/Dockerfile.full
index a374a3fa..c30f0c40 100644
--- a/Dockerfile.full
+++ b/Dockerfile.full
@@ -21,6 +21,7 @@ RUN chown -R mqttwarn:mqttwarn /etc/mqttwarn
# Install mqttwarn
COPY . /src
+RUN pip install wheel
RUN pip install /src[all]
# Make process run as "mqttwarn" user
diff --git a/Dockerfile.mqttwarn-slack b/Dockerfile.mqttwarn-slack
index fb258b68..7319a5eb 100644
--- a/Dockerfile.mqttwarn-slack
+++ b/Dockerfile.mqttwarn-slack
@@ -13,6 +13,7 @@ FROM ghcr.io/jpmens/mqttwarn-standard:latest
USER root
# Install Slack SDK.
+RUN pip install wheel
RUN pip install mqttwarn[slack]
# Make process run as "mqttwarn" user
| Apprise: Disable the Apprise rate limiting subsystem
Apprise applies rate limiting with delays of ~4-5 seconds on subsequent message submissions. This is not desired for `mqttwarn`. Indirectly related: https://github.com/caronc/apprise/issues/735#issuecomment-1301595477.
@caronc: Maybe a future Apprise can be made configurable to only _optionally_ enable rate limiting?
| 2022-11-20T22:00:05 | 0.0 | [] | [] |
|||
mqtt-tools/mqttwarn | mqtt-tools__mqttwarn-610 | dd9edba6e25df43ab00267b049a9f170a2a43e79 | diff --git a/CHANGES.rst b/CHANGES.rst
index 62eaef8d..20de8c52 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -11,6 +11,8 @@ in progress
level for filtered messages. Thanks, @jlrgraham.
- CI and tests: Improvements and maintenance
- Documentation: Improve section about Apprise
+- Documentation: Notifications to ntfy via Apprise. Thanks, @binwiederhier, @particledecay,
+ and @caronc.
2022-10-05 0.30.0
=================
diff --git a/HANDBOOK.md b/HANDBOOK.md
index 35637970..80c3a6e8 100644
--- a/HANDBOOK.md
+++ b/HANDBOOK.md
@@ -378,6 +378,7 @@ _mqttwarn_ supports a number of services (listed alphabetically below):
* [mythtv](#mythtv)
* [nntp](#nntp)
* [nsca](#nsca)
+* [ntfy](#ntfy)
* [desktopnotify](#desktopnotify)
* [osxsay](#osxsay)
* [pastebinpub](#pastebinpub)
@@ -562,7 +563,7 @@ Apprise to E-Mail, an HTTP endpoint, and a Discord channel.
```ini
[defaults]
-launch = apprise-mail, apprise-json, apprise-discord
+launch = apprise-mail, apprise-json, apprise-discord, apprise-ntfy
[config:apprise-mail]
; Dispatch message as e-mail.
@@ -589,9 +590,16 @@ baseuri = 'json://localhost:1234/mqtthook'
module = 'apprise'
baseuri = 'discord://4174216298/JHMHI8qBe7bk2ZwO5U711o3dV_js'
+[config:apprise-ntfy]
+; Dispatch message to ntfy.
+; https://github.com/caronc/apprise/wiki/URLBasics
+; https://github.com/caronc/apprise/wiki/Notify_ntfy
+module = 'apprise_single'
+baseuri = 'ntfy://user:password/ntfy.example.org/topic1/topic2'
+
[apprise-single-test]
topic = apprise/single/#
-targets = apprise-mail:demo, apprise-json, apprise-discord
+targets = apprise-mail:demo, apprise-json, apprise-discord, apprise-ntfy
format = Alarm from {device}: {payload}
title = Alarm from {device}
```
@@ -624,6 +632,7 @@ module = 'apprise_multi'
targets = {
'demo-http' : [ { 'baseuri': 'json://localhost:1234/mqtthook' }, { 'baseuri': 'json://daq.example.org:5555/foobar' } ],
'demo-discord' : [ { 'baseuri': 'discord://4174216298/JHMHI8qBe7bk2ZwO5U711o3dV_js' } ],
+ 'demo-ntfy' : [ { 'baseuri': 'ntfy://user:password/ntfy.example.org/topic1/topic2' } ],
'demo-mailto' : [ {
'baseuri': 'mailtos://smtp_username:[email protected]',
'recipients': ['[email protected]', '[email protected]'],
@@ -634,7 +643,7 @@ targets = {
[apprise-multi-test]
topic = apprise/multi/#
-targets = apprise-multi:demo-http, apprise-multi:demo-discord, apprise-multi:demo-mailto
+targets = apprise-multi:demo-http, apprise-multi:demo-discord, apprise-multi:demo-mailto, apprise-multi:demo-ntfy
format = Alarm from {device}: {payload}
title = Alarm from {device}
```
@@ -2026,6 +2035,15 @@ def check_temperature(data):
Requires:
* [pynsca](https://github.com/djmitche/pynsca).
+
+### `ntfy`
+
+Support for [ntfy] is provided through Apprise, see [apprise_single](#apprise_single)
+and [apprise_multi](#apprise_multi).
+
+[ntfy]: https://ntfy.sh/
+
+
### `desktopnotify`
It invokes desktop notifications, using the fine
diff --git a/README.rst b/README.rst
index 9c2ea2e7..e3d0fbe7 100644
--- a/README.rst
+++ b/README.rst
@@ -189,6 +189,11 @@ you an idea how to pass relevant information on the command line using JSON::
pip install mqttwarn-contrib
mqttwarn --plugin=mqttwarn_contrib.services.cloudflare_zone --config='{"auth-email": "foo", "auth-key": "bar"}' --options='{"addrs": ["0815", "www.example.org", ""], "message": "192.168.0.1"}'
+ # Submit notification to "ntfy", using Apprise service plugin.
+ mqttwarn --plugin=apprise \
+ --config='{"baseuri": "ntfy://user:[email protected]/topic1/topic2"}' \
+ --options='{"addrs": [], "title": "Example notification", "message": "Hello world"}'
+
Also, the ``--config-file`` parameter can be used to optionally specify the
path to a configuration file.
| ntfy service
[ntfy](https://github.com/binwiederhier/ntfy) sends push notifications to your phone or desktop using PUT/POST.
This would be a nice candidate for mqttwarn. ntfy is fully floss.
Takers for writing the code? :-)
| Sweet idea!
Apprise already has a corresponding plugin, contributed by @particledecay the other day - see [1]. Either it can be used as a blueprint, or it may alternatively be used with the Apprise mqttwarn service plugin [2,3] already.
[1] https://github.com/caronc/apprise/blob/master/apprise/plugins/NotifyNtfy.py
[2] https://github.com/jpmens/mqttwarn/blob/main/HANDBOOK.md#apprise_single
[3] https://github.com/jpmens/mqttwarn/blob/main/HANDBOOK.md#apprise_multi
I also vote for @particledecay. He's done good work with the apprise plugin. 😄 ❤️ | 2022-11-20T15:26:45 | 0.0 | [] | [] |
||
mqtt-tools/mqttwarn | mqtt-tools__mqttwarn-586 | 437220479a32aec6837075da6728c8ae3f139579 | diff --git a/mqttwarn/services/osxnotify.py b/mqttwarn/services/osxnotify.py
index 76314fe2..b87ce62c 100644
--- a/mqttwarn/services/osxnotify.py
+++ b/mqttwarn/services/osxnotify.py
@@ -27,8 +27,8 @@ def plugin(srv, item):
# Play Sound ?
playSound = True
- if config == dict and config['sound']:
- playSound = config['sound']
+ if isinstance(config, dict):
+ playSound = config.get('sound', True)
# Get Message
message = item.message
@@ -40,6 +40,7 @@ def plugin(srv, item):
"message": message
}
+ srv.logging.debug("Sending desktop notification")
try:
# Synchronous Notification (allows no callbacks in OSX)
# Asynchronous would require asyncio and require some changes to the plugin handler
diff --git a/pyproject.toml b/pyproject.toml
index 6408ae7e..677a005a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -19,7 +19,7 @@ markers = [
]
[tool.coverage.run]
-branch = true
+branch = false
source = ["mqttwarn"]
[tool.coverage.report]
diff --git a/setup.py b/setup.py
index ecd2847f..e1588072 100644
--- a/setup.py
+++ b/setup.py
@@ -149,6 +149,7 @@
extras["test"] = [
'pytest<8',
'pytest-cov<4',
+ 'pytest-mock<4',
'lovely.testlayers<1',
'tox<4',
'surrogate==0.1',
| Complete rewrite of osxnotify, using `desktop-notifier`
Use of desktop-notifier https://github.com/samschott/desktop-notifier
Currently synchronous, with limited options.
Moving to an asynchronous notification, would open up callbacks, buttons and more.
| Merged, thank you! | 2022-09-15T16:16:15 | 0.0 | [] | [] |
||
mqtt-tools/mqttwarn | mqtt-tools__mqttwarn-576 | bfd177b6cc175a15abfb51eff1dab8d963081a93 | diff --git a/CHANGES.rst b/CHANGES.rst
index abd2bc8b..e2dddda4 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -14,6 +14,8 @@ in progress
- Update dependencies: Use Jinja2 3.x; Remove configparser, it is built into Python 3
- Add support for Python 3.11
- Pushover service plugin: Enable passing of parameters ``html``, ``url``, ``url_title``
+- Add test for exercising configuration file scaffolding
+- Improve README regarding configuration file scaffolding on Windows 10/PowerShell
2021-10-31 0.28.1
diff --git a/README.rst b/README.rst
index 15b05f79..768900df 100644
--- a/README.rst
+++ b/README.rst
@@ -133,6 +133,12 @@ First, create configuration and custom Python starter files
# Create file for custom functions
mqttwarn make-samplefuncs > samplefuncs.py
+If you are using PowerShell on Windows 10, you may find the files to be written
+using the ``UTF-16`` charset encoding. However, ``mqttwarn`` works with ``UTF-8``.
+In order to switch to ``UTF-8``, please invoke this command beforehand::
+
+ $PSDefaultParameterValues['Out-File:Encoding'] = 'utf8'
+
*****
Usage
| Windows10 - UnicodeDecodeError: 'utf-8' codec can't decode byte 0xff in position 0: invalid start byte
Followed steps to install mqttwarn on my W10 machine. Everything seemed to work including mqttwarn make-xxx commands. Finally tried running mqttwarn by itself and got strange error messages. Any help?
``` shell
pip install --upgrade mqttwarn
mqttwarn make-config > mqttwarn.ini
cat mqttwarn.ini
mqttwarn make-samplefuncs > samplefuncs.py
```
```python
mqttwarn
Traceback (most recent call last):
File "D:\Program Files (D)\Python37\Scripts\mqttwarn-script.py", line 33, in <module>
sys.exit(load_entry_point('mqttwarn==0.28.1', 'console_scripts', 'mqttwarn')())
File "d:\program files (d)\python37\lib\site-packages\mqttwarn\commands.py", line 93, in run
run_mqttwarn()
File "d:\program files (d)\python37\lib\site-packages\mqttwarn\commands.py", line 132, in run_mqttwarn
config = load_configuration(name=scriptname)
File "d:\program files (d)\python37\lib\site-packages\mqttwarn\configuration.py", line 196, in load_configuration
return Config(configfile, defaults=defaults)
File "d:\program files (d)\python37\lib\site-packages\mqttwarn\configuration.py", line 38, in __init__
self.read_file(f)
File "d:\program files (d)\python37\lib\configparser.py", line 717, in read_file
self._read(f, source)
File "d:\program files (d)\python37\lib\configparser.py", line 1014, in _read
for lineno, line in enumerate(fp, start=1):
File "d:\program files (d)\python37\lib\codecs.py", line 714, in __next__
return next(self.reader)
File "d:\program files (d)\python37\lib\codecs.py", line 645, in __next__
line = self.readline()
File "d:\program files (d)\python37\lib\codecs.py", line 558, in readline
data = self.read(readsize, firstline=True)
File "d:\program files (d)\python37\lib\codecs.py", line 504, in read
newchars, decodedbytes = self.decode(data, self.errors)
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xff in position 0: invalid start byte
```
| Dear @symonjim,
thank you for writing in. It looks like that this is a specific issue when `mqttwarn` tries to open the generated configuration file `mqttwarn.ini`. Do you have any clues why `chr(255)` could be the first character in the generated file on Windows 10?
For the sake of getting the whole picture, may I ask if you are using the `cmd.exe` shell, or PowerShell?
With kind regards,
Andreas.
That sounds a bit like a (broken) [BOM](https://en.wikipedia.org/wiki/Byte_order_mark). Which text editor have you been using, @symonjim, for editing the `mqttwarn.ini` file?
> Do you have any clues why chr(255) could be the first character in the generated file on Windows?
I see, https://github.com/PyCQA/pylint/issues/1663 has many insights into the problem and also offers a solution.
> **Windows 10 produces a file with UTF-16 encoding when using the default redirection operator, which causes this problem when trying to read it back with assuming UTF-8.**
When using PowerShell, you might try this command to generate the default configuration file:
```
mqttwarn make-config | Out-File -Encoding utf8 mqttwarn.ini
```
Another proposed solution in the discussion is to invoke `chcp 65001` before running `mqttwarn make-config > mqttwarn.ini`, using either `cmd.exe`, or PowerShell.
It would be sweet if you could evaluate both proposed solutions and report back to us, so that we can improve the `mqttwarn` documentation correspondingly.
notepad
640 Tanager Lane
Chapel Hill, NC 27517
***@***.***
home 919 869-7877
cell 919 649-6337
On 8/17/2022 12:15 PM, Jan-Piet Mens wrote:
>
> That sounds a bit like a (broken) BOM
> <https://en.wikipedia.org/wiki/Byte_order_mark>. Which text editor
> have you been using, @symonjim <https://github.com/symonjim>, for
> editing the |mqttwarn.ini| file?
>
> —
> Reply to this email directly, view it on GitHub
> <https://github.com/jpmens/mqttwarn/issues/571#issuecomment-1218230907>,
> or unsubscribe
> <https://github.com/notifications/unsubscribe-auth/AVI2GEC6C3BHGFQZBAS47J3VZUF25ANCNFSM56UBXL5Q>.
> You are receiving this because you were mentioned.Message ID:
> ***@***.***>
>
That doesn't seem to help. At the end of the error output it says the
following. Could it be that the `mqttwarn.ini` is missing something at the
top?
```
configparser.MissingSectionHeaderError: File contains no section headers.
file: 'mqttwarn.ini', line: 1
'\ufeff# -*- coding: utf-8 -*-\r\n'
```
I'll attach my `mqttwarn.ini` file generated with that suggestion of
piping it into `Out-File`.
You sent the info about a similar issue with Python installation.
Installing or using mqttwarn involves use of Python, correct? could it
be that my Python installation has the problem rather than the mqttwarn
installation?
Thanks,
Jim
### Exception
```powershell
PS D:\Program Files (D)\mqttwarn-main\mqttwarn-main> mqttwarn
make-config | Out-File -Encoding utf8 mqttwarn.ini
PS D:\Program Files (D)\mqttwarn-main\mqttwarn-main> mqttwarn
Traceback (most recent call last):
File "D:\Program Files (D)\Python37\Scripts\mqttwarn-script.py", line
33, in <module>
sys.exit(load_entry_point('mqttwarn==0.28.1', 'console_scripts',
'mqttwarn')())
File "d:\program files
(d)\python37\lib\site-packages\mqttwarn\commands.py", line 93, in run
run_mqttwarn()
File "d:\program files
(d)\python37\lib\site-packages\mqttwarn\commands.py", line 132, in
run_mqttwarn
config = load_configuration(name=scriptname)
File "d:\program files
(d)\python37\lib\site-packages\mqttwarn\configuration.py", line 196, in
load_configuration
return Config(configfile, defaults=defaults)
File "d:\program files
(d)\python37\lib\site-packages\mqttwarn\configuration.py", line 38, in
__init__
self.read_file(f)
File "d:\program files (d)\python37\lib\configparser.py", line 717,
in read_file
self._read(f, source)
File "d:\program files (d)\python37\lib\configparser.py", line 1079,
in _read
raise MissingSectionHeaderError(fpname, lineno, line)
configparser.MissingSectionHeaderError: File contains no section headers.
file: 'mqttwarn.ini', line: 1
'\ufeff# -*- coding: utf-8 -*-\r\n'
```
### My configuration files
```
# -*- coding: utf-8 -*-
# (c) 2014-2018 The mqttwarn developers
#
# mqttwarn example configuration file "mqttwarn.ini"
#
; ------------------------------------------
; Base configuration
; ------------------------------------------
[defaults]
; ----
; MQTT
; ----
hostname = 'localhost'
port = 1883
username = None
password = None
clientid = 'mqttwarn'
lwt = 'clients/mqttwarn'
skipretained = False
cleansession = False
# MQTTv31 = 3 (default)
# MQTTv311 = 4
protocol = 3
; -------
; Logging
; -------
; Send log output to STDERR
logfile = 'stream://sys.stderr'
; Send log output to file
;logfile = 'mqttwarn.log'
; one of: CRITICAL, DEBUG, ERROR, INFO, WARN
loglevel = DEBUG
;logformat = '%(asctime)-15s %(levelname)-8s [%(name)-25s] %(message)s'
; --------
; Services
; --------
; path to file containing self-defined functions for formatmap and datamap
functions = 'samplefuncs.py'
; name the service providers you will be using.
launch = file, log
; Publish mqttwarn status information (retained)
status_publish = True
; status_topic = mqttwarn/$SYS
; -------
; Targets
; -------
[config:file]
append_newline = True
targets = {
'f01' : ['/tmp/f.01'],
'log-me' : ['/tmp/log.me'],
'mqttwarn' : ['/tmp/mqttwarn.err'],
}
[config:log]
targets = {
'debug' : [ 'debug' ],
'info' : [ 'info' ],
'warn' : [ 'warn' ],
'crit' : [ 'crit' ],
'error' : [ 'error' ]
}
; special config for 'failover' events
[failover]
targets = log:error, file:mqttwarn
; ------------------------------------------
; Basic
; ------------------------------------------
[hello/1]
; echo '{"name": "temperature", "number": 42.42}' | mosquitto_pub -h localhost -t hello/1 -l
targets = log:info
format = u'{name}: {number} => {_dthhmm}'
; ------------------------------------------
; OwnTracks
; ------------------------------------------
[owntracks-location]
topic = owntracks/+/+
targets = log:info, file:f01
datamap = OwnTracksTopic2Data()
format = OwnTracksConvert()
[owntracks-battery]
topic = owntracks/+/+
targets = log:info, file:f01
datamap = OwnTracksTopic2Data()
filter = OwnTracksBattFilter()
format = {username}'s phone battery is getting low ({batt}%)
; ------------------------------------------
; Dynamic targets
; ------------------------------------------
[robustness-1]
; even if "foo" is considered an invalid service or
; "log:baz" is considered an invalid service target,
; mqttwarn should keep calm and carry on
topic = test/robustness-1
targets = foo:bar, log:baz
[topic-targets-dynamic]
; interpolate transformation data values into topic target, example:
; mosquitto_pub -t test/topic-targets-dynamic -m '{"loglevel": "crit", "message": "Nur D├╢ner macht sch├╢ner!"}'
topic = test/topic-targets-dynamic
format = Something {loglevel} happened! {message}
targets = log:{loglevel}
[topic-targets-func]
; use functions for computing topic targets, example:
; mosquitto_pub -t test/topic-targets-func -m '{"condition": "sunny", "remark": "This should go to a file"}'
; mosquitto_pub -t test/topic-targets-func -m '{"condition": "rainy", "remark": "This should go to the log"}'
topic = test/topic-targets-func
format = Weather conditions changed: It's {condition}. Remark: {remark}
targets = TopicTargetList()
; ------------------------------------------
; Periodic tasks
; ------------------------------------------
[cron]
; Demonstrate periodic task feature.
; Define a function for publishing your public ip address to the MQTT bus each minute.
; mosquitto_sub -t 'test/ip/#' -v
#publish_public_ip_address = 60; now=true
```
Oops. That long file inclusion was not a good idea.
I used notepad to rewrite the ini file making sure it was output utf8. Ran mqttwarn and got the following new error messages.
```powershell
PS D:\Program Files (D)\mqttwarn-main\mqttwarn-main> mqttwarn
Traceback (most recent call last):
File "D:\Program Files (D)\Python37\Scripts\mqttwarn-script.py", line 33, in <module>
sys.exit(load_entry_point('mqttwarn==0.28.1', 'console_scripts', 'mqttwarn')())
File "d:\program files (d)\python37\lib\site-packages\mqttwarn\commands.py", line 93, in run
run_mqttwarn()
File "d:\program files (d)\python37\lib\site-packages\mqttwarn\commands.py", line 132, in run_mqttwarn
config = load_configuration(name=scriptname)
File "d:\program files (d)\python37\lib\site-packages\mqttwarn\configuration.py", line 196, in load_configuration
return Config(configfile, defaults=defaults)
File "d:\program files (d)\python37\lib\site-packages\mqttwarn\configuration.py", line 106, in __init__
self.functions = load_functions(functions_file)
File "d:\program files (d)\python37\lib\site-packages\mqttwarn\util.py", line 219, in load_functions
py_mod = imp.load_source(mod_name, filepath)
File "d:\program files (d)\python37\lib\imp.py", line 171, in load_source
module = _load(spec)
File "<frozen importlib._bootstrap>", line 696, in _load
File "<frozen importlib._bootstrap>", line 677, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 724, in exec_module
File "<frozen importlib._bootstrap_external>", line 860, in get_code
File "<frozen importlib._bootstrap_external>", line 791, in source_to_code
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
ValueError: source code string cannot contain null bytes
PS D:\Program Files (D)\mqttwarn-main\mqttwarn-main>
```
I see. Thanks for sharing your progress.
Now, it looks like mqttwarn is loading its configuration file just fine, but croaks at "load_functions" when trying to load the Python code file containing the runtime extension functions.
So, I would recommend to apply the same procedure to the .py file as well and create it manually with an editor of your choice, saving it using the utf-8 charset encoding.
I am confident it will get you started with finally getting mqttwarn running. Good luck with that.
If that succeeds, we can get back to investigating and resolving the original problem with the configuration blueprint scaffolding, which apparently goes south on Windows10.
--
Sent from my mind. This might have been typed on a mobile device, so please excuse my brevity.
I did that with the .py files. I opened them in notepad and immediately did a "save as" choosing utf8 and overwriting the original. Interestingly all the files I had worked with had said they were going to save as utf8 until I got to samplefuncs.py which said it would save as utf16. I changed that to utf8 and...presto! It works!
```powershell
PS D:\Program Files (D)\mqttwarn-main\mqttwarn-main> mqttwarn
2022-08-17 18:28:26,596 INFO [mqttwarn.commands ] Starting mqttwarn
2022-08-17 18:28:26,597 INFO [mqttwarn.commands ] Log level is DEBUG
2022-08-17 18:28:26,597 DEBUG [mqttwarn.core ] Trying to load built-in service "file" from "file"
2022-08-17 18:28:26,598 DEBUG [mqttwarn.core ] Trying to load service "file" from file "d:\program files (d)\python37\lib\site-packages\mqttwarn\services\file.py"
2022-08-17 18:28:26,599 INFO [mqttwarn.core ] Successfully loaded service "file"
2022-08-17 18:28:26,599 DEBUG [mqttwarn.core ] Trying to load built-in service "log" from "log"
2022-08-17 18:28:26,600 DEBUG [mqttwarn.core ] Trying to load service "log" from file "d:\program files (d)\python37\lib\site-packages\mqttwarn\services\log.py"
2022-08-17 18:28:26,601 INFO [mqttwarn.core ] Successfully loaded service "log"
2022-08-17 18:28:26,601 DEBUG [mqttwarn.core ] Attempting connection to MQTT broker localhost:1883...
2022-08-17 18:28:26,601 DEBUG [mqttwarn.core ] Setting LWT to clients/mqttwarn...
2022-08-17 18:28:28,672 INFO [mqttwarn.core ] Publishing status information to mqttwarn/$SYS
2022-08-17 18:28:28,673 INFO [mqttwarn.core ] Starting 1 worker threads
2022-08-17 18:28:28,678 DEBUG [mqttwarn.core ] Job queue has 0 items to process
2022-08-17 18:28:28,679 DEBUG [mqttwarn.core ] Connected to MQTT broker, subscribing to topics...
2022-08-17 18:28:28,680 DEBUG [mqttwarn.core ] Cleansession==False; previous subscriptions for clientid mqttwarn remain active on broker
2022-08-17 18:28:28,682 DEBUG [mqttwarn.core ] Subscribing to hello/1 (qos=0)
2022-08-17 18:28:28,682 DEBUG [mqttwarn.core ] Subscribing to owntracks/+/+ (qos=0)
2022-08-17 18:28:28,683 DEBUG [mqttwarn.core ] Subscribing to test/robustness-1 (qos=0)
2022-08-17 18:28:28,683 DEBUG [mqttwarn.core ] Subscribing to test/topic-targets-dynamic (qos=0)
2022-08-17 18:28:28,684 DEBUG [mqttwarn.core ] Subscribing to test/topic-targets-func (qos=0)
```
and mosquitto seemed to react. Now I just need to make some adjustments for my setup and tell it what to do when it gets a message.
Thanks for all your help.
Jim
It worked fine until I added my smtp config and topic to the mqttwarn.ini file. Then it gave me another esoteric error message about int having no length??
I added:
```ini
[config:smtp]
server = 'localhost:25'
sender = "mqttwarn <symonjim@localhost>"
starttls = False
targets = {
'symonjim' : ['[email protected]']
}
```
and:
```ini
[IOTgadgettopic]
topic = IOTgadgettopic
targets = smtp:symonjim
```
and got this when it failed:
```
2022-08-17 22:04:55,000 DEBUG [mqttwarn.core ] Successfully loaded service "smtp"
2022-08-17 22:04:55,761 DEBUG [mqttwarn.core ] Attempting connection to MQTT broker localhost:1883...
2022-08-17 22:04:55,762 DEBUG [mqttwarn.core ] Setting LWT to clients/mqttwarn...
2022-08-17 22:04:57,833 ERROR [mqttwarn.core ] Cannot connect to MQTT broker at localhost:1883: object of type 'int' has no len()
```
hmm...
Hi Jim,
> got this when it failed:
> `Cannot connect to MQTT broker at localhost:1883: object of type 'int' has no len()`
That's weird, but I am confident that we will also find out about the root cause. The corresponding place in the code where this is happening is:
https://github.com/jpmens/mqttwarn/blob/f4e6e0888a2345bb3b0bf7a490081b1e48185609/mqttwarn/core.py#L629-L634
I am assuming that again something might be going wrong with correctly parsing and interpreting the configuration file on your end. I can confirm that, when inserting the snippets you shared into the `mqttwarn.ini` on my machine, I don't observe such an error.
Currently, I have no idea what exactly might be going wrong on your end. Two things come to mind how to proceed:
a) Maybe the culprit is different newline characters on Windows after saving the configuration file? It is hard to believe, because you already edited and saved it manually beforehand, right?
b) Can you verify everything works flawlessly again, when you remove the snippets you added?
With kind regards,
Andreas.
I found it. Had nothing to do with the smtp stuff I had inserted. I had also put in these lines without single quotes. Single quotes fixed it.
Now I just need to figure out how to specify qos=1.
Fails with that odd error message:
username = mqttwarn
password = 123456789
Works fine:
username = 'mqttwarn'
password = '123456789'
Go that. Now this. I guess I need to just plug away at it and not keep clogging up this site with each of my steps.
```
2022-08-18 10:45:37,047 DEBUG [mqttwarn.core ] Message received on IOTgadgettopic: "This is MQTTX1 with gadget topic with another try"
2022-08-18 10:45:37,048 DEBUG [mqttwarn.core ] Section [IOTgadgettopic] matches message on IOTgadgettopic. Processing...
2022-08-18 10:45:37,051 DEBUG [mqttwarn.core ] Cannot decode JSON object, payload= "This is MQTTX1 with gadget topic with another try": dictionary update sequence element #0 has length 1; 2 is required
2022-08-18 10:45:37,053 DEBUG [mqttwarn.core ] Message on IOTgadgettopic going to smtp:mqttwarn
2022-08-18 10:45:37,053 DEBUG [mqttwarn.core ] New `smtp:mqttwarn' job: IOTgadgettopic
2022-08-18 10:45:37,054 DEBUG [mqttwarn.core ] Processor #0 is handling: `smtp' for mqttwarn
2022-08-18 10:45:37,054 INFO [mqttwarn.core ] Invoking service plugin for `smtp'
2022-08-18 10:45:37,056 DEBUG [mqttwarn.services.smtp ] *** MODULE=d:\program files (d)\python37\lib\site-packages\mqttwarn\services\smtp.py: service=smtp, target=mqttwarn
2022-08-18 10:45:37,056 ERROR [mqttwarn.core ] Cannot invoke service for `smtp'
Traceback (most recent call last):
File "d:\program files (d)\python37\lib\site-packages\mqttwarn\core.py", line 517, in processor
notified = timeout(module.plugin, (srv, st))
File "d:\program files (d)\python37\lib\site-packages\mqttwarn\util.py", line 133, in timeout
raise it.exception
File "d:\program files (d)\python37\lib\site-packages\mqttwarn\util.py", line 121, in run
self.result = func(*args, **kwargs)
File "d:\program files (d)\python37\lib\site-packages\mqttwarn\services\smtp.py", line 24, in plugin
username = item.config['username']
KeyError: 'username'
2022-08-18 10:45:37,063 WARNING [mqttwarn.core ] Notification of smtp for `IOTgadgettopic' FAILED or TIMED OUT
2022-08-18 10:45:37,066 DEBUG [mqttwarn.core ] Job queue has 0 items to process
```
Hi again,
I am glad that the issues with the configuration files seem to be resolved now.
> not keep clogging up this site with each of my steps
Don't worry, please keep reporting. Every detail counts to improve the software and/or its documentation in one way or another. On the very last detail you reported, I've just created #573.
Cheers,
Andreas.
| 2022-08-21T15:55:46 | 0.0 | [] | [] |
||
mqtt-tools/mqttwarn | mqtt-tools__mqttwarn-557 | 338477e53bacad50ff2ed888ceb10a80513ee663 | diff --git a/CHANGES.rst b/CHANGES.rst
index 3aaac642..428a2004 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -6,6 +6,13 @@ mqttwarn changelog
in progress
===========
+- Allow dispatching of messages with no target address information.
+ This helps for service plugins like Apprise to make the configuration
+ snippet more compact. Now, service configurations can omit the ``targets``
+ option altogether.
+- Apprise service: Accept omitted/empty `addrs` attribute.
+- Apprise service: Improve query parameter serialization.
+
2021-10-17 0.27.0
=================
diff --git a/HANDBOOK.md b/HANDBOOK.md
index 51bc4d6e..9ae25ecd 100644
--- a/HANDBOOK.md
+++ b/HANDBOOK.md
@@ -529,9 +529,11 @@ This configuration snippet will activate two service plugins
```ini
[defaults]
-launch = apprise-mail, apprise-json
+launch = apprise-mail, apprise-json, apprise-discord
[config:apprise-mail]
+; Submit emails for notifying users.
+; https://github.com/caronc/apprise/wiki/Notify_email
module = 'apprise'
baseuri = 'mailtos://smtp_username:[email protected]'
sender = '[email protected]'
@@ -541,16 +543,22 @@ targets = {
}
[config:apprise-json]
+; Post message to HTTP endpoint, in JSON format.
+; https://github.com/caronc/apprise/wiki/Notify_Custom_JSON
module = 'apprise'
baseuri = 'json://localhost:1234/mqtthook'
-; Surrogate for satisfying machinery.
-targets = {
- 'n/a' : [''],
- }
+
+[config:apprise-discord]
+; Post message to Discord channel, via Webhook.
+; https://github.com/caronc/apprise/wiki/Notify_discord
+; https://discord.com/developers/docs/resources/webhook
+; discord://{WebhookID}/{WebhookToken}/
+module = 'apprise'
+baseuri = 'discord://4174216298/JHMHI8qBe7bk2ZwO5U711o3dV_js'
[apprise-test]
topic = apprise/#
-targets = apprise-mail:demo, apprise-json
+targets = apprise-mail:demo, apprise-json, apprise-discord
format = Alarm from {device}: {payload}
title = Alarm from {device}
```
diff --git a/Makefile b/Makefile
index 5d2909bc..b7b3e7cb 100644
--- a/Makefile
+++ b/Makefile
@@ -29,15 +29,15 @@ setup-virtualenv:
# Run the main test suite
test:
@test -e $(pytest) || $(MAKE) install-tests
- @$(pytest) tests -m 'not slow'
+ @$(pytest) -vvv tests -m 'not slow'
test-refresh: install-tests test
test-junit: install-tests
- @$(pytest) tests --junit-xml .pytest_results/pytest.xml
+ @$(pytest) -vvv tests --junit-xml .pytest_results/pytest.xml
test-coverage: install-tests
- @$(pytest) tests \
+ @$(pytest) -vvv tests \
--junit-xml .pytest_results/pytest.xml \
--cov mqttwarn --cov-branch \
--cov-report term-missing \
diff --git a/mqttwarn/context.py b/mqttwarn/context.py
index ca75dc36..e74a78dd 100644
--- a/mqttwarn/context.py
+++ b/mqttwarn/context.py
@@ -102,17 +102,12 @@ def get_service_config(self, service):
return dict(config)
def get_service_targets(self, service):
+ # Be more graceful with jobs w/o any target address information (2021-10-18 [amo]).
try:
- targets = self.config.getdict('config:' + service, 'targets')
- if type(targets) != dict:
- logger.error("No targets for service `%s'" % service)
+ targets = self.config.getdict('config:' + service, 'targets') or [None]
+ return targets
except:
- logger.error("No targets for service `%s'" % service)
-
- if targets is None:
- return {}
-
- return dict(targets)
+ logger.exception("Unable to access targets for service `%s'" % service)
@attr.s
diff --git a/mqttwarn/core.py b/mqttwarn/core.py
index dbfdce33..e890db4e 100644
--- a/mqttwarn/core.py
+++ b/mqttwarn/core.py
@@ -454,12 +454,18 @@ def processor(worker_id=None):
q_in.task_done()
continue
+ # Be more graceful with jobs w/o any target address information (2021-10-18 [amo]).
+ if target is None:
+ addrs = []
+ else:
+ addrs = service_targets[target]
+
item = {
'service' : service,
'section' : section,
'target' : target,
'config' : service_config,
- 'addrs' : service_targets[target],
+ 'addrs' : addrs,
'topic' : topic,
'payload' : job.payload,
'data' : None,
diff --git a/mqttwarn/model.py b/mqttwarn/model.py
index 9b5d56e1..04970d55 100644
--- a/mqttwarn/model.py
+++ b/mqttwarn/model.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# (c) 2021 The mqttwarn developers
-from dataclasses import dataclass
+from dataclasses import dataclass, field
from typing import Dict, List, Union
@@ -13,7 +13,7 @@ class ProcessorItem:
service: str = None
target: str = None
config: Dict = None
- addrs: List[str] = None
+ addrs: List[str] = field(default_factory=list)
priority: int = None
topic: str = None
title: str = None
diff --git a/mqttwarn/services/apprise.py b/mqttwarn/services/apprise.py
index 1f31389b..23cb8e09 100644
--- a/mqttwarn/services/apprise.py
+++ b/mqttwarn/services/apprise.py
@@ -1,10 +1,13 @@
# -*- coding: utf-8 -*-
__author__ = 'Andreas Motl <[email protected]>'
-__copyright__ = 'Copyright 2020 Andreas Motl'
+__copyright__ = 'Copyright 2020-2021 Andreas Motl'
__license__ = 'Eclipse Public License - v 1.0 (http://www.eclipse.org/legal/epl-v10.html)'
# https://github.com/caronc/apprise#developers
+from urllib.parse import urlencode
+from collections import OrderedDict
+
import apprise
@@ -13,30 +16,33 @@ def plugin(srv, item):
srv.logging.debug("*** MODULE=%s: service=%s, target=%s", __file__, item.service, item.target)
- addresses = item.addrs
-
- if not addresses:
- srv.logging.warning("Skipped sending notification to Apprise %s, "
- "no addresses configured" % (item.target))
- return False
-
sender = item.config.get('sender')
sender_name = item.config.get('sender_name')
baseuri = item.config['baseuri']
+ addresses = item.addrs
title = item.title
body = item.message
try:
- srv.logging.debug("Sending notification to Apprise %s, addresses: %s" % (item.target, addresses))
+ srv.logging.debug("Sending notification to Apprise. target=%s, addresses=%s" % (item.target, addresses))
to = ','.join(addresses)
# Create an Apprise instance.
apobj = apprise.Apprise(asset=apprise.AppriseAsset(async_mode=False))
- # Add notification services by server url.
- uri = '{baseuri}?from={sender}&to={to}'.format(baseuri=baseuri, sender=sender, to=to)
+ # Collect URL parameters.
+ params = OrderedDict()
+ if sender:
+ params["from"] = sender
+ if to:
+ params["to"] = to
if sender_name:
- uri += '&name={sender_name}'.format(sender_name=sender_name)
+ params["name"] = sender_name
+
+ # Add notification services by server url.
+ uri = baseuri
+ if params:
+ uri += '?' + urlencode(params)
apobj.add(uri)
# Submit notification.
| Streamline configuration for services w/o `targets` attribute
Hi there,
when using the Apprise service plugin, more often than not, the `targets` attribute might not be required at all. Two examples are:
```
[config:apprise-json]
module = 'apprise'
baseuri = 'json://localhost:1234/mqtthook'
; Surrogate for satisfying machinery.
targets = {
'n/a' : [''],
}
```
-- https://github.com/jpmens/mqttwarn/blob/main/HANDBOOK.md#L543-L549
```
[config:apprise-discord]
# discord://{WebhookID}/{WebhookToken}/
module = 'apprise'
baseuri = 'discord://4174216298/JHMHI8qBe7bk2ZwO5U711o3dV_js'
; Surrogate for satisfying machinery.
targets = {
'n/a' : [''],
}
```
-- https://github.com/jpmens/mqttwarn/issues/555#issuecomment-945959534
It would be sweet if we could skip the `targets` configuration attribute altogether instead of having to supply this surrogate, i.e. making it an optional attribute.
With kind regards,
Andreas.
| 2021-10-18T19:52:26 | 0.0 | [] | [] |
|||
mqtt-tools/mqttwarn | mqtt-tools__mqttwarn-546 | 8c3fc7892d80600f587608c2ddf351ca738a35d6 | diff --git a/CHANGES.rst b/CHANGES.rst
index 0f4ed0db..5b761300 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -9,6 +9,9 @@ in progress
- IRCcat: Fix and improve service. Thanks, @JanKoppe.
- IRCcat: Add newline character after message. Thanks again, @JanKoppe.
- CI: Run tests on Python 3.10
+- Allow parameters of the ``file`` service to be defined on a per-file basis.
+ Thanks, @Gulaschcowboy!
+- Add software tests for ``file`` service.
2021-09-29 0.26.2
diff --git a/HANDBOOK.md b/HANDBOOK.md
index d71a409e..0be42e28 100644
--- a/HANDBOOK.md
+++ b/HANDBOOK.md
@@ -814,9 +814,12 @@ Requires:
### `file`
-The `file` service can be used for logging incoming topics, archiving, etc. Each message is written to a path specified in the targets list. Note that files are opened for appending and then closed on each notification.
+The `file` service can be used for logging incoming topics, archiving, etc.
+Each message is written to a path specified in the targets list. Note that
+files are opened for appending and then closed on each notification.
-Supposing we wish to archive all incoming messages to the branch `arch/#` to a file `/data/arch`, we could configure the following:
+Supposing we wish to archive all incoming messages to the branch `arch/#`
+to a file `/data/arch`, we could configure the following:
```ini
[config:file]
@@ -827,8 +830,21 @@ targets = {
}
```
-If `append_newline` is True, a newline character is unconditionally appended to the string written to the file. If `overwrite` is True, the file is opened for truncation upon writing (i.e. the file will contain the last message only).
+If `append_newline` is `True`, a newline character is unconditionally appended
+to the string written to the file. If `overwrite` is `True`, the file is opened
+for truncation upon writing (i.e. the file will contain the last message only).
+Both parameters can also be specified on a per-file basis. In order to do that,
+the corresponding configuration snippet would look like this:
+
+```ini
+[config:file]
+targets = {
+ 'log-me' : {'path': '/data/arch', 'append_newline': True, 'overwrite': False},
+ }
+```
+
+Per-item parameters take precedence over global parameters.
### `freeswitch`
diff --git a/mqttwarn/services/file.py b/mqttwarn/services/file.py
index fa385dc3..9d77f605 100644
--- a/mqttwarn/services/file.py
+++ b/mqttwarn/services/file.py
@@ -18,13 +18,30 @@ def plugin(srv, item):
# item.config is brought in from the configuration file
config = item.config
- # addrs is a list[] associated with a particular target.
- # While it may contain more than one item (e.g. pushover)
- # the `file' service carries one only, i.e. a path name
- filename = item.addrs[0].format(**item.data)
+ # Evaluate global parameters.
+ newline = False
+ overwrite = False
+ if type(config) == dict and 'append_newline' in config and config['append_newline']:
+ newline = True
+ if type(config) == dict and 'overwrite' in config and config['overwrite']:
+ overwrite = True
+
+ # `item.addrs` is either a dict or a list associated with a particular target.
+ # While lists may contain more than one item (e.g., for the pushover target),
+ # the `file` service only allows for single items, the path name.
+ # When it's a dict, additional parameters can be obtained to augment the
+ # behavior of the write operation on a per-file basis.
+ if isinstance(item.addrs, dict):
+ filename = item.addrs['path'].format(**item.data)
+ # Evaluate per-file parameters.
+ newline = item.addrs.get('append_newline', newline)
+ overwrite = item.addrs.get('overwrite', overwrite)
+ else:
+ filename = item.addrs[0].format(**item.data)
# Interpolate some variables into filename.
- filename = filename.replace("$TMPDIR", tempfile.gettempdir())
+ if "$TMPDIR" in filename:
+ filename = filename.replace("$TMPDIR", tempfile.gettempdir())
srv.logging.info("Writing to file `%s'" % (filename))
@@ -32,9 +49,9 @@ def plugin(srv, item):
# else the original payload
text = item.message
- if type(config) == dict and 'append_newline' in config and config['append_newline']:
+ if newline:
text += "\n"
- if type(config) == dict and 'overwrite' in config and config['overwrite']:
+ if overwrite:
mode = "w"
try:
diff --git a/setup.py b/setup.py
index 4518a1a1..b6d9e6db 100644
--- a/setup.py
+++ b/setup.py
@@ -156,6 +156,7 @@
'dataclasses; python_version<"3.7"',
'requests-toolbelt>=0.9.1,<1',
'responses>=0.13.3,<1',
+ 'pyfakefs>=4.5,<5',
]
| [config:file] config params like overwrite are global only
Hi there,
it seems that the config params of the file plugin are global only. So this config
```ini
[config:file]
append_newline = True
overwrite = True
targets = {
'f01' : ['/tmp/f.01'],
'log-me' : ['/tmp/log.me'],
'mqttwarn' : ['/tmp/mqttwarn.err'],
}
```
would set those params for all file targets.
It doesn't seem to be possible to configure those options on a per target level. Wouldn't this be a nice feature? ;-)
Dank u well
| Dear Alexander,
thank you for writing in. I propose to add the possibility to write down this configuration section like that:
```ini
[config:file]
targets = {
'f01' : {'path': '/tmp/f.01', 'append_newline': True, 'overwrite': True},
'log-me' : ['/tmp/log.me'],
'mqttwarn' : ['/tmp/mqttwarn.err'],
}
```
Around https://github.com/jpmens/mqttwarn/blob/0.26.2/mqttwarn/services/file.py#L21-L24, the code would then be like:
```python
# `item.addrs` is either a dict or a list associated with a particular target.
# While lists may contain more than one item (e.g., for the pushover target),
# the `file` service only allows for single items, the path name.
# When it's a dict, additional parameters can be obtained to augment the
# behavior of the write operation on a per-file basis.
newline = False
overwrite = False
if isinstance(item.addrs, dict):
filename = item.addrs['path'].format(**item.data)
newline = item.addrs.get('append_newline', False)
overwrite = item.addrs.get('overwrite', False)
else:
filename = item.addrs[0].format(**item.data)
```
Around https://github.com/jpmens/mqttwarn/blob/0.26.2/mqttwarn/services/file.py#L35-L38, it would be like:
```python
if type(config) == dict and 'append_newline' in config and config['append_newline']:
newline = True
if type(config) == dict and 'overwrite' in config and config['overwrite']:
overwrite = True
if newline:
text += "\n"
if overwrite:
mode = "w"
```
In this manner, we can implement your requirement while completely retaining backward compatibility.
With kind regards,
Andreas.
| 2021-10-03T13:38:27 | 0.0 | [] | [] |
||
mqtt-tools/mqttwarn | mqtt-tools__mqttwarn-539 | d360c98de3f370a2e10678f1d26e791fa10e3516 | diff --git a/.gitignore b/.gitignore
index 3f864c2f..89d5fe77 100644
--- a/.gitignore
+++ b/.gitignore
@@ -13,10 +13,9 @@ services/winscp.rnd
.vs/VSWorkspaceState.json
.vs/ProjectSettings.json
f.01
-.vscode/settings.json
funcs.py
.pytest_cache
.pytest_results
-.coverage
+.coverage*
coverage.xml
.tox
diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 00000000..c619bb6b
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,5 @@
+{
+ "python.pythonPath": "${workspaceFolder}/.venv/bin/python",
+ "python.testing.pytestEnabled": true,
+ "python.testing.pytestPath": "${workspaceFolder}/.venv/bin/pytest"
+}
\ No newline at end of file
diff --git a/CHANGES.rst b/CHANGES.rst
index e2dddda4..1b96d1c4 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -16,6 +16,8 @@ in progress
- Pushover service plugin: Enable passing of parameters ``html``, ``url``, ``url_title``
- Add test for exercising configuration file scaffolding
- Improve README regarding configuration file scaffolding on Windows 10/PowerShell
+- Improve support for VSCode
+- Improve ``mqttwarn`` command line entry point testing
2021-10-31 0.28.1
diff --git a/doc/sandbox.rst b/doc/sandbox.rst
index f02f1900..c1f915e2 100644
--- a/doc/sandbox.rst
+++ b/doc/sandbox.rst
@@ -46,6 +46,17 @@ will be able to launch the ``mqttwarn`` entrypoint without further ado.
Otherwise, setup the virtualenv manually by invoking those commands::
+ # On Linux
python3 -m venv .venv
source .venv/bin/activate
+
+ # On Windows
+ python -m venv .venv
+ .venv/Scripts/activate
+
pip install --editable=.[test] --upgrade
+
+For properly configuring a virtualenv, please also read those fine resources:
+
+- https://code.visualstudio.com/docs/python/environments
+- https://medium.com/@kylehayes/using-a-python-virtualenv-environment-with-vscode-b5f057f44c6a
| Running mqttwarn from VSCode
moved to official image as you know, and have found one of my items not working (in the pushover service I think there's a 2.7 > 3.9 incompatibility).
I think i have a fix for it, but just wanting to run it in debug mode in visual studio code. The structure has changed significantly since I last ran it.
What python file do I execute?
And where should the mqttwarn.ini file reside to run?
| Dear @psyciknz,
thank you for your report.
> I found one of my items not working: In the pushover service I think there's a 2.7 > 3.9 incompatibility.
That could well have happened, apologies. We made the transition to Python 3 the other day but haven't been able to keep up with migrating and verifying all service plugins and just take care about them as we go. We would appreciate any patches for the pushover service plugin and beyond.
> What python file do I execute?
The main program file entrypoint is just called `mqttwarn`, it will invoke `mqttwarn.commands:run`.
> Where should the `mqttwarn.ini` file reside to run?
By default, it will use a file `mqttwarn.ini` from the current working directory. Otherwise, you can adjust the `MQTTWARNINI` environment variable to point to a different configuration file.
With kind regards,
Andreas.
> > What python file do I execute?
>
> The main program file entrypoint is just called `mqttwarn`, it will invoke `mqttwarn.commands:run`.
>
If you were running it on linux, what python command (to what file) would you execute:
python mqttwarn/core.py? AS I don't see a mqttwarn.py file in the file system any more.
I think I've manage to get around it by creating a run.py module that calls mqttwarn.commands.run().
Hi @psyciknz,
> If you were running it on linux, what python command (to what file) would you execute?
The command entrypoint `mqttwarn` is actually a Python file, it just does not have a `.py` extension. When installing the package in development mode into a virtualenv (like, `pip install --editable=.`), the `mqttwarn` program will be located within its `bin/` directory, e.g. `.venv/bin/mqttwarn`.
I hope VSC will be able to invoke that program somehow?
With kind regards,
Andreas.
Hi again,
by following the documentation at [1], I am able to confirm that those commands seem to work:
```shell
source .venv/bin/activate
pip install debugpy
python -m debugpy --listen 5678 .venv/bin/mqttwarn
```
With kind regards,
Andreas.
P.S.: If you don't want to create a virtualenv by hand or such, you can easily invoke `make test` in the toplevel directory of the repository's working tree and the process will automatically create a virtualenv in the `.venv` directory so you should be good to go.
[1] https://code.visualstudio.com/docs/python/debugging#_command-line-debugging
Hi again,
have you been able to resolve this issue? Shall we just close the ticket or add anything valuable to the documentation?
With kind regards,
Andreas.
I think I've partially been able to resolve it. I may have added some vscode specifc files to the repo for run time.
I've got one other module it's complaining about when running, but I'm hoping that due to the mqttwarn.ini file I'm using which is based off my main one. I'll cut that down.
But what I might look at, and is probable a better direction for the project, is to create, in this case, a test_pushover.py test class for testing/debugging.
> I think I've partially been able to resolve it. I may have added some vscode-specific files to the repo for run time.
That is really sad. Don't you see any way to make VSCode just invoke the main program `mqttwarn` or its Python entrypoint `mqttwarn.commands:run`? According to [1,2], you might be able to configure `mqttwarn` in your `launch.json`?
[1] https://code.visualstudio.com/docs/python/debugging
[2] https://stackoverflow.com/questions/48164843/how-to-run-python-in-visual-studio-code-as-a-main-module
> I've got one other module it's complaining about when running, but I'm hoping that due to the mqttwarn.ini file I'm using which is based off my main one. I'll cut that down.
All right. Let us know about the outcome.
> But what I might look at, and is probable a better direction for the project, is to create, in this case, a test_pushover.py test class for testing/debugging.
I carried a discussion about that topic forward to #526.
> > I think I've partially been able to resolve it. I may have added some vscode-specific files to the repo for run time.
>
> That is really sad. Don't you see any way to make VSCode just invoke the main program `mqttwarn` or its Python entrypoint `mqttwarn.commands:run`? According to [1,2], you might be able to configure `mqttwarn` in your `launch.json`?
>
> [1] https://code.visualstudio.com/docs/python/debugging
> [2] https://stackoverflow.com/questions/48164843/how-to-run-python-in-visual-studio-code-as-a-main-module
>
Yeah I couldn't figure out what I needed to run it as a module - which I think is the terminology.
> > I've got one other module it's complaining about when running, but I'm hoping that due to the mqttwarn.ini file I'm using which is based off my main one. I'll cut that down.
>
> All right. Let us know about the outcome.
Yep sorted that, I think it was a lack of function directive in the ini file, so I added the entry and put in a basic samplefuncs.py.
>
> > But what I might look at, and is probable a better direction for the project, is to create, in this case, a test_pushover.py test class for testing/debugging.
>
> I carried a discussion about that topic forward to #526.
Ok will conitnue there.
Dear @psyciknz,
thank you for your contribution at #529. However, I was looking for a less invasive patch on that matter. So, can you now please check whether the VSCode `launch.json` configuration coming from #530 can satisfy your VSCode installation already? On my macOS machine, using [VSCodium](https://vscodium.com/), it worked like a charm and I was able to interactively set breakpoints and use the debugger successfully.
May I also humbly ask which operating system you are using? On the detail of how to setup the virtualenv stuff (see [1]), we might have to adjust the documentation when it comes to, for example, running that procedure on Windows. In that case, I am kindly asking for corresponding contributions to those guidelines.
With kind regards,
Andreas.
[1] https://github.com/jpmens/mqttwarn/blob/main/doc/sandbox.rst
> Dear @psyciknz,
>
> thank you for your contribution at #529. However, I was looking for a less invasive patch on that matter. So, can you now please check whether the VSCode `launch.json` configuration coming from #530 can satisfy your VSCode installation already? On my macOS machine, using [VSCodium](https://vscodium.com/), it worked like a charm and I was able to interactively set breakpoints and use the debugger successfully.
>
> May I also humbly ask which operating system you are using? On the detail of how to setup the virtualenv stuff (see [1]), we might have to adjust the documentation when it comes to, for example, running that procedure on Windows. In that case, I am kindly asking for corresponding contributions to those guidelines.
>
> With kind regards,
> Andreas.
>
> [1] https://github.com/jpmens/mqttwarn/blob/main/doc/sandbox.rst
I'm on windows, with Visusul studio code.
I don't tend to run venv for any python projects. I've pulled from main back into my master to re-do the publish version changes, and so took the oppurtunity to try the launch.json that was merged into master. I get the following upon running:
```
Try the new cross-platform PowerShell https://aka.ms/pscore6
PS C:\Users\d_col\Code\Git\mqttwarn> & 'c:\Users\d_col\AppData\Local\Programs\Python\Python39\python.exe' 'c:\Users\d_col\.vscode\extensions\ms-python.python-2021.5.926500501\pythonFiles\lib\python\debugpy\launcher' '53840' '--' 'mqttwarn'
pydev debugger: critical: unable to get real case for file. Details:
filename: mqttwarn
drive:
parts: ['mqttwarn']
(please create a ticket in the tracker to address this).
Traceback (most recent call last):
File "c:\Users\d_col\.vscode\extensions\ms-python.python-2021.5.926500501\pythonFiles\lib\python\debugpy\_vendored\pydevd\pydevd_file_utils.py", line 221, in _get_path_with_real_case
return _resolve_listing(drive, iter(parts))
File "c:\Users\d_col\.vscode\extensions\ms-python.python-2021.5.926500501\pythonFiles\lib\python\debugpy\_vendored\pydevd\pydevd_file_utils.py", line 184, in _resolve_listing
dir_contents = cache[resolved_lower] = os.listdir(resolved)
FileNotFoundError: [WinError 3] The system cannot find the path specified: ''
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "c:\Users\d_col\.vscode\extensions\ms-python.python-2021.5.926500501\pythonFiles\lib\python\debugpy\_vendored\pydevd\pydevd_file_utils.py", line 226, in _get_path_with_real_case
return _resolve_listing(drive, iter(parts))
File "c:\Users\d_col\.vscode\extensions\ms-python.python-2021.5.926500501\pythonFiles\lib\python\debugpy\_vendored\pydevd\pydevd_file_utils.py", line 184, in _resolve_listing
dir_contents = cache[resolved_lower] = os.listdir(resolved)
FileNotFoundError: [WinError 3] The system cannot find the path specified: ''
pydev debugger: critical: unable to get real case for file. Details:
filename: mqttwarn
drive:
parts: ['mqttwarn']
(please create a ticket in the tracker to address this).
Traceback (most recent call last):
File "c:\Users\d_col\.vscode\extensions\ms-python.python-2021.5.926500501\pythonFiles\lib\python\debugpy\_vendored\pydevd\pydevd_file_utils.py", line 221, in _get_path_with_real_case
return _resolve_listing(drive, iter(parts))
File "c:\Users\d_col\.vscode\extensions\ms-python.python-2021.5.926500501\pythonFiles\lib\python\debugpy\_vendored\pydevd\pydevd_file_utils.py", line 184, in _resolve_listing
dir_contents = cache[resolved_lower] = os.listdir(resolved)
FileNotFoundError: [WinError 3] The system cannot find the path specified: ''
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "c:\Users\d_col\.vscode\extensions\ms-python.python-2021.5.926500501\pythonFiles\lib\python\debugpy\_vendored\pydevd\pydevd_file_utils.py", line 226, in _get_path_with_real_case
return _resolve_listing(drive, iter(parts))
File "c:\Users\d_col\.vscode\extensions\ms-python.python-2021.5.926500501\pythonFiles\lib\python\debugpy\_vendored\pydevd\pydevd_file_utils.py", line 184, in _resolve_listing
dir_contents = cache[resolved_lower] = os.listdir(resolved)
FileNotFoundError: [WinError 3] The system cannot find the path specified: ''
```
I attempted to follow the instructions from https://github.com/jpmens/mqttwarn/blob/main/doc/sandbox.rst
But none seem to be clear.
```
PS C:\Users\d_col\Code\Git\mqttwarn> make test
make : The term 'make' is not recognized as the name of a cmdlet, function, script file, or operable program. Check the spelling of the name, or if a path was included, verify
that the path is correct and try again.
At line:1 char:1
+ make test
+ ~~~~
+ CategoryInfo : ObjectNotFound: (make:String) [], CommandNotFoundException
+ FullyQualifiedErrorId : CommandNotFoundException
PS C:\Users\d_col\Code\Git\mqttwarn> python3 -m venv .venv
Python was not found; run without arguments to install from the Microsoft Store, or disable this shortcut from Settings > Manage App Execution Aliases.
PS C:\Users\d_col\Code\Git\mqttwarn> source .venv/bin/activate
source : The term 'source' is not recognized as the name of a cmdlet, function, script file, or operable program. Check the spelling of the name, or if a path was included,
verify that the path is correct and try again.
At line:1 char:1
+ source .venv/bin/activate
+ ~~~~~~
+ CategoryInfo : ObjectNotFound: (source:String) [], CommandNotFoundException
+ FullyQualifiedErrorId : CommandNotFoundException
PS C:\Users\d_col\Code\Git\mqttwarn>
```
I see you have added a main.py....this does look similar to my .vscode\run.py, but you'll note I had to include more imports. Perhaps because I am not in a virutal environment?
```
import sys
#
# To Use this run.py a .env file is needed in the .vscode directory
# with the location of python path and name/locaiton of the RUNINI file.
# eg
#PYTHONPATH=C:\\Users\\default\\AppData\\Local\\Programs\\Python\\Python39\\Lib\\site-packages
#RUNINI=.vscode\\run.ini
sys.path.append('..\\mqttwarn')
sys.path.append('.vscode')
import attr
from docopt import docopt
#from ..mqttwarn import mqttwarn
import mqttwarn.core
import mqttwarn.util
import mqttwarn.context
import mqttwarn.services
import mqttwarn.commands
mqttwarn.commands.run()
``` | 2021-06-19T23:10:56 | 0.0 | [] | [] |
||
alteryx/nlp_primitives | alteryx__nlp_primitives-234 | b83c5ad19ec119b352dd879156a489901c316b3a | diff --git a/nlp_primitives/mean_characters_per_sentence.py b/nlp_primitives/mean_characters_per_sentence.py
index 201fe8df..9189e56c 100644
--- a/nlp_primitives/mean_characters_per_sentence.py
+++ b/nlp_primitives/mean_characters_per_sentence.py
@@ -31,12 +31,10 @@ def get_function(self):
def _mean_characters_per_sentence(text):
if not isinstance(text, str):
return np.nan
- if len(text) == 0:
- return 0
sentences = sent_tokenize(text)
- total = 0.0
- for s in sentences:
- total += len(s)
+ if not len(sentences):
+ return 0.0
+ total = sum(len(s) for s in sentences)
return total / len(sentences)
def mean_characters_per_sentence(array):
diff --git a/release_notes.rst b/release_notes.rst
index 8a2283fb..683e2805 100644
--- a/release_notes.rst
+++ b/release_notes.rst
@@ -8,6 +8,7 @@ Future Release
* Add conda create feedstock pull request workflow (:pr:`220`)
* Improve ``PartOfSpeech`` docstring (:pr:`224`)
* Fixes
+ * Fix handling of all-whitepace strings in ``MeanCharactersPerSentence`` (:pr:`234`)
* Changes
* Update workflow_dispatch to release workflow (:pr:`221`)
* Documentation Changes
| MeanCharactersPerSentence fails on just whitespace text
When there is text that is only whitespace, this text passes through the edge case checks in `_mean_characters_per_sentence` and `len(sentences)` ends up being zero which causes a divide my zero error.
```
Exception has occurred: ZeroDivisionError (note: full exception trace is shown but execution is paused at: _run_module_as_main)
float division by zero
File "/Users/odembowski/Repos/nlp_primitives/nlp_primitives/mean_characters_per_sentence.py", line 42, in _mean_characters_per_sentence
return total / len(sentences)
File "/Users/odembowski/.pyenv/versions/feature_tools/lib/python3.8/site-packages/pandas/_libs/lib.pyx", line 2870, in pandas._libs.lib.map_infer
val = f(arr[i])
File "/Users/odembowski/.pyenv/versions/feature_tools/lib/python3.8/site-packages/pandas/core/apply.py", line 1143, in apply_standard
mapped = lib.map_infer(
File "/Users/odembowski/.pyenv/versions/feature_tools/lib/python3.8/site-packages/pandas/core/apply.py", line 1088, in apply
return self.apply_standard()
File "/Users/odembowski/.pyenv/versions/feature_tools/lib/python3.8/site-packages/pandas/core/series.py", line 4433, in apply
return SeriesApply(self, func, convert_dtype, args, kwargs).apply()
File "/Users/odembowski/Repos/nlp_primitives/nlp_primitives/mean_characters_per_sentence.py", line 45, in mean_characters_per_sentence
return array.apply(_mean_characters_per_sentence)
File "/Users/odembowski/Repos/featuretools/rec_prims.py", line 45, in <module>
answer = get_fun(es["df"]["text"])
File "/Users/odembowski/.pyenv/versions/3.8.12_x86/lib/python3.8/runpy.py", line 87, in _run_code
exec(code, run_globals)
File "/Users/odembowski/.pyenv/versions/3.8.12_x86/lib/python3.8/runpy.py", line 194, in _run_module_as_main (Current frame)
return _run_code(code, main_globals, None,
```
along with these checks
```
if not isinstance(text, Iterable):
return np.nan
if len(text) == 0:
return 0
```
we may need to check for just whitespace and return 0 or nan. Or return 0 if the `sent_tokenize` tokenizer doesn't return anything.
| 2022-12-20T19:57:38 | 0.0 | [] | [] |
|||
microsoft/vision-datasets | microsoft__vision-datasets-34 | 49fdb5033f093b917b130a25457d5cd1a7372251 | diff --git a/setup.py b/setup.py
index 83f0874..2c8ac4e 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,7 @@
import setuptools
from os import path
-VERSION = '0.2.12'
+VERSION = '0.2.13'
# Get the long description from the README file
here = path.abspath(path.dirname(__file__))
diff --git a/vision_datasets/common/data_manifest.py b/vision_datasets/common/data_manifest.py
index f22c3af..50a3d5a 100644
--- a/vision_datasets/common/data_manifest.py
+++ b/vision_datasets/common/data_manifest.py
@@ -3,8 +3,9 @@
import json
import logging
import os
+import pathlib
import random
-from typing import List, Dict
+from typing import List, Dict, Union
from urllib import parse as urlparse
from PIL import Image
import numpy as np
@@ -16,9 +17,12 @@
logger = logging.getLogger(__name__)
-def _unix_path(path):
+def _unix_path(path: Union[pathlib.Path, str]):
assert path is not None
+ if isinstance(path, pathlib.Path):
+ path = path.as_posix()
+
return path.replace('\\', '/')
@@ -35,7 +39,9 @@ def _construct_full_path_generator(dirs: List[str]):
dirs = [x for x in dirs if x]
if dirs:
- def full_path_func(path):
+ def full_path_func(path: Union[pathlib.Path, str]):
+ if isinstance(path, pathlib.Path):
+ path = path.as_posix()
to_join = [x for x in dirs + [path] if x]
return _unix_path(os.path.join(*to_join))
else:
@@ -529,8 +535,9 @@ def merge(*args, flavor: int = 0):
"""
assert len(args) >= 1, 'less than one manifests provided, not possible to merged.'
- assert all(args), '"None" manifest found'
+ assert all([arg is not None for arg in args]), '"None" manifest found'
+ args = [arg for arg in args if arg]
if len(args) == 1:
logger.warning('Only one manifest provided. Nothing to be merged.')
return args[0]
@@ -682,7 +689,7 @@ def create_dataset_manifest(dataset_info, usage: str, container_sas_or_root_dir:
logger.warning(f'{dataset_info.name}: labelmap is missing!')
else:
# read tag names
- with file_reader.open(get_full_sas_or_path(dataset_info.labelmap)) as file_in:
+ with file_reader.open(get_full_sas_or_path(dataset_info.labelmap), encoding='utf-8') as file_in:
labelmap = [IrisManifestAdaptor._purge_line(line) for line in file_in if IrisManifestAdaptor._purge_line(line) != '']
# read image width and height
@@ -764,10 +771,10 @@ class CocoManifestAdaptor:
"""
@staticmethod
- def create_dataset_manifest(coco_file_path_or_url, data_type, container_sas_or_root_dir: str = None):
+ def create_dataset_manifest(coco_file_path_or_url: Union[str, dict, pathlib.Path], data_type, container_sas_or_root_dir: str = None):
""" construct a dataset manifest out of coco file
Args:
- coco_file_path_or_url (str or dict): path or url to coco file. dict if multitask
+ coco_file_path_or_url (str or pathlib.Path or dict): path or url to coco file. dict if multitask
data_type (str or dict): type of dataset. dict if multitask
container_sas_or_root_dir (str): container sas if resources are store in blob container, or a local dir
"""
@@ -789,7 +796,7 @@ def create_dataset_manifest(coco_file_path_or_url, data_type, container_sas_or_r
file_reader = FileReader()
# read image index files
coco_file_path_or_url = coco_file_path_or_url if is_url(coco_file_path_or_url) else get_full_sas_or_path(coco_file_path_or_url)
- with file_reader.open(coco_file_path_or_url) as file_in:
+ with file_reader.open(coco_file_path_or_url, encoding='utf-8') as file_in:
coco_manifest = json.load(file_in)
file_reader.close()
diff --git a/vision_datasets/common/util.py b/vision_datasets/common/util.py
index 4d7161b..3f2e0ea 100644
--- a/vision_datasets/common/util.py
+++ b/vision_datasets/common/util.py
@@ -16,6 +16,9 @@ def is_url(candidate: str):
"""
try:
+ if not isinstance(candidate, str):
+ return False
+
result = urlparse.urlparse(candidate)
return result.scheme and result.netloc
except ValueError:
@@ -53,7 +56,7 @@ class FileReader:
def __init__(self):
self.zip_files = {}
- def open(self, name, mode='r'):
+ def open(self, name, mode='r', encoding=None):
# read file from url
if is_url(name):
return urlopen(self._encode_non_ascii(name))
@@ -66,7 +69,7 @@ def open(self, name, mode='r'):
return self.zip_files[zip_path].open(file_path)
# read file from local dir
- return open(name, mode)
+ return open(name, mode, encoding=encoding)
def close(self):
for zip_file in self.zip_files.values():
| first commit
first commit to open source the dataset access and management repo
| 2022-06-29T22:14:50 | 0.0 | [] | [] |
|||
microsoft/vision-datasets | microsoft__vision-datasets-32 | ae299d06c9f43997e4046a501169a043bf051b75 | diff --git a/COCO_DATA_FORMAT.md b/COCO_DATA_FORMAT.md
index b2554b2..1d791e8 100644
--- a/COCO_DATA_FORMAT.md
+++ b/COCO_DATA_FORMAT.md
@@ -1,13 +1,15 @@
# Coco format
+In coco, we use `file_name` and `zip_file` to construct the file_path in `ImageDataManifest` mentioned in `README.md`. If `zip_file` is present, it means that the image is zipped into a zip file for storage and access and the image path would be `{zip_file}@{file_name}`. If `zip_file` is not present, the image path would just be `file_name`.
+
## Image classification (multiclass and multilabel)
Here is one example of the train.json, val.json, or test.json in the `DatasetInfo` above. Note that the `"id"` for `images`, `annotations` and `categories` should be consecutive integers, **starting from 1**. Note that our lib might work with id starting from 0, but many tools like [CVAT](https://github.com/openvinotoolkit/cvat/issues/2085) and official [COCOAPI](https://github.com/cocodataset/cocoapi/issues/507) will fail.
``` {json}
{
- "images": [{"id": 1, "width": 224.0, "height": 224.0, "file_name": "[email protected]"},
- {"id": 2, "width": 224.0, "height": 224.0, "file_name": "train_images.zip@kitten 3.jpg"}],
+ "images": [{"id": 1, "width": 224.0, "height": 224.0, "file_name": "train_images/siberian-kitten.jpg", "zip_file": "train_images.zip"},
+ {"id": 2, "width": 224.0, "height": 224.0, "file_name": "train_images/kitten 3.jpg", "zip_file": "train_images.zip"}],
// file_name is the image path, which supports three formats as described in previous section.
"annotations": [
{"id": 1, "category_id": 1, "image_id": 1},
@@ -22,8 +24,8 @@ Here is one example of the train.json, val.json, or test.json in the `DatasetInf
``` {json}
{
- "images": [{"id": 1, "width": 224.0, "height": 224.0, "file_name": "[email protected]"},
- {"id": 2, "width": 224.0, "height": 224.0, "file_name": "train_images.zip@kitten 3.jpg"}],
+ "images": [{"id": 1, "width": 224.0, "height": 224.0, "file_name": "train_images/siberian-kitten.jpg", "zip_file": "train_images.zip"},
+ {"id": 2, "width": 224.0, "height": 224.0, "file_name": "train_images/kitten 3.jpg", "zip_file": "train_images.zip"}],
"annotations": [
{"id": 1, "category_id": 1, "image_id": 1, "bbox": [10, 10, 100, 100]},
{"id": 2, "category_id": 1, "image_id": 2, "bbox": [100, 100, 200, 200]},
@@ -46,8 +48,8 @@ Here is one example of the json file for image caption task.
``` {json}
{
- "images": [{"id": 1, "file_name": "[email protected]"},
- {"id": 2, "file_name": "[email protected]"}],
+ "images": [{"id": 1, "file_name": "train_images/honda.jpg", "zip_file": "train_images.zip"},
+ {"id": 2, "file_name": "train_images/kitchen.jpg", "zip_file": "train_images.zip"}],
"annotations": [
{"id": 1, "image_id": 1, "caption": "A black Honda motorcycle parked in front of a garage."},
{"id": 2, "image_id": 1, "caption": "A Honda motorcycle parked in a grass driveway."},
@@ -62,8 +64,8 @@ Here is one example of the json file for image text matching task. `match: 1` in
``` {json}
{
- "images": [{"id": 1, "file_name": "[email protected]"},
- {"id": 2, "file_name": "[email protected]"}],
+ "images": [{"id": 1, "file_name": "train_images/honda.jpg", "zip_file": "train_images.zip"},
+ {"id": 2, "file_name": "train_images/kitchen.jpg", "zip_file": "train_images.zip"}],
"annotations": [
{"id": 1, "image_id": 1, "text": "A black Honda motorcycle parked in front of a garage.", "match": 0},
{"id": 2, "image_id": 1, "text": "A Honda motorcycle parked in a grass driveway.", "match": 0},
@@ -84,11 +86,11 @@ Specifically, **only** image files are supported for the label files. The ground
``` {json}
{
- "images": [{"id": 1, "file_name": "train_images.zip@image/test_1.jpg"},
- {"id": 2, "file_name": "train_images.zip@image/test_2.jpg"}],
+ "images": [{"id": 1, "file_name": "train_images/image/test_1.jpg", "zip_file": "train_images.zip"},
+ {"id": 2, "file_name": "train_images/image/test_2.jpg", "zip_file": "train_images.zip"}],
"annotations": [
- {"id": 1, "image_id": 1, "label": "image_matting_label.zip@mask/test_1.png"},
- {"id": 2, "image_id": 2, "label": "image_matting_label.zip@mask/test_2.png"},
+ {"id": 1, "image_id": 1, "label": "image_matting_label/mask/test_1.png", "zip_file": "image_matting_label.zip"},
+ {"id": 2, "image_id": 2, "label": "image_matting_label/mask/test_2.png", "zip_file": "image_matting_label.zip"},
]
}
-```
\ No newline at end of file
+```
diff --git a/README.md b/README.md
index 53134ba..f266d9b 100644
--- a/README.md
+++ b/README.md
@@ -5,6 +5,7 @@
This repo
- defines unified contract for dataset for purposes such as training, visualization, and exploration, via `DatasetManifest` and `ImageDataManifest`.
+- provides many commonly used dataset operation, such as sample dataset by categories, sample few-shot sub-dataset, sample dataset by ratios, train-test split, merge dataset, etc. (See here [Link](vision_datasets/common/data_manifest.py) for available utilities)
- provides API for organizing and accessing datasets, via `DatasetHub`
Currently, six `basic` types of data are supported:
@@ -130,11 +131,9 @@ Training with PyTorch is easy. After instantiating a `ManifestDataset`, simply p
### Managing datasets with DatasetHub on cloud storage
-If you are using `DatasetHub` to manage datasets in cloud storage, we recommend zipping (with uncompressed mode) the images into one or multiple zip files before uploading it and update the file path in index files to be like `[email protected]` from `train\1.jpg`. You can do it with `7zip` (set compression level to 'store') on Windows or [zip](https://superuser.com/questions/411394/zip-files-without-compression) command on Linux.
+If you are using `DatasetHub` to manage datasets in cloud storage, we recommend zipping (with uncompressed mode) the images into one or multiple zip files before uploading it and update the file path in index files to be like `train.zip@train/1.jpg` from `train/1.jpg`. For coco format, you can specify `"file_name": "train/1.jpg"` and `"zip_file": "train.zip"`. You can do it with `7zip` (set compression level to 'store') on Windows or [zip](https://superuser.com/questions/411394/zip-files-without-compression) command on Linux.
If you upload folders of images directly to cloud storage:
- you will need to list all images in `"files_for_local_usage"`, which can be millions of entries
- downloading images one by one (even with multithreading) is much slower than downloading a few zip files
-
-One more thing is that sometimes when you create a zip file `train.zip`, you might find out that there is only one `train` folder in the zip. This will fail the file loading if the path is `[email protected]`, as the image is actually at `train.zip@train\1.jpg`. It is ok to have this extra layer but please make sure the path is correct.
diff --git a/setup.py b/setup.py
index 1e0cccc..83f0874 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,7 @@
import setuptools
from os import path
-VERSION = '0.2.11'
+VERSION = '0.2.12'
# Get the long description from the README file
here = path.abspath(path.dirname(__file__))
diff --git a/vision_datasets/common/data_manifest.py b/vision_datasets/common/data_manifest.py
index 955870e..828081a 100644
--- a/vision_datasets/common/data_manifest.py
+++ b/vision_datasets/common/data_manifest.py
@@ -6,25 +6,17 @@
import random
from typing import List, Dict
from urllib import parse as urlparse
-
from PIL import Image
import numpy as np
-from .constants import DatasetTypes, Formats, BBoxFormat
+from .constants import BBoxFormat, DatasetTypes, Formats
from .dataset_info import MultiTaskDatasetInfo
from .util import is_url, FileReader
logger = logging.getLogger(__name__)
-def purge_line(line):
- if not isinstance(line, str):
- line = line.decode('utf-8')
-
- return line.strip()
-
-
-def _purge_path(path):
+def _unix_path(path):
assert path is not None
return path.replace('\\', '/')
@@ -45,38 +37,39 @@ def _construct_full_path_generator(dirs: List[str]):
if dirs:
def full_path_func(path):
to_join = [x for x in dirs + [path] if x]
- return _purge_path(os.path.join(*to_join))
+ return _unix_path(os.path.join(*to_join))
else:
- full_path_func = _purge_path
+ full_path_func = _unix_path
return full_path_func
-def _add_path_to_sas(sas, path_or_dir):
- assert sas
- if not path_or_dir:
- return sas
+def _construct_full_url_generator(container_sas: str):
+ if not container_sas:
+ return _unix_path
- parts = urlparse.urlparse(sas)
- path = _purge_path(os.path.join(parts[2], path_or_dir))
- path = path.replace('.zip@', '/') # cannot read from zip file with path targeting a url
- url = urlparse.urlunparse((parts[0], parts[1], path, parts[3], parts[4], parts[5]))
- return url
+ def add_path_to_url(url, path_or_dir):
+ assert url
+ if not path_or_dir:
+ return url
-def _construct_full_sas_generator(container_sas: str):
- if not container_sas:
- return _purge_path
+ parts = urlparse.urlparse(url)
+ path = _unix_path(os.path.join(parts[2], path_or_dir))
+ url = urlparse.urlunparse((parts[0], parts[1], path, parts[3], parts[4], parts[5]))
+
+ return url
def func(file_path):
- return _add_path_to_sas(container_sas, file_path)
+ file_path = file_path.replace('.zip@', '/') # cannot read from zip file with path targeting a url
+ return add_path_to_url(container_sas, file_path)
return func
-def _construct_full_sas_or_path_generator(container_sas_or_root_dir, prefix_dir=None):
+def _construct_full_url_or_path_generator(container_sas_or_root_dir, prefix_dir=None):
if container_sas_or_root_dir and is_url(container_sas_or_root_dir):
- return lambda path: _construct_full_sas_generator(container_sas_or_root_dir)(_construct_full_path_generator([prefix_dir])(path))
+ return lambda path: _construct_full_url_generator(container_sas_or_root_dir)(_construct_full_path_generator([prefix_dir])(path))
else:
return lambda path: _construct_full_path_generator([container_sas_or_root_dir, prefix_dir])(path)
@@ -89,7 +82,7 @@ class ImageDataManifest:
label_file_paths is a list of paths that have the same format with img_path
"""
- def __init__(self, id, img_path, width, height, labels, label_file_paths=None):
+ def __init__(self, id, img_path, width, height, labels, label_file_paths=None, labels_extra_info: dict = None):
"""
Args:
id (int or str): image id
@@ -104,13 +97,17 @@ def __init__(self, id, img_path, width, height, labels, label_file_paths=None):
multitask: dict[task, labels];
image_matting: [mask1, mask2, ...], each mask is a 2D numpy array that has the same width and height with the image.
label_file_paths (list): list of paths of the image label files. "label_file_paths" only works for image matting task.
+ labels_extra_info (dict[string, list]]): extra information about this image's labels
+ Examples: 'iscrowd'
"""
+
self.id = id
self.img_path = img_path
self.width = width
self.height = height
self._labels = labels
self.label_file_paths = label_file_paths
+ self.labels_extra_info = labels_extra_info or {}
@property
def labels(self):
@@ -163,18 +160,22 @@ def create_dataset_manifest(dataset_info, usage: str, container_sas_or_root_dir:
if dataset_info.data_format == Formats.IRIS:
return IrisManifestAdaptor.create_dataset_manifest(dataset_info, usage, container_sas_or_root_dir)
if dataset_info.data_format == Formats.COCO:
- get_full_sas_or_path = _construct_full_sas_or_path_generator(container_sas_or_root_dir, dataset_info.root_folder)('')
+ container_sas_or_root_dir = _construct_full_url_or_path_generator(container_sas_or_root_dir, dataset_info.root_folder)('')
if dataset_info.type == DatasetTypes.MULTITASK:
coco_file_by_task = {k: sub_taskinfo.index_files.get(usage) for k, sub_taskinfo in dataset_info.sub_task_infos.items()}
data_type_by_task = {k: sub_taskinfo.type for k, sub_taskinfo in dataset_info.sub_task_infos.items()}
- return CocoManifestAdaptor.create_dataset_manifest(coco_file_by_task, data_type_by_task, get_full_sas_or_path)
+ return CocoManifestAdaptor.create_dataset_manifest(coco_file_by_task, data_type_by_task, container_sas_or_root_dir)
- return CocoManifestAdaptor.create_dataset_manifest(dataset_info.index_files.get(usage), dataset_info.type, get_full_sas_or_path)
+ return CocoManifestAdaptor.create_dataset_manifest(dataset_info.index_files.get(usage), dataset_info.type, container_sas_or_root_dir)
raise RuntimeError(f'{dataset_info.data_format} not supported yet.')
@property
def is_multitask(self):
+ """
+ is this dataset multi-task dataset or not
+ """
+
return isinstance(self.data_type, dict)
def __len__(self):
@@ -463,6 +464,7 @@ def sample_few_shots_subset_greedy(self, num_min_samples_per_class, random_seed=
Raises:
RuntimeError if it couldn't find num_min_samples_per_class samples for all classes
"""
+
assert num_min_samples_per_class > 0
images = list(self.images)
rng = random.Random(random_seed)
@@ -643,7 +645,7 @@ def create_dataset_manifest(dataset_info, usage: str, container_sas_or_root_dir:
file_reader = FileReader()
dataset_info = copy.deepcopy(dataset_info)
- get_full_sas_or_path = _construct_full_sas_or_path_generator(container_sas_or_root_dir, dataset_info.root_folder)
+ get_full_sas_or_path = _construct_full_url_or_path_generator(container_sas_or_root_dir, dataset_info.root_folder)
max_index = 0
labelmap = None
@@ -652,7 +654,7 @@ def create_dataset_manifest(dataset_info, usage: str, container_sas_or_root_dir:
else:
# read tag names
with file_reader.open(get_full_sas_or_path(dataset_info.labelmap)) as file_in:
- labelmap = [purge_line(line) for line in file_in if purge_line(line) != '']
+ labelmap = [IrisManifestAdaptor._purge_line(line) for line in file_in if IrisManifestAdaptor._purge_line(line) != '']
# read image width and height
img_wh = None
@@ -663,7 +665,7 @@ def create_dataset_manifest(dataset_info, usage: str, container_sas_or_root_dir:
images = []
with file_reader.open(get_full_sas_or_path(dataset_info.index_files[usage])) as file_in:
for line in file_in:
- line = purge_line(line)
+ line = IrisManifestAdaptor._purge_line(line)
if not line:
continue
parts = line.rsplit(' ', maxsplit=1) # assumption: only the image file path can have spaces
@@ -692,7 +694,7 @@ def _load_img_width_and_height(file_reader, file_path):
img_wh = dict()
with file_reader.open(file_path) as file_in:
for line in file_in:
- line = purge_line(line)
+ line = IrisManifestAdaptor._purge_line(line)
if line == '':
continue
location, w, h = line.split()
@@ -704,7 +706,7 @@ def _load_img_width_and_height(file_reader, file_path):
def _load_detection_labels_from_file(file_reader, image_label_file_path):
with file_reader.open(image_label_file_path) as label_in:
- label_lines = [purge_line(line) for line in label_in]
+ label_lines = [IrisManifestAdaptor._purge_line(line) for line in label_in]
img_labels = []
for label_line in label_lines:
@@ -717,6 +719,13 @@ def _load_detection_labels_from_file(file_reader, image_label_file_path):
return img_labels
+ @staticmethod
+ def _purge_line(line):
+ if not isinstance(line, str):
+ line = line.decode('utf-8')
+
+ return line.strip()
+
class CocoManifestAdaptor:
"""
@@ -733,6 +742,7 @@ def create_dataset_manifest(coco_file_path_or_url, data_type, container_sas_or_r
data_type (str or dict): type of dataset. dict if multitask
container_sas_or_root_dir (str): container sas if resources are store in blob container, or a local dir
"""
+
if not coco_file_path_or_url:
return None
@@ -745,34 +755,39 @@ def create_dataset_manifest(coco_file_path_or_url, data_type, container_sas_or_r
return _generate_multitask_dataset_manifest(dataset_manifest_by_task)
- get_full_sas_or_path = _construct_full_sas_or_path_generator(container_sas_or_root_dir)
+ get_full_sas_or_path = _construct_full_url_or_path_generator(container_sas_or_root_dir)
file_reader = FileReader()
# read image index files
- with file_reader.open(coco_file_path_or_url if is_url(coco_file_path_or_url) else get_full_sas_or_path(coco_file_path_or_url)) as file_in:
+ coco_file_path_or_url = coco_file_path_or_url if is_url(coco_file_path_or_url) else get_full_sas_or_path(coco_file_path_or_url)
+ with file_reader.open(coco_file_path_or_url) as file_in:
coco_manifest = json.load(file_in)
file_reader.close()
- images_by_id = {img['id']: ImageDataManifest(img['id'], get_full_sas_or_path(img['file_name']), img.get('width'), img.get('height'), []) for img in coco_manifest['images']}
- if data_type == DatasetTypes.IMCAP:
- for annotation in coco_manifest['annotations']:
- images_by_id[annotation['image_id']].labels.append(annotation['caption'])
- images = [x for x in images_by_id.values()]
- return DatasetManifest(images, None, data_type)
+ def get_file_path(info_dict: dict, file_name):
+ zip_prefix = info_dict.get('zip_file', '')
+ if zip_prefix:
+ zip_prefix += '@'
- if data_type == DatasetTypes.IMAGE_TEXT_MATCHING:
- for annotation in coco_manifest['annotations']:
- images_by_id[annotation['image_id']].labels.append((annotation['text'], annotation['match']))
- images = [x for x in images_by_id.values()]
- return DatasetManifest(images, None, data_type)
+ return get_full_sas_or_path(zip_prefix + file_name)
- if data_type == DatasetTypes.IMAGE_MATTING:
+ images_by_id = {img['id']: ImageDataManifest(img['id'], get_file_path(img, img['file_name']), img.get('width'), img.get('height'), [], {}) for img in coco_manifest['images']}
+ process_labels_without_categories = None
+ if data_type == DatasetTypes.IMCAP:
+ def process_labels_without_categories(image):
+ image.labels.append(annotation['caption'])
+ elif data_type == DatasetTypes.IMAGE_TEXT_MATCHING:
+ def process_labels_without_categories(image):
+ image.labels.append((annotation['text'], annotation['match']))
+ elif data_type == DatasetTypes.IMAGE_MATTING:
+ def process_labels_without_categories(image):
+ image.label_file_paths = image.label_file_paths or []
+ image.label_file_paths.append(get_file_path(annotation, annotation['label']))
+
+ if process_labels_without_categories:
for annotation in coco_manifest['annotations']:
- if images_by_id[annotation['image_id']].label_file_paths:
- images_by_id[annotation['image_id']].label_file_paths.append(get_full_sas_or_path(annotation['label']))
- else:
- images_by_id[annotation['image_id']].label_file_paths = [get_full_sas_or_path(annotation['label'])]
+ process_labels_without_categories(images_by_id[annotation['image_id']])
images = [x for x in images_by_id.values()]
return DatasetManifest(images, None, data_type)
@@ -786,14 +801,19 @@ def create_dataset_manifest(coco_file_path_or_url, data_type, container_sas_or_r
for annotation in coco_manifest['annotations']:
c_id = label_id_to_pos[annotation['category_id']]
+ img = images_by_id[annotation['image_id']]
if 'bbox' in annotation:
bbox = annotation['bbox']
if bbox_format == BBoxFormat.LTWH:
bbox = [bbox[0], bbox[1], bbox[0] + bbox[2], bbox[1] + bbox[3]]
label = [c_id] + bbox
+ img.labels_extra_info['iscrowd'] = img.labels_extra_info.get('iscrowd', [])
+ img.labels_extra_info['iscrowd'].append(annotation.get('iscrowd', 0))
else:
label = c_id
- images_by_id[annotation['image_id']].labels.append(label)
+
+ img.labels.append(label)
+
images = [x for x in images_by_id.values()]
images.sort(key=lambda x: x.id)
diff --git a/vision_datasets/common/manifest_dataset.py b/vision_datasets/common/manifest_dataset.py
index d4196fb..ba450bf 100644
--- a/vision_datasets/common/manifest_dataset.py
+++ b/vision_datasets/common/manifest_dataset.py
@@ -30,6 +30,7 @@ def __init__(self, dataset_info: BaseDatasetInfo, dataset_manifest: DatasetManif
coordinates (str): 'relative' or 'absolute', indicating the desired format of the bboxes returned.
dataset_resources (str): disposable resources associated with this dataset
"""
+
assert dataset_manifest is not None
assert coordinates in ['relative', 'absolute']
@@ -53,7 +54,7 @@ def _get_single_item(self, index):
target = image_manifest.labels
if self.coordinates == 'relative':
w, h = image.size
- target = ManifestDataset._box_convert_to_relative(image_manifest.labels, w, h, self.dataset_info)
+ target = ManifestDataset._convert_box_to_relative(image_manifest.labels, w, h, self.dataset_info)
return image, target, str(index)
@@ -70,11 +71,12 @@ def _load_image(self, filepath):
raise
@staticmethod
- def _box_convert_to_relative(target, w, h, dataset_info):
+ def _convert_box_to_relative(target, w, h, dataset_info):
# Convert absolute coordinates to relative coordinates.
# Example: for image with size (200, 200), (1, 100, 100, 200, 200) => (1, 0.5, 0.5, 1.0, 1.0)
if dataset_info.type == DatasetTypes.MULTITASK:
- return {task_name: ManifestDataset._box_convert_to_relative(task_target, w, h, dataset_info.sub_task_infos[task_name]) for task_name, task_target in target.items()}
+ return {task_name: ManifestDataset._convert_box_to_relative(task_target, w, h, dataset_info.sub_task_infos[task_name]) for task_name, task_target in target.items()}
+
if dataset_info.type == DatasetTypes.OD:
return [[t[0], t[1] / w, t[2] / h, t[3] / w, t[4] / h] for t in target]
diff --git a/vision_datasets/pytorch/torch_dataset.py b/vision_datasets/pytorch/torch_dataset.py
index 5620dc9..4588ef8 100644
--- a/vision_datasets/pytorch/torch_dataset.py
+++ b/vision_datasets/pytorch/torch_dataset.py
@@ -1,5 +1,7 @@
import logging
+from ..common.manifest_dataset import ManifestDataset
+
from .dataset import Dataset
logger = logging.getLogger(__name__)
@@ -10,7 +12,7 @@ class TorchDataset(Dataset):
Dataset class used for pytorch training
"""
- def __init__(self, manifest_dataset, transform=None):
+ def __init__(self, manifest_dataset: ManifestDataset, transform=None):
Dataset.__init__(self, transform)
self.dataset = manifest_dataset
diff --git a/vision_datasets/resources/dataset_hub.py b/vision_datasets/resources/dataset_hub.py
index 078c47c..a44889d 100644
--- a/vision_datasets/resources/dataset_hub.py
+++ b/vision_datasets/resources/dataset_hub.py
@@ -25,7 +25,7 @@ def __init__(self, dataset_json_str: Union[str, list]):
self.dataset_registry = DatasetRegistry(dataset_json_str)
- def create_manifest_dataset(self, container_sas: str, local_dir: str, name: str, version: int = None, usage: str = Usages.TRAIN_PURPOSE, coordinates: str = 'relative',
+ def create_manifest_dataset(self, container_sas: str, local_dir: str, name: str, version: int = None, usage: Union[str, List] = Usages.TRAIN_PURPOSE, coordinates: str = 'relative',
few_shot_samples_per_class=None, rnd_seed=0):
"""Create manifest dataset.
If local_dir is provided, manifest_dataset consumes data from local disk. If data not present on local disk, it will be automatically downloaded.
| first commit
first commit to open source the dataset access and management repo
| 2022-06-15T19:08:34 | 0.0 | [] | [] |
|||
microsoft/vision-datasets | microsoft__vision-datasets-31 | f690b31ac8d3bdff5acfa7d93c34d0e96a52bd11 | diff --git a/setup.py b/setup.py
index 777645b..1e0cccc 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,7 @@
import setuptools
from os import path
-VERSION = '0.2.10'
+VERSION = '0.2.11'
# Get the long description from the README file
here = path.abspath(path.dirname(__file__))
diff --git a/vision_datasets/commands/dataset_check.py b/vision_datasets/commands/dataset_check.py
index 9870fb8..8bbce7c 100644
--- a/vision_datasets/commands/dataset_check.py
+++ b/vision_datasets/commands/dataset_check.py
@@ -30,7 +30,7 @@ def check_dataset(dataset):
for idx in random.sample(range(len(dataset)), min(10, len(dataset))):
show_img(dataset[idx])
- if dataset.dataset_info.type in [DatasetTypes.IMCAP, DatasetTypes.MULTITASK]:
+ if dataset.dataset_info.type in [DatasetTypes.IMCAP, DatasetTypes.MULTITASK, DatasetTypes.IMAGE_TEXT_MATCHING, DatasetTypes.IMAGE_MATTING] or not dataset.labels:
return
n_imgs_by_class = {x: 0 for x in range(len(dataset.labels))}
diff --git a/vision_datasets/common/__init__.py b/vision_datasets/common/__init__.py
index e12065b..a5bafb4 100644
--- a/vision_datasets/common/__init__.py
+++ b/vision_datasets/common/__init__.py
@@ -1,7 +1,8 @@
-from .manifest_dataset import ManifestDataset
+from .manifest_dataset import ManifestDataset, DetectionAsClassificationByCroppingDataset, DetectionAsClassificationIgnoreBoxesDataset, VisionAsImageTextDataset
from .dataset_registry import DatasetRegistry
from .dataset_info import BaseDatasetInfo, DatasetInfo, MultiTaskDatasetInfo
from .data_manifest import DatasetManifest, CocoManifestAdaptor, IrisManifestAdaptor
from .constants import Usages, DatasetTypes
-__all__ = ['ManifestDataset', 'DatasetRegistry', 'BaseDatasetInfo', 'DatasetInfo', 'MultiTaskDatasetInfo', 'Usages', 'DatasetTypes', 'DatasetManifest', 'CocoManifestAdaptor', 'IrisManifestAdaptor']
+__all__ = ['ManifestDataset', 'DatasetRegistry', 'BaseDatasetInfo', 'DatasetInfo', 'MultiTaskDatasetInfo', 'Usages', 'DatasetTypes', 'DatasetManifest', 'CocoManifestAdaptor', 'IrisManifestAdaptor',
+ 'DetectionAsClassificationIgnoreBoxesDataset', 'DetectionAsClassificationByCroppingDataset', 'VisionAsImageTextDataset']
diff --git a/vision_datasets/common/manifest_dataset.py b/vision_datasets/common/manifest_dataset.py
index fe698f1..d4196fb 100644
--- a/vision_datasets/common/manifest_dataset.py
+++ b/vision_datasets/common/manifest_dataset.py
@@ -331,3 +331,57 @@ def zoom_box(left, t, r, b, img_w, img_h, ratio_lower_b, ratio_upper_b, rnd: ran
logger.log(level, f'[zoom_box] new box {left}, {t}, {r}, {b}, with {w_ratio}, {h_ratio}, out of ({img_w}, {img_h})')
return left, t, r, b
+
+
+class VisionAsImageTextDataset(BaseDataset):
+ """
+ Consume traditional vision datasets of type [DatasetTypes.IC_MULTICLASS, DatasetTypes.IC_MULTILABEL, DatasetTypes.OD], as DatasetTypes.IMAGE_TEXT_MATCHING dataset.
+ For a certain image, negative image-text pairs are generated from the labels that this image does not possess.
+ """
+
+ def __init__(self, dataset: BaseDataset, neg_to_pos_ratio=0, text_aug=None, rnd_seed=0):
+ """
+ Args:
+ dataset: dataset of expected type
+ neg_to_pos_ratio: ratio of negative against positive image text pairs
+ text_aug: a func that augments a string, i.e., a class name, e.g. dog => a photo of dog
+ rnd_seed: random seed for choosing negative class names for negative image text pairs
+ """
+ assert dataset is not None
+ assert dataset.dataset_info.type in [DatasetTypes.IC_MULTICLASS, DatasetTypes.IC_MULTILABEL, DatasetTypes.OD]
+ assert neg_to_pos_ratio >= 0
+ dataset_info = deepcopy(dataset.dataset_info)
+ dataset_info.type = DatasetTypes.IMAGE_TEXT_MATCHING
+
+ super().__init__(dataset_info)
+ self._dataset = dataset
+ self._negative_pair_ratio = neg_to_pos_ratio
+ self._text_aug = text_aug or (lambda x: x)
+ self._rand = random.Random(rnd_seed)
+
+ @property
+ def labels(self):
+ return None
+
+ def __len__(self):
+ return len(self._dataset)
+
+ def _get_single_item(self, index):
+ img, target, _ = self._dataset[index]
+ pos_class_indices = [x[0] for x in target] if self._dataset.dataset_info.type == DatasetTypes.OD else target
+ pos_class_names = [self._dataset.labels[x] for x in pos_class_indices]
+ labels = [(self._text_aug(class_name), 1) for class_name in pos_class_names]
+ if self._negative_pair_ratio > 0:
+ neg_class_indices = set(range(len(self._dataset.labels))) - set(pos_class_indices)
+ neg_class_names = [self._dataset.labels[x] for x in neg_class_indices]
+ if neg_class_names:
+ down_sample_ratio = self._negative_pair_ratio * len(pos_class_names) / len(neg_class_names)
+ if down_sample_ratio < 1:
+ neg_class_names = [ncn for ncn in neg_class_names if self._rand.random() < down_sample_ratio]
+
+ neg_labels = [(self._text_aug(class_name), 0) for class_name in neg_class_names]
+ labels += neg_labels
+ return img, labels, str(index)
+
+ def close(self):
+ self._dataset.close()
| first commit
first commit to open source the dataset access and management repo
| 2022-04-15T21:40:39 | 0.0 | [] | [] |
|||
microsoft/vision-datasets | microsoft__vision-datasets-30 | f5b3d6fbd8c64d623c3fef526d51abc1275ccecd | diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 3c73c56..1098da5 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -16,7 +16,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ["3.7", "3.8", "3.9", "3.10"]
+ python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
steps:
- uses: actions/checkout@v3
@@ -33,8 +33,8 @@ jobs:
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
- # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
- flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
+ # exit-zero treats all errors as warnings.
+ flake8 . --count --exit-zero --max-complexity=10 --statistics
- name: Test with pytest
run: |
pytest
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..07bdcb2
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,6 @@
+numpy>=1.18.3
+Pillow>=6.2.2
+requests>=2.23.0
+tenacity>=6.2.0
+tqdm
+torch>=1.7.1
diff --git a/setup.py b/setup.py
index e9a6b4e..a6c3334 100644
--- a/setup.py
+++ b/setup.py
@@ -35,6 +35,7 @@
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
+ 'Programming Language :: Python :: 3.10',
],
extras_require={'run': ['torch>=1.6.0', 'torchvision>=0.7.0']},
entry_points={
| first commit
first commit to open source the dataset access and management repo
| 2022-04-04T22:09:58 | 0.0 | [] | [] |
|||
microsoft/vision-datasets | microsoft__vision-datasets-28 | 74419835ea9ebb63012775df6fbc879a34fc8d09 | diff --git a/setup.py b/setup.py
index ac9b62c..e9a6b4e 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,7 @@
import setuptools
from os import path
-VERSION = '0.2.8'
+VERSION = '0.2.9'
# Get the long description from the README file
here = path.abspath(path.dirname(__file__))
diff --git a/vision_datasets/common/manifest_dataset.py b/vision_datasets/common/manifest_dataset.py
index e98eb3b..fe698f1 100644
--- a/vision_datasets/common/manifest_dataset.py
+++ b/vision_datasets/common/manifest_dataset.py
@@ -164,9 +164,8 @@ def __init__(self, detection_dataset: ManifestDataset, box_aug_params: dict = No
self._n_booxes = 0
self._box_abs_id_to_img_rel_id = {}
- for img_id, x in enumerate(self._dataset):
- boxes = x[1]
- for i in range(len(boxes)):
+ for img_id, x in enumerate(self._dataset.dataset_manifest.images):
+ for i in range(len(x.labels)):
self._box_abs_id_to_img_rel_id[self._n_booxes] = (img_id, i)
self._n_booxes += 1
self._box_aug_params = box_aug_params
@@ -184,10 +183,7 @@ def _get_single_item(self, index):
c_id, left, t, r, b = boxes[box_rel_idx]
if self._dataset.coordinates == 'relative':
w, h = img.size
- left *= w
- t *= h
- r *= w
- b *= h
+ left, t, r, b = left * w, t * h, r * w, b * h
box_img = DetectionAsClassificationByCroppingDataset.crop(img, left, t, r, b, self._box_aug_params, self._box_aug_rnd)
return box_img, [c_id], str(index)
| first commit
first commit to open source the dataset access and management repo
| 2022-03-24T16:16:34 | 0.0 | [] | [] |
|||
microsoft/vision-datasets | microsoft__vision-datasets-27 | 61a7ad67d303b1ff057dd8a71fab912d7e028bef | diff --git a/.gitignore b/.gitignore
index ddb8437..b6e8afa 100644
--- a/.gitignore
+++ b/.gitignore
@@ -85,7 +85,7 @@ celerybeat-schedule
.env
.venv
env/
-venv/
+*venv/
ENV/
env.bak/
venv.bak/
diff --git a/setup.py b/setup.py
index 7333211..ac9b62c 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,7 @@
import setuptools
from os import path
-VERSION = '0.2.7'
+VERSION = '0.2.8'
# Get the long description from the README file
here = path.abspath(path.dirname(__file__))
diff --git a/tox.ini b/tox.ini
index 6c15c4c..170c523 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[flake8]
-exclude = .git,build,dist,venv,my_venv
+exclude = .git,build,dist,*venv
max-line-length = 200
[pytest]
diff --git a/vision_datasets/commands/converter_od_to_ic.py b/vision_datasets/commands/converter_od_to_ic.py
index 5809ef1..5fe7a6e 100644
--- a/vision_datasets/commands/converter_od_to_ic.py
+++ b/vision_datasets/commands/converter_od_to_ic.py
@@ -3,10 +3,10 @@
import multiprocessing
import os
import pathlib
+import shutil
-from tqdm import tqdm
from vision_datasets import DatasetHub, Usages
-from vision_datasets.common.manifest_dataset import DetectionAsClassificationDataset
+from vision_datasets.common.manifest_dataset import DetectionAsClassificationByCroppingDataset
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
@@ -25,47 +25,34 @@ def create_arg_parser():
help='lower and bound of the ratio that box height and width can expand (>1) or shrink (0-1), during cropping, e.g, 0.8/1.2')
parser.add_argument('-sb', '--shift_relative_bounds', type=str, required=False,
help='lower/upper bounds of relative ratio wrt box width and height that a box can shift, during cropping, e.g., "-0.3/0.1"')
+ parser.add_argument('-np', '--n_copies', type=int, required=False, default=1, help='number of copies per bbox')
parser.add_argument('-s', '--rnd_seed', type=int, required=False, help='random see for box expansion/shrink/shifting.', default=0)
+ parser.add_argument('--zip', dest='zip', action='store_true', help='Flag to add zip prefix to the image paths.')
return parser
def process_phase(params):
args, aug_params, phase = params
- categories = None
- images = []
- annotations = []
logger.info(f'download dataset manifest for {args.name}...')
dataset_resources = DatasetHub(pathlib.Path(args.reg_json_path).read_text())
dataset = dataset_resources.create_manifest_dataset(args.sas, args.local_folder, args.name, usage=phase, coordinates='absolute')
if not dataset:
- logger.info(f'Skipping phase {phase}.')
+ logger.info(f'Skipping non-existent phase {phase}.')
return
- img_folder = os.path.join(args.output_folder, phase)
- if not os.path.exists(img_folder):
- os.mkdir(img_folder)
-
- if not categories:
- categories = []
- for c_name in dataset.labels:
- categories.append({'id': len(categories) + 1, 'name': c_name})
-
logger.info(f'start conversion for {args.name}...')
- ic_dataset = DetectionAsClassificationDataset(dataset, aug_params)
-
- for img, labels, idx in tqdm(ic_dataset, desc=f'convert for {phase}'):
- img_id = int(idx) + 1
- file_name = f'{idx}.{img.format}'
- img.save(os.path.join(img_folder, file_name), img.format)
- logger.log(logging.DEBUG, f'Saving to {os.path.join(img_folder, file_name)}')
- file_name = f'{phase}.zip@{phase}/{file_name}'
- images.append({'id': img_id, 'file_name': file_name, 'width': img.width, 'height': img.height})
- annotations.append({'id': len(annotations) + 1, 'image_id': img_id, 'category_id': labels[0] + 1})
+ ic_dataset = DetectionAsClassificationByCroppingDataset(dataset, aug_params)
+ manifest = ic_dataset.generate_manifest(dir=phase, n_copies=args.n_copies)
+ if args.zip:
+ for img in manifest.images:
+ img.img_path = f'{phase}.zip@{img.img_path}'
+ coco = manifest.generate_coco_annotations()
with open(f'{args.output_folder}/{phase}.json', 'w') as coco_out:
- coco_out.write(json.dumps({'images': images, 'categories': categories, 'annotations': annotations}, indent=2))
+ coco_out.write(json.dumps(coco, indent=2))
+ shutil.move(f'{phase}', f'{args.output_folder}/', copy_function=shutil.copytree)
def main():
diff --git a/vision_datasets/common/dataset_info.py b/vision_datasets/common/dataset_info.py
index 23cfe8c..7134d45 100644
--- a/vision_datasets/common/dataset_info.py
+++ b/vision_datasets/common/dataset_info.py
@@ -20,9 +20,9 @@ class BaseDatasetInfo:
def __init__(self, dataset_info_dict):
self.name = dataset_info_dict['name']
- self.version = dataset_info_dict['version']
+ self.version = dataset_info_dict.get('version', 1)
self.type = dataset_info_dict['type']
- self.root_folder = dataset_info_dict['root_folder']
+ self.root_folder = dataset_info_dict.get('root_folder')
self.description = dataset_info_dict.get('description', '')
self.data_format = dataset_info_dict.get('format', Formats.IRIS)
@@ -30,7 +30,8 @@ def __init__(self, dataset_info_dict):
class DatasetInfo(BaseDatasetInfo):
def __init__(self, dataset_info_dict):
- assert dataset_info_dict['type'] in DatasetTypes.VALID_TYPES
+
+ assert dataset_info_dict.get('type') in DatasetTypes.VALID_TYPES
assert not DatasetInfoFactory.is_multitask(dataset_info_dict['type'])
super(DatasetInfo, self).__init__(dataset_info_dict)
@@ -39,9 +40,7 @@ def __init__(self, dataset_info_dict):
for usage in [Usages.TRAIN_PURPOSE, Usages.VAL_PURPOSE, Usages.TEST_PURPOSE]:
if usage in dataset_info_dict:
self.index_files[usage] = dataset_info_dict[usage]['index_path']
- self.files_for_local_usage[usage] = dataset_info_dict[usage]['files_for_local_usage']
-
- self.box_out_of_image = dataset_info_dict.get('box_out_of_image', False)
+ self.files_for_local_usage[usage] = dataset_info_dict[usage].get('files_for_local_usage', [])
# Below are needed for iris format only. As both image h and w and labelmaps are included in the coco annotation files
self.labelmap = dataset_info_dict.get('labelmap')
diff --git a/vision_datasets/common/manifest_dataset.py b/vision_datasets/common/manifest_dataset.py
index c7f553e..e98eb3b 100644
--- a/vision_datasets/common/manifest_dataset.py
+++ b/vision_datasets/common/manifest_dataset.py
@@ -1,14 +1,16 @@
import logging
import os.path
import pathlib
+from abc import ABC, abstractmethod
from copy import deepcopy
import random
from PIL import Image
+from tqdm import tqdm
from .base_dataset import BaseDataset
from .constants import DatasetTypes
from .dataset_info import BaseDatasetInfo
-from .data_manifest import DatasetManifest
+from .data_manifest import DatasetManifest, ImageDataManifest
from .image_loader import PILImageLoader
from .util import FileReader
@@ -28,7 +30,7 @@ def __init__(self, dataset_info: BaseDatasetInfo, dataset_manifest: DatasetManif
coordinates (str): 'relative' or 'absolute', indicating the desired format of the bboxes returned.
dataset_resources (str): disposable resources associated with this dataset
"""
- assert dataset_manifest
+ assert dataset_manifest is not None
assert coordinates in ['relative', 'absolute']
super().__init__(dataset_info)
@@ -79,31 +81,87 @@ def _box_convert_to_relative(target, w, h, dataset_info):
return target
-class DetectionAsClassificationDataset(BaseDataset):
+class DetectionAsClassificationBaseDataset(BaseDataset, ABC):
+ def __init__(self, detection_dataset: ManifestDataset, dataset_type: DatasetTypes):
+ """
+ Args:
+ detection_dataset: the detection dataset where images are cropped as classification samples
+ """
+
+ assert detection_dataset is not None
+ assert detection_dataset.dataset_info.type == DatasetTypes.OD
+ assert DatasetTypes.is_classification(dataset_type)
+
+ dataset_info = deepcopy(detection_dataset.dataset_info)
+ dataset_info.type = dataset_type
+ super().__init__(dataset_info)
+
+ self._dataset = detection_dataset
+
+ def close(self):
+ self._dataset.close()
+
+ @property
+ def labels(self):
+ return self._dataset.labels
+
+ @abstractmethod
+ def generate_manifest(self, **kwargs):
+ pass
+
+
+class DetectionAsClassificationIgnoreBoxesDataset(DetectionAsClassificationBaseDataset):
+ """
+ Consume a detection dataset as a multilabel classification dataset by simply ignoring the boxes. Duplicate classes for an image will be merged into one, i.e., whether one image possesses 1 bbox
+ of category 1 or 100 bboxes of category 1 does not matter, after conversion
+ """
+
+ def __init__(self, detection_dataset: ManifestDataset):
+ super(DetectionAsClassificationIgnoreBoxesDataset, self).__init__(detection_dataset, DatasetTypes.IC_MULTILABEL)
+
+ def __len__(self):
+ return len(self._dataset)
+
+ def _get_single_item(self, index):
+ img, labels, idx_str = self._dataset[index]
+ labels = DetectionAsClassificationIgnoreBoxesDataset._od_to_ic_labels(labels)
+ return img, labels, idx_str
+
+ def generate_manifest(self, **kwargs):
+ """
+ Generate dataset manifest for the multilabel classification dataset converted from detection dataset by ignoring the bbox. Manifest will re-use the existing image paths
+ """
+
+ images = []
+ for img in self._dataset.dataset_manifest.images:
+ labels = DetectionAsClassificationIgnoreBoxesDataset._od_to_ic_labels(img.labels)
+ ic_img = ImageDataManifest(len(images) + 1, img.img_path, img.width, img.height, labels)
+ images.append(ic_img)
+ return DatasetManifest(images, self._dataset.labels, DatasetTypes.IC_MULTILABEL)
+
+ @staticmethod
+ def _od_to_ic_labels(labels):
+ return sorted(list(set([label[0] for label in labels])))
+
+
+class DetectionAsClassificationByCroppingDataset(DetectionAsClassificationBaseDataset):
"""
Consume detection dataset as a classification dataset, i.e., sample from this dataset is a crop wrt a bbox in the detection dataset.
+
+ When box_aug_params is provided, different crops with randomness will be generated for the same bbox
"""
- def __init__(self, detection_dataset: ManifestDataset, box_aug_params: dict = None, local_cache_params: dict = None):
+ def __init__(self, detection_dataset: ManifestDataset, box_aug_params: dict = None):
"""
Args:
detection_dataset: the detection dataset where images are cropped as classification samples
box_aug_params (dict): params controlling box crop augmentation,
'zoom_ratio_bounds': the lower/upper bound of box zoom ratio wrt box width and height, e.g., (0.3, 1.5)
'shift_relative_bounds': lower/upper bounds of relative ratio wrt box width and height that a box can shift, e.g., (-0.3, 0.1)
- 'rnd_seed': rnd seed used for box crop zoom and shift
- local_cache_params(dict): params controlling local cache for crop access:
- 'dir': local dir for caching crops, it will be auto-created if not exist
- 'n_max_copies': max number of crops cached for each bbox
+ 'rnd_seed' [optional]: rnd seed used for box crop zoom and shift, default being 0
"""
- assert detection_dataset
- assert detection_dataset.dataset_info.type == DatasetTypes.OD
+ super().__init__(detection_dataset, DatasetTypes.IC_MULTICLASS)
- dataset_info = deepcopy(detection_dataset.dataset_info)
- dataset_info.type = DatasetTypes.IC_MULTICLASS
- super().__init__(dataset_info)
-
- self._dataset = detection_dataset
self._n_booxes = 0
self._box_abs_id_to_img_rel_id = {}
for img_id, x in enumerate(self._dataset):
@@ -113,30 +171,14 @@ def __init__(self, detection_dataset: ManifestDataset, box_aug_params: dict = No
self._n_booxes += 1
self._box_aug_params = box_aug_params
- self._box_aug_rnd = random.Random(self._box_aug_params['rnd_seed']) if box_aug_params else None
+ self._box_aug_rnd = random.Random(self._box_aug_params.get('rnd_seed', 0)) if box_aug_params else None
self._box_pick_rnd = random.Random(0)
- self._local_cache_params = local_cache_params
- if self._local_cache_params and not os.path.exists(self._local_cache_params['dir']):
- os.makedirs(self._local_cache_params['dir'])
-
- @property
- def labels(self):
- return self._dataset.labels
def __len__(self):
return self._n_booxes
def _get_single_item(self, index):
- local_img_path = None
img_idx, box_rel_idx = self._box_abs_id_to_img_rel_id[index]
- if self._local_cache_params:
- box_copy_idx = self._box_pick_rnd.randint(0, self._local_cache_params["max_n_copies"] - 1)
- box_img_id = f'{index}-{box_copy_idx}' if self._box_aug_params else str(index)
- local_img_path = pathlib.Path(self._local_cache_params['dir']) / box_img_id
- if os.path.exists(local_img_path):
- logger.log(logging.DEBUG, f'Found local cache for crop {index}! {box_copy_idx}')
- c_id = self._dataset.dataset_manifest.images[img_idx].labels[box_rel_idx][0]
- return Image.open(local_img_path), [c_id], str(index)
img, boxes, _ = self._dataset[img_idx]
c_id, left, t, r, b = boxes[box_rel_idx]
@@ -147,14 +189,9 @@ def _get_single_item(self, index):
r *= w
b *= h
- box_img = DetectionAsClassificationDataset.crop(img, left, t, r, b, self._box_aug_params, self._box_aug_rnd)
- if local_img_path:
- box_img.save(local_img_path, box_img.format)
+ box_img = DetectionAsClassificationByCroppingDataset.crop(img, left, t, r, b, self._box_aug_params, self._box_aug_rnd)
return box_img, [c_id], str(index)
- def close(self):
- self._dataset.close()
-
@staticmethod
def crop(img, left, t, r, b, aug_params=None, rnd: random.Random = None):
if aug_params:
@@ -172,6 +209,92 @@ def crop(img, left, t, r, b, aug_params=None, rnd: random.Random = None):
return crop_img
+ def generate_manifest(self, **kwargs):
+ """
+ Generate dataset manifest for the multiclass classification dataset converted from detection dataset by cropping bboxes as classification samples.
+ Crops will be saved into 'dir' for generating the manifest
+ Args:
+ 'dir'(str): directory where cropped images will be saved
+ 'n_copies'(int): number of image copies generated for each bbox
+ """
+
+ local_cache_params = {'dir': kwargs.get('dir', f'{self.dataset_info.name}-cropped-ic'), 'n_copies': kwargs.get('n_copies')}
+ cache_decor = LocalFolderCacheDecorator(self, local_cache_params)
+ return cache_decor.generate_manifest()
+
+
+class LocalFolderCacheDecorator(BaseDataset):
+ """
+ Decorate a dataset by caching data in a local folder, in local_cache_params['dir'].
+
+ """
+
+ def __init__(self, dataset: BaseDataset, local_cache_params: dict):
+ """
+ Args:
+ dataset: dataset that requires cache
+ local_cache_params(dict): params controlling local cache for image access:
+ 'dir': local dir for caching crops, it will be auto-created if not exist
+ [optional] 'n_copies': default being 1. if n_copies is greater than 1, then multiple copies will be cached and dataset will be n_copies times bigger
+ """
+
+ assert dataset is not None
+ assert local_cache_params
+ assert local_cache_params.get('dir')
+ local_cache_params['n_copies'] = local_cache_params.get('n_copies', 1)
+ assert local_cache_params['n_copies'] >= 1, 'n_copies must be equal or greater than 1.'
+
+ super().__init__(dataset.dataset_info)
+
+ self._dataset = dataset
+ self._local_cache_params = local_cache_params
+ if not os.path.exists(self._local_cache_params['dir']):
+ os.makedirs(self._local_cache_params['dir'])
+
+ self._annotations = {}
+ self._paths = {}
+
+ @property
+ def labels(self):
+ return self._dataset.labels
+
+ def __len__(self):
+ return len(self._dataset) * self._local_cache_params['n_copies']
+
+ def _get_single_item(self, index):
+ annotations = self._annotations.get(index)
+ if annotations:
+ return Image.open(self._paths[index]), annotations, str(index)
+
+ idx_in_epoch = index % len(self._dataset)
+ img, annotations, _ = self._dataset[idx_in_epoch]
+ local_img_path = self._construct_local_image_path(index, img.format)
+ img.save(local_img_path, img.format)
+ self._annotations[index] = annotations
+ self._paths[index] = local_img_path
+
+ return img, annotations, str(index)
+
+ def _construct_local_image_path(self, img_idx, img_format):
+ return pathlib.Path(self._local_cache_params['dir']) / f'{img_idx}.{img_format}'
+
+ def generate_manifest(self):
+ """
+ Generate dataset manifest for the cached dataset.
+ """
+
+ images = []
+ for idx in tqdm(range(len(self)), desc='Generating manifest...'):
+ img, labels, _ = self._get_single_item(idx) # make sure
+ width, height = img.size
+ image = ImageDataManifest(len(images) + 1, str(self._paths[idx].as_posix()), width, height, labels)
+ images.append(image)
+
+ return DatasetManifest(images, self.labels, self._dataset.dataset_info.type)
+
+ def close(self):
+ self._dataset.close()
+
class BoxAlteration:
@staticmethod
| first commit
first commit to open source the dataset access and management repo
| 2022-03-21T19:19:05 | 0.0 | [] | [] |
|||
microsoft/vision-datasets | microsoft__vision-datasets-24 | e5279ebcbe0c560b070b27a86c2f3850de902b38 | diff --git a/COCO_DATA_FORMAT.md b/COCO_DATA_FORMAT.md
new file mode 100644
index 0000000..688dc7d
--- /dev/null
+++ b/COCO_DATA_FORMAT.md
@@ -0,0 +1,73 @@
+# Coco format
+
+## Image classification (multiclass and multilabel)
+
+Here is one example of the train.json, val.json, or test.json in the `DatasetInfo` above. Note that the `"id"` for `images`, `annotations` and `categories` should be consecutive integers, **starting from 1**. Note that our lib might work with id starting from 0, but many tools like [CVAT](https://github.com/openvinotoolkit/cvat/issues/2085) and official [COCOAPI](https://github.com/cocodataset/cocoapi/issues/507) will fail.
+
+``` {json}
+{
+ "images": [{"id": 1, "width": 224.0, "height": 224.0, "file_name": "[email protected]"},
+ {"id": 2, "width": 224.0, "height": 224.0, "file_name": "train_images.zip@kitten 3.jpg"}],
+ // file_name is the image path, which supports three formats as described in previous section.
+ "annotations": [
+ {"id": 1, "category_id": 1, "image_id": 1},
+ {"id": 2, "category_id": 1, "image_id": 2},
+ {"id": 3, "category_id": 2, "image_id": 2}
+ ],
+ "categories": [{"id": 1, "name": "cat"}, {"id": 2, "name": "dog"}]
+}
+```
+
+## Object detection
+
+``` {json}
+{
+ "images": [{"id": 1, "width": 224.0, "height": 224.0, "file_name": "[email protected]"},
+ {"id": 2, "width": 224.0, "height": 224.0, "file_name": "train_images.zip@kitten 3.jpg"}],
+ "annotations": [
+ {"id": 1, "category_id": 1, "image_id": 1, "bbox": [10, 10, 100, 100]},
+ {"id": 2, "category_id": 1, "image_id": 2, "bbox": [100, 100, 200, 200]},
+ {"id": 3, "category_id": 2, "image_id": 2, "bbox": [20, 20, 200, 200]}
+ ],
+ "categories": [{"id": 1, "name": "cat"}, {"id": 2, "name": "dog"}]
+}
+```
+
+bbox format should be **absolute** pixel position following either `ltwh: [left, top, width, height]` or `ltrb: [left, top, right, bottom]`. `ltwh` is the default format. To work with `ltrb`, please specify `bbox_format` to be `ltrb` in coco json file.
+
+Note that
+
+- Note that `ltrb` used to be default. If your coco annotations were prepared to work with this repo before version 0.1.2. Please add `"bbox_format": "ltrb"` to your coco file.
+- Regardless of what format bboxes are stored in Coco file, when annotations are transformed into `ImageDataManifest`, the bbox will be unified into `ltrb: [left, top, right, bottom]`.
+
+## Image caption
+
+Here is one example of the json file for image caption task.
+
+``` {json}
+{
+ "images": [{"id": 1, "file_name": "[email protected]"},
+ {"id": 2, "file_name": "[email protected]"}],
+ "annotations": [
+ {"id": 1, "image_id": 1, "caption": "A black Honda motorcycle parked in front of a garage."},
+ {"id": 2, "image_id": 1, "caption": "A Honda motorcycle parked in a grass driveway."},
+ {"id": 3, "image_id": 2, "caption": "A black Honda motorcycle with a dark burgundy seat."},
+ ],
+}
+```
+
+## Image text matching
+
+Here is one example of the json file for image text matching task. `match: 1` indicates image and text match.
+
+``` {json}
+{
+ "images": [{"id": 1, "file_name": "[email protected]"},
+ {"id": 2, "file_name": "[email protected]"}],
+ "annotations": [
+ {"id": 1, "image_id": 1, "text": "A black Honda motorcycle parked in front of a garage.", "match": 0},
+ {"id": 2, "image_id": 1, "text": "A Honda motorcycle parked in a grass driveway.", "match": 0},
+ {"id": 3, "image_id": 2, "text": "A black Honda motorcycle with a dark burgundy seat.", "match": 1},
+ ],
+}
+```
diff --git a/IRIS_DATA_FORMAT.md b/IRIS_DATA_FORMAT.md
new file mode 100644
index 0000000..e00767f
--- /dev/null
+++ b/IRIS_DATA_FORMAT.md
@@ -0,0 +1,136 @@
+# Iris format
+
+Here is an example with explanation of what a `DatasetInfo` looks like for `iris` format:
+
+```{json}
+ {
+ "name": "sampled-ms-coco",
+ "version": 1,
+ "description": "A sampled ms-coco dataset.",
+ "type": "object_detection",
+ "root_folder": "detection/coco2017_20200401",
+ "format": "iris", // indicating the annotation data are stored in iris format
+ "train": {
+ "index_path": "train_images.txt", // index file for images and labels for training, example can be found in next section
+ "files_for_local_usage": [
+ "train_images.zip",
+ "train_labels.zip"
+ ],
+ },
+ "val": {
+ "index_path": "val_images.txt",
+ "files_for_local_usage": [
+ "val_images.zip",
+ "val_labels.zip"
+ ],
+ },
+ "test": {
+ "index_path": "test_images.txt",
+ "files_for_local_usage": [
+ "test_images.zip",
+ "test_labels.zip"
+ ],
+ },
+ "labelmap": "labels.txt", // includes tag names
+ "image_metadata_path": "image_meta_info.txt", // includes info about image width and height
+ },
+```
+
+## Iris image classification format
+
+Each rows in the index file (`index_path`) is:
+
+``` {txt}
+<image_filepath> <comma-separated-label-indices>
+```
+
+Note that the class/label index should start from zero.
+
+Example:
+
+``` {txt}
[email protected] 0,1,2
[email protected] 2,3
+...
+```
+
+## Iris object detection format
+
+The index file for OD is slightly different from IC. Each rows in the index file is:
+
+``` {txt}
+<image_filepath> <label_filepath>
+```
+
+Same with classification, the class/label index should start from 0.
+
+Example for `train_images.txt`:
+
+``` {txt}
[email protected] [email protected]
[email protected] [email protected]
+...
+```
+
+Formats and example for a label file like `[email protected]`:
+
+``` {txt}
+class_index left top right bottom
+```
+
+``` {txt}
+3 200 300 600 1200 // class_id, left, top, right, bottom
+4 100 100 200 200
+...
+```
+
+## Multitask DatasetInfo
+
+The `DatasetInfo` for multitask is not very different from single task. A `'tasks'` section will be found in the json and the `'type'` of the dataset is `'multitask'`. Within each task, it wraps the
+info specific to that task.
+
+Below is an example for `'iris'` format, but the general idea applies to `'coco'` format as well.
+
+```{json}
+{
+ "name": "coco-vehicle-multitask",
+ "version": 1,
+ "type": "multitask",
+ "root_folder": "classification/coco_vehicle_multitask_20210202",
+ "format": "iris",
+ "tasks": {
+ "vehicle_color": {
+ "type": "classification_multiclass",
+ "train": {
+ "index_path": "train_images_VehicleColor.txt",
+ "files_for_local_usage": [
+ "train_images.zip"
+ ]
+ },
+ "test": {
+ "index_path": "test_images_VehicleColor.txt",
+ "files_for_local_usage": [
+ "test_images.zip"
+ ]
+ },
+ "labelmap": "labels_VehicleColor.txt"
+ },
+ "vehicle_type": {
+ "type": "classification_multiclass",
+ "train": {
+ "index_path": "train_images_VehicleType.txt",
+ "files_for_local_usage": [
+ "train_images.zip"
+ ]
+ },
+ "test": {
+ "index_path": "test_images_VehicleType.txt",
+ "files_for_local_usage": [
+ "test_images.zip"
+ ]
+ },
+ "labelmap": "labels_VehicleType.txt"
+ }
+ }
+}
+```
diff --git a/README.md b/README.md
index 97f024a..9d1db0b 100644
--- a/README.md
+++ b/README.md
@@ -4,8 +4,18 @@
This repo
-- defines the contract for dataset for purposes such as training, visualization, and exploration
-- provides API for organizing and accessing datasets: `DatasetHub`
+- defines unified contract for dataset for purposes such as training, visualization, and exploration, via `DatasetManifest` and `ImageDataManifest`.
+- provides API for organizing and accessing datasets, via `DatasetHub`
+
+Currently, five `basic` types of data are supported:
+- `classification_multiclass`: each image can is only with one label.
+- `classification_multilabel`: each image can is with one or multiple labels (e.g., 'cat', 'animal', 'pet').
+- `object_detection`: each image is labeled with bounding boxes surrounding the objects of interest.
+- `image_caption`: each image is labeled with a few texts describing the images.
+- `image_text_matching`: each image is associated with a collection of texts describing the image, and whether each text description matches the image or not
+
+`multitask` type is a composition type, where one set of images has multiple sets of annotations available for different tasks, where each task can be of any basic type.
+
## Dataset Contracts
@@ -18,19 +28,17 @@ This repo
`ManifestDataset` is able to load the data from all three kinds of paths. Both 1. and 2. are good for training, as they access data from local disk while the 3rd one is good for data exploration, if you have the data in azure storage.
-Currently, four basic types of data are supported: `classification_multilabel`, `classification_multiclass`, `object_detection`, `image_caption`. `multitask` type is a composition type, where one set of images has multiple sets of annotations available for different tasks, where each task can be of any basic type.
-
For `multitask` dataset, the labels stored in the `ImageDataManifest` is a `dict` mapping from task name to that task's labels. The labelmap stored in `DatasetManifest` is also a `dict` mapping from task name to that task's labels.
### Creating DatasetManifest
In addition to loading a serialized `DatasetManifest` for instantiation, this repo currently supports two formats of data that can instantiates `DatasetManifest`,
-using `DatasetManifest.create_dataset_manifest(dataset_info, usage, container_sas_or_root_dir)`: `IRIS` and `COCO`.
+using `DatasetManifest.create_dataset_manifest(dataset_info, usage, container_sas_or_root_dir)`: `COCO` and `IRIS` (legacy).
`DatasetInfo` as the first arg in the arg list wraps the metainfo about the dataset like the name of the dataset, locations of the images, annotation files, etc. See examples in the sections below
for different data formats.
-Once a `DatasetManifest` is created, you can create a `ManifestDataset` for accessing the dataset:
+Once a `DatasetManifest` is created, you can create a `ManifestDataset` for accessing the data in the dataset, especially the image data, for training, visualization, etc:
```{python}
dataset = ManifestDataset(dataset_info, dataset_manifest, coordinates='relative')
@@ -51,224 +59,30 @@ Here is an example with explanation of what a `DatasetInfo` looks like for coco
"train": {
"index_path": "train.json", // coco json file for training, see next section for example
"files_for_local_usage": [ // associated files including data such as images
- "train_images.zip"
+ "images/train_images.zip"
]
},
"val": {
"index_path": "val.json",
"files_for_local_usage": [
- "test_images.zip"
+ "images/val_images.zip"
]
},
"test": {
"index_path": "test.json",
"files_for_local_usage": [
- "test_images.zip"
+ "images/test_images.zip"
]
}
}
```
-##### Coco JSON - Image classification
-
-Here is one example of the train.json, val.json, or test.json in the `DatasetInfo` above. Note that the `"id"` for `images`, `annotations` and `categories` should be consecutive integers, **starting from 1**. Note that our lib might work with id starting from 0, but many tools like [CVAT](https://github.com/openvinotoolkit/cvat/issues/2085) and official [COCOAPI](https://github.com/cocodataset/cocoapi/issues/507) will fail.
-
-``` {json}
-{
- "images": [{"id": 1, "width": 224.0, "height": 224.0, "file_name": "[email protected]"},
- {"id": 2, "width": 224.0, "height": 224.0, "file_name": "train_images.zip@kitten 3.jpg"}],
- // file_name is the image path, which supports three formats as described in previous section.
- "annotations": [
- {"id": 1, "category_id": 1, "image_id": 1},
- {"id": 2, "category_id": 1, "image_id": 2},
- {"id": 3, "category_id": 2, "image_id": 2}
- ],
- "categories": [{"id": 1, "name": "cat"}, {"id": 2, "name": "dog"}]
-}
-```
-
-##### Coco JSON - Object detection
-
-``` {json}
-{
- "images": [{"id": 1, "width": 224.0, "height": 224.0, "file_name": "[email protected]"},
- {"id": 2, "width": 224.0, "height": 224.0, "file_name": "train_images.zip@kitten 3.jpg"}],
- "annotations": [
- {"id": 1, "category_id": 1, "image_id": 1, "bbox": [10, 10, 100, 100]},
- {"id": 2, "category_id": 1, "image_id": 2, "bbox": [100, 100, 200, 200]},
- {"id": 3, "category_id": 2, "image_id": 2, "bbox": [20, 20, 200, 200]}
- ],
- "categories": [{"id": 1, "name": "cat"}, {"id": 2, "name": "dog"}]
-}
-```
-
-bbox format should be **absolute** pixel position following either `ltwh: [left, top, width, height]` or `ltrb: [left, top, right, bottom]`. `ltwh` is the default format. To work with `ltrb`, please specify `bbox_format` to be `ltrb` in coco json file.
-
-Note that
-
-- Note that we used to use `ltrb` as default. If your coco annotations were prepared to work with this repo before version 0.1.2. Please add `"bbox_format": "ltrb"` to your coco file.
-- Regardless of what format bboxes are stored in Coco file, when annotations are transformed into `ImageDataManifest`, the bbox will be unified into `ltrb: [left, top, right, bottom]`.
-
-##### Coco JSON - Image caption
-
-Here is one example of the json file for image caption task.
-
-``` {json}
-{
- "images": [{"id": 1, "file_name": "[email protected]"},
- {"id": 2, "file_name": "[email protected]"}],
- "annotations": [
- {"id": 1, "image_id": 1, "caption": "A black Honda motorcycle parked in front of a garage."},
- {"id": 2, "image_id": 1, "caption": "A Honda motorcycle parked in a grass driveway."},
- {"id": 3, "image_id": 1, "caption": "A black Honda motorcycle with a dark burgundy seat."},
- {"id": 4, "image_id": 1, "caption": "Ma motorcycle parked on the gravel in front of a garage."},
- {"id": 5, "image_id": 1, "caption": "A motorcycle with its brake extended standing outside."},
- {"id": 6, "image_id": 2, "caption": "A picture of a modern looking kitchen area.\n"},
- {"id": 7, "image_id": 2, "caption": "A narrow kitchen ending with a chrome refrigerator."},
- {"id": 8, "image_id": 2, "caption": "A narrow kitchen is decorated in shades of white, gray, and black."},
- {"id": 9, "image_id": 2, "caption": "a room that has a stove and a icebox in it"},
- {"id": 10, "image_id": 2, "caption": "A long empty, minimal modern skylit home kitchen."}
- ],
-}
-```
+Coco annotation format details wrt Only `multiclass/label_classification`, `object_detection`, `image_caption`, `image_text_match` and `multitask` can be found in `COCO_DATA_FORMAT.md`.
#### Iris format
-Here is an example with explanation of what a `DatasetInfo` looks like for `iris` format:
-
-```{json}
- {
- "name": "sampled-ms-coco",
- "version": 1,
- "description": "A sampled ms-coco dataset.",
- "type": "object_detection",
- "root_folder": "detection/coco2017_20200401",
- "format": "iris", // indicating the annotation data are stored in iris format
- "train": {
- "index_path": "train_images.txt", // index file for images and labels for training, example can be found in next section
- "files_for_local_usage": [
- "train_images.zip",
- "train_labels.zip"
- ],
- },
- "val": {
- "index_path": "val_images.txt",
- "files_for_local_usage": [
- "val_images.zip",
- "val_labels.zip"
- ],
- },
- "test": {
- "index_path": "test_images.txt",
- "files_for_local_usage": [
- "test_images.zip",
- "test_labels.zip"
- ],
- },
- "labelmap": "labels.txt", // includes tag names
- "image_metadata_path": "image_meta_info.txt", // includes info about image width and height
- },
-```
-
-##### Iris image classification format
-
-Each rows in the index file (`index_path`) is:
-
-``` {txt}
-<image_filepath> <comma-separated-label-indices>
-```
-
-Note that the class/label index should start from zero.
-
-Example:
-
-``` {txt}
[email protected] 0,1,2
[email protected] 2,3
-...
-```
-
-##### Iris object detection format
-
-The index file for OD is slightly different from IC. Each rows in the index file is:
-
-``` {txt}
-<image_filepath> <label_filepath>
-```
-
-Same with classification, the class/label index should start from 0.
-
-Example for `train_images.txt`:
-
-``` {txt}
[email protected] [email protected]
[email protected] [email protected]
-...
-```
-
-Formats and example for a label file like `[email protected]`:
-
-``` {txt}
-class_index left top right bottom
-```
-
-``` {txt}
-3 200 300 600 1200 // class_id, left, top, right, bottom
-4 100 100 200 200
-...
-```
-
-#### Multitask DatasetInfo
-
-The `DatasetInfo` for multitask is not very different from single task. A `'tasks'` section will be found in the json and the `'type'` of the dataset is `'multitask'`. Within each task, it wraps the
-info specific to that task.
-
-Below is an example for `'iris'` format, but the general idea applies to `'coco'` format as well.
-
-```{json}
-{
- "name": "coco-vehicle-multitask",
- "version": 1,
- "type": "multitask",
- "root_folder": "classification/coco_vehicle_multitask_20210202",
- "format": "iris",
- "tasks": {
- "vehicle_color": {
- "type": "classification_multiclass",
- "train": {
- "index_path": "train_images_VehicleColor.txt",
- "files_for_local_usage": [
- "train_images.zip"
- ]
- },
- "test": {
- "index_path": "test_images_VehicleColor.txt",
- "files_for_local_usage": [
- "test_images.zip"
- ]
- },
- "labelmap": "labels_VehicleColor.txt"
- },
- "vehicle_type": {
- "type": "classification_multiclass",
- "train": {
- "index_path": "train_images_VehicleType.txt",
- "files_for_local_usage": [
- "train_images.zip"
- ]
- },
- "test": {
- "index_path": "test_images_VehicleType.txt",
- "files_for_local_usage": [
- "test_images.zip"
- ]
- },
- "labelmap": "labels_VehicleType.txt"
- }
- }
-}
-```
+Iris format is a legacy format which can be found in `IRIS_FORMAT.md`. Only `multiclass/label_classification`, `object_detection` and `multitask` are supported.
## Dataset management and access
@@ -314,7 +128,7 @@ If you are using `DatasetHub` to manage datasets in cloud storage, we recommend
If you upload folders of images directly to cloud storage:
-- you will have to list all images in `"files_for_local_usage"`, which can be millions of entries
+- you will need to list all images in `"files_for_local_usage"`, which can be millions of entries
- downloading images one by one (even with multithreading) is much slower than downloading a few zip files
-One more thing is that sometimes when you create a zip file `train.zip`, you might find out that there is only one `train` folder in the zip. This will fail the file loading if the path is `[email protected]`, as the image is actually at `train.zip@train\1.jpg`. It is usually a good idea to avoid this extra layer of folder when zipping and double-confirm this does not happen by mistake.
+One more thing is that sometimes when you create a zip file `train.zip`, you might find out that there is only one `train` folder in the zip. This will fail the file loading if the path is `[email protected]`, as the image is actually at `train.zip@train\1.jpg`. It is ok to have this extra layer but please make sure the path is correct.
diff --git a/setup.py b/setup.py
index d08877b..7333211 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,7 @@
import setuptools
from os import path
-VERSION = '0.2.6'
+VERSION = '0.2.7'
# Get the long description from the README file
here = path.abspath(path.dirname(__file__))
diff --git a/vision_datasets/common/constants.py b/vision_datasets/common/constants.py
index a3d7601..b0e41a7 100644
--- a/vision_datasets/common/constants.py
+++ b/vision_datasets/common/constants.py
@@ -4,8 +4,9 @@ class DatasetTypes:
OD = 'object_detection'
MULTITASK = 'multitask'
IMCAP = 'image_caption'
+ IMAGE_TEXT_MATCHING = 'image_text_matching'
- VALID_TYPES = [IC_MULTILABEL, IC_MULTICLASS, OD, MULTITASK, IMCAP]
+ VALID_TYPES = [IC_MULTILABEL, IC_MULTICLASS, OD, MULTITASK, IMCAP, IMAGE_TEXT_MATCHING]
@staticmethod
def is_classification(dataset_type):
diff --git a/vision_datasets/common/data_manifest.py b/vision_datasets/common/data_manifest.py
index c1c5fa0..3668f3a 100644
--- a/vision_datasets/common/data_manifest.py
+++ b/vision_datasets/common/data_manifest.py
@@ -96,6 +96,7 @@ def __init__(self, id, img_path, width, height, labels):
classification: [c_id] for multiclass, [c_id1, c_id2, ...] for multilabel;
detection: [[c_id, left, top, right, bottom], ...];
image caption: [caption1, caption2, ...];
+ image_text_matching: [(text1, match (0 or 1), text2, match (0 or 1), ...)]
multitask: dict[task, labels]
"""
self.id = id
@@ -606,8 +607,8 @@ def create_dataset_manifest(dataset_info, usage: str, container_sas_or_root_dir:
assert dataset_info
assert usage
- if dataset_info.type == DatasetTypes.IMCAP:
- raise ValueError('Iris format is not supported for image caption task, please use COCO format!')
+ if dataset_info.type in [DatasetTypes.IMCAP, DatasetTypes.IMAGE_TEXT_MATCHING]:
+ raise ValueError(f'Iris format is not supported for {dataset_info.type} task, please use COCO format!')
if isinstance(dataset_info, MultiTaskDatasetInfo):
dataset_manifest_by_task = {k: IrisManifestAdaptor.create_dataset_manifest(task_info, usage, container_sas_or_root_dir) for k, task_info in dataset_info.sub_task_infos.items()}
return _generate_multitask_dataset_manifest(dataset_manifest_by_task)
@@ -728,14 +729,18 @@ def create_dataset_manifest(coco_file_path_or_url, data_type, container_sas_or_r
file_reader.close()
+ images_by_id = {img['id']: ImageDataManifest(img['id'], get_full_sas_or_path(img['file_name']), img.get('width'), img.get('height'), []) for img in coco_manifest['images']}
if data_type == DatasetTypes.IMCAP:
- images_by_id = {img['id']: ImageDataManifest(img['id'], get_full_sas_or_path(img['file_name']), img.get('width'), img.get('height'), []) for img in coco_manifest['images']}
for annotation in coco_manifest['annotations']:
images_by_id[annotation['image_id']].labels.append(annotation['caption'])
images = [x for x in images_by_id.values()]
return DatasetManifest(images, None, data_type)
- images_by_id = {img['id']: ImageDataManifest(img['id'], get_full_sas_or_path(img['file_name']), img.get('width'), img.get('height'), []) for img in coco_manifest['images']}
+ if data_type == DatasetTypes.IMAGE_TEXT_MATCHING:
+ for annotation in coco_manifest['annotations']:
+ images_by_id[annotation['image_id']].labels.append((annotation['text'], annotation['match']))
+ images = [x for x in images_by_id.values()]
+ return DatasetManifest(images, None, data_type)
cate_id_name = [(cate['id'], cate['name']) for cate in coco_manifest['categories']]
cate_id_name.sort(key=lambda x: x[0])
| first commit
first commit to open source the dataset access and management repo
| 2022-02-17T17:33:05 | 0.0 | [] | [] |
|||
microsoft/vision-datasets | microsoft__vision-datasets-22 | 30b5180cc27a05fc1dfbc6a241b487f1154e058d | diff --git a/setup.py b/setup.py
index ffc2d29..404de6d 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,7 @@
import setuptools
from os import path
-VERSION = '0.2.3'
+VERSION = '0.2.4'
# Get the long description from the README file
here = path.abspath(path.dirname(__file__))
diff --git a/vision_datasets/common/data_manifest.py b/vision_datasets/common/data_manifest.py
index 726a0d4..2ad4fc3 100644
--- a/vision_datasets/common/data_manifest.py
+++ b/vision_datasets/common/data_manifest.py
@@ -460,17 +460,113 @@ def sample_few_shots_subset_greedy(self, num_min_samples_per_class, random_seed=
return DatasetManifest(sampled_images, self.labelmap, self.data_type)
@staticmethod
- def merge(manifest_a, manifest_b):
+ def merge(*args, flavor: int = 0):
"""
- merge two data manifests of the same data type and labelmap
+ merge multiple data manifests into one.
+
+ Args:
+ args: manifests to be merged
+ flavor: flavor of dataset merge (not difference for captioning)
+ 0: merge manifests of the same type and the same labelmap (for multitask, it should be same set of tasks and same labelmap for each task)
+ 1: concat manifests of the same type, the new labelmap are concats of all labelmaps in all manifest (for multitask, duplicate task names are not allowed)
+ """
+
+ assert len(args) >= 1, 'less than one manifests provided, not possible to merged.'
+ assert all(args), '"None" manifest found'
+
+ if len(args) == 1:
+ logger.warning('Only one manifest provided. Nothing to be merged.')
+ return args[0]
+
+ if any([isinstance(x.data_type, dict) for x in args]):
+ assert all([isinstance(x.data_type, dict) for x in args]), 'Cannot merge multitask manifest and single task manifest'
+ else:
+ assert len(set([x.data_type for x in args])) == 1, 'All manifests must be of the same data type'
+
+ if flavor == 0:
+ return DatasetManifest._merge_with_same_labelmap(*args)
+ elif flavor == 1:
+ return DatasetManifest._merge_with_concat(*args)
+ else:
+ raise ValueError(f'Unknown flavor {flavor}.')
+
+ @staticmethod
+ def _merge_with_same_labelmap(*args):
+ for i in range(len(args)):
+ if i > 0 and args[i].labelmap != args[i - 1].labelmap:
+ raise ValueError('labelmap must be the same for all manifests.')
+ if i > 0 and args[i].data_type != args[i - 1].data_type:
+ raise ValueError('Data type must be the same for all manifests.')
+
+ images = [y for x in args for y in x.images]
+
+ return DatasetManifest(images, args[0].labelmap, args[0].data_type)
+
+ @staticmethod
+ def _merge_with_concat(*args):
+ data_type = args[0].data_type
+
+ if data_type == DatasetTypes.IMCAP:
+ return DatasetManifest._merge_with_same_labelmap(args)
+
+ if isinstance(data_type, dict): # multitask
+ labelmap = {}
+ data_types = {}
+ for manifest in args:
+ for k, v in manifest.labelmap.items():
+ if k in labelmap:
+ raise ValueError(f'Failed to merge dataset manifests, as due to task with name {k} exists in more than one manifest.')
+
+ labelmap[k] = v
+
+ for k, v in manifest.data_type.items():
+ data_types[k] = v
+
+ return DatasetManifest([y for x in args for y in x.images], labelmap, data_types)
+
+ labelmap = []
+ images = []
+
+ for manifest in args:
+ label_offset = len(labelmap)
+ for img_manifest in manifest.images:
+ new_img_manifest = copy.deepcopy(img_manifest)
+ if DatasetTypes.is_classification(data_type):
+ new_img_manifest.labels = [x + label_offset for x in new_img_manifest.labels]
+ elif data_type == DatasetTypes.OD:
+ for label in new_img_manifest.labels:
+ label[0] += label_offset
+ else:
+ raise ValueError(f'Unsupported type in merging {data_type}')
+
+ images.append(new_img_manifest)
+ labelmap.extend(manifest.labelmap)
+
+ return DatasetManifest(images, labelmap, data_type)
+
+ @staticmethod
+ def create_multitask_manifest(manifest_by_task: dict):
"""
- assert manifest_a
- assert manifest_b
+ Merge several manifests into a multitask dataset in a naive way, assuming images from different manifests are independent different images.
+ Args:
+ manifest_by_task (dict): manifest by task name
+
+ Returns:
+ a merged multitask manifest
+ """
+
+ task_names = sorted(list(manifest_by_task.keys()))
+ images = []
+ for task_name in task_names:
+ for img in manifest_by_task[task_name].images:
+ new_img = copy.deepcopy(img)
+ new_img.labels = {task_name: new_img.labels}
+ images.append(new_img)
- assert manifest_a.data_type == manifest_b.data_type, f'data type must be the same, {manifest_a.data_type} vs {manifest_b.data_type}.'
- assert manifest_a.labelmap == manifest_b.labelmap, f'labelmap must be the same, {manifest_a.labelmap}, {manifest_b.labelmap}.'
+ labelmap = {task_name: manifest_by_task[task_name].labelmap for task_name in task_names}
+ data_types = {task_name: manifest_by_task[task_name].data_type for task_name in task_names}
- return DatasetManifest(manifest_a.images + manifest_b.images, manifest_a.labelmap, manifest_a.data_type)
+ return DatasetManifest(images, labelmap, data_types)
def _generate_multitask_dataset_manifest(manifest_by_task: Dict[str, DatasetManifest]):
diff --git a/vision_datasets/common/dataset_downloader.py b/vision_datasets/common/dataset_downloader.py
index 7655082..495f50e 100644
--- a/vision_datasets/common/dataset_downloader.py
+++ b/vision_datasets/common/dataset_downloader.py
@@ -32,6 +32,13 @@ def __exit__(self, exc_type, exc_value, traceback):
logger.info(f'Removing folder: {base_dir}.')
shutil.rmtree(base_dir)
+ @staticmethod
+ def merge(r1, r2):
+ assert r1
+ assert r2
+
+ return DownloadedDatasetsResources(r1.base_dirs + r2.base_dirs)
+
class DatasetDownloader:
def __init__(self, dataset_sas_url: str, dataset_registry: DatasetRegistry):
diff --git a/vision_datasets/resources/dataset_hub.py b/vision_datasets/resources/dataset_hub.py
index b0bffa3..078c47c 100644
--- a/vision_datasets/resources/dataset_hub.py
+++ b/vision_datasets/resources/dataset_hub.py
@@ -1,5 +1,5 @@
import logging
-from typing import Union
+from typing import Union, List
from ..common import DatasetRegistry, Usages
from ..common.data_manifest import DatasetManifest
@@ -39,7 +39,7 @@ def create_manifest_dataset(self, container_sas: str, local_dir: str, name: str,
local_dir: local directory where datasets can be found/downloaded to
name: dataset name
version: dataset version, if not specified, latest version will be returned
- usage: usage of the dataset, 'train', 'val' or 'test'
+ usage: usage(s) of the dataset, 'train', 'val' or 'test' or a list of usages
coordinates: format of the bounding boxes, can be 'relative' or 'absolute'
few_shot_samples_per_class (int): get a sampled dataset with N images at most for each class (for detection and multilabel case, not guaranteed.)
rnd_seed (int): random seed for few shot sampling
@@ -56,7 +56,7 @@ def create_manifest_dataset(self, container_sas: str, local_dir: str, name: str,
from vision_datasets import ManifestDataset
return ManifestDataset(dataset_info, manifest, coordinates, downloader_resources)
- def create_dataset_manifest(self, container_sas: str, local_dir: str, name: str, version: int = None, usage: str = Usages.TRAIN_PURPOSE, few_shot_samples_per_class=None, rnd_seed=0):
+ def create_dataset_manifest(self, container_sas: str, local_dir: str, name: str, version: int = None, usage: Union[str, List] = Usages.TRAIN_PURPOSE, few_shot_samples_per_class=None, rnd_seed=0):
"""Create dataset manifest.
If local_dir is provided, manifest_dataset consumes data from local disk. If data not present on local disk, it will be automatically downloaded.
if container_sas is provided but local_dir not provided, manifest_dataset consumes data directly from container_sas.
@@ -69,18 +69,23 @@ def create_dataset_manifest(self, container_sas: str, local_dir: str, name: str,
local_dir: local directory where datasets can be found/downloaded to
name: dataset name
version: dataset version, if not specified, latest version will be returned
- usage: usage of the dataset, 'train', 'val' or 'test'
+ usage: usage(s) of the dataset, 'train', 'val' or 'test' or a list of usages
few_shot_samples_per_class (int): get a sampled dataset with N images at most for each class (for detection and multilabel case, not guaranteed.)
rnd_seed (int): random seed for few shot sampling
Returns:
- dataset manifest dataset, dataset_info, downloaded_resources, if dataset exists, else None
+ dataset manifest, dataset_info, downloaded_resources, if dataset exists, else None
"""
assert container_sas or local_dir
assert name
+ assert usage
from ..common.dataset_downloader import DatasetDownloader
+ usages = usage if isinstance(usage, list) else [usage]
+
+ manifest = None
+ downloader_resources = None
dataset_info = self.dataset_registry.get_dataset_info(name, version)
if dataset_info is None:
logger.warning(f'Dataset with {name} and version {version} not found.')
@@ -88,19 +93,26 @@ def create_dataset_manifest(self, container_sas: str, local_dir: str, name: str,
if isinstance(dataset_info, MultiTaskDatasetInfo):
for task_info in dataset_info.sub_task_infos.values():
- task_info.index_files = {usage: task_info.index_files[usage]} if usage in task_info.index_files else {}
+ task_info.index_files = {usage: task_info.index_files[usage] for usage in usages if usage in task_info.index_files}
else:
- dataset_info.index_files = {usage: dataset_info.index_files[usage]} if usage in dataset_info.index_files else {}
+ dataset_info.index_files = {usage: dataset_info.index_files[usage] for usage in usages if usage in dataset_info.index_files}
if container_sas and local_dir:
downloader = DatasetDownloader(container_sas, self.dataset_registry)
- downloader_resources = downloader.download(name, version, local_dir, [usage])
+ downloader_resources_usage = downloader.download(name, version, local_dir, usages)
else:
- downloader_resources = None
+ downloader_resources_usage = None
+
+ for usage in usages:
+ manifest_usage = DatasetManifest.create_dataset_manifest(dataset_info, usage, local_dir or container_sas)
+ if manifest_usage:
+ manifest = DatasetManifest.merge(manifest, manifest_usage) if manifest else manifest_usage
- manifest = DatasetManifest.create_dataset_manifest(dataset_info, usage, local_dir or container_sas)
+ if downloader_resources_usage:
+ from ..common.dataset_downloader import DownloadedDatasetsResources
+ downloader_resources = DownloadedDatasetsResources.merge(downloader_resources, downloader_resources_usage) if downloader_resources else downloader_resources_usage
- if not manifest:
+ if manifest is None:
return None
if few_shot_samples_per_class:
| first commit
first commit to open source the dataset access and management repo
| 2022-02-04T07:31:01 | 0.0 | [] | [] |
Subsets and Splits