diff --git a/.gitattributes b/.gitattributes index b547e80b1ce6626abce8617e13f7194b94b758db..c30b96f056b219555832334fdebd7217437a2784 100644 --- a/.gitattributes +++ b/.gitattributes @@ -82,3 +82,5 @@ MLPY/Lib/site-packages/numpy/.libs/libopenblas.XWYDX2IKJW2NMTWSFYNGFUWKQU3LYTCZ. MLPY/Lib/site-packages/numpy/core/_multiarray_umath.cp39-win_amd64.pyd filter=lfs diff=lfs merge=lfs -text MLPY/Lib/site-packages/numpy/core/_simd.cp39-win_amd64.pyd filter=lfs diff=lfs merge=lfs -text MLPY/Lib/site-packages/onnx/onnx_cpp2py_export.cp39-win_amd64.pyd filter=lfs diff=lfs merge=lfs -text +MLPY/Lib/site-packages/PIL/_imaging.cp39-win_amd64.pyd filter=lfs diff=lfs merge=lfs -text +MLPY/Lib/site-packages/PIL/_imagingft.cp39-win_amd64.pyd filter=lfs diff=lfs merge=lfs -text diff --git a/MLPY/Lib/site-packages/PIL/BdfFontFile.py b/MLPY/Lib/site-packages/PIL/BdfFontFile.py new file mode 100644 index 0000000000000000000000000000000000000000..1d30582890fb0b4af90c520944f5bb135c1f62f3 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/BdfFontFile.py @@ -0,0 +1,133 @@ +# +# The Python Imaging Library +# $Id$ +# +# bitmap distribution font (bdf) file parser +# +# history: +# 1996-05-16 fl created (as bdf2pil) +# 1997-08-25 fl converted to FontFile driver +# 2001-05-25 fl removed bogus __init__ call +# 2002-11-20 fl robustification (from Kevin Cazabon, Dmitry Vasiliev) +# 2003-04-22 fl more robustification (from Graham Dumpleton) +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1997-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +""" +Parse X Bitmap Distribution Format (BDF) +""" +from __future__ import annotations + +from typing import BinaryIO + +from . import FontFile, Image + +bdf_slant = { + "R": "Roman", + "I": "Italic", + "O": "Oblique", + "RI": "Reverse Italic", + "RO": "Reverse Oblique", + "OT": "Other", +} + +bdf_spacing = {"P": "Proportional", "M": "Monospaced", "C": "Cell"} + + +def bdf_char( + f: BinaryIO, +) -> ( + tuple[ + str, + int, + tuple[tuple[int, int], tuple[int, int, int, int], tuple[int, int, int, int]], + Image.Image, + ] + | None +): + # skip to STARTCHAR + while True: + s = f.readline() + if not s: + return None + if s[:9] == b"STARTCHAR": + break + id = s[9:].strip().decode("ascii") + + # load symbol properties + props = {} + while True: + s = f.readline() + if not s or s[:6] == b"BITMAP": + break + i = s.find(b" ") + props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii") + + # load bitmap + bitmap = bytearray() + while True: + s = f.readline() + if not s or s[:7] == b"ENDCHAR": + break + bitmap += s[:-1] + + # The word BBX + # followed by the width in x (BBw), height in y (BBh), + # and x and y displacement (BBxoff0, BByoff0) + # of the lower left corner from the origin of the character. + width, height, x_disp, y_disp = (int(p) for p in props["BBX"].split()) + + # The word DWIDTH + # followed by the width in x and y of the character in device pixels. + dwx, dwy = (int(p) for p in props["DWIDTH"].split()) + + bbox = ( + (dwx, dwy), + (x_disp, -y_disp - height, width + x_disp, -y_disp), + (0, 0, width, height), + ) + + try: + im = Image.frombytes("1", (width, height), bitmap, "hex", "1") + except ValueError: + # deal with zero-width characters + im = Image.new("1", (width, height)) + + return id, int(props["ENCODING"]), bbox, im + + +class BdfFontFile(FontFile.FontFile): + """Font file plugin for the X11 BDF format.""" + + def __init__(self, fp: BinaryIO) -> None: + super().__init__() + + s = fp.readline() + if s[:13] != b"STARTFONT 2.1": + msg = "not a valid BDF file" + raise SyntaxError(msg) + + props = {} + comments = [] + + while True: + s = fp.readline() + if not s or s[:13] == b"ENDPROPERTIES": + break + i = s.find(b" ") + props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii") + if s[:i] in [b"COMMENT", b"COPYRIGHT"]: + if s.find(b"LogicalFontDescription") < 0: + comments.append(s[i + 1 : -1].decode("ascii")) + + while True: + c = bdf_char(fp) + if not c: + break + id, ch, (xy, dst, src), im = c + if 0 <= ch < len(self.glyph): + self.glyph[ch] = xy, dst, src, im diff --git a/MLPY/Lib/site-packages/PIL/BlpImagePlugin.py b/MLPY/Lib/site-packages/PIL/BlpImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..04e537a9ddea0e52f466d0ce737e8c5fe50b94a1 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/BlpImagePlugin.py @@ -0,0 +1,488 @@ +""" +Blizzard Mipmap Format (.blp) +Jerome Leclanche + +The contents of this file are hereby released in the public domain (CC0) +Full text of the CC0 license: + https://creativecommons.org/publicdomain/zero/1.0/ + +BLP1 files, used mostly in Warcraft III, are not fully supported. +All types of BLP2 files used in World of Warcraft are supported. + +The BLP file structure consists of a header, up to 16 mipmaps of the +texture + +Texture sizes must be powers of two, though the two dimensions do +not have to be equal; 512x256 is valid, but 512x200 is not. +The first mipmap (mipmap #0) is the full size image; each subsequent +mipmap halves both dimensions. The final mipmap should be 1x1. + +BLP files come in many different flavours: +* JPEG-compressed (type == 0) - only supported for BLP1. +* RAW images (type == 1, encoding == 1). Each mipmap is stored as an + array of 8-bit values, one per pixel, left to right, top to bottom. + Each value is an index to the palette. +* DXT-compressed (type == 1, encoding == 2): +- DXT1 compression is used if alpha_encoding == 0. + - An additional alpha bit is used if alpha_depth == 1. + - DXT3 compression is used if alpha_encoding == 1. + - DXT5 compression is used if alpha_encoding == 7. +""" + +from __future__ import annotations + +import abc +import os +import struct +from enum import IntEnum +from io import BytesIO +from typing import IO + +from . import Image, ImageFile + + +class Format(IntEnum): + JPEG = 0 + + +class Encoding(IntEnum): + UNCOMPRESSED = 1 + DXT = 2 + UNCOMPRESSED_RAW_BGRA = 3 + + +class AlphaEncoding(IntEnum): + DXT1 = 0 + DXT3 = 1 + DXT5 = 7 + + +def unpack_565(i: int) -> tuple[int, int, int]: + return ((i >> 11) & 0x1F) << 3, ((i >> 5) & 0x3F) << 2, (i & 0x1F) << 3 + + +def decode_dxt1( + data: bytes, alpha: bool = False +) -> tuple[bytearray, bytearray, bytearray, bytearray]: + """ + input: one "row" of data (i.e. will produce 4*width pixels) + """ + + blocks = len(data) // 8 # number of blocks in row + ret = (bytearray(), bytearray(), bytearray(), bytearray()) + + for block_index in range(blocks): + # Decode next 8-byte block. + idx = block_index * 8 + color0, color1, bits = struct.unpack_from("> 2 + + a = 0xFF + if control == 0: + r, g, b = r0, g0, b0 + elif control == 1: + r, g, b = r1, g1, b1 + elif control == 2: + if color0 > color1: + r = (2 * r0 + r1) // 3 + g = (2 * g0 + g1) // 3 + b = (2 * b0 + b1) // 3 + else: + r = (r0 + r1) // 2 + g = (g0 + g1) // 2 + b = (b0 + b1) // 2 + elif control == 3: + if color0 > color1: + r = (2 * r1 + r0) // 3 + g = (2 * g1 + g0) // 3 + b = (2 * b1 + b0) // 3 + else: + r, g, b, a = 0, 0, 0, 0 + + if alpha: + ret[j].extend([r, g, b, a]) + else: + ret[j].extend([r, g, b]) + + return ret + + +def decode_dxt3(data: bytes) -> tuple[bytearray, bytearray, bytearray, bytearray]: + """ + input: one "row" of data (i.e. will produce 4*width pixels) + """ + + blocks = len(data) // 16 # number of blocks in row + ret = (bytearray(), bytearray(), bytearray(), bytearray()) + + for block_index in range(blocks): + idx = block_index * 16 + block = data[idx : idx + 16] + # Decode next 16-byte block. + bits = struct.unpack_from("<8B", block) + color0, color1 = struct.unpack_from(">= 4 + else: + high = True + a &= 0xF + a *= 17 # We get a value between 0 and 15 + + color_code = (code >> 2 * (4 * j + i)) & 0x03 + + if color_code == 0: + r, g, b = r0, g0, b0 + elif color_code == 1: + r, g, b = r1, g1, b1 + elif color_code == 2: + r = (2 * r0 + r1) // 3 + g = (2 * g0 + g1) // 3 + b = (2 * b0 + b1) // 3 + elif color_code == 3: + r = (2 * r1 + r0) // 3 + g = (2 * g1 + g0) // 3 + b = (2 * b1 + b0) // 3 + + ret[j].extend([r, g, b, a]) + + return ret + + +def decode_dxt5(data: bytes) -> tuple[bytearray, bytearray, bytearray, bytearray]: + """ + input: one "row" of data (i.e. will produce 4 * width pixels) + """ + + blocks = len(data) // 16 # number of blocks in row + ret = (bytearray(), bytearray(), bytearray(), bytearray()) + + for block_index in range(blocks): + idx = block_index * 16 + block = data[idx : idx + 16] + # Decode next 16-byte block. + a0, a1 = struct.unpack_from("> alphacode_index) & 0x07 + elif alphacode_index == 15: + alphacode = (alphacode2 >> 15) | ((alphacode1 << 1) & 0x06) + else: # alphacode_index >= 18 and alphacode_index <= 45 + alphacode = (alphacode1 >> (alphacode_index - 16)) & 0x07 + + if alphacode == 0: + a = a0 + elif alphacode == 1: + a = a1 + elif a0 > a1: + a = ((8 - alphacode) * a0 + (alphacode - 1) * a1) // 7 + elif alphacode == 6: + a = 0 + elif alphacode == 7: + a = 255 + else: + a = ((6 - alphacode) * a0 + (alphacode - 1) * a1) // 5 + + color_code = (code >> 2 * (4 * j + i)) & 0x03 + + if color_code == 0: + r, g, b = r0, g0, b0 + elif color_code == 1: + r, g, b = r1, g1, b1 + elif color_code == 2: + r = (2 * r0 + r1) // 3 + g = (2 * g0 + g1) // 3 + b = (2 * b0 + b1) // 3 + elif color_code == 3: + r = (2 * r1 + r0) // 3 + g = (2 * g1 + g0) // 3 + b = (2 * b1 + b0) // 3 + + ret[j].extend([r, g, b, a]) + + return ret + + +class BLPFormatError(NotImplementedError): + pass + + +def _accept(prefix: bytes) -> bool: + return prefix[:4] in (b"BLP1", b"BLP2") + + +class BlpImageFile(ImageFile.ImageFile): + """ + Blizzard Mipmap Format + """ + + format = "BLP" + format_description = "Blizzard Mipmap Format" + + def _open(self) -> None: + self.magic = self.fp.read(4) + + self.fp.seek(5, os.SEEK_CUR) + (self._blp_alpha_depth,) = struct.unpack(" tuple[int, int]: + try: + self._read_blp_header() + self._load() + except struct.error as e: + msg = "Truncated BLP file" + raise OSError(msg) from e + return -1, 0 + + @abc.abstractmethod + def _load(self) -> None: + pass + + def _read_blp_header(self) -> None: + assert self.fd is not None + self.fd.seek(4) + (self._blp_compression,) = struct.unpack(" bytes: + return ImageFile._safe_read(self.fd, length) + + def _read_palette(self) -> list[tuple[int, int, int, int]]: + ret = [] + for i in range(256): + try: + b, g, r, a = struct.unpack("<4B", self._safe_read(4)) + except struct.error: + break + ret.append((b, g, r, a)) + return ret + + def _read_bgra(self, palette: list[tuple[int, int, int, int]]) -> bytearray: + data = bytearray() + _data = BytesIO(self._safe_read(self._blp_lengths[0])) + while True: + try: + (offset,) = struct.unpack(" None: + if self._blp_compression == Format.JPEG: + self._decode_jpeg_stream() + + elif self._blp_compression == 1: + if self._blp_encoding in (4, 5): + palette = self._read_palette() + data = self._read_bgra(palette) + self.set_as_raw(data) + else: + msg = f"Unsupported BLP encoding {repr(self._blp_encoding)}" + raise BLPFormatError(msg) + else: + msg = f"Unsupported BLP compression {repr(self._blp_encoding)}" + raise BLPFormatError(msg) + + def _decode_jpeg_stream(self) -> None: + from .JpegImagePlugin import JpegImageFile + + (jpeg_header_size,) = struct.unpack(" None: + palette = self._read_palette() + + assert self.fd is not None + self.fd.seek(self._blp_offsets[0]) + + if self._blp_compression == 1: + # Uncompressed or DirectX compression + + if self._blp_encoding == Encoding.UNCOMPRESSED: + data = self._read_bgra(palette) + + elif self._blp_encoding == Encoding.DXT: + data = bytearray() + if self._blp_alpha_encoding == AlphaEncoding.DXT1: + linesize = (self.size[0] + 3) // 4 * 8 + for yb in range((self.size[1] + 3) // 4): + for d in decode_dxt1( + self._safe_read(linesize), alpha=bool(self._blp_alpha_depth) + ): + data += d + + elif self._blp_alpha_encoding == AlphaEncoding.DXT3: + linesize = (self.size[0] + 3) // 4 * 16 + for yb in range((self.size[1] + 3) // 4): + for d in decode_dxt3(self._safe_read(linesize)): + data += d + + elif self._blp_alpha_encoding == AlphaEncoding.DXT5: + linesize = (self.size[0] + 3) // 4 * 16 + for yb in range((self.size[1] + 3) // 4): + for d in decode_dxt5(self._safe_read(linesize)): + data += d + else: + msg = f"Unsupported alpha encoding {repr(self._blp_alpha_encoding)}" + raise BLPFormatError(msg) + else: + msg = f"Unknown BLP encoding {repr(self._blp_encoding)}" + raise BLPFormatError(msg) + + else: + msg = f"Unknown BLP compression {repr(self._blp_compression)}" + raise BLPFormatError(msg) + + self.set_as_raw(data) + + +class BLPEncoder(ImageFile.PyEncoder): + _pushes_fd = True + + def _write_palette(self) -> bytes: + data = b"" + assert self.im is not None + palette = self.im.getpalette("RGBA", "RGBA") + for i in range(len(palette) // 4): + r, g, b, a = palette[i * 4 : (i + 1) * 4] + data += struct.pack("<4B", b, g, r, a) + while len(data) < 256 * 4: + data += b"\x00" * 4 + return data + + def encode(self, bufsize: int) -> tuple[int, int, bytes]: + palette_data = self._write_palette() + + offset = 20 + 16 * 4 * 2 + len(palette_data) + data = struct.pack("<16I", offset, *((0,) * 15)) + + assert self.im is not None + w, h = self.im.size + data += struct.pack("<16I", w * h, *((0,) * 15)) + + data += palette_data + + for y in range(h): + for x in range(w): + data += struct.pack(" None: + if im.mode != "P": + msg = "Unsupported BLP image mode" + raise ValueError(msg) + + magic = b"BLP1" if im.encoderinfo.get("blp_version") == "BLP1" else b"BLP2" + fp.write(magic) + + fp.write(struct.pack(" mode, rawmode + 1: ("P", "P;1"), + 4: ("P", "P;4"), + 8: ("P", "P"), + 16: ("RGB", "BGR;15"), + 24: ("RGB", "BGR"), + 32: ("RGB", "BGRX"), +} + + +def _accept(prefix: bytes) -> bool: + return prefix[:2] == b"BM" + + +def _dib_accept(prefix: bytes) -> bool: + return i32(prefix) in [12, 40, 52, 56, 64, 108, 124] + + +# ============================================================================= +# Image plugin for the Windows BMP format. +# ============================================================================= +class BmpImageFile(ImageFile.ImageFile): + """Image plugin for the Windows Bitmap format (BMP)""" + + # ------------------------------------------------------------- Description + format_description = "Windows Bitmap" + format = "BMP" + + # -------------------------------------------------- BMP Compression values + COMPRESSIONS = {"RAW": 0, "RLE8": 1, "RLE4": 2, "BITFIELDS": 3, "JPEG": 4, "PNG": 5} + for k, v in COMPRESSIONS.items(): + vars()[k] = v + + def _bitmap(self, header=0, offset=0): + """Read relevant info about the BMP""" + read, seek = self.fp.read, self.fp.seek + if header: + seek(header) + # read bmp header size @offset 14 (this is part of the header size) + file_info = {"header_size": i32(read(4)), "direction": -1} + + # -------------------- If requested, read header at a specific position + # read the rest of the bmp header, without its size + header_data = ImageFile._safe_read(self.fp, file_info["header_size"] - 4) + + # ------------------------------- Windows Bitmap v2, IBM OS/2 Bitmap v1 + # ----- This format has different offsets because of width/height types + # 12: BITMAPCOREHEADER/OS21XBITMAPHEADER + if file_info["header_size"] == 12: + file_info["width"] = i16(header_data, 0) + file_info["height"] = i16(header_data, 2) + file_info["planes"] = i16(header_data, 4) + file_info["bits"] = i16(header_data, 6) + file_info["compression"] = self.RAW + file_info["palette_padding"] = 3 + + # --------------------------------------------- Windows Bitmap v3 to v5 + # 40: BITMAPINFOHEADER + # 52: BITMAPV2HEADER + # 56: BITMAPV3HEADER + # 64: BITMAPCOREHEADER2/OS22XBITMAPHEADER + # 108: BITMAPV4HEADER + # 124: BITMAPV5HEADER + elif file_info["header_size"] in (40, 52, 56, 64, 108, 124): + file_info["y_flip"] = header_data[7] == 0xFF + file_info["direction"] = 1 if file_info["y_flip"] else -1 + file_info["width"] = i32(header_data, 0) + file_info["height"] = ( + i32(header_data, 4) + if not file_info["y_flip"] + else 2**32 - i32(header_data, 4) + ) + file_info["planes"] = i16(header_data, 8) + file_info["bits"] = i16(header_data, 10) + file_info["compression"] = i32(header_data, 12) + # byte size of pixel data + file_info["data_size"] = i32(header_data, 16) + file_info["pixels_per_meter"] = ( + i32(header_data, 20), + i32(header_data, 24), + ) + file_info["colors"] = i32(header_data, 28) + file_info["palette_padding"] = 4 + self.info["dpi"] = tuple(x / 39.3701 for x in file_info["pixels_per_meter"]) + if file_info["compression"] == self.BITFIELDS: + masks = ["r_mask", "g_mask", "b_mask"] + if len(header_data) >= 48: + if len(header_data) >= 52: + masks.append("a_mask") + else: + file_info["a_mask"] = 0x0 + for idx, mask in enumerate(masks): + file_info[mask] = i32(header_data, 36 + idx * 4) + else: + # 40 byte headers only have the three components in the + # bitfields masks, ref: + # https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx + # See also + # https://github.com/python-pillow/Pillow/issues/1293 + # There is a 4th component in the RGBQuad, in the alpha + # location, but it is listed as a reserved component, + # and it is not generally an alpha channel + file_info["a_mask"] = 0x0 + for mask in masks: + file_info[mask] = i32(read(4)) + file_info["rgb_mask"] = ( + file_info["r_mask"], + file_info["g_mask"], + file_info["b_mask"], + ) + file_info["rgba_mask"] = ( + file_info["r_mask"], + file_info["g_mask"], + file_info["b_mask"], + file_info["a_mask"], + ) + else: + msg = f"Unsupported BMP header type ({file_info['header_size']})" + raise OSError(msg) + + # ------------------ Special case : header is reported 40, which + # ---------------------- is shorter than real size for bpp >= 16 + self._size = file_info["width"], file_info["height"] + + # ------- If color count was not found in the header, compute from bits + file_info["colors"] = ( + file_info["colors"] + if file_info.get("colors", 0) + else (1 << file_info["bits"]) + ) + if offset == 14 + file_info["header_size"] and file_info["bits"] <= 8: + offset += 4 * file_info["colors"] + + # ---------------------- Check bit depth for unusual unsupported values + self._mode, raw_mode = BIT2MODE.get(file_info["bits"], (None, None)) + if self.mode is None: + msg = f"Unsupported BMP pixel depth ({file_info['bits']})" + raise OSError(msg) + + # ---------------- Process BMP with Bitfields compression (not palette) + decoder_name = "raw" + if file_info["compression"] == self.BITFIELDS: + SUPPORTED = { + 32: [ + (0xFF0000, 0xFF00, 0xFF, 0x0), + (0xFF000000, 0xFF0000, 0xFF00, 0x0), + (0xFF000000, 0xFF00, 0xFF, 0x0), + (0xFF000000, 0xFF0000, 0xFF00, 0xFF), + (0xFF, 0xFF00, 0xFF0000, 0xFF000000), + (0xFF0000, 0xFF00, 0xFF, 0xFF000000), + (0xFF000000, 0xFF00, 0xFF, 0xFF0000), + (0x0, 0x0, 0x0, 0x0), + ], + 24: [(0xFF0000, 0xFF00, 0xFF)], + 16: [(0xF800, 0x7E0, 0x1F), (0x7C00, 0x3E0, 0x1F)], + } + MASK_MODES = { + (32, (0xFF0000, 0xFF00, 0xFF, 0x0)): "BGRX", + (32, (0xFF000000, 0xFF0000, 0xFF00, 0x0)): "XBGR", + (32, (0xFF000000, 0xFF00, 0xFF, 0x0)): "BGXR", + (32, (0xFF000000, 0xFF0000, 0xFF00, 0xFF)): "ABGR", + (32, (0xFF, 0xFF00, 0xFF0000, 0xFF000000)): "RGBA", + (32, (0xFF0000, 0xFF00, 0xFF, 0xFF000000)): "BGRA", + (32, (0xFF000000, 0xFF00, 0xFF, 0xFF0000)): "BGAR", + (32, (0x0, 0x0, 0x0, 0x0)): "BGRA", + (24, (0xFF0000, 0xFF00, 0xFF)): "BGR", + (16, (0xF800, 0x7E0, 0x1F)): "BGR;16", + (16, (0x7C00, 0x3E0, 0x1F)): "BGR;15", + } + if file_info["bits"] in SUPPORTED: + if ( + file_info["bits"] == 32 + and file_info["rgba_mask"] in SUPPORTED[file_info["bits"]] + ): + raw_mode = MASK_MODES[(file_info["bits"], file_info["rgba_mask"])] + self._mode = "RGBA" if "A" in raw_mode else self.mode + elif ( + file_info["bits"] in (24, 16) + and file_info["rgb_mask"] in SUPPORTED[file_info["bits"]] + ): + raw_mode = MASK_MODES[(file_info["bits"], file_info["rgb_mask"])] + else: + msg = "Unsupported BMP bitfields layout" + raise OSError(msg) + else: + msg = "Unsupported BMP bitfields layout" + raise OSError(msg) + elif file_info["compression"] == self.RAW: + if file_info["bits"] == 32 and header == 22: # 32-bit .cur offset + raw_mode, self._mode = "BGRA", "RGBA" + elif file_info["compression"] in (self.RLE8, self.RLE4): + decoder_name = "bmp_rle" + else: + msg = f"Unsupported BMP compression ({file_info['compression']})" + raise OSError(msg) + + # --------------- Once the header is processed, process the palette/LUT + if self.mode == "P": # Paletted for 1, 4 and 8 bit images + # ---------------------------------------------------- 1-bit images + if not (0 < file_info["colors"] <= 65536): + msg = f"Unsupported BMP Palette size ({file_info['colors']})" + raise OSError(msg) + else: + padding = file_info["palette_padding"] + palette = read(padding * file_info["colors"]) + grayscale = True + indices = ( + (0, 255) + if file_info["colors"] == 2 + else list(range(file_info["colors"])) + ) + + # ----------------- Check if grayscale and ignore palette if so + for ind, val in enumerate(indices): + rgb = palette[ind * padding : ind * padding + 3] + if rgb != o8(val) * 3: + grayscale = False + + # ------- If all colors are gray, white or black, ditch palette + if grayscale: + self._mode = "1" if file_info["colors"] == 2 else "L" + raw_mode = self.mode + else: + self._mode = "P" + self.palette = ImagePalette.raw( + "BGRX" if padding == 4 else "BGR", palette + ) + + # ---------------------------- Finally set the tile data for the plugin + self.info["compression"] = file_info["compression"] + args = [raw_mode] + if decoder_name == "bmp_rle": + args.append(file_info["compression"] == self.RLE4) + else: + args.append(((file_info["width"] * file_info["bits"] + 31) >> 3) & (~3)) + args.append(file_info["direction"]) + self.tile = [ + ( + decoder_name, + (0, 0, file_info["width"], file_info["height"]), + offset or self.fp.tell(), + tuple(args), + ) + ] + + def _open(self) -> None: + """Open file, check magic number and read header""" + # read 14 bytes: magic number, filesize, reserved, header final offset + head_data = self.fp.read(14) + # choke if the file does not have the required magic bytes + if not _accept(head_data): + msg = "Not a BMP file" + raise SyntaxError(msg) + # read the start position of the BMP image data (u32) + offset = i32(head_data, 10) + # load bitmap information (offset=raster info) + self._bitmap(offset=offset) + + +class BmpRleDecoder(ImageFile.PyDecoder): + _pulls_fd = True + + def decode(self, buffer: bytes) -> tuple[int, int]: + assert self.fd is not None + rle4 = self.args[1] + data = bytearray() + x = 0 + dest_length = self.state.xsize * self.state.ysize + while len(data) < dest_length: + pixels = self.fd.read(1) + byte = self.fd.read(1) + if not pixels or not byte: + break + num_pixels = pixels[0] + if num_pixels: + # encoded mode + if x + num_pixels > self.state.xsize: + # Too much data for row + num_pixels = max(0, self.state.xsize - x) + if rle4: + first_pixel = o8(byte[0] >> 4) + second_pixel = o8(byte[0] & 0x0F) + for index in range(num_pixels): + if index % 2 == 0: + data += first_pixel + else: + data += second_pixel + else: + data += byte * num_pixels + x += num_pixels + else: + if byte[0] == 0: + # end of line + while len(data) % self.state.xsize != 0: + data += b"\x00" + x = 0 + elif byte[0] == 1: + # end of bitmap + break + elif byte[0] == 2: + # delta + bytes_read = self.fd.read(2) + if len(bytes_read) < 2: + break + right, up = self.fd.read(2) + data += b"\x00" * (right + up * self.state.xsize) + x = len(data) % self.state.xsize + else: + # absolute mode + if rle4: + # 2 pixels per byte + byte_count = byte[0] // 2 + bytes_read = self.fd.read(byte_count) + for byte_read in bytes_read: + data += o8(byte_read >> 4) + data += o8(byte_read & 0x0F) + else: + byte_count = byte[0] + bytes_read = self.fd.read(byte_count) + data += bytes_read + if len(bytes_read) < byte_count: + break + x += byte[0] + + # align to 16-bit word boundary + if self.fd.tell() % 2 != 0: + self.fd.seek(1, os.SEEK_CUR) + rawmode = "L" if self.mode == "L" else "P" + self.set_as_raw(bytes(data), (rawmode, 0, self.args[-1])) + return -1, 0 + + +# ============================================================================= +# Image plugin for the DIB format (BMP alias) +# ============================================================================= +class DibImageFile(BmpImageFile): + format = "DIB" + format_description = "Windows Bitmap" + + def _open(self) -> None: + self._bitmap() + + +# +# -------------------------------------------------------------------- +# Write BMP file + + +SAVE = { + "1": ("1", 1, 2), + "L": ("L", 8, 256), + "P": ("P", 8, 256), + "RGB": ("BGR", 24, 0), + "RGBA": ("BGRA", 32, 0), +} + + +def _dib_save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + _save(im, fp, filename, False) + + +def _save( + im: Image.Image, fp: IO[bytes], filename: str | bytes, bitmap_header: bool = True +) -> None: + try: + rawmode, bits, colors = SAVE[im.mode] + except KeyError as e: + msg = f"cannot write mode {im.mode} as BMP" + raise OSError(msg) from e + + info = im.encoderinfo + + dpi = info.get("dpi", (96, 96)) + + # 1 meter == 39.3701 inches + ppm = tuple(int(x * 39.3701 + 0.5) for x in dpi) + + stride = ((im.size[0] * bits + 7) // 8 + 3) & (~3) + header = 40 # or 64 for OS/2 version 2 + image = stride * im.size[1] + + if im.mode == "1": + palette = b"".join(o8(i) * 4 for i in (0, 255)) + elif im.mode == "L": + palette = b"".join(o8(i) * 4 for i in range(256)) + elif im.mode == "P": + palette = im.im.getpalette("RGB", "BGRX") + colors = len(palette) // 4 + else: + palette = None + + # bitmap header + if bitmap_header: + offset = 14 + header + colors * 4 + file_size = offset + image + if file_size > 2**32 - 1: + msg = "File size is too large for the BMP format" + raise ValueError(msg) + fp.write( + b"BM" # file type (magic) + + o32(file_size) # file size + + o32(0) # reserved + + o32(offset) # image data offset + ) + + # bitmap info header + fp.write( + o32(header) # info header size + + o32(im.size[0]) # width + + o32(im.size[1]) # height + + o16(1) # planes + + o16(bits) # depth + + o32(0) # compression (0=uncompressed) + + o32(image) # size of bitmap + + o32(ppm[0]) # resolution + + o32(ppm[1]) # resolution + + o32(colors) # colors used + + o32(colors) # colors important + ) + + fp.write(b"\0" * (header - 40)) # padding (for OS/2 format) + + if palette: + fp.write(palette) + + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, stride, -1))]) + + +# +# -------------------------------------------------------------------- +# Registry + + +Image.register_open(BmpImageFile.format, BmpImageFile, _accept) +Image.register_save(BmpImageFile.format, _save) + +Image.register_extension(BmpImageFile.format, ".bmp") + +Image.register_mime(BmpImageFile.format, "image/bmp") + +Image.register_decoder("bmp_rle", BmpRleDecoder) + +Image.register_open(DibImageFile.format, DibImageFile, _dib_accept) +Image.register_save(DibImageFile.format, _dib_save) + +Image.register_extension(DibImageFile.format, ".dib") + +Image.register_mime(DibImageFile.format, "image/bmp") diff --git a/MLPY/Lib/site-packages/PIL/BufrStubImagePlugin.py b/MLPY/Lib/site-packages/PIL/BufrStubImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..4d8b8ac8eda5d6ef8cdb97f173b63ab484d3a165 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/BufrStubImagePlugin.py @@ -0,0 +1,76 @@ +# +# The Python Imaging Library +# $Id$ +# +# BUFR stub adapter +# +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +from typing import IO + +from . import Image, ImageFile + +_handler = None + + +def register_handler(handler: ImageFile.StubHandler | None) -> None: + """ + Install application-specific BUFR image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + + +def _accept(prefix: bytes) -> bool: + return prefix[:4] == b"BUFR" or prefix[:4] == b"ZCZC" + + +class BufrStubImageFile(ImageFile.StubImageFile): + format = "BUFR" + format_description = "BUFR" + + def _open(self) -> None: + offset = self.fp.tell() + + if not _accept(self.fp.read(4)): + msg = "Not a BUFR file" + raise SyntaxError(msg) + + self.fp.seek(offset) + + # make something up + self._mode = "F" + self._size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self) -> ImageFile.StubHandler | None: + return _handler + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + if _handler is None or not hasattr(_handler, "save"): + msg = "BUFR save handler not installed" + raise OSError(msg) + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(BufrStubImageFile.format, BufrStubImageFile, _accept) +Image.register_save(BufrStubImageFile.format, _save) + +Image.register_extension(BufrStubImageFile.format, ".bufr") diff --git a/MLPY/Lib/site-packages/PIL/ContainerIO.py b/MLPY/Lib/site-packages/PIL/ContainerIO.py new file mode 100644 index 0000000000000000000000000000000000000000..b88782f2df143ebe3d6421ca24e96762d9ab0d6a --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ContainerIO.py @@ -0,0 +1,121 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a class to read from a container file +# +# History: +# 1995-06-18 fl Created +# 1995-09-07 fl Added readline(), readlines() +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1995 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import io +from typing import IO, AnyStr, Generic, Literal + + +class ContainerIO(Generic[AnyStr]): + """ + A file object that provides read access to a part of an existing + file (for example a TAR file). + """ + + def __init__(self, file: IO[AnyStr], offset: int, length: int) -> None: + """ + Create file object. + + :param file: Existing file. + :param offset: Start of region, in bytes. + :param length: Size of region, in bytes. + """ + self.fh: IO[AnyStr] = file + self.pos = 0 + self.offset = offset + self.length = length + self.fh.seek(offset) + + ## + # Always false. + + def isatty(self) -> bool: + return False + + def seek(self, offset: int, mode: Literal[0, 1, 2] = io.SEEK_SET) -> None: + """ + Move file pointer. + + :param offset: Offset in bytes. + :param mode: Starting position. Use 0 for beginning of region, 1 + for current offset, and 2 for end of region. You cannot move + the pointer outside the defined region. + """ + if mode == 1: + self.pos = self.pos + offset + elif mode == 2: + self.pos = self.length + offset + else: + self.pos = offset + # clamp + self.pos = max(0, min(self.pos, self.length)) + self.fh.seek(self.offset + self.pos) + + def tell(self) -> int: + """ + Get current file pointer. + + :returns: Offset from start of region, in bytes. + """ + return self.pos + + def read(self, n: int = 0) -> AnyStr: + """ + Read data. + + :param n: Number of bytes to read. If omitted or zero, + read until end of region. + :returns: An 8-bit string. + """ + if n: + n = min(n, self.length - self.pos) + else: + n = self.length - self.pos + if not n: # EOF + return b"" if "b" in self.fh.mode else "" # type: ignore[return-value] + self.pos = self.pos + n + return self.fh.read(n) + + def readline(self) -> AnyStr: + """ + Read a line of text. + + :returns: An 8-bit string. + """ + s: AnyStr = b"" if "b" in self.fh.mode else "" # type: ignore[assignment] + newline_character = b"\n" if "b" in self.fh.mode else "\n" + while True: + c = self.read(1) + if not c: + break + s = s + c + if c == newline_character: + break + return s + + def readlines(self) -> list[AnyStr]: + """ + Read multiple lines of text. + + :returns: A list of 8-bit strings. + """ + lines = [] + while True: + s = self.readline() + if not s: + break + lines.append(s) + return lines diff --git a/MLPY/Lib/site-packages/PIL/CurImagePlugin.py b/MLPY/Lib/site-packages/PIL/CurImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..d192b8b597b2ce0d4424693417af2ebfeb29d285 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/CurImagePlugin.py @@ -0,0 +1,75 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Windows Cursor support for PIL +# +# notes: +# uses BmpImagePlugin.py to read the bitmap data. +# +# history: +# 96-05-27 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +from . import BmpImagePlugin, Image +from ._binary import i16le as i16 +from ._binary import i32le as i32 + +# +# -------------------------------------------------------------------- + + +def _accept(prefix: bytes) -> bool: + return prefix[:4] == b"\0\0\2\0" + + +## +# Image plugin for Windows Cursor files. + + +class CurImageFile(BmpImagePlugin.BmpImageFile): + format = "CUR" + format_description = "Windows Cursor" + + def _open(self) -> None: + offset = self.fp.tell() + + # check magic + s = self.fp.read(6) + if not _accept(s): + msg = "not a CUR file" + raise SyntaxError(msg) + + # pick the largest cursor in the file + m = b"" + for i in range(i16(s, 4)): + s = self.fp.read(16) + if not m: + m = s + elif s[0] > m[0] and s[1] > m[1]: + m = s + if not m: + msg = "No cursors were found" + raise TypeError(msg) + + # load as bitmap + self._bitmap(i32(m, 12) + offset) + + # patch up the bitmap height + self._size = self.size[0], self.size[1] // 2 + d, e, o, a = self.tile[0] + self.tile[0] = d, (0, 0) + self.size, o, a + + +# +# -------------------------------------------------------------------- + +Image.register_open(CurImageFile.format, CurImageFile, _accept) + +Image.register_extension(CurImageFile.format, ".cur") diff --git a/MLPY/Lib/site-packages/PIL/DcxImagePlugin.py b/MLPY/Lib/site-packages/PIL/DcxImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..5d4184f10bce8f79c8ec0eae7c3bb0cf69c37828 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/DcxImagePlugin.py @@ -0,0 +1,80 @@ +# +# The Python Imaging Library. +# $Id$ +# +# DCX file handling +# +# DCX is a container file format defined by Intel, commonly used +# for fax applications. Each DCX file consists of a directory +# (a list of file offsets) followed by a set of (usually 1-bit) +# PCX files. +# +# History: +# 1995-09-09 fl Created +# 1996-03-20 fl Properly derived from PcxImageFile. +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 2002-07-30 fl Fixed file handling +# +# Copyright (c) 1997-98 by Secret Labs AB. +# Copyright (c) 1995-96 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +from . import Image +from ._binary import i32le as i32 +from .PcxImagePlugin import PcxImageFile + +MAGIC = 0x3ADE68B1 # QUIZ: what's this value, then? + + +def _accept(prefix: bytes) -> bool: + return len(prefix) >= 4 and i32(prefix) == MAGIC + + +## +# Image plugin for the Intel DCX format. + + +class DcxImageFile(PcxImageFile): + format = "DCX" + format_description = "Intel DCX" + _close_exclusive_fp_after_loading = False + + def _open(self) -> None: + # Header + s = self.fp.read(4) + if not _accept(s): + msg = "not a DCX file" + raise SyntaxError(msg) + + # Component directory + self._offset = [] + for i in range(1024): + offset = i32(self.fp.read(4)) + if not offset: + break + self._offset.append(offset) + + self._fp = self.fp + self.frame = -1 + self.n_frames = len(self._offset) + self.is_animated = self.n_frames > 1 + self.seek(0) + + def seek(self, frame: int) -> None: + if not self._seek_check(frame): + return + self.frame = frame + self.fp = self._fp + self.fp.seek(self._offset[frame]) + PcxImageFile._open(self) + + def tell(self) -> int: + return self.frame + + +Image.register_open(DcxImageFile.format, DcxImageFile, _accept) + +Image.register_extension(DcxImageFile.format, ".dcx") diff --git a/MLPY/Lib/site-packages/PIL/DdsImagePlugin.py b/MLPY/Lib/site-packages/PIL/DdsImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..4808ec136a7cf2978094ed68ef65071b527f3bb8 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/DdsImagePlugin.py @@ -0,0 +1,575 @@ +""" +A Pillow loader for .dds files (S3TC-compressed aka DXTC) +Jerome Leclanche + +Documentation: +https://web.archive.org/web/20170802060935/http://oss.sgi.com/projects/ogl-sample/registry/EXT/texture_compression_s3tc.txt + +The contents of this file are hereby released in the public domain (CC0) +Full text of the CC0 license: +https://creativecommons.org/publicdomain/zero/1.0/ +""" + +from __future__ import annotations + +import io +import struct +import sys +from enum import IntEnum, IntFlag +from typing import IO + +from . import Image, ImageFile, ImagePalette +from ._binary import i32le as i32 +from ._binary import o8 +from ._binary import o32le as o32 + +# Magic ("DDS ") +DDS_MAGIC = 0x20534444 + + +# DDS flags +class DDSD(IntFlag): + CAPS = 0x1 + HEIGHT = 0x2 + WIDTH = 0x4 + PITCH = 0x8 + PIXELFORMAT = 0x1000 + MIPMAPCOUNT = 0x20000 + LINEARSIZE = 0x80000 + DEPTH = 0x800000 + + +# DDS caps +class DDSCAPS(IntFlag): + COMPLEX = 0x8 + TEXTURE = 0x1000 + MIPMAP = 0x400000 + + +class DDSCAPS2(IntFlag): + CUBEMAP = 0x200 + CUBEMAP_POSITIVEX = 0x400 + CUBEMAP_NEGATIVEX = 0x800 + CUBEMAP_POSITIVEY = 0x1000 + CUBEMAP_NEGATIVEY = 0x2000 + CUBEMAP_POSITIVEZ = 0x4000 + CUBEMAP_NEGATIVEZ = 0x8000 + VOLUME = 0x200000 + + +# Pixel Format +class DDPF(IntFlag): + ALPHAPIXELS = 0x1 + ALPHA = 0x2 + FOURCC = 0x4 + PALETTEINDEXED8 = 0x20 + RGB = 0x40 + LUMINANCE = 0x20000 + + +# dxgiformat.h +class DXGI_FORMAT(IntEnum): + UNKNOWN = 0 + R32G32B32A32_TYPELESS = 1 + R32G32B32A32_FLOAT = 2 + R32G32B32A32_UINT = 3 + R32G32B32A32_SINT = 4 + R32G32B32_TYPELESS = 5 + R32G32B32_FLOAT = 6 + R32G32B32_UINT = 7 + R32G32B32_SINT = 8 + R16G16B16A16_TYPELESS = 9 + R16G16B16A16_FLOAT = 10 + R16G16B16A16_UNORM = 11 + R16G16B16A16_UINT = 12 + R16G16B16A16_SNORM = 13 + R16G16B16A16_SINT = 14 + R32G32_TYPELESS = 15 + R32G32_FLOAT = 16 + R32G32_UINT = 17 + R32G32_SINT = 18 + R32G8X24_TYPELESS = 19 + D32_FLOAT_S8X24_UINT = 20 + R32_FLOAT_X8X24_TYPELESS = 21 + X32_TYPELESS_G8X24_UINT = 22 + R10G10B10A2_TYPELESS = 23 + R10G10B10A2_UNORM = 24 + R10G10B10A2_UINT = 25 + R11G11B10_FLOAT = 26 + R8G8B8A8_TYPELESS = 27 + R8G8B8A8_UNORM = 28 + R8G8B8A8_UNORM_SRGB = 29 + R8G8B8A8_UINT = 30 + R8G8B8A8_SNORM = 31 + R8G8B8A8_SINT = 32 + R16G16_TYPELESS = 33 + R16G16_FLOAT = 34 + R16G16_UNORM = 35 + R16G16_UINT = 36 + R16G16_SNORM = 37 + R16G16_SINT = 38 + R32_TYPELESS = 39 + D32_FLOAT = 40 + R32_FLOAT = 41 + R32_UINT = 42 + R32_SINT = 43 + R24G8_TYPELESS = 44 + D24_UNORM_S8_UINT = 45 + R24_UNORM_X8_TYPELESS = 46 + X24_TYPELESS_G8_UINT = 47 + R8G8_TYPELESS = 48 + R8G8_UNORM = 49 + R8G8_UINT = 50 + R8G8_SNORM = 51 + R8G8_SINT = 52 + R16_TYPELESS = 53 + R16_FLOAT = 54 + D16_UNORM = 55 + R16_UNORM = 56 + R16_UINT = 57 + R16_SNORM = 58 + R16_SINT = 59 + R8_TYPELESS = 60 + R8_UNORM = 61 + R8_UINT = 62 + R8_SNORM = 63 + R8_SINT = 64 + A8_UNORM = 65 + R1_UNORM = 66 + R9G9B9E5_SHAREDEXP = 67 + R8G8_B8G8_UNORM = 68 + G8R8_G8B8_UNORM = 69 + BC1_TYPELESS = 70 + BC1_UNORM = 71 + BC1_UNORM_SRGB = 72 + BC2_TYPELESS = 73 + BC2_UNORM = 74 + BC2_UNORM_SRGB = 75 + BC3_TYPELESS = 76 + BC3_UNORM = 77 + BC3_UNORM_SRGB = 78 + BC4_TYPELESS = 79 + BC4_UNORM = 80 + BC4_SNORM = 81 + BC5_TYPELESS = 82 + BC5_UNORM = 83 + BC5_SNORM = 84 + B5G6R5_UNORM = 85 + B5G5R5A1_UNORM = 86 + B8G8R8A8_UNORM = 87 + B8G8R8X8_UNORM = 88 + R10G10B10_XR_BIAS_A2_UNORM = 89 + B8G8R8A8_TYPELESS = 90 + B8G8R8A8_UNORM_SRGB = 91 + B8G8R8X8_TYPELESS = 92 + B8G8R8X8_UNORM_SRGB = 93 + BC6H_TYPELESS = 94 + BC6H_UF16 = 95 + BC6H_SF16 = 96 + BC7_TYPELESS = 97 + BC7_UNORM = 98 + BC7_UNORM_SRGB = 99 + AYUV = 100 + Y410 = 101 + Y416 = 102 + NV12 = 103 + P010 = 104 + P016 = 105 + OPAQUE_420 = 106 + YUY2 = 107 + Y210 = 108 + Y216 = 109 + NV11 = 110 + AI44 = 111 + IA44 = 112 + P8 = 113 + A8P8 = 114 + B4G4R4A4_UNORM = 115 + P208 = 130 + V208 = 131 + V408 = 132 + SAMPLER_FEEDBACK_MIN_MIP_OPAQUE = 189 + SAMPLER_FEEDBACK_MIP_REGION_USED_OPAQUE = 190 + + +class D3DFMT(IntEnum): + UNKNOWN = 0 + R8G8B8 = 20 + A8R8G8B8 = 21 + X8R8G8B8 = 22 + R5G6B5 = 23 + X1R5G5B5 = 24 + A1R5G5B5 = 25 + A4R4G4B4 = 26 + R3G3B2 = 27 + A8 = 28 + A8R3G3B2 = 29 + X4R4G4B4 = 30 + A2B10G10R10 = 31 + A8B8G8R8 = 32 + X8B8G8R8 = 33 + G16R16 = 34 + A2R10G10B10 = 35 + A16B16G16R16 = 36 + A8P8 = 40 + P8 = 41 + L8 = 50 + A8L8 = 51 + A4L4 = 52 + V8U8 = 60 + L6V5U5 = 61 + X8L8V8U8 = 62 + Q8W8V8U8 = 63 + V16U16 = 64 + A2W10V10U10 = 67 + D16_LOCKABLE = 70 + D32 = 71 + D15S1 = 73 + D24S8 = 75 + D24X8 = 77 + D24X4S4 = 79 + D16 = 80 + D32F_LOCKABLE = 82 + D24FS8 = 83 + D32_LOCKABLE = 84 + S8_LOCKABLE = 85 + L16 = 81 + VERTEXDATA = 100 + INDEX16 = 101 + INDEX32 = 102 + Q16W16V16U16 = 110 + R16F = 111 + G16R16F = 112 + A16B16G16R16F = 113 + R32F = 114 + G32R32F = 115 + A32B32G32R32F = 116 + CxV8U8 = 117 + A1 = 118 + A2B10G10R10_XR_BIAS = 119 + BINARYBUFFER = 199 + + UYVY = i32(b"UYVY") + R8G8_B8G8 = i32(b"RGBG") + YUY2 = i32(b"YUY2") + G8R8_G8B8 = i32(b"GRGB") + DXT1 = i32(b"DXT1") + DXT2 = i32(b"DXT2") + DXT3 = i32(b"DXT3") + DXT4 = i32(b"DXT4") + DXT5 = i32(b"DXT5") + DX10 = i32(b"DX10") + BC4S = i32(b"BC4S") + BC4U = i32(b"BC4U") + BC5S = i32(b"BC5S") + BC5U = i32(b"BC5U") + ATI1 = i32(b"ATI1") + ATI2 = i32(b"ATI2") + MULTI2_ARGB8 = i32(b"MET1") + + +# Backward compatibility layer +module = sys.modules[__name__] +for item in DDSD: + assert item.name is not None + setattr(module, f"DDSD_{item.name}", item.value) +for item1 in DDSCAPS: + assert item1.name is not None + setattr(module, f"DDSCAPS_{item1.name}", item1.value) +for item2 in DDSCAPS2: + assert item2.name is not None + setattr(module, f"DDSCAPS2_{item2.name}", item2.value) +for item3 in DDPF: + assert item3.name is not None + setattr(module, f"DDPF_{item3.name}", item3.value) + +DDS_FOURCC = DDPF.FOURCC +DDS_RGB = DDPF.RGB +DDS_RGBA = DDPF.RGB | DDPF.ALPHAPIXELS +DDS_LUMINANCE = DDPF.LUMINANCE +DDS_LUMINANCEA = DDPF.LUMINANCE | DDPF.ALPHAPIXELS +DDS_ALPHA = DDPF.ALPHA +DDS_PAL8 = DDPF.PALETTEINDEXED8 + +DDS_HEADER_FLAGS_TEXTURE = DDSD.CAPS | DDSD.HEIGHT | DDSD.WIDTH | DDSD.PIXELFORMAT +DDS_HEADER_FLAGS_MIPMAP = DDSD.MIPMAPCOUNT +DDS_HEADER_FLAGS_VOLUME = DDSD.DEPTH +DDS_HEADER_FLAGS_PITCH = DDSD.PITCH +DDS_HEADER_FLAGS_LINEARSIZE = DDSD.LINEARSIZE + +DDS_HEIGHT = DDSD.HEIGHT +DDS_WIDTH = DDSD.WIDTH + +DDS_SURFACE_FLAGS_TEXTURE = DDSCAPS.TEXTURE +DDS_SURFACE_FLAGS_MIPMAP = DDSCAPS.COMPLEX | DDSCAPS.MIPMAP +DDS_SURFACE_FLAGS_CUBEMAP = DDSCAPS.COMPLEX + +DDS_CUBEMAP_POSITIVEX = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_POSITIVEX +DDS_CUBEMAP_NEGATIVEX = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_NEGATIVEX +DDS_CUBEMAP_POSITIVEY = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_POSITIVEY +DDS_CUBEMAP_NEGATIVEY = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_NEGATIVEY +DDS_CUBEMAP_POSITIVEZ = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_POSITIVEZ +DDS_CUBEMAP_NEGATIVEZ = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_NEGATIVEZ + +DXT1_FOURCC = D3DFMT.DXT1 +DXT3_FOURCC = D3DFMT.DXT3 +DXT5_FOURCC = D3DFMT.DXT5 + +DXGI_FORMAT_R8G8B8A8_TYPELESS = DXGI_FORMAT.R8G8B8A8_TYPELESS +DXGI_FORMAT_R8G8B8A8_UNORM = DXGI_FORMAT.R8G8B8A8_UNORM +DXGI_FORMAT_R8G8B8A8_UNORM_SRGB = DXGI_FORMAT.R8G8B8A8_UNORM_SRGB +DXGI_FORMAT_BC5_TYPELESS = DXGI_FORMAT.BC5_TYPELESS +DXGI_FORMAT_BC5_UNORM = DXGI_FORMAT.BC5_UNORM +DXGI_FORMAT_BC5_SNORM = DXGI_FORMAT.BC5_SNORM +DXGI_FORMAT_BC6H_UF16 = DXGI_FORMAT.BC6H_UF16 +DXGI_FORMAT_BC6H_SF16 = DXGI_FORMAT.BC6H_SF16 +DXGI_FORMAT_BC7_TYPELESS = DXGI_FORMAT.BC7_TYPELESS +DXGI_FORMAT_BC7_UNORM = DXGI_FORMAT.BC7_UNORM +DXGI_FORMAT_BC7_UNORM_SRGB = DXGI_FORMAT.BC7_UNORM_SRGB + + +class DdsImageFile(ImageFile.ImageFile): + format = "DDS" + format_description = "DirectDraw Surface" + + def _open(self) -> None: + if not _accept(self.fp.read(4)): + msg = "not a DDS file" + raise SyntaxError(msg) + (header_size,) = struct.unpack(" None: + pass + + +class DdsRgbDecoder(ImageFile.PyDecoder): + _pulls_fd = True + + def decode(self, buffer: bytes) -> tuple[int, int]: + assert self.fd is not None + bitcount, masks = self.args + + # Some masks will be padded with zeros, e.g. R 0b11 G 0b1100 + # Calculate how many zeros each mask is padded with + mask_offsets = [] + # And the maximum value of each channel without the padding + mask_totals = [] + for mask in masks: + offset = 0 + if mask != 0: + while mask >> (offset + 1) << (offset + 1) == mask: + offset += 1 + mask_offsets.append(offset) + mask_totals.append(mask >> offset) + + data = bytearray() + bytecount = bitcount // 8 + dest_length = self.state.xsize * self.state.ysize * len(masks) + while len(data) < dest_length: + value = int.from_bytes(self.fd.read(bytecount), "little") + for i, mask in enumerate(masks): + masked_value = value & mask + # Remove the zero padding, and scale it to 8 bits + data += o8( + int(((masked_value >> mask_offsets[i]) / mask_totals[i]) * 255) + ) + self.set_as_raw(data) + return -1, 0 + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + if im.mode not in ("RGB", "RGBA", "L", "LA"): + msg = f"cannot write mode {im.mode} as DDS" + raise OSError(msg) + + alpha = im.mode[-1] == "A" + if im.mode[0] == "L": + pixel_flags = DDPF.LUMINANCE + rawmode = im.mode + if alpha: + rgba_mask = [0x000000FF, 0x000000FF, 0x000000FF] + else: + rgba_mask = [0xFF000000, 0xFF000000, 0xFF000000] + else: + pixel_flags = DDPF.RGB + rawmode = im.mode[::-1] + rgba_mask = [0x00FF0000, 0x0000FF00, 0x000000FF] + + if alpha: + r, g, b, a = im.split() + im = Image.merge("RGBA", (a, r, g, b)) + if alpha: + pixel_flags |= DDPF.ALPHAPIXELS + rgba_mask.append(0xFF000000 if alpha else 0) + + flags = DDSD.CAPS | DDSD.HEIGHT | DDSD.WIDTH | DDSD.PITCH | DDSD.PIXELFORMAT + bitcount = len(im.getbands()) * 8 + pitch = (im.width * bitcount + 7) // 8 + + fp.write( + o32(DDS_MAGIC) + + struct.pack( + "<7I", + 124, # header size + flags, # flags + im.height, + im.width, + pitch, + 0, # depth + 0, # mipmaps + ) + + struct.pack("11I", *((0,) * 11)) # reserved + # pfsize, pfflags, fourcc, bitcount + + struct.pack("<4I", 32, pixel_flags, 0, bitcount) + + struct.pack("<4I", *rgba_mask) # dwRGBABitMask + + struct.pack("<5I", DDSCAPS.TEXTURE, 0, 0, 0, 0) + ) + ImageFile._save( + im, fp, [ImageFile._Tile("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))] + ) + + +def _accept(prefix: bytes) -> bool: + return prefix[:4] == b"DDS " + + +Image.register_open(DdsImageFile.format, DdsImageFile, _accept) +Image.register_decoder("dds_rgb", DdsRgbDecoder) +Image.register_save(DdsImageFile.format, _save) +Image.register_extension(DdsImageFile.format, ".dds") diff --git a/MLPY/Lib/site-packages/PIL/EpsImagePlugin.py b/MLPY/Lib/site-packages/PIL/EpsImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..06028b8106b07d44be000f817394e697f5b48c4c --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/EpsImagePlugin.py @@ -0,0 +1,478 @@ +# +# The Python Imaging Library. +# $Id$ +# +# EPS file handling +# +# History: +# 1995-09-01 fl Created (0.1) +# 1996-05-18 fl Don't choke on "atend" fields, Ghostscript interface (0.2) +# 1996-08-22 fl Don't choke on floating point BoundingBox values +# 1996-08-23 fl Handle files from Macintosh (0.3) +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4) +# 2003-09-07 fl Check gs.close status (from Federico Di Gregorio) (0.5) +# 2014-05-07 e Handling of EPS with binary preview and fixed resolution +# resizing +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import io +import os +import re +import subprocess +import sys +import tempfile +from typing import IO + +from . import Image, ImageFile +from ._binary import i32le as i32 +from ._deprecate import deprecate + +# -------------------------------------------------------------------- + + +split = re.compile(r"^%%([^:]*):[ \t]*(.*)[ \t]*$") +field = re.compile(r"^%[%!\w]([^:]*)[ \t]*$") + +gs_binary: str | bool | None = None +gs_windows_binary = None + + +def has_ghostscript() -> bool: + global gs_binary, gs_windows_binary + if gs_binary is None: + if sys.platform.startswith("win"): + if gs_windows_binary is None: + import shutil + + for binary in ("gswin32c", "gswin64c", "gs"): + if shutil.which(binary) is not None: + gs_windows_binary = binary + break + else: + gs_windows_binary = False + gs_binary = gs_windows_binary + else: + try: + subprocess.check_call(["gs", "--version"], stdout=subprocess.DEVNULL) + gs_binary = "gs" + except OSError: + gs_binary = False + return gs_binary is not False + + +def Ghostscript(tile, size, fp, scale=1, transparency=False): + """Render an image using Ghostscript""" + global gs_binary + if not has_ghostscript(): + msg = "Unable to locate Ghostscript on paths" + raise OSError(msg) + + # Unpack decoder tile + decoder, tile, offset, data = tile[0] + length, bbox = data + + # Hack to support hi-res rendering + scale = int(scale) or 1 + width = size[0] * scale + height = size[1] * scale + # resolution is dependent on bbox and size + res_x = 72.0 * width / (bbox[2] - bbox[0]) + res_y = 72.0 * height / (bbox[3] - bbox[1]) + + out_fd, outfile = tempfile.mkstemp() + os.close(out_fd) + + infile_temp = None + if hasattr(fp, "name") and os.path.exists(fp.name): + infile = fp.name + else: + in_fd, infile_temp = tempfile.mkstemp() + os.close(in_fd) + infile = infile_temp + + # Ignore length and offset! + # Ghostscript can read it + # Copy whole file to read in Ghostscript + with open(infile_temp, "wb") as f: + # fetch length of fp + fp.seek(0, io.SEEK_END) + fsize = fp.tell() + # ensure start position + # go back + fp.seek(0) + lengthfile = fsize + while lengthfile > 0: + s = fp.read(min(lengthfile, 100 * 1024)) + if not s: + break + lengthfile -= len(s) + f.write(s) + + device = "pngalpha" if transparency else "ppmraw" + + # Build Ghostscript command + command = [ + gs_binary, + "-q", # quiet mode + f"-g{width:d}x{height:d}", # set output geometry (pixels) + f"-r{res_x:f}x{res_y:f}", # set input DPI (dots per inch) + "-dBATCH", # exit after processing + "-dNOPAUSE", # don't pause between pages + "-dSAFER", # safe mode + f"-sDEVICE={device}", + f"-sOutputFile={outfile}", # output file + # adjust for image origin + "-c", + f"{-bbox[0]} {-bbox[1]} translate", + "-f", + infile, # input file + # showpage (see https://bugs.ghostscript.com/show_bug.cgi?id=698272) + "-c", + "showpage", + ] + + # push data through Ghostscript + try: + startupinfo = None + if sys.platform.startswith("win"): + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + subprocess.check_call(command, startupinfo=startupinfo) + out_im = Image.open(outfile) + out_im.load() + finally: + try: + os.unlink(outfile) + if infile_temp: + os.unlink(infile_temp) + except OSError: + pass + + im = out_im.im.copy() + out_im.close() + return im + + +class PSFile: + """ + Wrapper for bytesio object that treats either CR or LF as end of line. + This class is no longer used internally, but kept for backwards compatibility. + """ + + def __init__(self, fp): + deprecate( + "PSFile", + 11, + action="If you need the functionality of this class " + "you will need to implement it yourself.", + ) + self.fp = fp + self.char = None + + def seek(self, offset, whence=io.SEEK_SET): + self.char = None + self.fp.seek(offset, whence) + + def readline(self) -> str: + s = [self.char or b""] + self.char = None + + c = self.fp.read(1) + while (c not in b"\r\n") and len(c): + s.append(c) + c = self.fp.read(1) + + self.char = self.fp.read(1) + # line endings can be 1 or 2 of \r \n, in either order + if self.char in b"\r\n": + self.char = None + + return b"".join(s).decode("latin-1") + + +def _accept(prefix: bytes) -> bool: + return prefix[:4] == b"%!PS" or (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5) + + +## +# Image plugin for Encapsulated PostScript. This plugin supports only +# a few variants of this format. + + +class EpsImageFile(ImageFile.ImageFile): + """EPS File Parser for the Python Imaging Library""" + + format = "EPS" + format_description = "Encapsulated Postscript" + + mode_map = {1: "L", 2: "LAB", 3: "RGB", 4: "CMYK"} + + def _open(self) -> None: + (length, offset) = self._find_offset(self.fp) + + # go to offset - start of "%!PS" + self.fp.seek(offset) + + self._mode = "RGB" + self._size = None + + byte_arr = bytearray(255) + bytes_mv = memoryview(byte_arr) + bytes_read = 0 + reading_header_comments = True + reading_trailer_comments = False + trailer_reached = False + + def check_required_header_comments() -> None: + """ + The EPS specification requires that some headers exist. + This should be checked when the header comments formally end, + when image data starts, or when the file ends, whichever comes first. + """ + if "PS-Adobe" not in self.info: + msg = 'EPS header missing "%!PS-Adobe" comment' + raise SyntaxError(msg) + if "BoundingBox" not in self.info: + msg = 'EPS header missing "%%BoundingBox" comment' + raise SyntaxError(msg) + + def _read_comment(s: str) -> bool: + nonlocal reading_trailer_comments + try: + m = split.match(s) + except re.error as e: + msg = "not an EPS file" + raise SyntaxError(msg) from e + + if not m: + return False + + k, v = m.group(1, 2) + self.info[k] = v + if k == "BoundingBox": + if v == "(atend)": + reading_trailer_comments = True + elif not self._size or (trailer_reached and reading_trailer_comments): + try: + # Note: The DSC spec says that BoundingBox + # fields should be integers, but some drivers + # put floating point values there anyway. + box = [int(float(i)) for i in v.split()] + self._size = box[2] - box[0], box[3] - box[1] + self.tile = [("eps", (0, 0) + self.size, offset, (length, box))] + except Exception: + pass + return True + + while True: + byte = self.fp.read(1) + if byte == b"": + # if we didn't read a byte we must be at the end of the file + if bytes_read == 0: + if reading_header_comments: + check_required_header_comments() + break + elif byte in b"\r\n": + # if we read a line ending character, ignore it and parse what + # we have already read. if we haven't read any other characters, + # continue reading + if bytes_read == 0: + continue + else: + # ASCII/hexadecimal lines in an EPS file must not exceed + # 255 characters, not including line ending characters + if bytes_read >= 255: + # only enforce this for lines starting with a "%", + # otherwise assume it's binary data + if byte_arr[0] == ord("%"): + msg = "not an EPS file" + raise SyntaxError(msg) + else: + if reading_header_comments: + check_required_header_comments() + reading_header_comments = False + # reset bytes_read so we can keep reading + # data until the end of the line + bytes_read = 0 + byte_arr[bytes_read] = byte[0] + bytes_read += 1 + continue + + if reading_header_comments: + # Load EPS header + + # if this line doesn't start with a "%", + # or does start with "%%EndComments", + # then we've reached the end of the header/comments + if byte_arr[0] != ord("%") or bytes_mv[:13] == b"%%EndComments": + check_required_header_comments() + reading_header_comments = False + continue + + s = str(bytes_mv[:bytes_read], "latin-1") + if not _read_comment(s): + m = field.match(s) + if m: + k = m.group(1) + if k[:8] == "PS-Adobe": + self.info["PS-Adobe"] = k[9:] + else: + self.info[k] = "" + elif s[0] == "%": + # handle non-DSC PostScript comments that some + # tools mistakenly put in the Comments section + pass + else: + msg = "bad EPS header" + raise OSError(msg) + elif bytes_mv[:11] == b"%ImageData:": + # Check for an "ImageData" descriptor + # https://www.adobe.com/devnet-apps/photoshop/fileformatashtml/#50577413_pgfId-1035096 + + # Values: + # columns + # rows + # bit depth (1 or 8) + # mode (1: L, 2: LAB, 3: RGB, 4: CMYK) + # number of padding channels + # block size (number of bytes per row per channel) + # binary/ascii (1: binary, 2: ascii) + # data start identifier (the image data follows after a single line + # consisting only of this quoted value) + image_data_values = byte_arr[11:bytes_read].split(None, 7) + columns, rows, bit_depth, mode_id = ( + int(value) for value in image_data_values[:4] + ) + + if bit_depth == 1: + self._mode = "1" + elif bit_depth == 8: + try: + self._mode = self.mode_map[mode_id] + except ValueError: + break + else: + break + + self._size = columns, rows + return + elif bytes_mv[:5] == b"%%EOF": + break + elif trailer_reached and reading_trailer_comments: + # Load EPS trailer + s = str(bytes_mv[:bytes_read], "latin-1") + _read_comment(s) + elif bytes_mv[:9] == b"%%Trailer": + trailer_reached = True + bytes_read = 0 + + if not self._size: + msg = "cannot determine EPS bounding box" + raise OSError(msg) + + def _find_offset(self, fp): + s = fp.read(4) + + if s == b"%!PS": + # for HEAD without binary preview + fp.seek(0, io.SEEK_END) + length = fp.tell() + offset = 0 + elif i32(s) == 0xC6D3D0C5: + # FIX for: Some EPS file not handled correctly / issue #302 + # EPS can contain binary data + # or start directly with latin coding + # more info see: + # https://web.archive.org/web/20160528181353/http://partners.adobe.com/public/developer/en/ps/5002.EPSF_Spec.pdf + s = fp.read(8) + offset = i32(s) + length = i32(s, 4) + else: + msg = "not an EPS file" + raise SyntaxError(msg) + + return length, offset + + def load(self, scale=1, transparency=False): + # Load EPS via Ghostscript + if self.tile: + self.im = Ghostscript(self.tile, self.size, self.fp, scale, transparency) + self._mode = self.im.mode + self._size = self.im.size + self.tile = [] + return Image.Image.load(self) + + def load_seek(self, pos: int) -> None: + # we can't incrementally load, so force ImageFile.parser to + # use our custom load method by defining this method. + pass + + +# -------------------------------------------------------------------- + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes, eps: int = 1) -> None: + """EPS Writer for the Python Imaging Library.""" + + # make sure image data is available + im.load() + + # determine PostScript image mode + if im.mode == "L": + operator = (8, 1, b"image") + elif im.mode == "RGB": + operator = (8, 3, b"false 3 colorimage") + elif im.mode == "CMYK": + operator = (8, 4, b"false 4 colorimage") + else: + msg = "image mode is not supported" + raise ValueError(msg) + + if eps: + # write EPS header + fp.write(b"%!PS-Adobe-3.0 EPSF-3.0\n") + fp.write(b"%%Creator: PIL 0.1 EpsEncode\n") + # fp.write("%%CreationDate: %s"...) + fp.write(b"%%%%BoundingBox: 0 0 %d %d\n" % im.size) + fp.write(b"%%Pages: 1\n") + fp.write(b"%%EndComments\n") + fp.write(b"%%Page: 1 1\n") + fp.write(b"%%ImageData: %d %d " % im.size) + fp.write(b'%d %d 0 1 1 "%s"\n' % operator) + + # image header + fp.write(b"gsave\n") + fp.write(b"10 dict begin\n") + fp.write(b"/buf %d string def\n" % (im.size[0] * operator[1])) + fp.write(b"%d %d scale\n" % im.size) + fp.write(b"%d %d 8\n" % im.size) # <= bits + fp.write(b"[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1])) + fp.write(b"{ currentfile buf readhexstring pop } bind\n") + fp.write(operator[2] + b"\n") + if hasattr(fp, "flush"): + fp.flush() + + ImageFile._save(im, fp, [("eps", (0, 0) + im.size, 0, None)]) + + fp.write(b"\n%%%%EndBinary\n") + fp.write(b"grestore end\n") + if hasattr(fp, "flush"): + fp.flush() + + +# -------------------------------------------------------------------- + + +Image.register_open(EpsImageFile.format, EpsImageFile, _accept) + +Image.register_save(EpsImageFile.format, _save) + +Image.register_extensions(EpsImageFile.format, [".ps", ".eps"]) + +Image.register_mime(EpsImageFile.format, "application/postscript") diff --git a/MLPY/Lib/site-packages/PIL/ExifTags.py b/MLPY/Lib/site-packages/PIL/ExifTags.py new file mode 100644 index 0000000000000000000000000000000000000000..a4cbe3c48e9916e531dac7186947fd11c63fc846 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ExifTags.py @@ -0,0 +1,381 @@ +# +# The Python Imaging Library. +# $Id$ +# +# EXIF tags +# +# Copyright (c) 2003 by Secret Labs AB +# +# See the README file for information on usage and redistribution. +# + +""" +This module provides constants and clear-text names for various +well-known EXIF tags. +""" +from __future__ import annotations + +from enum import IntEnum + + +class Base(IntEnum): + # possibly incomplete + InteropIndex = 0x0001 + ProcessingSoftware = 0x000B + NewSubfileType = 0x00FE + SubfileType = 0x00FF + ImageWidth = 0x0100 + ImageLength = 0x0101 + BitsPerSample = 0x0102 + Compression = 0x0103 + PhotometricInterpretation = 0x0106 + Thresholding = 0x0107 + CellWidth = 0x0108 + CellLength = 0x0109 + FillOrder = 0x010A + DocumentName = 0x010D + ImageDescription = 0x010E + Make = 0x010F + Model = 0x0110 + StripOffsets = 0x0111 + Orientation = 0x0112 + SamplesPerPixel = 0x0115 + RowsPerStrip = 0x0116 + StripByteCounts = 0x0117 + MinSampleValue = 0x0118 + MaxSampleValue = 0x0119 + XResolution = 0x011A + YResolution = 0x011B + PlanarConfiguration = 0x011C + PageName = 0x011D + FreeOffsets = 0x0120 + FreeByteCounts = 0x0121 + GrayResponseUnit = 0x0122 + GrayResponseCurve = 0x0123 + T4Options = 0x0124 + T6Options = 0x0125 + ResolutionUnit = 0x0128 + PageNumber = 0x0129 + TransferFunction = 0x012D + Software = 0x0131 + DateTime = 0x0132 + Artist = 0x013B + HostComputer = 0x013C + Predictor = 0x013D + WhitePoint = 0x013E + PrimaryChromaticities = 0x013F + ColorMap = 0x0140 + HalftoneHints = 0x0141 + TileWidth = 0x0142 + TileLength = 0x0143 + TileOffsets = 0x0144 + TileByteCounts = 0x0145 + SubIFDs = 0x014A + InkSet = 0x014C + InkNames = 0x014D + NumberOfInks = 0x014E + DotRange = 0x0150 + TargetPrinter = 0x0151 + ExtraSamples = 0x0152 + SampleFormat = 0x0153 + SMinSampleValue = 0x0154 + SMaxSampleValue = 0x0155 + TransferRange = 0x0156 + ClipPath = 0x0157 + XClipPathUnits = 0x0158 + YClipPathUnits = 0x0159 + Indexed = 0x015A + JPEGTables = 0x015B + OPIProxy = 0x015F + JPEGProc = 0x0200 + JpegIFOffset = 0x0201 + JpegIFByteCount = 0x0202 + JpegRestartInterval = 0x0203 + JpegLosslessPredictors = 0x0205 + JpegPointTransforms = 0x0206 + JpegQTables = 0x0207 + JpegDCTables = 0x0208 + JpegACTables = 0x0209 + YCbCrCoefficients = 0x0211 + YCbCrSubSampling = 0x0212 + YCbCrPositioning = 0x0213 + ReferenceBlackWhite = 0x0214 + XMLPacket = 0x02BC + RelatedImageFileFormat = 0x1000 + RelatedImageWidth = 0x1001 + RelatedImageLength = 0x1002 + Rating = 0x4746 + RatingPercent = 0x4749 + ImageID = 0x800D + CFARepeatPatternDim = 0x828D + BatteryLevel = 0x828F + Copyright = 0x8298 + ExposureTime = 0x829A + FNumber = 0x829D + IPTCNAA = 0x83BB + ImageResources = 0x8649 + ExifOffset = 0x8769 + InterColorProfile = 0x8773 + ExposureProgram = 0x8822 + SpectralSensitivity = 0x8824 + GPSInfo = 0x8825 + ISOSpeedRatings = 0x8827 + OECF = 0x8828 + Interlace = 0x8829 + TimeZoneOffset = 0x882A + SelfTimerMode = 0x882B + SensitivityType = 0x8830 + StandardOutputSensitivity = 0x8831 + RecommendedExposureIndex = 0x8832 + ISOSpeed = 0x8833 + ISOSpeedLatitudeyyy = 0x8834 + ISOSpeedLatitudezzz = 0x8835 + ExifVersion = 0x9000 + DateTimeOriginal = 0x9003 + DateTimeDigitized = 0x9004 + OffsetTime = 0x9010 + OffsetTimeOriginal = 0x9011 + OffsetTimeDigitized = 0x9012 + ComponentsConfiguration = 0x9101 + CompressedBitsPerPixel = 0x9102 + ShutterSpeedValue = 0x9201 + ApertureValue = 0x9202 + BrightnessValue = 0x9203 + ExposureBiasValue = 0x9204 + MaxApertureValue = 0x9205 + SubjectDistance = 0x9206 + MeteringMode = 0x9207 + LightSource = 0x9208 + Flash = 0x9209 + FocalLength = 0x920A + Noise = 0x920D + ImageNumber = 0x9211 + SecurityClassification = 0x9212 + ImageHistory = 0x9213 + TIFFEPStandardID = 0x9216 + MakerNote = 0x927C + UserComment = 0x9286 + SubsecTime = 0x9290 + SubsecTimeOriginal = 0x9291 + SubsecTimeDigitized = 0x9292 + AmbientTemperature = 0x9400 + Humidity = 0x9401 + Pressure = 0x9402 + WaterDepth = 0x9403 + Acceleration = 0x9404 + CameraElevationAngle = 0x9405 + XPTitle = 0x9C9B + XPComment = 0x9C9C + XPAuthor = 0x9C9D + XPKeywords = 0x9C9E + XPSubject = 0x9C9F + FlashPixVersion = 0xA000 + ColorSpace = 0xA001 + ExifImageWidth = 0xA002 + ExifImageHeight = 0xA003 + RelatedSoundFile = 0xA004 + ExifInteroperabilityOffset = 0xA005 + FlashEnergy = 0xA20B + SpatialFrequencyResponse = 0xA20C + FocalPlaneXResolution = 0xA20E + FocalPlaneYResolution = 0xA20F + FocalPlaneResolutionUnit = 0xA210 + SubjectLocation = 0xA214 + ExposureIndex = 0xA215 + SensingMethod = 0xA217 + FileSource = 0xA300 + SceneType = 0xA301 + CFAPattern = 0xA302 + CustomRendered = 0xA401 + ExposureMode = 0xA402 + WhiteBalance = 0xA403 + DigitalZoomRatio = 0xA404 + FocalLengthIn35mmFilm = 0xA405 + SceneCaptureType = 0xA406 + GainControl = 0xA407 + Contrast = 0xA408 + Saturation = 0xA409 + Sharpness = 0xA40A + DeviceSettingDescription = 0xA40B + SubjectDistanceRange = 0xA40C + ImageUniqueID = 0xA420 + CameraOwnerName = 0xA430 + BodySerialNumber = 0xA431 + LensSpecification = 0xA432 + LensMake = 0xA433 + LensModel = 0xA434 + LensSerialNumber = 0xA435 + CompositeImage = 0xA460 + CompositeImageCount = 0xA461 + CompositeImageExposureTimes = 0xA462 + Gamma = 0xA500 + PrintImageMatching = 0xC4A5 + DNGVersion = 0xC612 + DNGBackwardVersion = 0xC613 + UniqueCameraModel = 0xC614 + LocalizedCameraModel = 0xC615 + CFAPlaneColor = 0xC616 + CFALayout = 0xC617 + LinearizationTable = 0xC618 + BlackLevelRepeatDim = 0xC619 + BlackLevel = 0xC61A + BlackLevelDeltaH = 0xC61B + BlackLevelDeltaV = 0xC61C + WhiteLevel = 0xC61D + DefaultScale = 0xC61E + DefaultCropOrigin = 0xC61F + DefaultCropSize = 0xC620 + ColorMatrix1 = 0xC621 + ColorMatrix2 = 0xC622 + CameraCalibration1 = 0xC623 + CameraCalibration2 = 0xC624 + ReductionMatrix1 = 0xC625 + ReductionMatrix2 = 0xC626 + AnalogBalance = 0xC627 + AsShotNeutral = 0xC628 + AsShotWhiteXY = 0xC629 + BaselineExposure = 0xC62A + BaselineNoise = 0xC62B + BaselineSharpness = 0xC62C + BayerGreenSplit = 0xC62D + LinearResponseLimit = 0xC62E + CameraSerialNumber = 0xC62F + LensInfo = 0xC630 + ChromaBlurRadius = 0xC631 + AntiAliasStrength = 0xC632 + ShadowScale = 0xC633 + DNGPrivateData = 0xC634 + MakerNoteSafety = 0xC635 + CalibrationIlluminant1 = 0xC65A + CalibrationIlluminant2 = 0xC65B + BestQualityScale = 0xC65C + RawDataUniqueID = 0xC65D + OriginalRawFileName = 0xC68B + OriginalRawFileData = 0xC68C + ActiveArea = 0xC68D + MaskedAreas = 0xC68E + AsShotICCProfile = 0xC68F + AsShotPreProfileMatrix = 0xC690 + CurrentICCProfile = 0xC691 + CurrentPreProfileMatrix = 0xC692 + ColorimetricReference = 0xC6BF + CameraCalibrationSignature = 0xC6F3 + ProfileCalibrationSignature = 0xC6F4 + AsShotProfileName = 0xC6F6 + NoiseReductionApplied = 0xC6F7 + ProfileName = 0xC6F8 + ProfileHueSatMapDims = 0xC6F9 + ProfileHueSatMapData1 = 0xC6FA + ProfileHueSatMapData2 = 0xC6FB + ProfileToneCurve = 0xC6FC + ProfileEmbedPolicy = 0xC6FD + ProfileCopyright = 0xC6FE + ForwardMatrix1 = 0xC714 + ForwardMatrix2 = 0xC715 + PreviewApplicationName = 0xC716 + PreviewApplicationVersion = 0xC717 + PreviewSettingsName = 0xC718 + PreviewSettingsDigest = 0xC719 + PreviewColorSpace = 0xC71A + PreviewDateTime = 0xC71B + RawImageDigest = 0xC71C + OriginalRawFileDigest = 0xC71D + SubTileBlockSize = 0xC71E + RowInterleaveFactor = 0xC71F + ProfileLookTableDims = 0xC725 + ProfileLookTableData = 0xC726 + OpcodeList1 = 0xC740 + OpcodeList2 = 0xC741 + OpcodeList3 = 0xC74E + NoiseProfile = 0xC761 + + +"""Maps EXIF tags to tag names.""" +TAGS = { + **{i.value: i.name for i in Base}, + 0x920C: "SpatialFrequencyResponse", + 0x9214: "SubjectLocation", + 0x9215: "ExposureIndex", + 0x828E: "CFAPattern", + 0x920B: "FlashEnergy", + 0x9216: "TIFF/EPStandardID", +} + + +class GPS(IntEnum): + GPSVersionID = 0 + GPSLatitudeRef = 1 + GPSLatitude = 2 + GPSLongitudeRef = 3 + GPSLongitude = 4 + GPSAltitudeRef = 5 + GPSAltitude = 6 + GPSTimeStamp = 7 + GPSSatellites = 8 + GPSStatus = 9 + GPSMeasureMode = 10 + GPSDOP = 11 + GPSSpeedRef = 12 + GPSSpeed = 13 + GPSTrackRef = 14 + GPSTrack = 15 + GPSImgDirectionRef = 16 + GPSImgDirection = 17 + GPSMapDatum = 18 + GPSDestLatitudeRef = 19 + GPSDestLatitude = 20 + GPSDestLongitudeRef = 21 + GPSDestLongitude = 22 + GPSDestBearingRef = 23 + GPSDestBearing = 24 + GPSDestDistanceRef = 25 + GPSDestDistance = 26 + GPSProcessingMethod = 27 + GPSAreaInformation = 28 + GPSDateStamp = 29 + GPSDifferential = 30 + GPSHPositioningError = 31 + + +"""Maps EXIF GPS tags to tag names.""" +GPSTAGS = {i.value: i.name for i in GPS} + + +class Interop(IntEnum): + InteropIndex = 1 + InteropVersion = 2 + RelatedImageFileFormat = 4096 + RelatedImageWidth = 4097 + RelatedImageHeight = 4098 + + +class IFD(IntEnum): + Exif = 34665 + GPSInfo = 34853 + Makernote = 37500 + Interop = 40965 + IFD1 = -1 + + +class LightSource(IntEnum): + Unknown = 0 + Daylight = 1 + Fluorescent = 2 + Tungsten = 3 + Flash = 4 + Fine = 9 + Cloudy = 10 + Shade = 11 + DaylightFluorescent = 12 + DayWhiteFluorescent = 13 + CoolWhiteFluorescent = 14 + WhiteFluorescent = 15 + StandardLightA = 17 + StandardLightB = 18 + StandardLightC = 19 + D55 = 20 + D65 = 21 + D75 = 22 + D50 = 23 + ISO = 24 + Other = 255 diff --git a/MLPY/Lib/site-packages/PIL/FitsImagePlugin.py b/MLPY/Lib/site-packages/PIL/FitsImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..45634660456d72cc37259506405176811b9e1341 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/FitsImagePlugin.py @@ -0,0 +1,152 @@ +# +# The Python Imaging Library +# $Id$ +# +# FITS file handling +# +# Copyright (c) 1998-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import gzip +import math + +from . import Image, ImageFile + + +def _accept(prefix: bytes) -> bool: + return prefix[:6] == b"SIMPLE" + + +class FitsImageFile(ImageFile.ImageFile): + format = "FITS" + format_description = "FITS" + + def _open(self) -> None: + assert self.fp is not None + + headers: dict[bytes, bytes] = {} + header_in_progress = False + decoder_name = "" + while True: + header = self.fp.read(80) + if not header: + msg = "Truncated FITS file" + raise OSError(msg) + keyword = header[:8].strip() + if keyword in (b"SIMPLE", b"XTENSION"): + header_in_progress = True + elif headers and not header_in_progress: + # This is now a data unit + break + elif keyword == b"END": + # Seek to the end of the header unit + self.fp.seek(math.ceil(self.fp.tell() / 2880) * 2880) + if not decoder_name: + decoder_name, offset, args = self._parse_headers(headers) + + header_in_progress = False + continue + + if decoder_name: + # Keep going to read past the headers + continue + + value = header[8:].split(b"/")[0].strip() + if value.startswith(b"="): + value = value[1:].strip() + if not headers and (not _accept(keyword) or value != b"T"): + msg = "Not a FITS file" + raise SyntaxError(msg) + headers[keyword] = value + + if not decoder_name: + msg = "No image data" + raise ValueError(msg) + + offset += self.fp.tell() - 80 + self.tile = [(decoder_name, (0, 0) + self.size, offset, args)] + + def _get_size( + self, headers: dict[bytes, bytes], prefix: bytes + ) -> tuple[int, int] | None: + naxis = int(headers[prefix + b"NAXIS"]) + if naxis == 0: + return None + + if naxis == 1: + return 1, int(headers[prefix + b"NAXIS1"]) + else: + return int(headers[prefix + b"NAXIS1"]), int(headers[prefix + b"NAXIS2"]) + + def _parse_headers( + self, headers: dict[bytes, bytes] + ) -> tuple[str, int, tuple[str | int, ...]]: + prefix = b"" + decoder_name = "raw" + offset = 0 + if ( + headers.get(b"XTENSION") == b"'BINTABLE'" + and headers.get(b"ZIMAGE") == b"T" + and headers[b"ZCMPTYPE"] == b"'GZIP_1 '" + ): + no_prefix_size = self._get_size(headers, prefix) or (0, 0) + number_of_bits = int(headers[b"BITPIX"]) + offset = no_prefix_size[0] * no_prefix_size[1] * (number_of_bits // 8) + + prefix = b"Z" + decoder_name = "fits_gzip" + + size = self._get_size(headers, prefix) + if not size: + return "", 0, () + + self._size = size + + number_of_bits = int(headers[prefix + b"BITPIX"]) + if number_of_bits == 8: + self._mode = "L" + elif number_of_bits == 16: + self._mode = "I;16" + elif number_of_bits == 32: + self._mode = "I" + elif number_of_bits in (-32, -64): + self._mode = "F" + + args: tuple[str | int, ...] + if decoder_name == "raw": + args = (self.mode, 0, -1) + else: + args = (number_of_bits,) + return decoder_name, offset, args + + +class FitsGzipDecoder(ImageFile.PyDecoder): + _pulls_fd = True + + def decode(self, buffer: bytes) -> tuple[int, int]: + assert self.fd is not None + value = gzip.decompress(self.fd.read()) + + rows = [] + offset = 0 + number_of_bits = min(self.args[0] // 8, 4) + for y in range(self.state.ysize): + row = bytearray() + for x in range(self.state.xsize): + row += value[offset + (4 - number_of_bits) : offset + 4] + offset += 4 + rows.append(row) + self.set_as_raw(bytes([pixel for row in rows[::-1] for pixel in row])) + return -1, 0 + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(FitsImageFile.format, FitsImageFile, _accept) +Image.register_decoder("fits_gzip", FitsGzipDecoder) + +Image.register_extensions(FitsImageFile.format, [".fit", ".fits"]) diff --git a/MLPY/Lib/site-packages/PIL/FliImagePlugin.py b/MLPY/Lib/site-packages/PIL/FliImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..5ad7a0bc54f3e3c8217467cb31f03422d326a767 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/FliImagePlugin.py @@ -0,0 +1,174 @@ +# +# The Python Imaging Library. +# $Id$ +# +# FLI/FLC file handling. +# +# History: +# 95-09-01 fl Created +# 97-01-03 fl Fixed parser, setup decoder tile +# 98-07-15 fl Renamed offset attribute to avoid name clash +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import os + +from . import Image, ImageFile, ImagePalette +from ._binary import i16le as i16 +from ._binary import i32le as i32 +from ._binary import o8 + +# +# decoder + + +def _accept(prefix: bytes) -> bool: + return ( + len(prefix) >= 6 + and i16(prefix, 4) in [0xAF11, 0xAF12] + and i16(prefix, 14) in [0, 3] # flags + ) + + +## +# Image plugin for the FLI/FLC animation format. Use the seek +# method to load individual frames. + + +class FliImageFile(ImageFile.ImageFile): + format = "FLI" + format_description = "Autodesk FLI/FLC Animation" + _close_exclusive_fp_after_loading = False + + def _open(self): + # HEAD + s = self.fp.read(128) + if not (_accept(s) and s[20:22] == b"\x00\x00"): + msg = "not an FLI/FLC file" + raise SyntaxError(msg) + + # frames + self.n_frames = i16(s, 6) + self.is_animated = self.n_frames > 1 + + # image characteristics + self._mode = "P" + self._size = i16(s, 8), i16(s, 10) + + # animation speed + duration = i32(s, 16) + magic = i16(s, 4) + if magic == 0xAF11: + duration = (duration * 1000) // 70 + self.info["duration"] = duration + + # look for palette + palette = [(a, a, a) for a in range(256)] + + s = self.fp.read(16) + + self.__offset = 128 + + if i16(s, 4) == 0xF100: + # prefix chunk; ignore it + self.__offset = self.__offset + i32(s) + self.fp.seek(self.__offset) + s = self.fp.read(16) + + if i16(s, 4) == 0xF1FA: + # look for palette chunk + number_of_subchunks = i16(s, 6) + chunk_size = None + for _ in range(number_of_subchunks): + if chunk_size is not None: + self.fp.seek(chunk_size - 6, os.SEEK_CUR) + s = self.fp.read(6) + chunk_type = i16(s, 4) + if chunk_type in (4, 11): + self._palette(palette, 2 if chunk_type == 11 else 0) + break + chunk_size = i32(s) + if not chunk_size: + break + + palette = [o8(r) + o8(g) + o8(b) for (r, g, b) in palette] + self.palette = ImagePalette.raw("RGB", b"".join(palette)) + + # set things up to decode first frame + self.__frame = -1 + self._fp = self.fp + self.__rewind = self.fp.tell() + self.seek(0) + + def _palette(self, palette, shift): + # load palette + + i = 0 + for e in range(i16(self.fp.read(2))): + s = self.fp.read(2) + i = i + s[0] + n = s[1] + if n == 0: + n = 256 + s = self.fp.read(n * 3) + for n in range(0, len(s), 3): + r = s[n] << shift + g = s[n + 1] << shift + b = s[n + 2] << shift + palette[i] = (r, g, b) + i += 1 + + def seek(self, frame: int) -> None: + if not self._seek_check(frame): + return + if frame < self.__frame: + self._seek(0) + + for f in range(self.__frame + 1, frame + 1): + self._seek(f) + + def _seek(self, frame: int) -> None: + if frame == 0: + self.__frame = -1 + self._fp.seek(self.__rewind) + self.__offset = 128 + else: + # ensure that the previous frame was loaded + self.load() + + if frame != self.__frame + 1: + msg = f"cannot seek to frame {frame}" + raise ValueError(msg) + self.__frame = frame + + # move to next frame + self.fp = self._fp + self.fp.seek(self.__offset) + + s = self.fp.read(4) + if not s: + msg = "missing frame size" + raise EOFError(msg) + + framesize = i32(s) + + self.decodermaxblock = framesize + self.tile = [("fli", (0, 0) + self.size, self.__offset, None)] + + self.__offset += framesize + + def tell(self) -> int: + return self.__frame + + +# +# registry + +Image.register_open(FliImageFile.format, FliImageFile, _accept) + +Image.register_extensions(FliImageFile.format, [".fli", ".flc"]) diff --git a/MLPY/Lib/site-packages/PIL/FontFile.py b/MLPY/Lib/site-packages/PIL/FontFile.py new file mode 100644 index 0000000000000000000000000000000000000000..5a51ea2845a501f72d7102e829f1e408b765266c --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/FontFile.py @@ -0,0 +1,134 @@ +# +# The Python Imaging Library +# $Id$ +# +# base class for raster font file parsers +# +# history: +# 1997-06-05 fl created +# 1997-08-19 fl restrict image width +# +# Copyright (c) 1997-1998 by Secret Labs AB +# Copyright (c) 1997-1998 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import os +from typing import BinaryIO + +from . import Image, _binary + +WIDTH = 800 + + +def puti16( + fp: BinaryIO, values: tuple[int, int, int, int, int, int, int, int, int, int] +) -> None: + """Write network order (big-endian) 16-bit sequence""" + for v in values: + if v < 0: + v += 65536 + fp.write(_binary.o16be(v)) + + +class FontFile: + """Base class for raster font file handlers.""" + + bitmap: Image.Image | None = None + + def __init__(self) -> None: + self.info: dict[bytes, bytes | int] = {} + self.glyph: list[ + tuple[ + tuple[int, int], + tuple[int, int, int, int], + tuple[int, int, int, int], + Image.Image, + ] + | None + ] = [None] * 256 + + def __getitem__(self, ix: int) -> ( + tuple[ + tuple[int, int], + tuple[int, int, int, int], + tuple[int, int, int, int], + Image.Image, + ] + | None + ): + return self.glyph[ix] + + def compile(self) -> None: + """Create metrics and bitmap""" + + if self.bitmap: + return + + # create bitmap large enough to hold all data + h = w = maxwidth = 0 + lines = 1 + for glyph in self.glyph: + if glyph: + d, dst, src, im = glyph + h = max(h, src[3] - src[1]) + w = w + (src[2] - src[0]) + if w > WIDTH: + lines += 1 + w = src[2] - src[0] + maxwidth = max(maxwidth, w) + + xsize = maxwidth + ysize = lines * h + + if xsize == 0 and ysize == 0: + return + + self.ysize = h + + # paste glyphs into bitmap + self.bitmap = Image.new("1", (xsize, ysize)) + self.metrics: list[ + tuple[tuple[int, int], tuple[int, int, int, int], tuple[int, int, int, int]] + | None + ] = [None] * 256 + x = y = 0 + for i in range(256): + glyph = self[i] + if glyph: + d, dst, src, im = glyph + xx = src[2] - src[0] + x0, y0 = x, y + x = x + xx + if x > WIDTH: + x, y = 0, y + h + x0, y0 = x, y + x = xx + s = src[0] + x0, src[1] + y0, src[2] + x0, src[3] + y0 + self.bitmap.paste(im.crop(src), s) + self.metrics[i] = d, dst, s + + def save(self, filename: str) -> None: + """Save font""" + + self.compile() + + # font data + if not self.bitmap: + msg = "No bitmap created" + raise ValueError(msg) + self.bitmap.save(os.path.splitext(filename)[0] + ".pbm", "PNG") + + # font metrics + with open(os.path.splitext(filename)[0] + ".pil", "wb") as fp: + fp.write(b"PILfont\n") + fp.write(f";;;;;;{self.ysize};\n".encode("ascii")) # HACK!!! + fp.write(b"DATA\n") + for id in range(256): + m = self.metrics[id] + if not m: + puti16(fp, (0,) * 10) + else: + puti16(fp, m[0] + m[1] + m[2]) diff --git a/MLPY/Lib/site-packages/PIL/FpxImagePlugin.py b/MLPY/Lib/site-packages/PIL/FpxImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..08790154d75222512e750b811cdb310a59795b4e --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/FpxImagePlugin.py @@ -0,0 +1,255 @@ +# +# THIS IS WORK IN PROGRESS +# +# The Python Imaging Library. +# $Id$ +# +# FlashPix support for PIL +# +# History: +# 97-01-25 fl Created (reads uncompressed RGB images only) +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import olefile + +from . import Image, ImageFile +from ._binary import i32le as i32 + +# we map from colour field tuples to (mode, rawmode) descriptors +MODES = { + # opacity + (0x00007FFE,): ("A", "L"), + # monochrome + (0x00010000,): ("L", "L"), + (0x00018000, 0x00017FFE): ("RGBA", "LA"), + # photo YCC + (0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"), + (0x00028000, 0x00028001, 0x00028002, 0x00027FFE): ("RGBA", "YCCA;P"), + # standard RGB (NIFRGB) + (0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"), + (0x00038000, 0x00038001, 0x00038002, 0x00037FFE): ("RGBA", "RGBA"), +} + + +# +# -------------------------------------------------------------------- + + +def _accept(prefix: bytes) -> bool: + return prefix[:8] == olefile.MAGIC + + +## +# Image plugin for the FlashPix images. + + +class FpxImageFile(ImageFile.ImageFile): + format = "FPX" + format_description = "FlashPix" + + def _open(self): + # + # read the OLE directory and see if this is a likely + # to be a FlashPix file + + try: + self.ole = olefile.OleFileIO(self.fp) + except OSError as e: + msg = "not an FPX file; invalid OLE file" + raise SyntaxError(msg) from e + + if self.ole.root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B": + msg = "not an FPX file; bad root CLSID" + raise SyntaxError(msg) + + self._open_index(1) + + def _open_index(self, index: int = 1) -> None: + # + # get the Image Contents Property Set + + prop = self.ole.getproperties( + [f"Data Object Store {index:06d}", "\005Image Contents"] + ) + + # size (highest resolution) + + self._size = prop[0x1000002], prop[0x1000003] + + size = max(self.size) + i = 1 + while size > 64: + size = size // 2 + i += 1 + self.maxid = i - 1 + + # mode. instead of using a single field for this, flashpix + # requires you to specify the mode for each channel in each + # resolution subimage, and leaves it to the decoder to make + # sure that they all match. for now, we'll cheat and assume + # that this is always the case. + + id = self.maxid << 16 + + s = prop[0x2000002 | id] + + bands = i32(s, 4) + if bands > 4: + msg = "Invalid number of bands" + raise OSError(msg) + + # note: for now, we ignore the "uncalibrated" flag + colors = tuple(i32(s, 8 + i * 4) & 0x7FFFFFFF for i in range(bands)) + + self._mode, self.rawmode = MODES[colors] + + # load JPEG tables, if any + self.jpeg = {} + for i in range(256): + id = 0x3000001 | (i << 16) + if id in prop: + self.jpeg[i] = prop[id] + + self._open_subimage(1, self.maxid) + + def _open_subimage(self, index: int = 1, subimage: int = 0) -> None: + # + # setup tile descriptors for a given subimage + + stream = [ + f"Data Object Store {index:06d}", + f"Resolution {subimage:04d}", + "Subimage 0000 Header", + ] + + fp = self.ole.openstream(stream) + + # skip prefix + fp.read(28) + + # header stream + s = fp.read(36) + + size = i32(s, 4), i32(s, 8) + # tilecount = i32(s, 12) + tilesize = i32(s, 16), i32(s, 20) + # channels = i32(s, 24) + offset = i32(s, 28) + length = i32(s, 32) + + if size != self.size: + msg = "subimage mismatch" + raise OSError(msg) + + # get tile descriptors + fp.seek(28 + offset) + s = fp.read(i32(s, 12) * length) + + x = y = 0 + xsize, ysize = size + xtile, ytile = tilesize + self.tile = [] + + for i in range(0, len(s), length): + x1 = min(xsize, x + xtile) + y1 = min(ysize, y + ytile) + + compression = i32(s, i + 8) + + if compression == 0: + self.tile.append( + ( + "raw", + (x, y, x1, y1), + i32(s, i) + 28, + (self.rawmode,), + ) + ) + + elif compression == 1: + # FIXME: the fill decoder is not implemented + self.tile.append( + ( + "fill", + (x, y, x1, y1), + i32(s, i) + 28, + (self.rawmode, s[12:16]), + ) + ) + + elif compression == 2: + internal_color_conversion = s[14] + jpeg_tables = s[15] + rawmode = self.rawmode + + if internal_color_conversion: + # The image is stored as usual (usually YCbCr). + if rawmode == "RGBA": + # For "RGBA", data is stored as YCbCrA based on + # negative RGB. The following trick works around + # this problem : + jpegmode, rawmode = "YCbCrK", "CMYK" + else: + jpegmode = None # let the decoder decide + + else: + # The image is stored as defined by rawmode + jpegmode = rawmode + + self.tile.append( + ( + "jpeg", + (x, y, x1, y1), + i32(s, i) + 28, + (rawmode, jpegmode), + ) + ) + + # FIXME: jpeg tables are tile dependent; the prefix + # data must be placed in the tile descriptor itself! + + if jpeg_tables: + self.tile_prefix = self.jpeg[jpeg_tables] + + else: + msg = "unknown/invalid compression" + raise OSError(msg) + + x = x + xtile + if x >= xsize: + x, y = 0, y + ytile + if y >= ysize: + break # isn't really required + + self.stream = stream + self._fp = self.fp + self.fp = None + + def load(self): + if not self.fp: + self.fp = self.ole.openstream(self.stream[:2] + ["Subimage 0000 Data"]) + + return ImageFile.ImageFile.load(self) + + def close(self) -> None: + self.ole.close() + super().close() + + def __exit__(self, *args: object) -> None: + self.ole.close() + super().__exit__() + + +# +# -------------------------------------------------------------------- + + +Image.register_open(FpxImageFile.format, FpxImageFile, _accept) + +Image.register_extension(FpxImageFile.format, ".fpx") diff --git a/MLPY/Lib/site-packages/PIL/FtexImagePlugin.py b/MLPY/Lib/site-packages/PIL/FtexImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..80700739faf085ac039ec7c793f5a6fe7d843b3d --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/FtexImagePlugin.py @@ -0,0 +1,115 @@ +""" +A Pillow loader for .ftc and .ftu files (FTEX) +Jerome Leclanche + +The contents of this file are hereby released in the public domain (CC0) +Full text of the CC0 license: + https://creativecommons.org/publicdomain/zero/1.0/ + +Independence War 2: Edge Of Chaos - Texture File Format - 16 October 2001 + +The textures used for 3D objects in Independence War 2: Edge Of Chaos are in a +packed custom format called FTEX. This file format uses file extensions FTC +and FTU. +* FTC files are compressed textures (using standard texture compression). +* FTU files are not compressed. +Texture File Format +The FTC and FTU texture files both use the same format. This +has the following structure: +{header} +{format_directory} +{data} +Where: +{header} = { + u32:magic, + u32:version, + u32:width, + u32:height, + u32:mipmap_count, + u32:format_count +} + +* The "magic" number is "FTEX". +* "width" and "height" are the dimensions of the texture. +* "mipmap_count" is the number of mipmaps in the texture. +* "format_count" is the number of texture formats (different versions of the +same texture) in this file. + +{format_directory} = format_count * { u32:format, u32:where } + +The format value is 0 for DXT1 compressed textures and 1 for 24-bit RGB +uncompressed textures. +The texture data for a format starts at the position "where" in the file. + +Each set of texture data in the file has the following structure: +{data} = format_count * { u32:mipmap_size, mipmap_size * { u8 } } +* "mipmap_size" is the number of bytes in that mip level. For compressed +textures this is the size of the texture data compressed with DXT1. For 24 bit +uncompressed textures, this is 3 * width * height. Following this are the image +bytes for that mipmap level. + +Note: All data is stored in little-Endian (Intel) byte order. +""" + +from __future__ import annotations + +import struct +from enum import IntEnum +from io import BytesIO + +from . import Image, ImageFile + +MAGIC = b"FTEX" + + +class Format(IntEnum): + DXT1 = 0 + UNCOMPRESSED = 1 + + +class FtexImageFile(ImageFile.ImageFile): + format = "FTEX" + format_description = "Texture File Format (IW2:EOC)" + + def _open(self) -> None: + if not _accept(self.fp.read(4)): + msg = "not an FTEX file" + raise SyntaxError(msg) + struct.unpack(" None: + pass + + +def _accept(prefix: bytes) -> bool: + return prefix[:4] == MAGIC + + +Image.register_open(FtexImageFile.format, FtexImageFile, _accept) +Image.register_extensions(FtexImageFile.format, [".ftc", ".ftu"]) diff --git a/MLPY/Lib/site-packages/PIL/GbrImagePlugin.py b/MLPY/Lib/site-packages/PIL/GbrImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..7de53d79bac6441db39d94666b50f325cc8a6889 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/GbrImagePlugin.py @@ -0,0 +1,103 @@ +# +# The Python Imaging Library +# +# load a GIMP brush file +# +# History: +# 96-03-14 fl Created +# 16-01-08 es Version 2 +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# Copyright (c) Eric Soroos 2016. +# +# See the README file for information on usage and redistribution. +# +# +# See https://github.com/GNOME/gimp/blob/mainline/devel-docs/gbr.txt for +# format documentation. +# +# This code Interprets version 1 and 2 .gbr files. +# Version 1 files are obsolete, and should not be used for new +# brushes. +# Version 2 files are saved by GIMP v2.8 (at least) +# Version 3 files have a format specifier of 18 for 16bit floats in +# the color depth field. This is currently unsupported by Pillow. +from __future__ import annotations + +from . import Image, ImageFile +from ._binary import i32be as i32 + + +def _accept(prefix: bytes) -> bool: + return len(prefix) >= 8 and i32(prefix, 0) >= 20 and i32(prefix, 4) in (1, 2) + + +## +# Image plugin for the GIMP brush format. + + +class GbrImageFile(ImageFile.ImageFile): + format = "GBR" + format_description = "GIMP brush file" + + def _open(self) -> None: + header_size = i32(self.fp.read(4)) + if header_size < 20: + msg = "not a GIMP brush" + raise SyntaxError(msg) + version = i32(self.fp.read(4)) + if version not in (1, 2): + msg = f"Unsupported GIMP brush version: {version}" + raise SyntaxError(msg) + + width = i32(self.fp.read(4)) + height = i32(self.fp.read(4)) + color_depth = i32(self.fp.read(4)) + if width <= 0 or height <= 0: + msg = "not a GIMP brush" + raise SyntaxError(msg) + if color_depth not in (1, 4): + msg = f"Unsupported GIMP brush color depth: {color_depth}" + raise SyntaxError(msg) + + if version == 1: + comment_length = header_size - 20 + else: + comment_length = header_size - 28 + magic_number = self.fp.read(4) + if magic_number != b"GIMP": + msg = "not a GIMP brush, bad magic number" + raise SyntaxError(msg) + self.info["spacing"] = i32(self.fp.read(4)) + + comment = self.fp.read(comment_length)[:-1] + + if color_depth == 1: + self._mode = "L" + else: + self._mode = "RGBA" + + self._size = width, height + + self.info["comment"] = comment + + # Image might not be small + Image._decompression_bomb_check(self.size) + + # Data is an uncompressed block of w * h * bytes/pixel + self._data_size = width * height * color_depth + + def load(self): + if not self.im: + self.im = Image.core.new(self.mode, self.size) + self.frombytes(self.fp.read(self._data_size)) + return Image.Image.load(self) + + +# +# registry + + +Image.register_open(GbrImageFile.format, GbrImageFile, _accept) +Image.register_extension(GbrImageFile.format, ".gbr") diff --git a/MLPY/Lib/site-packages/PIL/GdImageFile.py b/MLPY/Lib/site-packages/PIL/GdImageFile.py new file mode 100644 index 0000000000000000000000000000000000000000..207713f7c477c55aea8150509e1f14e0e8e1787b --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/GdImageFile.py @@ -0,0 +1,102 @@ +# +# The Python Imaging Library. +# $Id$ +# +# GD file handling +# +# History: +# 1996-04-12 fl Created +# +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +""" +.. note:: + This format cannot be automatically recognized, so the + class is not registered for use with :py:func:`PIL.Image.open()`. To open a + gd file, use the :py:func:`PIL.GdImageFile.open()` function instead. + +.. warning:: + THE GD FORMAT IS NOT DESIGNED FOR DATA INTERCHANGE. This + implementation is provided for convenience and demonstrational + purposes only. +""" +from __future__ import annotations + +from typing import IO + +from . import ImageFile, ImagePalette, UnidentifiedImageError +from ._binary import i16be as i16 +from ._binary import i32be as i32 +from ._typing import StrOrBytesPath + + +class GdImageFile(ImageFile.ImageFile): + """ + Image plugin for the GD uncompressed format. Note that this format + is not supported by the standard :py:func:`PIL.Image.open()` function. To use + this plugin, you have to import the :py:mod:`PIL.GdImageFile` module and + use the :py:func:`PIL.GdImageFile.open()` function. + """ + + format = "GD" + format_description = "GD uncompressed images" + + def _open(self) -> None: + # Header + assert self.fp is not None + + s = self.fp.read(1037) + + if i16(s) not in [65534, 65535]: + msg = "Not a valid GD 2.x .gd file" + raise SyntaxError(msg) + + self._mode = "L" # FIXME: "P" + self._size = i16(s, 2), i16(s, 4) + + true_color = s[6] + true_color_offset = 2 if true_color else 0 + + # transparency index + tindex = i32(s, 7 + true_color_offset) + if tindex < 256: + self.info["transparency"] = tindex + + self.palette = ImagePalette.raw( + "XBGR", s[7 + true_color_offset + 4 : 7 + true_color_offset + 4 + 256 * 4] + ) + + self.tile = [ + ( + "raw", + (0, 0) + self.size, + 7 + true_color_offset + 4 + 256 * 4, + ("L", 0, 1), + ) + ] + + +def open(fp: StrOrBytesPath | IO[bytes], mode: str = "r") -> GdImageFile: + """ + Load texture from a GD image file. + + :param fp: GD file name, or an opened file handle. + :param mode: Optional mode. In this version, if the mode argument + is given, it must be "r". + :returns: An image instance. + :raises OSError: If the image could not be read. + """ + if mode != "r": + msg = "bad mode" + raise ValueError(msg) + + try: + return GdImageFile(fp) + except SyntaxError as e: + msg = "cannot identify this image file" + raise UnidentifiedImageError(msg) from e diff --git a/MLPY/Lib/site-packages/PIL/GifImagePlugin.py b/MLPY/Lib/site-packages/PIL/GifImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..714b2092bc3fb89d10d88b20537b295b3379b950 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/GifImagePlugin.py @@ -0,0 +1,1159 @@ +# +# The Python Imaging Library. +# $Id$ +# +# GIF file handling +# +# History: +# 1995-09-01 fl Created +# 1996-12-14 fl Added interlace support +# 1996-12-30 fl Added animation support +# 1997-01-05 fl Added write support, fixed local colour map bug +# 1997-02-23 fl Make sure to load raster data in getdata() +# 1997-07-05 fl Support external decoder (0.4) +# 1998-07-09 fl Handle all modes when saving (0.5) +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 2001-04-16 fl Added rewind support (seek to frame 0) (0.6) +# 2001-04-17 fl Added palette optimization (0.7) +# 2002-06-06 fl Added transparency support for save (0.8) +# 2004-02-24 fl Disable interlacing for small images +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1995-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import itertools +import math +import os +import subprocess +import sys +from enum import IntEnum +from functools import cached_property +from typing import IO, TYPE_CHECKING, Any, List, Literal, NamedTuple, Union + +from . import ( + Image, + ImageChops, + ImageFile, + ImageMath, + ImageOps, + ImagePalette, + ImageSequence, +) +from ._binary import i16le as i16 +from ._binary import o8 +from ._binary import o16le as o16 + +if TYPE_CHECKING: + from . import _imaging + + +class LoadingStrategy(IntEnum): + """.. versionadded:: 9.1.0""" + + RGB_AFTER_FIRST = 0 + RGB_AFTER_DIFFERENT_PALETTE_ONLY = 1 + RGB_ALWAYS = 2 + + +#: .. versionadded:: 9.1.0 +LOADING_STRATEGY = LoadingStrategy.RGB_AFTER_FIRST + +# -------------------------------------------------------------------- +# Identify/read GIF files + + +def _accept(prefix: bytes) -> bool: + return prefix[:6] in [b"GIF87a", b"GIF89a"] + + +## +# Image plugin for GIF images. This plugin supports both GIF87 and +# GIF89 images. + + +class GifImageFile(ImageFile.ImageFile): + format = "GIF" + format_description = "Compuserve GIF" + _close_exclusive_fp_after_loading = False + + global_palette = None + + def data(self) -> bytes | None: + s = self.fp.read(1) + if s and s[0]: + return self.fp.read(s[0]) + return None + + def _is_palette_needed(self, p: bytes) -> bool: + for i in range(0, len(p), 3): + if not (i // 3 == p[i] == p[i + 1] == p[i + 2]): + return True + return False + + def _open(self) -> None: + # Screen + s = self.fp.read(13) + if not _accept(s): + msg = "not a GIF file" + raise SyntaxError(msg) + + self.info["version"] = s[:6] + self._size = i16(s, 6), i16(s, 8) + self.tile = [] + flags = s[10] + bits = (flags & 7) + 1 + + if flags & 128: + # get global palette + self.info["background"] = s[11] + # check if palette contains colour indices + p = self.fp.read(3 << bits) + if self._is_palette_needed(p): + p = ImagePalette.raw("RGB", p) + self.global_palette = self.palette = p + + self._fp = self.fp # FIXME: hack + self.__rewind = self.fp.tell() + self._n_frames: int | None = None + self._seek(0) # get ready to read first frame + + @property + def n_frames(self) -> int: + if self._n_frames is None: + current = self.tell() + try: + while True: + self._seek(self.tell() + 1, False) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @cached_property + def is_animated(self) -> bool: + if self._n_frames is not None: + return self._n_frames != 1 + + current = self.tell() + if current: + return True + + try: + self._seek(1, False) + is_animated = True + except EOFError: + is_animated = False + + self.seek(current) + return is_animated + + def seek(self, frame: int) -> None: + if not self._seek_check(frame): + return + if frame < self.__frame: + self.im = None + self._seek(0) + + last_frame = self.__frame + for f in range(self.__frame + 1, frame + 1): + try: + self._seek(f) + except EOFError as e: + self.seek(last_frame) + msg = "no more images in GIF file" + raise EOFError(msg) from e + + def _seek(self, frame: int, update_image: bool = True) -> None: + if frame == 0: + # rewind + self.__offset = 0 + self.dispose: _imaging.ImagingCore | None = None + self.__frame = -1 + self._fp.seek(self.__rewind) + self.disposal_method = 0 + if "comment" in self.info: + del self.info["comment"] + else: + # ensure that the previous frame was loaded + if self.tile and update_image: + self.load() + + if frame != self.__frame + 1: + msg = f"cannot seek to frame {frame}" + raise ValueError(msg) + + self.fp = self._fp + if self.__offset: + # backup to last frame + self.fp.seek(self.__offset) + while self.data(): + pass + self.__offset = 0 + + s = self.fp.read(1) + if not s or s == b";": + msg = "no more images in GIF file" + raise EOFError(msg) + + palette: ImagePalette.ImagePalette | Literal[False] | None = None + + info: dict[str, Any] = {} + frame_transparency = None + interlace = None + frame_dispose_extent = None + while True: + if not s: + s = self.fp.read(1) + if not s or s == b";": + break + + elif s == b"!": + # + # extensions + # + s = self.fp.read(1) + block = self.data() + if s[0] == 249 and block is not None: + # + # graphic control extension + # + flags = block[0] + if flags & 1: + frame_transparency = block[3] + info["duration"] = i16(block, 1) * 10 + + # disposal method - find the value of bits 4 - 6 + dispose_bits = 0b00011100 & flags + dispose_bits = dispose_bits >> 2 + if dispose_bits: + # only set the dispose if it is not + # unspecified. I'm not sure if this is + # correct, but it seems to prevent the last + # frame from looking odd for some animations + self.disposal_method = dispose_bits + elif s[0] == 254: + # + # comment extension + # + comment = b"" + + # Read this comment block + while block: + comment += block + block = self.data() + + if "comment" in info: + # If multiple comment blocks in frame, separate with \n + info["comment"] += b"\n" + comment + else: + info["comment"] = comment + s = None + continue + elif s[0] == 255 and frame == 0 and block is not None: + # + # application extension + # + info["extension"] = block, self.fp.tell() + if block[:11] == b"NETSCAPE2.0": + block = self.data() + if block and len(block) >= 3 and block[0] == 1: + self.info["loop"] = i16(block, 1) + while self.data(): + pass + + elif s == b",": + # + # local image + # + s = self.fp.read(9) + + # extent + x0, y0 = i16(s, 0), i16(s, 2) + x1, y1 = x0 + i16(s, 4), y0 + i16(s, 6) + if (x1 > self.size[0] or y1 > self.size[1]) and update_image: + self._size = max(x1, self.size[0]), max(y1, self.size[1]) + Image._decompression_bomb_check(self._size) + frame_dispose_extent = x0, y0, x1, y1 + flags = s[8] + + interlace = (flags & 64) != 0 + + if flags & 128: + bits = (flags & 7) + 1 + p = self.fp.read(3 << bits) + if self._is_palette_needed(p): + palette = ImagePalette.raw("RGB", p) + else: + palette = False + + # image data + bits = self.fp.read(1)[0] + self.__offset = self.fp.tell() + break + s = None + + if interlace is None: + msg = "image not found in GIF frame" + raise EOFError(msg) + + self.__frame = frame + if not update_image: + return + + self.tile = [] + + if self.dispose: + self.im.paste(self.dispose, self.dispose_extent) + + self._frame_palette = palette if palette is not None else self.global_palette + self._frame_transparency = frame_transparency + if frame == 0: + if self._frame_palette: + if LOADING_STRATEGY == LoadingStrategy.RGB_ALWAYS: + self._mode = "RGBA" if frame_transparency is not None else "RGB" + else: + self._mode = "P" + else: + self._mode = "L" + + if not palette and self.global_palette: + from copy import copy + + palette = copy(self.global_palette) + self.palette = palette + else: + if self.mode == "P": + if ( + LOADING_STRATEGY != LoadingStrategy.RGB_AFTER_DIFFERENT_PALETTE_ONLY + or palette + ): + self.pyaccess = None + if "transparency" in self.info: + self.im.putpalettealpha(self.info["transparency"], 0) + self.im = self.im.convert("RGBA", Image.Dither.FLOYDSTEINBERG) + self._mode = "RGBA" + del self.info["transparency"] + else: + self._mode = "RGB" + self.im = self.im.convert("RGB", Image.Dither.FLOYDSTEINBERG) + + def _rgb(color: int) -> tuple[int, int, int]: + if self._frame_palette: + if color * 3 + 3 > len(self._frame_palette.palette): + color = 0 + return tuple(self._frame_palette.palette[color * 3 : color * 3 + 3]) + else: + return (color, color, color) + + self.dispose = None + self.dispose_extent = frame_dispose_extent + if self.dispose_extent and self.disposal_method >= 2: + try: + if self.disposal_method == 2: + # replace with background colour + + # only dispose the extent in this frame + x0, y0, x1, y1 = self.dispose_extent + dispose_size = (x1 - x0, y1 - y0) + + Image._decompression_bomb_check(dispose_size) + + # by convention, attempt to use transparency first + dispose_mode = "P" + color = self.info.get("transparency", frame_transparency) + if color is not None: + if self.mode in ("RGB", "RGBA"): + dispose_mode = "RGBA" + color = _rgb(color) + (0,) + else: + color = self.info.get("background", 0) + if self.mode in ("RGB", "RGBA"): + dispose_mode = "RGB" + color = _rgb(color) + self.dispose = Image.core.fill(dispose_mode, dispose_size, color) + else: + # replace with previous contents + if self.im is not None: + # only dispose the extent in this frame + self.dispose = self._crop(self.im, self.dispose_extent) + elif frame_transparency is not None: + x0, y0, x1, y1 = self.dispose_extent + dispose_size = (x1 - x0, y1 - y0) + + Image._decompression_bomb_check(dispose_size) + dispose_mode = "P" + color = frame_transparency + if self.mode in ("RGB", "RGBA"): + dispose_mode = "RGBA" + color = _rgb(frame_transparency) + (0,) + self.dispose = Image.core.fill( + dispose_mode, dispose_size, color + ) + except AttributeError: + pass + + if interlace is not None: + transparency = -1 + if frame_transparency is not None: + if frame == 0: + if LOADING_STRATEGY != LoadingStrategy.RGB_ALWAYS: + self.info["transparency"] = frame_transparency + elif self.mode not in ("RGB", "RGBA"): + transparency = frame_transparency + self.tile = [ + ( + "gif", + (x0, y0, x1, y1), + self.__offset, + (bits, interlace, transparency), + ) + ] + + if info.get("comment"): + self.info["comment"] = info["comment"] + for k in ["duration", "extension"]: + if k in info: + self.info[k] = info[k] + elif k in self.info: + del self.info[k] + + def load_prepare(self) -> None: + temp_mode = "P" if self._frame_palette else "L" + self._prev_im = None + if self.__frame == 0: + if self._frame_transparency is not None: + self.im = Image.core.fill( + temp_mode, self.size, self._frame_transparency + ) + elif self.mode in ("RGB", "RGBA"): + self._prev_im = self.im + if self._frame_palette: + self.im = Image.core.fill("P", self.size, self._frame_transparency or 0) + self.im.putpalette("RGB", *self._frame_palette.getdata()) + else: + self.im = None + self._mode = temp_mode + self._frame_palette = None + + super().load_prepare() + + def load_end(self) -> None: + if self.__frame == 0: + if self.mode == "P" and LOADING_STRATEGY == LoadingStrategy.RGB_ALWAYS: + if self._frame_transparency is not None: + self.im.putpalettealpha(self._frame_transparency, 0) + self._mode = "RGBA" + else: + self._mode = "RGB" + self.im = self.im.convert(self.mode, Image.Dither.FLOYDSTEINBERG) + return + if not self._prev_im: + return + if self._frame_transparency is not None: + self.im.putpalettealpha(self._frame_transparency, 0) + frame_im = self.im.convert("RGBA") + else: + frame_im = self.im.convert("RGB") + + assert self.dispose_extent is not None + frame_im = self._crop(frame_im, self.dispose_extent) + + self.im = self._prev_im + self._mode = self.im.mode + if frame_im.mode == "RGBA": + self.im.paste(frame_im, self.dispose_extent, frame_im) + else: + self.im.paste(frame_im, self.dispose_extent) + + def tell(self) -> int: + return self.__frame + + +# -------------------------------------------------------------------- +# Write GIF files + + +RAWMODE = {"1": "L", "L": "L", "P": "P"} + + +def _normalize_mode(im: Image.Image) -> Image.Image: + """ + Takes an image (or frame), returns an image in a mode that is appropriate + for saving in a Gif. + + It may return the original image, or it may return an image converted to + palette or 'L' mode. + + :param im: Image object + :returns: Image object + """ + if im.mode in RAWMODE: + im.load() + return im + if Image.getmodebase(im.mode) == "RGB": + im = im.convert("P", palette=Image.Palette.ADAPTIVE) + if im.palette.mode == "RGBA": + for rgba in im.palette.colors: + if rgba[3] == 0: + im.info["transparency"] = im.palette.colors[rgba] + break + return im + return im.convert("L") + + +_Palette = Union[bytes, bytearray, List[int], ImagePalette.ImagePalette] + + +def _normalize_palette( + im: Image.Image, palette: _Palette | None, info: dict[str, Any] +) -> Image.Image: + """ + Normalizes the palette for image. + - Sets the palette to the incoming palette, if provided. + - Ensures that there's a palette for L mode images + - Optimizes the palette if necessary/desired. + + :param im: Image object + :param palette: bytes object containing the source palette, or .... + :param info: encoderinfo + :returns: Image object + """ + source_palette = None + if palette: + # a bytes palette + if isinstance(palette, (bytes, bytearray, list)): + source_palette = bytearray(palette[:768]) + if isinstance(palette, ImagePalette.ImagePalette): + source_palette = bytearray(palette.palette) + + if im.mode == "P": + if not source_palette: + source_palette = im.im.getpalette("RGB")[:768] + else: # L-mode + if not source_palette: + source_palette = bytearray(i // 3 for i in range(768)) + im.palette = ImagePalette.ImagePalette("RGB", palette=source_palette) + + used_palette_colors: list[int] | None + if palette: + used_palette_colors = [] + assert source_palette is not None + for i in range(0, len(source_palette), 3): + source_color = tuple(source_palette[i : i + 3]) + index = im.palette.colors.get(source_color) + if index in used_palette_colors: + index = None + used_palette_colors.append(index) + for i, index in enumerate(used_palette_colors): + if index is None: + for j in range(len(used_palette_colors)): + if j not in used_palette_colors: + used_palette_colors[i] = j + break + im = im.remap_palette(used_palette_colors) + else: + used_palette_colors = _get_optimize(im, info) + if used_palette_colors is not None: + im = im.remap_palette(used_palette_colors, source_palette) + if "transparency" in info: + try: + info["transparency"] = used_palette_colors.index( + info["transparency"] + ) + except ValueError: + del info["transparency"] + return im + + im.palette.palette = source_palette + return im + + +def _write_single_frame( + im: Image.Image, + fp: IO[bytes], + palette: _Palette | None, +) -> None: + im_out = _normalize_mode(im) + for k, v in im_out.info.items(): + im.encoderinfo.setdefault(k, v) + im_out = _normalize_palette(im_out, palette, im.encoderinfo) + + for s in _get_global_header(im_out, im.encoderinfo): + fp.write(s) + + # local image header + flags = 0 + if get_interlace(im): + flags = flags | 64 + _write_local_header(fp, im, (0, 0), flags) + + im_out.encoderconfig = (8, get_interlace(im)) + ImageFile._save(im_out, fp, [("gif", (0, 0) + im.size, 0, RAWMODE[im_out.mode])]) + + fp.write(b"\0") # end of image data + + +def _getbbox( + base_im: Image.Image, im_frame: Image.Image +) -> tuple[Image.Image, tuple[int, int, int, int] | None]: + if _get_palette_bytes(im_frame) != _get_palette_bytes(base_im): + im_frame = im_frame.convert("RGBA") + base_im = base_im.convert("RGBA") + delta = ImageChops.subtract_modulo(im_frame, base_im) + return delta, delta.getbbox(alpha_only=False) + + +class _Frame(NamedTuple): + im: Image.Image + bbox: tuple[int, int, int, int] | None + encoderinfo: dict[str, Any] + + +def _write_multiple_frames( + im: Image.Image, fp: IO[bytes], palette: _Palette | None +) -> bool: + duration = im.encoderinfo.get("duration") + disposal = im.encoderinfo.get("disposal", im.info.get("disposal")) + + im_frames: list[_Frame] = [] + previous_im: Image.Image | None = None + frame_count = 0 + background_im = None + for imSequence in itertools.chain([im], im.encoderinfo.get("append_images", [])): + for im_frame in ImageSequence.Iterator(imSequence): + # a copy is required here since seek can still mutate the image + im_frame = _normalize_mode(im_frame.copy()) + if frame_count == 0: + for k, v in im_frame.info.items(): + if k == "transparency": + continue + im.encoderinfo.setdefault(k, v) + + encoderinfo = im.encoderinfo.copy() + if "transparency" in im_frame.info: + encoderinfo.setdefault("transparency", im_frame.info["transparency"]) + im_frame = _normalize_palette(im_frame, palette, encoderinfo) + if isinstance(duration, (list, tuple)): + encoderinfo["duration"] = duration[frame_count] + elif duration is None and "duration" in im_frame.info: + encoderinfo["duration"] = im_frame.info["duration"] + if isinstance(disposal, (list, tuple)): + encoderinfo["disposal"] = disposal[frame_count] + frame_count += 1 + + diff_frame = None + if im_frames and previous_im: + # delta frame + delta, bbox = _getbbox(previous_im, im_frame) + if not bbox: + # This frame is identical to the previous frame + if encoderinfo.get("duration"): + im_frames[-1].encoderinfo["duration"] += encoderinfo["duration"] + continue + if im_frames[-1].encoderinfo.get("disposal") == 2: + if background_im is None: + color = im.encoderinfo.get( + "transparency", im.info.get("transparency", (0, 0, 0)) + ) + background = _get_background(im_frame, color) + background_im = Image.new("P", im_frame.size, background) + background_im.putpalette(im_frames[0].im.palette) + bbox = _getbbox(background_im, im_frame)[1] + elif encoderinfo.get("optimize") and im_frame.mode != "1": + if "transparency" not in encoderinfo: + try: + encoderinfo["transparency"] = ( + im_frame.palette._new_color_index(im_frame) + ) + except ValueError: + pass + if "transparency" in encoderinfo: + # When the delta is zero, fill the image with transparency + diff_frame = im_frame.copy() + fill = Image.new("P", delta.size, encoderinfo["transparency"]) + if delta.mode == "RGBA": + r, g, b, a = delta.split() + mask = ImageMath.lambda_eval( + lambda args: args["convert"]( + args["max"]( + args["max"]( + args["max"](args["r"], args["g"]), args["b"] + ), + args["a"], + ) + * 255, + "1", + ), + r=r, + g=g, + b=b, + a=a, + ) + else: + if delta.mode == "P": + # Convert to L without considering palette + delta_l = Image.new("L", delta.size) + delta_l.putdata(delta.getdata()) + delta = delta_l + mask = ImageMath.lambda_eval( + lambda args: args["convert"](args["im"] * 255, "1"), + im=delta, + ) + diff_frame.paste(fill, mask=ImageOps.invert(mask)) + else: + bbox = None + previous_im = im_frame + im_frames.append(_Frame(diff_frame or im_frame, bbox, encoderinfo)) + + if len(im_frames) == 1: + if "duration" in im.encoderinfo: + # Since multiple frames will not be written, use the combined duration + im.encoderinfo["duration"] = im_frames[0].encoderinfo["duration"] + return False + + for frame_data in im_frames: + im_frame = frame_data.im + if not frame_data.bbox: + # global header + for s in _get_global_header(im_frame, frame_data.encoderinfo): + fp.write(s) + offset = (0, 0) + else: + # compress difference + if not palette: + frame_data.encoderinfo["include_color_table"] = True + + im_frame = im_frame.crop(frame_data.bbox) + offset = frame_data.bbox[:2] + _write_frame_data(fp, im_frame, offset, frame_data.encoderinfo) + return True + + +def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + _save(im, fp, filename, save_all=True) + + +def _save( + im: Image.Image, fp: IO[bytes], filename: str | bytes, save_all: bool = False +) -> None: + # header + if "palette" in im.encoderinfo or "palette" in im.info: + palette = im.encoderinfo.get("palette", im.info.get("palette")) + else: + palette = None + im.encoderinfo.setdefault("optimize", True) + + if not save_all or not _write_multiple_frames(im, fp, palette): + _write_single_frame(im, fp, palette) + + fp.write(b";") # end of file + + if hasattr(fp, "flush"): + fp.flush() + + +def get_interlace(im: Image.Image) -> int: + interlace = im.encoderinfo.get("interlace", 1) + + # workaround for @PIL153 + if min(im.size) < 16: + interlace = 0 + + return interlace + + +def _write_local_header( + fp: IO[bytes], im: Image.Image, offset: tuple[int, int], flags: int +) -> None: + try: + transparency = im.encoderinfo["transparency"] + except KeyError: + transparency = None + + if "duration" in im.encoderinfo: + duration = int(im.encoderinfo["duration"] / 10) + else: + duration = 0 + + disposal = int(im.encoderinfo.get("disposal", 0)) + + if transparency is not None or duration != 0 or disposal: + packed_flag = 1 if transparency is not None else 0 + packed_flag |= disposal << 2 + + fp.write( + b"!" + + o8(249) # extension intro + + o8(4) # length + + o8(packed_flag) # packed fields + + o16(duration) # duration + + o8(transparency or 0) # transparency index + + o8(0) + ) + + include_color_table = im.encoderinfo.get("include_color_table") + if include_color_table: + palette_bytes = _get_palette_bytes(im) + color_table_size = _get_color_table_size(palette_bytes) + if color_table_size: + flags = flags | 128 # local color table flag + flags = flags | color_table_size + + fp.write( + b"," + + o16(offset[0]) # offset + + o16(offset[1]) + + o16(im.size[0]) # size + + o16(im.size[1]) + + o8(flags) # flags + ) + if include_color_table and color_table_size: + fp.write(_get_header_palette(palette_bytes)) + fp.write(o8(8)) # bits + + +def _save_netpbm(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + # Unused by default. + # To use, uncomment the register_save call at the end of the file. + # + # If you need real GIF compression and/or RGB quantization, you + # can use the external NETPBM/PBMPLUS utilities. See comments + # below for information on how to enable this. + tempfile = im._dump() + + try: + with open(filename, "wb") as f: + if im.mode != "RGB": + subprocess.check_call( + ["ppmtogif", tempfile], stdout=f, stderr=subprocess.DEVNULL + ) + else: + # Pipe ppmquant output into ppmtogif + # "ppmquant 256 %s | ppmtogif > %s" % (tempfile, filename) + quant_cmd = ["ppmquant", "256", tempfile] + togif_cmd = ["ppmtogif"] + quant_proc = subprocess.Popen( + quant_cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL + ) + togif_proc = subprocess.Popen( + togif_cmd, + stdin=quant_proc.stdout, + stdout=f, + stderr=subprocess.DEVNULL, + ) + + # Allow ppmquant to receive SIGPIPE if ppmtogif exits + assert quant_proc.stdout is not None + quant_proc.stdout.close() + + retcode = quant_proc.wait() + if retcode: + raise subprocess.CalledProcessError(retcode, quant_cmd) + + retcode = togif_proc.wait() + if retcode: + raise subprocess.CalledProcessError(retcode, togif_cmd) + finally: + try: + os.unlink(tempfile) + except OSError: + pass + + +# Force optimization so that we can test performance against +# cases where it took lots of memory and time previously. +_FORCE_OPTIMIZE = False + + +def _get_optimize(im: Image.Image, info: dict[str, Any]) -> list[int] | None: + """ + Palette optimization is a potentially expensive operation. + + This function determines if the palette should be optimized using + some heuristics, then returns the list of palette entries in use. + + :param im: Image object + :param info: encoderinfo + :returns: list of indexes of palette entries in use, or None + """ + if im.mode in ("P", "L") and info and info.get("optimize"): + # Potentially expensive operation. + + # The palette saves 3 bytes per color not used, but palette + # lengths are restricted to 3*(2**N) bytes. Max saving would + # be 768 -> 6 bytes if we went all the way down to 2 colors. + # * If we're over 128 colors, we can't save any space. + # * If there aren't any holes, it's not worth collapsing. + # * If we have a 'large' image, the palette is in the noise. + + # create the new palette if not every color is used + optimise = _FORCE_OPTIMIZE or im.mode == "L" + if optimise or im.width * im.height < 512 * 512: + # check which colors are used + used_palette_colors = [] + for i, count in enumerate(im.histogram()): + if count: + used_palette_colors.append(i) + + if optimise or max(used_palette_colors) >= len(used_palette_colors): + return used_palette_colors + + num_palette_colors = len(im.palette.palette) // Image.getmodebands( + im.palette.mode + ) + current_palette_size = 1 << (num_palette_colors - 1).bit_length() + if ( + # check that the palette would become smaller when saved + len(used_palette_colors) <= current_palette_size // 2 + # check that the palette is not already the smallest possible size + and current_palette_size > 2 + ): + return used_palette_colors + return None + + +def _get_color_table_size(palette_bytes: bytes) -> int: + # calculate the palette size for the header + if not palette_bytes: + return 0 + elif len(palette_bytes) < 9: + return 1 + else: + return math.ceil(math.log(len(palette_bytes) // 3, 2)) - 1 + + +def _get_header_palette(palette_bytes: bytes) -> bytes: + """ + Returns the palette, null padded to the next power of 2 (*3) bytes + suitable for direct inclusion in the GIF header + + :param palette_bytes: Unpadded palette bytes, in RGBRGB form + :returns: Null padded palette + """ + color_table_size = _get_color_table_size(palette_bytes) + + # add the missing amount of bytes + # the palette has to be 2< 0: + palette_bytes += o8(0) * 3 * actual_target_size_diff + return palette_bytes + + +def _get_palette_bytes(im: Image.Image) -> bytes: + """ + Gets the palette for inclusion in the gif header + + :param im: Image object + :returns: Bytes, len<=768 suitable for inclusion in gif header + """ + return im.palette.palette if im.palette else b"" + + +def _get_background( + im: Image.Image, + info_background: int | tuple[int, int, int] | tuple[int, int, int, int] | None, +) -> int: + background = 0 + if info_background: + if isinstance(info_background, tuple): + # WebPImagePlugin stores an RGBA value in info["background"] + # So it must be converted to the same format as GifImagePlugin's + # info["background"] - a global color table index + try: + background = im.palette.getcolor(info_background, im) + except ValueError as e: + if str(e) not in ( + # If all 256 colors are in use, + # then there is no need for the background color + "cannot allocate more than 256 colors", + # Ignore non-opaque WebP background + "cannot add non-opaque RGBA color to RGB palette", + ): + raise + else: + background = info_background + return background + + +def _get_global_header(im: Image.Image, info: dict[str, Any]) -> list[bytes]: + """Return a list of strings representing a GIF header""" + + # Header Block + # https://www.matthewflickinger.com/lab/whatsinagif/bits_and_bytes.asp + + version = b"87a" + if im.info.get("version") == b"89a" or ( + info + and ( + "transparency" in info + or info.get("loop") is not None + or info.get("duration") + or info.get("comment") + ) + ): + version = b"89a" + + background = _get_background(im, info.get("background")) + + palette_bytes = _get_palette_bytes(im) + color_table_size = _get_color_table_size(palette_bytes) + + header = [ + b"GIF" # signature + + version # version + + o16(im.size[0]) # canvas width + + o16(im.size[1]), # canvas height + # Logical Screen Descriptor + # size of global color table + global color table flag + o8(color_table_size + 128), # packed fields + # background + reserved/aspect + o8(background) + o8(0), + # Global Color Table + _get_header_palette(palette_bytes), + ] + if info.get("loop") is not None: + header.append( + b"!" + + o8(255) # extension intro + + o8(11) + + b"NETSCAPE2.0" + + o8(3) + + o8(1) + + o16(info["loop"]) # number of loops + + o8(0) + ) + if info.get("comment"): + comment_block = b"!" + o8(254) # extension intro + + comment = info["comment"] + if isinstance(comment, str): + comment = comment.encode() + for i in range(0, len(comment), 255): + subblock = comment[i : i + 255] + comment_block += o8(len(subblock)) + subblock + + comment_block += o8(0) + header.append(comment_block) + return header + + +def _write_frame_data( + fp: IO[bytes], + im_frame: Image.Image, + offset: tuple[int, int], + params: dict[str, Any], +) -> None: + try: + im_frame.encoderinfo = params + + # local image header + _write_local_header(fp, im_frame, offset, 0) + + ImageFile._save( + im_frame, fp, [("gif", (0, 0) + im_frame.size, 0, RAWMODE[im_frame.mode])] + ) + + fp.write(b"\0") # end of image data + finally: + del im_frame.encoderinfo + + +# -------------------------------------------------------------------- +# Legacy GIF utilities + + +def getheader( + im: Image.Image, palette: _Palette | None = None, info: dict[str, Any] | None = None +) -> tuple[list[bytes], list[int] | None]: + """ + Legacy Method to get Gif data from image. + + Warning:: May modify image data. + + :param im: Image object + :param palette: bytes object containing the source palette, or .... + :param info: encoderinfo + :returns: tuple of(list of header items, optimized palette) + + """ + if info is None: + info = {} + + used_palette_colors = _get_optimize(im, info) + + if "background" not in info and "background" in im.info: + info["background"] = im.info["background"] + + im_mod = _normalize_palette(im, palette, info) + im.palette = im_mod.palette + im.im = im_mod.im + header = _get_global_header(im, info) + + return header, used_palette_colors + + +def getdata( + im: Image.Image, offset: tuple[int, int] = (0, 0), **params: Any +) -> list[bytes]: + """ + Legacy Method + + Return a list of strings representing this image. + The first string is a local image header, the rest contains + encoded image data. + + To specify duration, add the time in milliseconds, + e.g. ``getdata(im_frame, duration=1000)`` + + :param im: Image object + :param offset: Tuple of (x, y) pixels. Defaults to (0, 0) + :param \\**params: e.g. duration or other encoder info parameters + :returns: List of bytes containing GIF encoded frame data + + """ + from io import BytesIO + + class Collector(BytesIO): + data = [] + + if sys.version_info >= (3, 12): + from collections.abc import Buffer + + def write(self, data: Buffer) -> int: + self.data.append(data) + return len(data) + + else: + + def write(self, data: Any) -> int: + self.data.append(data) + return len(data) + + im.load() # make sure raster data is available + + fp = Collector() + + _write_frame_data(fp, im, offset, params) + + return fp.data + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(GifImageFile.format, GifImageFile, _accept) +Image.register_save(GifImageFile.format, _save) +Image.register_save_all(GifImageFile.format, _save_all) +Image.register_extension(GifImageFile.format, ".gif") +Image.register_mime(GifImageFile.format, "image/gif") + +# +# Uncomment the following line if you wish to use NETPBM/PBMPLUS +# instead of the built-in "uncompressed" GIF encoder + +# Image.register_save(GifImageFile.format, _save_netpbm) diff --git a/MLPY/Lib/site-packages/PIL/GimpGradientFile.py b/MLPY/Lib/site-packages/PIL/GimpGradientFile.py new file mode 100644 index 0000000000000000000000000000000000000000..59e50a9f3218d192d59669bcbb638bcf6c0ad92f --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/GimpGradientFile.py @@ -0,0 +1,149 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read (and render) GIMP gradient files +# +# History: +# 97-08-23 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +""" +Stuff to translate curve segments to palette values (derived from +the corresponding code in GIMP, written by Federico Mena Quintero. +See the GIMP distribution for more information.) +""" +from __future__ import annotations + +from math import log, pi, sin, sqrt +from typing import IO, Callable + +from ._binary import o8 + +EPSILON = 1e-10 +"""""" # Enable auto-doc for data member + + +def linear(middle: float, pos: float) -> float: + if pos <= middle: + if middle < EPSILON: + return 0.0 + else: + return 0.5 * pos / middle + else: + pos = pos - middle + middle = 1.0 - middle + if middle < EPSILON: + return 1.0 + else: + return 0.5 + 0.5 * pos / middle + + +def curved(middle: float, pos: float) -> float: + return pos ** (log(0.5) / log(max(middle, EPSILON))) + + +def sine(middle: float, pos: float) -> float: + return (sin((-pi / 2.0) + pi * linear(middle, pos)) + 1.0) / 2.0 + + +def sphere_increasing(middle: float, pos: float) -> float: + return sqrt(1.0 - (linear(middle, pos) - 1.0) ** 2) + + +def sphere_decreasing(middle: float, pos: float) -> float: + return 1.0 - sqrt(1.0 - linear(middle, pos) ** 2) + + +SEGMENTS = [linear, curved, sine, sphere_increasing, sphere_decreasing] +"""""" # Enable auto-doc for data member + + +class GradientFile: + gradient: ( + list[ + tuple[ + float, + float, + float, + list[float], + list[float], + Callable[[float, float], float], + ] + ] + | None + ) = None + + def getpalette(self, entries: int = 256) -> tuple[bytes, str]: + assert self.gradient is not None + palette = [] + + ix = 0 + x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix] + + for i in range(entries): + x = i / (entries - 1) + + while x1 < x: + ix += 1 + x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix] + + w = x1 - x0 + + if w < EPSILON: + scale = segment(0.5, 0.5) + else: + scale = segment((xm - x0) / w, (x - x0) / w) + + # expand to RGBA + r = o8(int(255 * ((rgb1[0] - rgb0[0]) * scale + rgb0[0]) + 0.5)) + g = o8(int(255 * ((rgb1[1] - rgb0[1]) * scale + rgb0[1]) + 0.5)) + b = o8(int(255 * ((rgb1[2] - rgb0[2]) * scale + rgb0[2]) + 0.5)) + a = o8(int(255 * ((rgb1[3] - rgb0[3]) * scale + rgb0[3]) + 0.5)) + + # add to palette + palette.append(r + g + b + a) + + return b"".join(palette), "RGBA" + + +class GimpGradientFile(GradientFile): + """File handler for GIMP's gradient format.""" + + def __init__(self, fp: IO[bytes]) -> None: + if fp.readline()[:13] != b"GIMP Gradient": + msg = "not a GIMP gradient file" + raise SyntaxError(msg) + + line = fp.readline() + + # GIMP 1.2 gradient files don't contain a name, but GIMP 1.3 files do + if line.startswith(b"Name: "): + line = fp.readline().strip() + + count = int(line) + + self.gradient = [] + + for i in range(count): + s = fp.readline().split() + w = [float(x) for x in s[:11]] + + x0, x1 = w[0], w[2] + xm = w[1] + rgb0 = w[3:7] + rgb1 = w[7:11] + + segment = SEGMENTS[int(s[11])] + cspace = int(s[12]) + + if cspace != 0: + msg = "cannot handle HSV colour space" + raise OSError(msg) + + self.gradient.append((x0, x1, xm, rgb0, rgb1, segment)) diff --git a/MLPY/Lib/site-packages/PIL/GimpPaletteFile.py b/MLPY/Lib/site-packages/PIL/GimpPaletteFile.py new file mode 100644 index 0000000000000000000000000000000000000000..3cc054460eebfc4572b3a6824cdb25de8dddd09d --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/GimpPaletteFile.py @@ -0,0 +1,58 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read GIMP palette files +# +# History: +# 1997-08-23 fl Created +# 2004-09-07 fl Support GIMP 2.0 palette files. +# +# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. +# Copyright (c) Fredrik Lundh 1997-2004. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import re +from typing import IO + +from ._binary import o8 + + +class GimpPaletteFile: + """File handler for GIMP's palette format.""" + + rawmode = "RGB" + + def __init__(self, fp: IO[bytes]) -> None: + palette = [o8(i) * 3 for i in range(256)] + + if fp.readline()[:12] != b"GIMP Palette": + msg = "not a GIMP palette file" + raise SyntaxError(msg) + + for i in range(256): + s = fp.readline() + if not s: + break + + # skip fields and comment lines + if re.match(rb"\w+:|#", s): + continue + if len(s) > 100: + msg = "bad palette file" + raise SyntaxError(msg) + + v = tuple(map(int, s.split()[:3])) + if len(v) != 3: + msg = "bad palette entry" + raise ValueError(msg) + + palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2]) + + self.palette = b"".join(palette) + + def getpalette(self) -> tuple[bytes, str]: + return self.palette, self.rawmode diff --git a/MLPY/Lib/site-packages/PIL/GribStubImagePlugin.py b/MLPY/Lib/site-packages/PIL/GribStubImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..9cef6786a24e7779e15563549cd2c926d967ee5a --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/GribStubImagePlugin.py @@ -0,0 +1,76 @@ +# +# The Python Imaging Library +# $Id$ +# +# GRIB stub adapter +# +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +from typing import IO + +from . import Image, ImageFile + +_handler = None + + +def register_handler(handler: ImageFile.StubHandler | None) -> None: + """ + Install application-specific GRIB image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + + +def _accept(prefix: bytes) -> bool: + return prefix[:4] == b"GRIB" and prefix[7] == 1 + + +class GribStubImageFile(ImageFile.StubImageFile): + format = "GRIB" + format_description = "GRIB" + + def _open(self) -> None: + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + msg = "Not a GRIB file" + raise SyntaxError(msg) + + self.fp.seek(offset) + + # make something up + self._mode = "F" + self._size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self) -> ImageFile.StubHandler | None: + return _handler + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + if _handler is None or not hasattr(_handler, "save"): + msg = "GRIB save handler not installed" + raise OSError(msg) + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(GribStubImageFile.format, GribStubImageFile, _accept) +Image.register_save(GribStubImageFile.format, _save) + +Image.register_extension(GribStubImageFile.format, ".grib") diff --git a/MLPY/Lib/site-packages/PIL/Hdf5StubImagePlugin.py b/MLPY/Lib/site-packages/PIL/Hdf5StubImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..b1a89e9316e696f589296850a16bf440579521c1 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/Hdf5StubImagePlugin.py @@ -0,0 +1,76 @@ +# +# The Python Imaging Library +# $Id$ +# +# HDF5 stub adapter +# +# Copyright (c) 2000-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +from typing import IO + +from . import Image, ImageFile + +_handler = None + + +def register_handler(handler: ImageFile.StubHandler | None) -> None: + """ + Install application-specific HDF5 image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + + +def _accept(prefix: bytes) -> bool: + return prefix[:8] == b"\x89HDF\r\n\x1a\n" + + +class HDF5StubImageFile(ImageFile.StubImageFile): + format = "HDF5" + format_description = "HDF5" + + def _open(self) -> None: + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + msg = "Not an HDF file" + raise SyntaxError(msg) + + self.fp.seek(offset) + + # make something up + self._mode = "F" + self._size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self) -> ImageFile.StubHandler | None: + return _handler + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + if _handler is None or not hasattr(_handler, "save"): + msg = "HDF5 save handler not installed" + raise OSError(msg) + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(HDF5StubImageFile.format, HDF5StubImageFile, _accept) +Image.register_save(HDF5StubImageFile.format, _save) + +Image.register_extensions(HDF5StubImageFile.format, [".h5", ".hdf"]) diff --git a/MLPY/Lib/site-packages/PIL/IcnsImagePlugin.py b/MLPY/Lib/site-packages/PIL/IcnsImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..2e7db70405dcdf7eaa7028e71f857a21a115822f --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/IcnsImagePlugin.py @@ -0,0 +1,399 @@ +# +# The Python Imaging Library. +# $Id$ +# +# macOS icns file decoder, based on icns.py by Bob Ippolito. +# +# history: +# 2004-10-09 fl Turned into a PIL plugin; removed 2.3 dependencies. +# 2020-04-04 Allow saving on all operating systems. +# +# Copyright (c) 2004 by Bob Ippolito. +# Copyright (c) 2004 by Secret Labs. +# Copyright (c) 2004 by Fredrik Lundh. +# Copyright (c) 2014 by Alastair Houghton. +# Copyright (c) 2020 by Pan Jing. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import io +import os +import struct +import sys +from typing import IO + +from . import Image, ImageFile, PngImagePlugin, features + +enable_jpeg2k = features.check_codec("jpg_2000") +if enable_jpeg2k: + from . import Jpeg2KImagePlugin + +MAGIC = b"icns" +HEADERSIZE = 8 + + +def nextheader(fobj): + return struct.unpack(">4sI", fobj.read(HEADERSIZE)) + + +def read_32t(fobj, start_length, size): + # The 128x128 icon seems to have an extra header for some reason. + (start, length) = start_length + fobj.seek(start) + sig = fobj.read(4) + if sig != b"\x00\x00\x00\x00": + msg = "Unknown signature, expecting 0x00000000" + raise SyntaxError(msg) + return read_32(fobj, (start + 4, length - 4), size) + + +def read_32(fobj, start_length, size): + """ + Read a 32bit RGB icon resource. Seems to be either uncompressed or + an RLE packbits-like scheme. + """ + (start, length) = start_length + fobj.seek(start) + pixel_size = (size[0] * size[2], size[1] * size[2]) + sizesq = pixel_size[0] * pixel_size[1] + if length == sizesq * 3: + # uncompressed ("RGBRGBGB") + indata = fobj.read(length) + im = Image.frombuffer("RGB", pixel_size, indata, "raw", "RGB", 0, 1) + else: + # decode image + im = Image.new("RGB", pixel_size, None) + for band_ix in range(3): + data = [] + bytesleft = sizesq + while bytesleft > 0: + byte = fobj.read(1) + if not byte: + break + byte = byte[0] + if byte & 0x80: + blocksize = byte - 125 + byte = fobj.read(1) + for i in range(blocksize): + data.append(byte) + else: + blocksize = byte + 1 + data.append(fobj.read(blocksize)) + bytesleft -= blocksize + if bytesleft <= 0: + break + if bytesleft != 0: + msg = f"Error reading channel [{repr(bytesleft)} left]" + raise SyntaxError(msg) + band = Image.frombuffer("L", pixel_size, b"".join(data), "raw", "L", 0, 1) + im.im.putband(band.im, band_ix) + return {"RGB": im} + + +def read_mk(fobj, start_length, size): + # Alpha masks seem to be uncompressed + start = start_length[0] + fobj.seek(start) + pixel_size = (size[0] * size[2], size[1] * size[2]) + sizesq = pixel_size[0] * pixel_size[1] + band = Image.frombuffer("L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1) + return {"A": band} + + +def read_png_or_jpeg2000(fobj, start_length, size): + (start, length) = start_length + fobj.seek(start) + sig = fobj.read(12) + if sig[:8] == b"\x89PNG\x0d\x0a\x1a\x0a": + fobj.seek(start) + im = PngImagePlugin.PngImageFile(fobj) + Image._decompression_bomb_check(im.size) + return {"RGBA": im} + elif ( + sig[:4] == b"\xff\x4f\xff\x51" + or sig[:4] == b"\x0d\x0a\x87\x0a" + or sig == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a" + ): + if not enable_jpeg2k: + msg = ( + "Unsupported icon subimage format (rebuild PIL " + "with JPEG 2000 support to fix this)" + ) + raise ValueError(msg) + # j2k, jpc or j2c + fobj.seek(start) + jp2kstream = fobj.read(length) + f = io.BytesIO(jp2kstream) + im = Jpeg2KImagePlugin.Jpeg2KImageFile(f) + Image._decompression_bomb_check(im.size) + if im.mode != "RGBA": + im = im.convert("RGBA") + return {"RGBA": im} + else: + msg = "Unsupported icon subimage format" + raise ValueError(msg) + + +class IcnsFile: + SIZES = { + (512, 512, 2): [(b"ic10", read_png_or_jpeg2000)], + (512, 512, 1): [(b"ic09", read_png_or_jpeg2000)], + (256, 256, 2): [(b"ic14", read_png_or_jpeg2000)], + (256, 256, 1): [(b"ic08", read_png_or_jpeg2000)], + (128, 128, 2): [(b"ic13", read_png_or_jpeg2000)], + (128, 128, 1): [ + (b"ic07", read_png_or_jpeg2000), + (b"it32", read_32t), + (b"t8mk", read_mk), + ], + (64, 64, 1): [(b"icp6", read_png_or_jpeg2000)], + (32, 32, 2): [(b"ic12", read_png_or_jpeg2000)], + (48, 48, 1): [(b"ih32", read_32), (b"h8mk", read_mk)], + (32, 32, 1): [ + (b"icp5", read_png_or_jpeg2000), + (b"il32", read_32), + (b"l8mk", read_mk), + ], + (16, 16, 2): [(b"ic11", read_png_or_jpeg2000)], + (16, 16, 1): [ + (b"icp4", read_png_or_jpeg2000), + (b"is32", read_32), + (b"s8mk", read_mk), + ], + } + + def __init__(self, fobj): + """ + fobj is a file-like object as an icns resource + """ + # signature : (start, length) + self.dct = dct = {} + self.fobj = fobj + sig, filesize = nextheader(fobj) + if not _accept(sig): + msg = "not an icns file" + raise SyntaxError(msg) + i = HEADERSIZE + while i < filesize: + sig, blocksize = nextheader(fobj) + if blocksize <= 0: + msg = "invalid block header" + raise SyntaxError(msg) + i += HEADERSIZE + blocksize -= HEADERSIZE + dct[sig] = (i, blocksize) + fobj.seek(blocksize, io.SEEK_CUR) + i += blocksize + + def itersizes(self): + sizes = [] + for size, fmts in self.SIZES.items(): + for fmt, reader in fmts: + if fmt in self.dct: + sizes.append(size) + break + return sizes + + def bestsize(self): + sizes = self.itersizes() + if not sizes: + msg = "No 32bit icon resources found" + raise SyntaxError(msg) + return max(sizes) + + def dataforsize(self, size): + """ + Get an icon resource as {channel: array}. Note that + the arrays are bottom-up like windows bitmaps and will likely + need to be flipped or transposed in some way. + """ + dct = {} + for code, reader in self.SIZES[size]: + desc = self.dct.get(code) + if desc is not None: + dct.update(reader(self.fobj, desc, size)) + return dct + + def getimage(self, size=None): + if size is None: + size = self.bestsize() + if len(size) == 2: + size = (size[0], size[1], 1) + channels = self.dataforsize(size) + + im = channels.get("RGBA", None) + if im: + return im + + im = channels.get("RGB").copy() + try: + im.putalpha(channels["A"]) + except KeyError: + pass + return im + + +## +# Image plugin for Mac OS icons. + + +class IcnsImageFile(ImageFile.ImageFile): + """ + PIL image support for Mac OS .icns files. + Chooses the best resolution, but will possibly load + a different size image if you mutate the size attribute + before calling 'load'. + + The info dictionary has a key 'sizes' that is a list + of sizes that the icns file has. + """ + + format = "ICNS" + format_description = "Mac OS icns resource" + + def _open(self) -> None: + self.icns = IcnsFile(self.fp) + self._mode = "RGBA" + self.info["sizes"] = self.icns.itersizes() + self.best_size = self.icns.bestsize() + self.size = ( + self.best_size[0] * self.best_size[2], + self.best_size[1] * self.best_size[2], + ) + + @property + def size(self): + return self._size + + @size.setter + def size(self, value): + info_size = value + if info_size not in self.info["sizes"] and len(info_size) == 2: + info_size = (info_size[0], info_size[1], 1) + if ( + info_size not in self.info["sizes"] + and len(info_size) == 3 + and info_size[2] == 1 + ): + simple_sizes = [ + (size[0] * size[2], size[1] * size[2]) for size in self.info["sizes"] + ] + if value in simple_sizes: + info_size = self.info["sizes"][simple_sizes.index(value)] + if info_size not in self.info["sizes"]: + msg = "This is not one of the allowed sizes of this image" + raise ValueError(msg) + self._size = value + + def load(self): + if len(self.size) == 3: + self.best_size = self.size + self.size = ( + self.best_size[0] * self.best_size[2], + self.best_size[1] * self.best_size[2], + ) + + px = Image.Image.load(self) + if self.im is not None and self.im.size == self.size: + # Already loaded + return px + self.load_prepare() + # This is likely NOT the best way to do it, but whatever. + im = self.icns.getimage(self.best_size) + + # If this is a PNG or JPEG 2000, it won't be loaded yet + px = im.load() + + self.im = im.im + self._mode = im.mode + self.size = im.size + + return px + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + """ + Saves the image as a series of PNG files, + that are then combined into a .icns file. + """ + if hasattr(fp, "flush"): + fp.flush() + + sizes = { + b"ic07": 128, + b"ic08": 256, + b"ic09": 512, + b"ic10": 1024, + b"ic11": 32, + b"ic12": 64, + b"ic13": 256, + b"ic14": 512, + } + provided_images = {im.width: im for im in im.encoderinfo.get("append_images", [])} + size_streams = {} + for size in set(sizes.values()): + image = ( + provided_images[size] + if size in provided_images + else im.resize((size, size)) + ) + + temp = io.BytesIO() + image.save(temp, "png") + size_streams[size] = temp.getvalue() + + entries = [] + for type, size in sizes.items(): + stream = size_streams[size] + entries.append((type, HEADERSIZE + len(stream), stream)) + + # Header + fp.write(MAGIC) + file_length = HEADERSIZE # Header + file_length += HEADERSIZE + 8 * len(entries) # TOC + file_length += sum(entry[1] for entry in entries) + fp.write(struct.pack(">i", file_length)) + + # TOC + fp.write(b"TOC ") + fp.write(struct.pack(">i", HEADERSIZE + len(entries) * HEADERSIZE)) + for entry in entries: + fp.write(entry[0]) + fp.write(struct.pack(">i", entry[1])) + + # Data + for entry in entries: + fp.write(entry[0]) + fp.write(struct.pack(">i", entry[1])) + fp.write(entry[2]) + + if hasattr(fp, "flush"): + fp.flush() + + +def _accept(prefix: bytes) -> bool: + return prefix[:4] == MAGIC + + +Image.register_open(IcnsImageFile.format, IcnsImageFile, _accept) +Image.register_extension(IcnsImageFile.format, ".icns") + +Image.register_save(IcnsImageFile.format, _save) +Image.register_mime(IcnsImageFile.format, "image/icns") + +if __name__ == "__main__": + if len(sys.argv) < 2: + print("Syntax: python3 IcnsImagePlugin.py [file]") + sys.exit() + + with open(sys.argv[1], "rb") as fp: + imf = IcnsImageFile(fp) + for size in imf.info["sizes"]: + width, height, scale = imf.size = size + imf.save(f"out-{width}-{height}-{scale}.png") + with Image.open(sys.argv[1]) as im: + im.save("out.png") + if sys.platform == "windows": + os.startfile("out.png") diff --git a/MLPY/Lib/site-packages/PIL/IcoImagePlugin.py b/MLPY/Lib/site-packages/PIL/IcoImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..a284907ba7c02a6e644af19fbf3155892a7bd503 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/IcoImagePlugin.py @@ -0,0 +1,360 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Windows Icon support for PIL +# +# History: +# 96-05-27 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +# This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis +# . +# https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki +# +# Icon format references: +# * https://en.wikipedia.org/wiki/ICO_(file_format) +# * https://msdn.microsoft.com/en-us/library/ms997538.aspx +from __future__ import annotations + +import warnings +from io import BytesIO +from math import ceil, log +from typing import IO + +from . import BmpImagePlugin, Image, ImageFile, PngImagePlugin +from ._binary import i16le as i16 +from ._binary import i32le as i32 +from ._binary import o8 +from ._binary import o16le as o16 +from ._binary import o32le as o32 + +# +# -------------------------------------------------------------------- + +_MAGIC = b"\0\0\1\0" + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + fp.write(_MAGIC) # (2+2) + bmp = im.encoderinfo.get("bitmap_format") == "bmp" + sizes = im.encoderinfo.get( + "sizes", + [(16, 16), (24, 24), (32, 32), (48, 48), (64, 64), (128, 128), (256, 256)], + ) + frames = [] + provided_ims = [im] + im.encoderinfo.get("append_images", []) + width, height = im.size + for size in sorted(set(sizes)): + if size[0] > width or size[1] > height or size[0] > 256 or size[1] > 256: + continue + + for provided_im in provided_ims: + if provided_im.size != size: + continue + frames.append(provided_im) + if bmp: + bits = BmpImagePlugin.SAVE[provided_im.mode][1] + bits_used = [bits] + for other_im in provided_ims: + if other_im.size != size: + continue + bits = BmpImagePlugin.SAVE[other_im.mode][1] + if bits not in bits_used: + # Another image has been supplied for this size + # with a different bit depth + frames.append(other_im) + bits_used.append(bits) + break + else: + # TODO: invent a more convenient method for proportional scalings + frame = provided_im.copy() + frame.thumbnail(size, Image.Resampling.LANCZOS, reducing_gap=None) + frames.append(frame) + fp.write(o16(len(frames))) # idCount(2) + offset = fp.tell() + len(frames) * 16 + for frame in frames: + width, height = frame.size + # 0 means 256 + fp.write(o8(width if width < 256 else 0)) # bWidth(1) + fp.write(o8(height if height < 256 else 0)) # bHeight(1) + + bits, colors = BmpImagePlugin.SAVE[frame.mode][1:] if bmp else (32, 0) + fp.write(o8(colors)) # bColorCount(1) + fp.write(b"\0") # bReserved(1) + fp.write(b"\0\0") # wPlanes(2) + fp.write(o16(bits)) # wBitCount(2) + + image_io = BytesIO() + if bmp: + frame.save(image_io, "dib") + + if bits != 32: + and_mask = Image.new("1", size) + ImageFile._save( + and_mask, image_io, [("raw", (0, 0) + size, 0, ("1", 0, -1))] + ) + else: + frame.save(image_io, "png") + image_io.seek(0) + image_bytes = image_io.read() + if bmp: + image_bytes = image_bytes[:8] + o32(height * 2) + image_bytes[12:] + bytes_len = len(image_bytes) + fp.write(o32(bytes_len)) # dwBytesInRes(4) + fp.write(o32(offset)) # dwImageOffset(4) + current = fp.tell() + fp.seek(offset) + fp.write(image_bytes) + offset = offset + bytes_len + fp.seek(current) + + +def _accept(prefix: bytes) -> bool: + return prefix[:4] == _MAGIC + + +class IcoFile: + def __init__(self, buf): + """ + Parse image from file-like object containing ico file data + """ + + # check magic + s = buf.read(6) + if not _accept(s): + msg = "not an ICO file" + raise SyntaxError(msg) + + self.buf = buf + self.entry = [] + + # Number of items in file + self.nb_items = i16(s, 4) + + # Get headers for each item + for i in range(self.nb_items): + s = buf.read(16) + + icon_header = { + "width": s[0], + "height": s[1], + "nb_color": s[2], # No. of colors in image (0 if >=8bpp) + "reserved": s[3], + "planes": i16(s, 4), + "bpp": i16(s, 6), + "size": i32(s, 8), + "offset": i32(s, 12), + } + + # See Wikipedia + for j in ("width", "height"): + if not icon_header[j]: + icon_header[j] = 256 + + # See Wikipedia notes about color depth. + # We need this just to differ images with equal sizes + icon_header["color_depth"] = ( + icon_header["bpp"] + or ( + icon_header["nb_color"] != 0 + and ceil(log(icon_header["nb_color"], 2)) + ) + or 256 + ) + + icon_header["dim"] = (icon_header["width"], icon_header["height"]) + icon_header["square"] = icon_header["width"] * icon_header["height"] + + self.entry.append(icon_header) + + self.entry = sorted(self.entry, key=lambda x: x["color_depth"]) + # ICO images are usually squares + self.entry = sorted(self.entry, key=lambda x: x["square"], reverse=True) + + def sizes(self): + """ + Get a list of all available icon sizes and color depths. + """ + return {(h["width"], h["height"]) for h in self.entry} + + def getentryindex(self, size, bpp=False): + for i, h in enumerate(self.entry): + if size == h["dim"] and (bpp is False or bpp == h["color_depth"]): + return i + return 0 + + def getimage(self, size, bpp=False): + """ + Get an image from the icon + """ + return self.frame(self.getentryindex(size, bpp)) + + def frame(self, idx: int) -> Image.Image: + """ + Get an image from frame idx + """ + + header = self.entry[idx] + + self.buf.seek(header["offset"]) + data = self.buf.read(8) + self.buf.seek(header["offset"]) + + im: Image.Image + if data[:8] == PngImagePlugin._MAGIC: + # png frame + im = PngImagePlugin.PngImageFile(self.buf) + Image._decompression_bomb_check(im.size) + else: + # XOR + AND mask bmp frame + im = BmpImagePlugin.DibImageFile(self.buf) + Image._decompression_bomb_check(im.size) + + # change tile dimension to only encompass XOR image + im._size = (im.size[0], int(im.size[1] / 2)) + d, e, o, a = im.tile[0] + im.tile[0] = d, (0, 0) + im.size, o, a + + # figure out where AND mask image starts + bpp = header["bpp"] + if 32 == bpp: + # 32-bit color depth icon image allows semitransparent areas + # PIL's DIB format ignores transparency bits, recover them. + # The DIB is packed in BGRX byte order where X is the alpha + # channel. + + # Back up to start of bmp data + self.buf.seek(o) + # extract every 4th byte (eg. 3,7,11,15,...) + alpha_bytes = self.buf.read(im.size[0] * im.size[1] * 4)[3::4] + + # convert to an 8bpp grayscale image + mask = Image.frombuffer( + "L", # 8bpp + im.size, # (w, h) + alpha_bytes, # source chars + "raw", # raw decoder + ("L", 0, -1), # 8bpp inverted, unpadded, reversed + ) + else: + # get AND image from end of bitmap + w = im.size[0] + if (w % 32) > 0: + # bitmap row data is aligned to word boundaries + w += 32 - (im.size[0] % 32) + + # the total mask data is + # padded row size * height / bits per char + + total_bytes = int((w * im.size[1]) / 8) + and_mask_offset = header["offset"] + header["size"] - total_bytes + + self.buf.seek(and_mask_offset) + mask_data = self.buf.read(total_bytes) + + # convert raw data to image + mask = Image.frombuffer( + "1", # 1 bpp + im.size, # (w, h) + mask_data, # source chars + "raw", # raw decoder + ("1;I", int(w / 8), -1), # 1bpp inverted, padded, reversed + ) + + # now we have two images, im is XOR image and mask is AND image + + # apply mask image as alpha channel + im = im.convert("RGBA") + im.putalpha(mask) + + return im + + +## +# Image plugin for Windows Icon files. + + +class IcoImageFile(ImageFile.ImageFile): + """ + PIL read-only image support for Microsoft Windows .ico files. + + By default the largest resolution image in the file will be loaded. This + can be changed by altering the 'size' attribute before calling 'load'. + + The info dictionary has a key 'sizes' that is a list of the sizes available + in the icon file. + + Handles classic, XP and Vista icon formats. + + When saving, PNG compression is used. Support for this was only added in + Windows Vista. If you are unable to view the icon in Windows, convert the + image to "RGBA" mode before saving. + + This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis + . + https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki + """ + + format = "ICO" + format_description = "Windows Icon" + + def _open(self) -> None: + self.ico = IcoFile(self.fp) + self.info["sizes"] = self.ico.sizes() + self.size = self.ico.entry[0]["dim"] + self.load() + + @property + def size(self): + return self._size + + @size.setter + def size(self, value): + if value not in self.info["sizes"]: + msg = "This is not one of the allowed sizes of this image" + raise ValueError(msg) + self._size = value + + def load(self): + if self.im is not None and self.im.size == self.size: + # Already loaded + return Image.Image.load(self) + im = self.ico.getimage(self.size) + # if tile is PNG, it won't really be loaded yet + im.load() + self.im = im.im + self.pyaccess = None + self._mode = im.mode + if im.palette: + self.palette = im.palette + if im.size != self.size: + warnings.warn("Image was not the expected size") + + index = self.ico.getentryindex(self.size) + sizes = list(self.info["sizes"]) + sizes[index] = im.size + self.info["sizes"] = set(sizes) + + self.size = im.size + + def load_seek(self, pos: int) -> None: + # Flag the ImageFile.Parser so that it + # just does all the decode at the end. + pass + + +# +# -------------------------------------------------------------------- + + +Image.register_open(IcoImageFile.format, IcoImageFile, _accept) +Image.register_save(IcoImageFile.format, _save) +Image.register_extension(IcoImageFile.format, ".ico") + +Image.register_mime(IcoImageFile.format, "image/x-icon") diff --git a/MLPY/Lib/site-packages/PIL/ImImagePlugin.py b/MLPY/Lib/site-packages/PIL/ImImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..1b133f23a1e2a1d1c2044eaf59447f96cca39fcc --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImImagePlugin.py @@ -0,0 +1,374 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IFUNC IM file handling for PIL +# +# history: +# 1995-09-01 fl Created. +# 1997-01-03 fl Save palette images +# 1997-01-08 fl Added sequence support +# 1997-01-23 fl Added P and RGB save support +# 1997-05-31 fl Read floating point images +# 1997-06-22 fl Save floating point images +# 1997-08-27 fl Read and save 1-bit images +# 1998-06-25 fl Added support for RGB+LUT images +# 1998-07-02 fl Added support for YCC images +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 1998-12-29 fl Added I;16 support +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) +# 2003-09-26 fl Added LA/PA support +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2001 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import os +import re +from typing import IO, Any + +from . import Image, ImageFile, ImagePalette + +# -------------------------------------------------------------------- +# Standard tags + +COMMENT = "Comment" +DATE = "Date" +EQUIPMENT = "Digitalization equipment" +FRAMES = "File size (no of images)" +LUT = "Lut" +NAME = "Name" +SCALE = "Scale (x,y)" +SIZE = "Image size (x*y)" +MODE = "Image type" + +TAGS = { + COMMENT: 0, + DATE: 0, + EQUIPMENT: 0, + FRAMES: 0, + LUT: 0, + NAME: 0, + SCALE: 0, + SIZE: 0, + MODE: 0, +} + +OPEN = { + # ifunc93/p3cfunc formats + "0 1 image": ("1", "1"), + "L 1 image": ("1", "1"), + "Greyscale image": ("L", "L"), + "Grayscale image": ("L", "L"), + "RGB image": ("RGB", "RGB;L"), + "RLB image": ("RGB", "RLB"), + "RYB image": ("RGB", "RLB"), + "B1 image": ("1", "1"), + "B2 image": ("P", "P;2"), + "B4 image": ("P", "P;4"), + "X 24 image": ("RGB", "RGB"), + "L 32 S image": ("I", "I;32"), + "L 32 F image": ("F", "F;32"), + # old p3cfunc formats + "RGB3 image": ("RGB", "RGB;T"), + "RYB3 image": ("RGB", "RYB;T"), + # extensions + "LA image": ("LA", "LA;L"), + "PA image": ("LA", "PA;L"), + "RGBA image": ("RGBA", "RGBA;L"), + "RGBX image": ("RGB", "RGBX;L"), + "CMYK image": ("CMYK", "CMYK;L"), + "YCC image": ("YCbCr", "YCbCr;L"), +} + +# ifunc95 extensions +for i in ["8", "8S", "16", "16S", "32", "32F"]: + OPEN[f"L {i} image"] = ("F", f"F;{i}") + OPEN[f"L*{i} image"] = ("F", f"F;{i}") +for i in ["16", "16L", "16B"]: + OPEN[f"L {i} image"] = (f"I;{i}", f"I;{i}") + OPEN[f"L*{i} image"] = (f"I;{i}", f"I;{i}") +for i in ["32S"]: + OPEN[f"L {i} image"] = ("I", f"I;{i}") + OPEN[f"L*{i} image"] = ("I", f"I;{i}") +for j in range(2, 33): + OPEN[f"L*{j} image"] = ("F", f"F;{j}") + + +# -------------------------------------------------------------------- +# Read IM directory + +split = re.compile(rb"^([A-Za-z][^:]*):[ \t]*(.*)[ \t]*$") + + +def number(s: Any) -> float: + try: + return int(s) + except ValueError: + return float(s) + + +## +# Image plugin for the IFUNC IM file format. + + +class ImImageFile(ImageFile.ImageFile): + format = "IM" + format_description = "IFUNC Image Memory" + _close_exclusive_fp_after_loading = False + + def _open(self) -> None: + # Quick rejection: if there's not an LF among the first + # 100 bytes, this is (probably) not a text header. + + if b"\n" not in self.fp.read(100): + msg = "not an IM file" + raise SyntaxError(msg) + self.fp.seek(0) + + n = 0 + + # Default values + self.info[MODE] = "L" + self.info[SIZE] = (512, 512) + self.info[FRAMES] = 1 + + self.rawmode = "L" + + while True: + s = self.fp.read(1) + + # Some versions of IFUNC uses \n\r instead of \r\n... + if s == b"\r": + continue + + if not s or s == b"\0" or s == b"\x1A": + break + + # FIXME: this may read whole file if not a text file + s = s + self.fp.readline() + + if len(s) > 100: + msg = "not an IM file" + raise SyntaxError(msg) + + if s[-2:] == b"\r\n": + s = s[:-2] + elif s[-1:] == b"\n": + s = s[:-1] + + try: + m = split.match(s) + except re.error as e: + msg = "not an IM file" + raise SyntaxError(msg) from e + + if m: + k, v = m.group(1, 2) + + # Don't know if this is the correct encoding, + # but a decent guess (I guess) + k = k.decode("latin-1", "replace") + v = v.decode("latin-1", "replace") + + # Convert value as appropriate + if k in [FRAMES, SCALE, SIZE]: + v = v.replace("*", ",") + v = tuple(map(number, v.split(","))) + if len(v) == 1: + v = v[0] + elif k == MODE and v in OPEN: + v, self.rawmode = OPEN[v] + + # Add to dictionary. Note that COMMENT tags are + # combined into a list of strings. + if k == COMMENT: + if k in self.info: + self.info[k].append(v) + else: + self.info[k] = [v] + else: + self.info[k] = v + + if k in TAGS: + n += 1 + + else: + msg = f"Syntax error in IM header: {s.decode('ascii', 'replace')}" + raise SyntaxError(msg) + + if not n: + msg = "Not an IM file" + raise SyntaxError(msg) + + # Basic attributes + self._size = self.info[SIZE] + self._mode = self.info[MODE] + + # Skip forward to start of image data + while s and s[:1] != b"\x1A": + s = self.fp.read(1) + if not s: + msg = "File truncated" + raise SyntaxError(msg) + + if LUT in self.info: + # convert lookup table to palette or lut attribute + palette = self.fp.read(768) + greyscale = 1 # greyscale palette + linear = 1 # linear greyscale palette + for i in range(256): + if palette[i] == palette[i + 256] == palette[i + 512]: + if palette[i] != i: + linear = 0 + else: + greyscale = 0 + if self.mode in ["L", "LA", "P", "PA"]: + if greyscale: + if not linear: + self.lut = list(palette[:256]) + else: + if self.mode in ["L", "P"]: + self._mode = self.rawmode = "P" + elif self.mode in ["LA", "PA"]: + self._mode = "PA" + self.rawmode = "PA;L" + self.palette = ImagePalette.raw("RGB;L", palette) + elif self.mode == "RGB": + if not greyscale or not linear: + self.lut = list(palette) + + self.frame = 0 + + self.__offset = offs = self.fp.tell() + + self._fp = self.fp # FIXME: hack + + if self.rawmode[:2] == "F;": + # ifunc95 formats + try: + # use bit decoder (if necessary) + bits = int(self.rawmode[2:]) + if bits not in [8, 16, 32]: + self.tile = [("bit", (0, 0) + self.size, offs, (bits, 8, 3, 0, -1))] + return + except ValueError: + pass + + if self.rawmode in ["RGB;T", "RYB;T"]: + # Old LabEye/3PC files. Would be very surprised if anyone + # ever stumbled upon such a file ;-) + size = self.size[0] * self.size[1] + self.tile = [ + ("raw", (0, 0) + self.size, offs, ("G", 0, -1)), + ("raw", (0, 0) + self.size, offs + size, ("R", 0, -1)), + ("raw", (0, 0) + self.size, offs + 2 * size, ("B", 0, -1)), + ] + else: + # LabEye/IFUNC files + self.tile = [("raw", (0, 0) + self.size, offs, (self.rawmode, 0, -1))] + + @property + def n_frames(self) -> int: + return self.info[FRAMES] + + @property + def is_animated(self) -> bool: + return self.info[FRAMES] > 1 + + def seek(self, frame: int) -> None: + if not self._seek_check(frame): + return + + self.frame = frame + + if self.mode == "1": + bits = 1 + else: + bits = 8 * len(self.mode) + + size = ((self.size[0] * bits + 7) // 8) * self.size[1] + offs = self.__offset + frame * size + + self.fp = self._fp + + self.tile = [("raw", (0, 0) + self.size, offs, (self.rawmode, 0, -1))] + + def tell(self) -> int: + return self.frame + + +# +# -------------------------------------------------------------------- +# Save IM files + + +SAVE = { + # mode: (im type, raw mode) + "1": ("0 1", "1"), + "L": ("Greyscale", "L"), + "LA": ("LA", "LA;L"), + "P": ("Greyscale", "P"), + "PA": ("LA", "PA;L"), + "I": ("L 32S", "I;32S"), + "I;16": ("L 16", "I;16"), + "I;16L": ("L 16L", "I;16L"), + "I;16B": ("L 16B", "I;16B"), + "F": ("L 32F", "F;32F"), + "RGB": ("RGB", "RGB;L"), + "RGBA": ("RGBA", "RGBA;L"), + "RGBX": ("RGBX", "RGBX;L"), + "CMYK": ("CMYK", "CMYK;L"), + "YCbCr": ("YCC", "YCbCr;L"), +} + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + try: + image_type, rawmode = SAVE[im.mode] + except KeyError as e: + msg = f"Cannot save {im.mode} images as IM" + raise ValueError(msg) from e + + frames = im.encoderinfo.get("frames", 1) + + fp.write(f"Image type: {image_type} image\r\n".encode("ascii")) + if filename: + # Each line must be 100 characters or less, + # or: SyntaxError("not an IM file") + # 8 characters are used for "Name: " and "\r\n" + # Keep just the filename, ditch the potentially overlong path + if isinstance(filename, bytes): + filename = filename.decode("ascii") + name, ext = os.path.splitext(os.path.basename(filename)) + name = "".join([name[: 92 - len(ext)], ext]) + + fp.write(f"Name: {name}\r\n".encode("ascii")) + fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode("ascii")) + fp.write(f"File size (no of images): {frames}\r\n".encode("ascii")) + if im.mode in ["P", "PA"]: + fp.write(b"Lut: 1\r\n") + fp.write(b"\000" * (511 - fp.tell()) + b"\032") + if im.mode in ["P", "PA"]: + im_palette = im.im.getpalette("RGB", "RGB;L") + colors = len(im_palette) // 3 + palette = b"" + for i in range(3): + palette += im_palette[colors * i : colors * (i + 1)] + palette += b"\x00" * (256 - colors) + fp.write(palette) # 768 bytes + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, -1))]) + + +# +# -------------------------------------------------------------------- +# Registry + + +Image.register_open(ImImageFile.format, ImImageFile) +Image.register_save(ImImageFile.format, _save) + +Image.register_extension(ImImageFile.format, ".im") diff --git a/MLPY/Lib/site-packages/PIL/Image.py b/MLPY/Lib/site-packages/PIL/Image.py new file mode 100644 index 0000000000000000000000000000000000000000..70ff54fcdb536f24aa39edab289bdf13169ee669 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/Image.py @@ -0,0 +1,4147 @@ +# +# The Python Imaging Library. +# $Id$ +# +# the Image class wrapper +# +# partial release history: +# 1995-09-09 fl Created +# 1996-03-11 fl PIL release 0.0 (proof of concept) +# 1996-04-30 fl PIL release 0.1b1 +# 1999-07-28 fl PIL release 1.0 final +# 2000-06-07 fl PIL release 1.1 +# 2000-10-20 fl PIL release 1.1.1 +# 2001-05-07 fl PIL release 1.1.2 +# 2002-03-15 fl PIL release 1.1.3 +# 2003-05-10 fl PIL release 1.1.4 +# 2005-03-28 fl PIL release 1.1.5 +# 2006-12-02 fl PIL release 1.1.6 +# 2009-11-15 fl PIL release 1.1.7 +# +# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-2009 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import annotations + +import abc +import atexit +import builtins +import io +import logging +import math +import os +import re +import struct +import sys +import tempfile +import warnings +from collections.abc import Callable, MutableMapping +from enum import IntEnum +from types import ModuleType +from typing import ( + IO, + TYPE_CHECKING, + Any, + Literal, + Protocol, + Sequence, + Tuple, + cast, +) + +# VERSION was removed in Pillow 6.0.0. +# PILLOW_VERSION was removed in Pillow 9.0.0. +# Use __version__ instead. +from . import ( + ExifTags, + ImageMode, + TiffTags, + UnidentifiedImageError, + __version__, + _plugins, +) +from ._binary import i32le, o32be, o32le +from ._deprecate import deprecate +from ._typing import StrOrBytesPath, TypeGuard +from ._util import DeferredError, is_path + +ElementTree: ModuleType | None +try: + from defusedxml import ElementTree +except ImportError: + ElementTree = None + +logger = logging.getLogger(__name__) + + +class DecompressionBombWarning(RuntimeWarning): + pass + + +class DecompressionBombError(Exception): + pass + + +WARN_POSSIBLE_FORMATS: bool = False + +# Limit to around a quarter gigabyte for a 24-bit (3 bpp) image +MAX_IMAGE_PIXELS: int | None = int(1024 * 1024 * 1024 // 4 // 3) + + +try: + # If the _imaging C module is not present, Pillow will not load. + # Note that other modules should not refer to _imaging directly; + # import Image and use the Image.core variable instead. + # Also note that Image.core is not a publicly documented interface, + # and should be considered private and subject to change. + from . import _imaging as core + + if __version__ != getattr(core, "PILLOW_VERSION", None): + msg = ( + "The _imaging extension was built for another version of Pillow or PIL:\n" + f"Core version: {getattr(core, 'PILLOW_VERSION', None)}\n" + f"Pillow version: {__version__}" + ) + raise ImportError(msg) + +except ImportError as v: + core = DeferredError.new(ImportError("The _imaging C module is not installed.")) + # Explanations for ways that we know we might have an import error + if str(v).startswith("Module use of python"): + # The _imaging C module is present, but not compiled for + # the right version (windows only). Print a warning, if + # possible. + warnings.warn( + "The _imaging extension was built for another version of Python.", + RuntimeWarning, + ) + elif str(v).startswith("The _imaging extension"): + warnings.warn(str(v), RuntimeWarning) + # Fail here anyway. Don't let people run with a mostly broken Pillow. + # see docs/porting.rst + raise + + +USE_CFFI_ACCESS = False +cffi: ModuleType | None +try: + import cffi +except ImportError: + cffi = None + + +def isImageType(t: Any) -> TypeGuard[Image]: + """ + Checks if an object is an image object. + + .. warning:: + + This function is for internal use only. + + :param t: object to check if it's an image + :returns: True if the object is an image + """ + return hasattr(t, "im") + + +# +# Constants + + +# transpose +class Transpose(IntEnum): + FLIP_LEFT_RIGHT = 0 + FLIP_TOP_BOTTOM = 1 + ROTATE_90 = 2 + ROTATE_180 = 3 + ROTATE_270 = 4 + TRANSPOSE = 5 + TRANSVERSE = 6 + + +# transforms (also defined in Imaging.h) +class Transform(IntEnum): + AFFINE = 0 + EXTENT = 1 + PERSPECTIVE = 2 + QUAD = 3 + MESH = 4 + + +# resampling filters (also defined in Imaging.h) +class Resampling(IntEnum): + NEAREST = 0 + BOX = 4 + BILINEAR = 2 + HAMMING = 5 + BICUBIC = 3 + LANCZOS = 1 + + +_filters_support = { + Resampling.BOX: 0.5, + Resampling.BILINEAR: 1.0, + Resampling.HAMMING: 1.0, + Resampling.BICUBIC: 2.0, + Resampling.LANCZOS: 3.0, +} + + +# dithers +class Dither(IntEnum): + NONE = 0 + ORDERED = 1 # Not yet implemented + RASTERIZE = 2 # Not yet implemented + FLOYDSTEINBERG = 3 # default + + +# palettes/quantizers +class Palette(IntEnum): + WEB = 0 + ADAPTIVE = 1 + + +class Quantize(IntEnum): + MEDIANCUT = 0 + MAXCOVERAGE = 1 + FASTOCTREE = 2 + LIBIMAGEQUANT = 3 + + +module = sys.modules[__name__] +for enum in (Transpose, Transform, Resampling, Dither, Palette, Quantize): + for item in enum: + setattr(module, item.name, item.value) + + +if hasattr(core, "DEFAULT_STRATEGY"): + DEFAULT_STRATEGY = core.DEFAULT_STRATEGY + FILTERED = core.FILTERED + HUFFMAN_ONLY = core.HUFFMAN_ONLY + RLE = core.RLE + FIXED = core.FIXED + + +# -------------------------------------------------------------------- +# Registries + +if TYPE_CHECKING: + from . import ImageFile, PyAccess +ID: list[str] = [] +OPEN: dict[ + str, + tuple[ + Callable[[IO[bytes], str | bytes], ImageFile.ImageFile], + Callable[[bytes], bool | str] | None, + ], +] = {} +MIME: dict[str, str] = {} +SAVE: dict[str, Callable[[Image, IO[bytes], str | bytes], None]] = {} +SAVE_ALL: dict[str, Callable[[Image, IO[bytes], str | bytes], None]] = {} +EXTENSION: dict[str, str] = {} +DECODERS: dict[str, type[ImageFile.PyDecoder]] = {} +ENCODERS: dict[str, type[ImageFile.PyEncoder]] = {} + +# -------------------------------------------------------------------- +# Modes + +_ENDIAN = "<" if sys.byteorder == "little" else ">" + + +def _conv_type_shape(im): + m = ImageMode.getmode(im.mode) + shape = (im.height, im.width) + extra = len(m.bands) + if extra != 1: + shape += (extra,) + return shape, m.typestr + + +MODES = [ + "1", + "CMYK", + "F", + "HSV", + "I", + "I;16", + "I;16B", + "I;16L", + "I;16N", + "L", + "LA", + "La", + "LAB", + "P", + "PA", + "RGB", + "RGBA", + "RGBa", + "RGBX", + "YCbCr", +] + +# raw modes that may be memory mapped. NOTE: if you change this, you +# may have to modify the stride calculation in map.c too! +_MAPMODES = ("L", "P", "RGBX", "RGBA", "CMYK", "I;16", "I;16L", "I;16B") + + +def getmodebase(mode: str) -> str: + """ + Gets the "base" mode for given mode. This function returns "L" for + images that contain grayscale data, and "RGB" for images that + contain color data. + + :param mode: Input mode. + :returns: "L" or "RGB". + :exception KeyError: If the input mode was not a standard mode. + """ + return ImageMode.getmode(mode).basemode + + +def getmodetype(mode: str) -> str: + """ + Gets the storage type mode. Given a mode, this function returns a + single-layer mode suitable for storing individual bands. + + :param mode: Input mode. + :returns: "L", "I", or "F". + :exception KeyError: If the input mode was not a standard mode. + """ + return ImageMode.getmode(mode).basetype + + +def getmodebandnames(mode: str) -> tuple[str, ...]: + """ + Gets a list of individual band names. Given a mode, this function returns + a tuple containing the names of individual bands (use + :py:method:`~PIL.Image.getmodetype` to get the mode used to store each + individual band. + + :param mode: Input mode. + :returns: A tuple containing band names. The length of the tuple + gives the number of bands in an image of the given mode. + :exception KeyError: If the input mode was not a standard mode. + """ + return ImageMode.getmode(mode).bands + + +def getmodebands(mode: str) -> int: + """ + Gets the number of individual bands for this mode. + + :param mode: Input mode. + :returns: The number of bands in this mode. + :exception KeyError: If the input mode was not a standard mode. + """ + return len(ImageMode.getmode(mode).bands) + + +# -------------------------------------------------------------------- +# Helpers + +_initialized = 0 + + +def preinit() -> None: + """ + Explicitly loads BMP, GIF, JPEG, PPM and PPM file format drivers. + + It is called when opening or saving images. + """ + + global _initialized + if _initialized >= 1: + return + + try: + from . import BmpImagePlugin + + assert BmpImagePlugin + except ImportError: + pass + try: + from . import GifImagePlugin + + assert GifImagePlugin + except ImportError: + pass + try: + from . import JpegImagePlugin + + assert JpegImagePlugin + except ImportError: + pass + try: + from . import PpmImagePlugin + + assert PpmImagePlugin + except ImportError: + pass + try: + from . import PngImagePlugin + + assert PngImagePlugin + except ImportError: + pass + + _initialized = 1 + + +def init() -> bool: + """ + Explicitly initializes the Python Imaging Library. This function + loads all available file format drivers. + + It is called when opening or saving images if :py:meth:`~preinit()` is + insufficient, and by :py:meth:`~PIL.features.pilinfo`. + """ + + global _initialized + if _initialized >= 2: + return False + + parent_name = __name__.rpartition(".")[0] + for plugin in _plugins: + try: + logger.debug("Importing %s", plugin) + __import__(f"{parent_name}.{plugin}", globals(), locals(), []) + except ImportError as e: + logger.debug("Image: failed to import %s: %s", plugin, e) + + if OPEN or SAVE: + _initialized = 2 + return True + return False + + +# -------------------------------------------------------------------- +# Codec factories (used by tobytes/frombytes and ImageFile.load) + + +def _getdecoder( + mode: str, decoder_name: str, args: Any, extra: tuple[Any, ...] = () +) -> core.ImagingDecoder | ImageFile.PyDecoder: + # tweak arguments + if args is None: + args = () + elif not isinstance(args, tuple): + args = (args,) + + try: + decoder = DECODERS[decoder_name] + except KeyError: + pass + else: + return decoder(mode, *args + extra) + + try: + # get decoder + decoder = getattr(core, f"{decoder_name}_decoder") + except AttributeError as e: + msg = f"decoder {decoder_name} not available" + raise OSError(msg) from e + return decoder(mode, *args + extra) + + +def _getencoder( + mode: str, encoder_name: str, args: Any, extra: tuple[Any, ...] = () +) -> core.ImagingEncoder | ImageFile.PyEncoder: + # tweak arguments + if args is None: + args = () + elif not isinstance(args, tuple): + args = (args,) + + try: + encoder = ENCODERS[encoder_name] + except KeyError: + pass + else: + return encoder(mode, *args + extra) + + try: + # get encoder + encoder = getattr(core, f"{encoder_name}_encoder") + except AttributeError as e: + msg = f"encoder {encoder_name} not available" + raise OSError(msg) from e + return encoder(mode, *args + extra) + + +# -------------------------------------------------------------------- +# Simple expression analyzer + + +class _E: + def __init__(self, scale, offset) -> None: + self.scale = scale + self.offset = offset + + def __neg__(self): + return _E(-self.scale, -self.offset) + + def __add__(self, other): + if isinstance(other, _E): + return _E(self.scale + other.scale, self.offset + other.offset) + return _E(self.scale, self.offset + other) + + __radd__ = __add__ + + def __sub__(self, other): + return self + -other + + def __rsub__(self, other): + return other + -self + + def __mul__(self, other): + if isinstance(other, _E): + return NotImplemented + return _E(self.scale * other, self.offset * other) + + __rmul__ = __mul__ + + def __truediv__(self, other): + if isinstance(other, _E): + return NotImplemented + return _E(self.scale / other, self.offset / other) + + +def _getscaleoffset(expr): + a = expr(_E(1, 0)) + return (a.scale, a.offset) if isinstance(a, _E) else (0, a) + + +# -------------------------------------------------------------------- +# Implementation wrapper + + +class SupportsGetData(Protocol): + def getdata( + self, + ) -> tuple[Transform, Sequence[int]]: ... + + +class Image: + """ + This class represents an image object. To create + :py:class:`~PIL.Image.Image` objects, use the appropriate factory + functions. There's hardly ever any reason to call the Image constructor + directly. + + * :py:func:`~PIL.Image.open` + * :py:func:`~PIL.Image.new` + * :py:func:`~PIL.Image.frombytes` + """ + + format: str | None = None + format_description: str | None = None + _close_exclusive_fp_after_loading = True + + def __init__(self): + # FIXME: take "new" parameters / other image? + # FIXME: turn mode and size into delegating properties? + self.im = None + self._mode = "" + self._size = (0, 0) + self.palette = None + self.info = {} + self.readonly = 0 + self.pyaccess = None + self._exif = None + + @property + def width(self) -> int: + return self.size[0] + + @property + def height(self) -> int: + return self.size[1] + + @property + def size(self) -> tuple[int, int]: + return self._size + + @property + def mode(self) -> str: + return self._mode + + def _new(self, im: core.ImagingCore) -> Image: + new = Image() + new.im = im + new._mode = im.mode + new._size = im.size + if im.mode in ("P", "PA"): + if self.palette: + new.palette = self.palette.copy() + else: + from . import ImagePalette + + new.palette = ImagePalette.ImagePalette() + new.info = self.info.copy() + return new + + # Context manager support + def __enter__(self): + return self + + def _close_fp(self): + if getattr(self, "_fp", False): + if self._fp != self.fp: + self._fp.close() + self._fp = DeferredError(ValueError("Operation on closed image")) + if self.fp: + self.fp.close() + + def __exit__(self, *args): + if hasattr(self, "fp"): + if getattr(self, "_exclusive_fp", False): + self._close_fp() + self.fp = None + + def close(self) -> None: + """ + Closes the file pointer, if possible. + + This operation will destroy the image core and release its memory. + The image data will be unusable afterward. + + This function is required to close images that have multiple frames or + have not had their file read and closed by the + :py:meth:`~PIL.Image.Image.load` method. See :ref:`file-handling` for + more information. + """ + if hasattr(self, "fp"): + try: + self._close_fp() + self.fp = None + except Exception as msg: + logger.debug("Error closing: %s", msg) + + if getattr(self, "map", None): + self.map = None + + # Instead of simply setting to None, we're setting up a + # deferred error that will better explain that the core image + # object is gone. + self.im = DeferredError(ValueError("Operation on closed image")) + + def _copy(self) -> None: + self.load() + self.im = self.im.copy() + self.pyaccess = None + self.readonly = 0 + + def _ensure_mutable(self) -> None: + if self.readonly: + self._copy() + else: + self.load() + + def _dump( + self, file: str | None = None, format: str | None = None, **options: Any + ) -> str: + suffix = "" + if format: + suffix = f".{format}" + + if not file: + f, filename = tempfile.mkstemp(suffix) + os.close(f) + else: + filename = file + if not filename.endswith(suffix): + filename = filename + suffix + + self.load() + + if not format or format == "PPM": + self.im.save_ppm(filename) + else: + self.save(filename, format, **options) + + return filename + + def __eq__(self, other: object) -> bool: + if self.__class__ is not other.__class__: + return False + assert isinstance(other, Image) + return ( + self.mode == other.mode + and self.size == other.size + and self.info == other.info + and self.getpalette() == other.getpalette() + and self.tobytes() == other.tobytes() + ) + + def __repr__(self) -> str: + return "<%s.%s image mode=%s size=%dx%d at 0x%X>" % ( + self.__class__.__module__, + self.__class__.__name__, + self.mode, + self.size[0], + self.size[1], + id(self), + ) + + def _repr_pretty_(self, p, cycle) -> None: + """IPython plain text display support""" + + # Same as __repr__ but without unpredictable id(self), + # to keep Jupyter notebook `text/plain` output stable. + p.text( + "<%s.%s image mode=%s size=%dx%d>" + % ( + self.__class__.__module__, + self.__class__.__name__, + self.mode, + self.size[0], + self.size[1], + ) + ) + + def _repr_image(self, image_format: str, **kwargs: Any) -> bytes | None: + """Helper function for iPython display hook. + + :param image_format: Image format. + :returns: image as bytes, saved into the given format. + """ + b = io.BytesIO() + try: + self.save(b, image_format, **kwargs) + except Exception: + return None + return b.getvalue() + + def _repr_png_(self) -> bytes | None: + """iPython display hook support for PNG format. + + :returns: PNG version of the image as bytes + """ + return self._repr_image("PNG", compress_level=1) + + def _repr_jpeg_(self) -> bytes | None: + """iPython display hook support for JPEG format. + + :returns: JPEG version of the image as bytes + """ + return self._repr_image("JPEG") + + @property + def __array_interface__(self): + # numpy array interface support + new = {"version": 3} + try: + if self.mode == "1": + # Binary images need to be extended from bits to bytes + # See: https://github.com/python-pillow/Pillow/issues/350 + new["data"] = self.tobytes("raw", "L") + else: + new["data"] = self.tobytes() + except Exception as e: + if not isinstance(e, (MemoryError, RecursionError)): + try: + import numpy + from packaging.version import parse as parse_version + except ImportError: + pass + else: + if parse_version(numpy.__version__) < parse_version("1.23"): + warnings.warn(str(e)) + raise + new["shape"], new["typestr"] = _conv_type_shape(self) + return new + + def __getstate__(self): + im_data = self.tobytes() # load image first + return [self.info, self.mode, self.size, self.getpalette(), im_data] + + def __setstate__(self, state) -> None: + Image.__init__(self) + info, mode, size, palette, data = state + self.info = info + self._mode = mode + self._size = size + self.im = core.new(mode, size) + if mode in ("L", "LA", "P", "PA") and palette: + self.putpalette(palette) + self.frombytes(data) + + def tobytes(self, encoder_name: str = "raw", *args: Any) -> bytes: + """ + Return image as a bytes object. + + .. warning:: + + This method returns the raw image data from the internal + storage. For compressed image data (e.g. PNG, JPEG) use + :meth:`~.save`, with a BytesIO parameter for in-memory + data. + + :param encoder_name: What encoder to use. The default is to + use the standard "raw" encoder. + + A list of C encoders can be seen under + codecs section of the function array in + :file:`_imaging.c`. Python encoders are + registered within the relevant plugins. + :param args: Extra arguments to the encoder. + :returns: A :py:class:`bytes` object. + """ + + encoder_args: Any = args + if len(encoder_args) == 1 and isinstance(encoder_args[0], tuple): + # may pass tuple instead of argument list + encoder_args = encoder_args[0] + + if encoder_name == "raw" and encoder_args == (): + encoder_args = self.mode + + self.load() + + if self.width == 0 or self.height == 0: + return b"" + + # unpack data + e = _getencoder(self.mode, encoder_name, encoder_args) + e.setimage(self.im) + + bufsize = max(65536, self.size[0] * 4) # see RawEncode.c + + output = [] + while True: + bytes_consumed, errcode, data = e.encode(bufsize) + output.append(data) + if errcode: + break + if errcode < 0: + msg = f"encoder error {errcode} in tobytes" + raise RuntimeError(msg) + + return b"".join(output) + + def tobitmap(self, name: str = "image") -> bytes: + """ + Returns the image converted to an X11 bitmap. + + .. note:: This method only works for mode "1" images. + + :param name: The name prefix to use for the bitmap variables. + :returns: A string containing an X11 bitmap. + :raises ValueError: If the mode is not "1" + """ + + self.load() + if self.mode != "1": + msg = "not a bitmap" + raise ValueError(msg) + data = self.tobytes("xbm") + return b"".join( + [ + f"#define {name}_width {self.size[0]}\n".encode("ascii"), + f"#define {name}_height {self.size[1]}\n".encode("ascii"), + f"static char {name}_bits[] = {{\n".encode("ascii"), + data, + b"};", + ] + ) + + def frombytes( + self, data: bytes | bytearray, decoder_name: str = "raw", *args: Any + ) -> None: + """ + Loads this image with pixel data from a bytes object. + + This method is similar to the :py:func:`~PIL.Image.frombytes` function, + but loads data into this image instead of creating a new image object. + """ + + if self.width == 0 or self.height == 0: + return + + decoder_args: Any = args + if len(decoder_args) == 1 and isinstance(decoder_args[0], tuple): + # may pass tuple instead of argument list + decoder_args = decoder_args[0] + + # default format + if decoder_name == "raw" and decoder_args == (): + decoder_args = self.mode + + # unpack data + d = _getdecoder(self.mode, decoder_name, decoder_args) + d.setimage(self.im) + s = d.decode(data) + + if s[0] >= 0: + msg = "not enough image data" + raise ValueError(msg) + if s[1] != 0: + msg = "cannot decode image data" + raise ValueError(msg) + + def load(self) -> core.PixelAccess | PyAccess.PyAccess | None: + """ + Allocates storage for the image and loads the pixel data. In + normal cases, you don't need to call this method, since the + Image class automatically loads an opened image when it is + accessed for the first time. + + If the file associated with the image was opened by Pillow, then this + method will close it. The exception to this is if the image has + multiple frames, in which case the file will be left open for seek + operations. See :ref:`file-handling` for more information. + + :returns: An image access object. + :rtype: :py:class:`.PixelAccess` or :py:class:`.PyAccess` + """ + if self.im is not None and self.palette and self.palette.dirty: + # realize palette + mode, arr = self.palette.getdata() + self.im.putpalette(self.palette.mode, mode, arr) + self.palette.dirty = 0 + self.palette.rawmode = None + if "transparency" in self.info and mode in ("LA", "PA"): + if isinstance(self.info["transparency"], int): + self.im.putpalettealpha(self.info["transparency"], 0) + else: + self.im.putpalettealphas(self.info["transparency"]) + self.palette.mode = "RGBA" + else: + self.palette.palette = self.im.getpalette( + self.palette.mode, self.palette.mode + ) + + if self.im is not None: + if cffi and USE_CFFI_ACCESS: + if self.pyaccess: + return self.pyaccess + from . import PyAccess + + self.pyaccess = PyAccess.new(self, self.readonly) + if self.pyaccess: + return self.pyaccess + return self.im.pixel_access(self.readonly) + return None + + def verify(self) -> None: + """ + Verifies the contents of a file. For data read from a file, this + method attempts to determine if the file is broken, without + actually decoding the image data. If this method finds any + problems, it raises suitable exceptions. If you need to load + the image after using this method, you must reopen the image + file. + """ + pass + + def convert( + self, + mode: str | None = None, + matrix: tuple[float, ...] | None = None, + dither: Dither | None = None, + palette: Palette = Palette.WEB, + colors: int = 256, + ) -> Image: + """ + Returns a converted copy of this image. For the "P" mode, this + method translates pixels through the palette. If mode is + omitted, a mode is chosen so that all information in the image + and the palette can be represented without a palette. + + This supports all possible conversions between "L", "RGB" and "CMYK". The + ``matrix`` argument only supports "L" and "RGB". + + When translating a color image to grayscale (mode "L"), + the library uses the ITU-R 601-2 luma transform:: + + L = R * 299/1000 + G * 587/1000 + B * 114/1000 + + The default method of converting a grayscale ("L") or "RGB" + image into a bilevel (mode "1") image uses Floyd-Steinberg + dither to approximate the original image luminosity levels. If + dither is ``None``, all values larger than 127 are set to 255 (white), + all other values to 0 (black). To use other thresholds, use the + :py:meth:`~PIL.Image.Image.point` method. + + When converting from "RGBA" to "P" without a ``matrix`` argument, + this passes the operation to :py:meth:`~PIL.Image.Image.quantize`, + and ``dither`` and ``palette`` are ignored. + + When converting from "PA", if an "RGBA" palette is present, the alpha + channel from the image will be used instead of the values from the palette. + + :param mode: The requested mode. See: :ref:`concept-modes`. + :param matrix: An optional conversion matrix. If given, this + should be 4- or 12-tuple containing floating point values. + :param dither: Dithering method, used when converting from + mode "RGB" to "P" or from "RGB" or "L" to "1". + Available methods are :data:`Dither.NONE` or :data:`Dither.FLOYDSTEINBERG` + (default). Note that this is not used when ``matrix`` is supplied. + :param palette: Palette to use when converting from mode "RGB" + to "P". Available palettes are :data:`Palette.WEB` or + :data:`Palette.ADAPTIVE`. + :param colors: Number of colors to use for the :data:`Palette.ADAPTIVE` + palette. Defaults to 256. + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if mode in ("BGR;15", "BGR;16", "BGR;24"): + deprecate(mode, 12) + + self.load() + + has_transparency = "transparency" in self.info + if not mode and self.mode == "P": + # determine default mode + if self.palette: + mode = self.palette.mode + else: + mode = "RGB" + if mode == "RGB" and has_transparency: + mode = "RGBA" + if not mode or (mode == self.mode and not matrix): + return self.copy() + + if matrix: + # matrix conversion + if mode not in ("L", "RGB"): + msg = "illegal conversion" + raise ValueError(msg) + im = self.im.convert_matrix(mode, matrix) + new_im = self._new(im) + if has_transparency and self.im.bands == 3: + transparency = new_im.info["transparency"] + + def convert_transparency( + m: tuple[float, ...], v: tuple[int, int, int] + ) -> int: + value = m[0] * v[0] + m[1] * v[1] + m[2] * v[2] + m[3] * 0.5 + return max(0, min(255, int(value))) + + if mode == "L": + transparency = convert_transparency(matrix, transparency) + elif len(mode) == 3: + transparency = tuple( + convert_transparency(matrix[i * 4 : i * 4 + 4], transparency) + for i in range(0, len(transparency)) + ) + new_im.info["transparency"] = transparency + return new_im + + if mode == "P" and self.mode == "RGBA": + return self.quantize(colors) + + trns = None + delete_trns = False + # transparency handling + if has_transparency: + if (self.mode in ("1", "L", "I", "I;16") and mode in ("LA", "RGBA")) or ( + self.mode == "RGB" and mode in ("La", "LA", "RGBa", "RGBA") + ): + # Use transparent conversion to promote from transparent + # color to an alpha channel. + new_im = self._new( + self.im.convert_transparent(mode, self.info["transparency"]) + ) + del new_im.info["transparency"] + return new_im + elif self.mode in ("L", "RGB", "P") and mode in ("L", "RGB", "P"): + t = self.info["transparency"] + if isinstance(t, bytes): + # Dragons. This can't be represented by a single color + warnings.warn( + "Palette images with Transparency expressed in bytes should be " + "converted to RGBA images" + ) + delete_trns = True + else: + # get the new transparency color. + # use existing conversions + trns_im = new(self.mode, (1, 1)) + if self.mode == "P": + trns_im.putpalette(self.palette) + if isinstance(t, tuple): + err = "Couldn't allocate a palette color for transparency" + try: + t = trns_im.palette.getcolor(t, self) + except ValueError as e: + if str(e) == "cannot allocate more than 256 colors": + # If all 256 colors are in use, + # then there is no need for transparency + t = None + else: + raise ValueError(err) from e + if t is None: + trns = None + else: + trns_im.putpixel((0, 0), t) + + if mode in ("L", "RGB"): + trns_im = trns_im.convert(mode) + else: + # can't just retrieve the palette number, got to do it + # after quantization. + trns_im = trns_im.convert("RGB") + trns = trns_im.getpixel((0, 0)) + + elif self.mode == "P" and mode in ("LA", "PA", "RGBA"): + t = self.info["transparency"] + delete_trns = True + + if isinstance(t, bytes): + self.im.putpalettealphas(t) + elif isinstance(t, int): + self.im.putpalettealpha(t, 0) + else: + msg = "Transparency for P mode should be bytes or int" + raise ValueError(msg) + + if mode == "P" and palette == Palette.ADAPTIVE: + im = self.im.quantize(colors) + new_im = self._new(im) + from . import ImagePalette + + new_im.palette = ImagePalette.ImagePalette( + "RGB", new_im.im.getpalette("RGB") + ) + if delete_trns: + # This could possibly happen if we requantize to fewer colors. + # The transparency would be totally off in that case. + del new_im.info["transparency"] + if trns is not None: + try: + new_im.info["transparency"] = new_im.palette.getcolor( + cast(Tuple[int, ...], trns), # trns was converted to RGB + new_im, + ) + except Exception: + # if we can't make a transparent color, don't leave the old + # transparency hanging around to mess us up. + del new_im.info["transparency"] + warnings.warn("Couldn't allocate palette entry for transparency") + return new_im + + if "LAB" in (self.mode, mode): + other_mode = mode if self.mode == "LAB" else self.mode + if other_mode in ("RGB", "RGBA", "RGBX"): + from . import ImageCms + + srgb = ImageCms.createProfile("sRGB") + lab = ImageCms.createProfile("LAB") + profiles = [lab, srgb] if self.mode == "LAB" else [srgb, lab] + transform = ImageCms.buildTransform( + profiles[0], profiles[1], self.mode, mode + ) + return transform.apply(self) + + # colorspace conversion + if dither is None: + dither = Dither.FLOYDSTEINBERG + + try: + im = self.im.convert(mode, dither) + except ValueError: + try: + # normalize source image and try again + modebase = getmodebase(self.mode) + if modebase == self.mode: + raise + im = self.im.convert(modebase) + im = im.convert(mode, dither) + except KeyError as e: + msg = "illegal conversion" + raise ValueError(msg) from e + + new_im = self._new(im) + if mode == "P" and palette != Palette.ADAPTIVE: + from . import ImagePalette + + new_im.palette = ImagePalette.ImagePalette("RGB", im.getpalette("RGB")) + if delete_trns: + # crash fail if we leave a bytes transparency in an rgb/l mode. + del new_im.info["transparency"] + if trns is not None: + if new_im.mode == "P" and new_im.palette: + try: + new_im.info["transparency"] = new_im.palette.getcolor(trns, new_im) + except ValueError as e: + del new_im.info["transparency"] + if str(e) != "cannot allocate more than 256 colors": + # If all 256 colors are in use, + # then there is no need for transparency + warnings.warn( + "Couldn't allocate palette entry for transparency" + ) + else: + new_im.info["transparency"] = trns + return new_im + + def quantize( + self, + colors: int = 256, + method: int | None = None, + kmeans: int = 0, + palette=None, + dither: Dither = Dither.FLOYDSTEINBERG, + ) -> Image: + """ + Convert the image to 'P' mode with the specified number + of colors. + + :param colors: The desired number of colors, <= 256 + :param method: :data:`Quantize.MEDIANCUT` (median cut), + :data:`Quantize.MAXCOVERAGE` (maximum coverage), + :data:`Quantize.FASTOCTREE` (fast octree), + :data:`Quantize.LIBIMAGEQUANT` (libimagequant; check support + using :py:func:`PIL.features.check_feature` with + ``feature="libimagequant"``). + + By default, :data:`Quantize.MEDIANCUT` will be used. + + The exception to this is RGBA images. :data:`Quantize.MEDIANCUT` + and :data:`Quantize.MAXCOVERAGE` do not support RGBA images, so + :data:`Quantize.FASTOCTREE` is used by default instead. + :param kmeans: Integer greater than or equal to zero. + :param palette: Quantize to the palette of given + :py:class:`PIL.Image.Image`. + :param dither: Dithering method, used when converting from + mode "RGB" to "P" or from "RGB" or "L" to "1". + Available methods are :data:`Dither.NONE` or :data:`Dither.FLOYDSTEINBERG` + (default). + :returns: A new image + """ + + self.load() + + if method is None: + # defaults: + method = Quantize.MEDIANCUT + if self.mode == "RGBA": + method = Quantize.FASTOCTREE + + if self.mode == "RGBA" and method not in ( + Quantize.FASTOCTREE, + Quantize.LIBIMAGEQUANT, + ): + # Caller specified an invalid mode. + msg = ( + "Fast Octree (method == 2) and libimagequant (method == 3) " + "are the only valid methods for quantizing RGBA images" + ) + raise ValueError(msg) + + if palette: + # use palette from reference image + palette.load() + if palette.mode != "P": + msg = "bad mode for palette image" + raise ValueError(msg) + if self.mode not in {"RGB", "L"}: + msg = "only RGB or L mode images can be quantized to a palette" + raise ValueError(msg) + im = self.im.convert("P", dither, palette.im) + new_im = self._new(im) + new_im.palette = palette.palette.copy() + return new_im + + if kmeans < 0: + msg = "kmeans must not be negative" + raise ValueError(msg) + + im = self._new(self.im.quantize(colors, method, kmeans)) + + from . import ImagePalette + + mode = im.im.getpalettemode() + palette = im.im.getpalette(mode, mode)[: colors * len(mode)] + im.palette = ImagePalette.ImagePalette(mode, palette) + + return im + + def copy(self) -> Image: + """ + Copies this image. Use this method if you wish to paste things + into an image, but still retain the original. + + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + self.load() + return self._new(self.im.copy()) + + __copy__ = copy + + def crop(self, box: tuple[float, float, float, float] | None = None) -> Image: + """ + Returns a rectangular region from this image. The box is a + 4-tuple defining the left, upper, right, and lower pixel + coordinate. See :ref:`coordinate-system`. + + Note: Prior to Pillow 3.4.0, this was a lazy operation. + + :param box: The crop rectangle, as a (left, upper, right, lower)-tuple. + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if box is None: + return self.copy() + + if box[2] < box[0]: + msg = "Coordinate 'right' is less than 'left'" + raise ValueError(msg) + elif box[3] < box[1]: + msg = "Coordinate 'lower' is less than 'upper'" + raise ValueError(msg) + + self.load() + return self._new(self._crop(self.im, box)) + + def _crop( + self, im: core.ImagingCore, box: tuple[float, float, float, float] + ) -> core.ImagingCore: + """ + Returns a rectangular region from the core image object im. + + This is equivalent to calling im.crop((x0, y0, x1, y1)), but + includes additional sanity checks. + + :param im: a core image object + :param box: The crop rectangle, as a (left, upper, right, lower)-tuple. + :returns: A core image object. + """ + + x0, y0, x1, y1 = map(int, map(round, box)) + + absolute_values = (abs(x1 - x0), abs(y1 - y0)) + + _decompression_bomb_check(absolute_values) + + return im.crop((x0, y0, x1, y1)) + + def draft( + self, mode: str | None, size: tuple[int, int] | None + ) -> tuple[str, tuple[int, int, float, float]] | None: + """ + Configures the image file loader so it returns a version of the + image that as closely as possible matches the given mode and + size. For example, you can use this method to convert a color + JPEG to grayscale while loading it. + + If any changes are made, returns a tuple with the chosen ``mode`` and + ``box`` with coordinates of the original image within the altered one. + + Note that this method modifies the :py:class:`~PIL.Image.Image` object + in place. If the image has already been loaded, this method has no + effect. + + Note: This method is not implemented for most images. It is + currently implemented only for JPEG and MPO images. + + :param mode: The requested mode. + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + """ + pass + + def _expand(self, xmargin: int, ymargin: int | None = None) -> Image: + if ymargin is None: + ymargin = xmargin + self.load() + return self._new(self.im.expand(xmargin, ymargin)) + + if TYPE_CHECKING: + from . import ImageFilter + + def filter(self, filter: ImageFilter.Filter | type[ImageFilter.Filter]) -> Image: + """ + Filters this image using the given filter. For a list of + available filters, see the :py:mod:`~PIL.ImageFilter` module. + + :param filter: Filter kernel. + :returns: An :py:class:`~PIL.Image.Image` object.""" + + from . import ImageFilter + + self.load() + + if callable(filter): + filter = filter() + if not hasattr(filter, "filter"): + msg = "filter argument should be ImageFilter.Filter instance or class" + raise TypeError(msg) + + multiband = isinstance(filter, ImageFilter.MultibandFilter) + if self.im.bands == 1 or multiband: + return self._new(filter.filter(self.im)) + + ims = [ + self._new(filter.filter(self.im.getband(c))) for c in range(self.im.bands) + ] + return merge(self.mode, ims) + + def getbands(self) -> tuple[str, ...]: + """ + Returns a tuple containing the name of each band in this image. + For example, ``getbands`` on an RGB image returns ("R", "G", "B"). + + :returns: A tuple containing band names. + :rtype: tuple + """ + return ImageMode.getmode(self.mode).bands + + def getbbox(self, *, alpha_only: bool = True) -> tuple[int, int, int, int] | None: + """ + Calculates the bounding box of the non-zero regions in the + image. + + :param alpha_only: Optional flag, defaulting to ``True``. + If ``True`` and the image has an alpha channel, trim transparent pixels. + Otherwise, trim pixels when all channels are zero. + Keyword-only argument. + :returns: The bounding box is returned as a 4-tuple defining the + left, upper, right, and lower pixel coordinate. See + :ref:`coordinate-system`. If the image is completely empty, this + method returns None. + + """ + + self.load() + return self.im.getbbox(alpha_only) + + def getcolors(self, maxcolors: int = 256): + """ + Returns a list of colors used in this image. + + The colors will be in the image's mode. For example, an RGB image will + return a tuple of (red, green, blue) color values, and a P image will + return the index of the color in the palette. + + :param maxcolors: Maximum number of colors. If this number is + exceeded, this method returns None. The default limit is + 256 colors. + :returns: An unsorted list of (count, pixel) values. + """ + + self.load() + if self.mode in ("1", "L", "P"): + h = self.im.histogram() + out = [(h[i], i) for i in range(256) if h[i]] + if len(out) > maxcolors: + return None + return out + return self.im.getcolors(maxcolors) + + def getdata(self, band: int | None = None): + """ + Returns the contents of this image as a sequence object + containing pixel values. The sequence object is flattened, so + that values for line one follow directly after the values of + line zero, and so on. + + Note that the sequence object returned by this method is an + internal PIL data type, which only supports certain sequence + operations. To convert it to an ordinary sequence (e.g. for + printing), use ``list(im.getdata())``. + + :param band: What band to return. The default is to return + all bands. To return a single band, pass in the index + value (e.g. 0 to get the "R" band from an "RGB" image). + :returns: A sequence-like object. + """ + + self.load() + if band is not None: + return self.im.getband(band) + return self.im # could be abused + + def getextrema(self) -> tuple[float, float] | tuple[tuple[int, int], ...]: + """ + Gets the minimum and maximum pixel values for each band in + the image. + + :returns: For a single-band image, a 2-tuple containing the + minimum and maximum pixel value. For a multi-band image, + a tuple containing one 2-tuple for each band. + """ + + self.load() + if self.im.bands > 1: + return tuple(self.im.getband(i).getextrema() for i in range(self.im.bands)) + return self.im.getextrema() + + def getxmp(self): + """ + Returns a dictionary containing the XMP tags. + Requires defusedxml to be installed. + + :returns: XMP tags in a dictionary. + """ + + def get_name(tag: str) -> str: + return re.sub("^{[^}]+}", "", tag) + + def get_value(element): + value = {get_name(k): v for k, v in element.attrib.items()} + children = list(element) + if children: + for child in children: + name = get_name(child.tag) + child_value = get_value(child) + if name in value: + if not isinstance(value[name], list): + value[name] = [value[name]] + value[name].append(child_value) + else: + value[name] = child_value + elif value: + if element.text: + value["text"] = element.text + else: + return element.text + return value + + if ElementTree is None: + warnings.warn("XMP data cannot be read without defusedxml dependency") + return {} + if "xmp" not in self.info: + return {} + root = ElementTree.fromstring(self.info["xmp"].rstrip(b"\x00")) + return {get_name(root.tag): get_value(root)} + + def getexif(self) -> Exif: + """ + Gets EXIF data from the image. + + :returns: an :py:class:`~PIL.Image.Exif` object. + """ + if self._exif is None: + self._exif = Exif() + elif self._exif._loaded: + return self._exif + self._exif._loaded = True + + exif_info = self.info.get("exif") + if exif_info is None: + if "Raw profile type exif" in self.info: + exif_info = bytes.fromhex( + "".join(self.info["Raw profile type exif"].split("\n")[3:]) + ) + elif hasattr(self, "tag_v2"): + self._exif.bigtiff = self.tag_v2._bigtiff + self._exif.endian = self.tag_v2._endian + self._exif.load_from_fp(self.fp, self.tag_v2._offset) + if exif_info is not None: + self._exif.load(exif_info) + + # XMP tags + if ExifTags.Base.Orientation not in self._exif: + xmp_tags = self.info.get("XML:com.adobe.xmp") + if xmp_tags: + match = re.search(r'tiff:Orientation(="|>)([0-9])', xmp_tags) + if match: + self._exif[ExifTags.Base.Orientation] = int(match[2]) + + return self._exif + + def _reload_exif(self) -> None: + if self._exif is None or not self._exif._loaded: + return + self._exif._loaded = False + self.getexif() + + def get_child_images(self) -> list[ImageFile.ImageFile]: + child_images = [] + exif = self.getexif() + ifds = [] + if ExifTags.Base.SubIFDs in exif: + subifd_offsets = exif[ExifTags.Base.SubIFDs] + if subifd_offsets: + if not isinstance(subifd_offsets, tuple): + subifd_offsets = (subifd_offsets,) + for subifd_offset in subifd_offsets: + ifds.append((exif._get_ifd_dict(subifd_offset), subifd_offset)) + ifd1 = exif.get_ifd(ExifTags.IFD.IFD1) + if ifd1 and ifd1.get(513): + ifds.append((ifd1, exif._info.next)) + + offset = None + for ifd, ifd_offset in ifds: + current_offset = self.fp.tell() + if offset is None: + offset = current_offset + + fp = self.fp + thumbnail_offset = ifd.get(513) + if thumbnail_offset is not None: + thumbnail_offset += getattr(self, "_exif_offset", 0) + self.fp.seek(thumbnail_offset) + data = self.fp.read(ifd.get(514)) + fp = io.BytesIO(data) + + with open(fp) as im: + from . import TiffImagePlugin + + if thumbnail_offset is None and isinstance( + im, TiffImagePlugin.TiffImageFile + ): + im._frame_pos = [ifd_offset] + im._seek(0) + im.load() + child_images.append(im) + + if offset is not None: + self.fp.seek(offset) + return child_images + + def getim(self): + """ + Returns a capsule that points to the internal image memory. + + :returns: A capsule object. + """ + + self.load() + return self.im.ptr + + def getpalette(self, rawmode: str | None = "RGB") -> list[int] | None: + """ + Returns the image palette as a list. + + :param rawmode: The mode in which to return the palette. ``None`` will + return the palette in its current mode. + + .. versionadded:: 9.1.0 + + :returns: A list of color values [r, g, b, ...], or None if the + image has no palette. + """ + + self.load() + try: + mode = self.im.getpalettemode() + except ValueError: + return None # no palette + if rawmode is None: + rawmode = mode + return list(self.im.getpalette(mode, rawmode)) + + @property + def has_transparency_data(self) -> bool: + """ + Determine if an image has transparency data, whether in the form of an + alpha channel, a palette with an alpha channel, or a "transparency" key + in the info dictionary. + + Note the image might still appear solid, if all of the values shown + within are opaque. + + :returns: A boolean. + """ + return ( + self.mode in ("LA", "La", "PA", "RGBA", "RGBa") + or (self.mode == "P" and self.palette.mode.endswith("A")) + or "transparency" in self.info + ) + + def apply_transparency(self) -> None: + """ + If a P mode image has a "transparency" key in the info dictionary, + remove the key and instead apply the transparency to the palette. + Otherwise, the image is unchanged. + """ + if self.mode != "P" or "transparency" not in self.info: + return + + from . import ImagePalette + + palette = self.getpalette("RGBA") + assert palette is not None + transparency = self.info["transparency"] + if isinstance(transparency, bytes): + for i, alpha in enumerate(transparency): + palette[i * 4 + 3] = alpha + else: + palette[transparency * 4 + 3] = 0 + self.palette = ImagePalette.ImagePalette("RGBA", bytes(palette)) + self.palette.dirty = 1 + + del self.info["transparency"] + + def getpixel( + self, xy: tuple[int, int] | list[int] + ) -> float | tuple[int, ...] | None: + """ + Returns the pixel value at a given position. + + :param xy: The coordinate, given as (x, y). See + :ref:`coordinate-system`. + :returns: The pixel value. If the image is a multi-layer image, + this method returns a tuple. + """ + + self.load() + if self.pyaccess: + return self.pyaccess.getpixel(xy) + return self.im.getpixel(tuple(xy)) + + def getprojection(self) -> tuple[list[int], list[int]]: + """ + Get projection to x and y axes + + :returns: Two sequences, indicating where there are non-zero + pixels along the X-axis and the Y-axis, respectively. + """ + + self.load() + x, y = self.im.getprojection() + return list(x), list(y) + + def histogram(self, mask: Image | None = None, extrema=None) -> list[int]: + """ + Returns a histogram for the image. The histogram is returned as a + list of pixel counts, one for each pixel value in the source + image. Counts are grouped into 256 bins for each band, even if + the image has more than 8 bits per band. If the image has more + than one band, the histograms for all bands are concatenated (for + example, the histogram for an "RGB" image contains 768 values). + + A bilevel image (mode "1") is treated as a grayscale ("L") image + by this method. + + If a mask is provided, the method returns a histogram for those + parts of the image where the mask image is non-zero. The mask + image must have the same size as the image, and be either a + bi-level image (mode "1") or a grayscale image ("L"). + + :param mask: An optional mask. + :param extrema: An optional tuple of manually-specified extrema. + :returns: A list containing pixel counts. + """ + self.load() + if mask: + mask.load() + return self.im.histogram((0, 0), mask.im) + if self.mode in ("I", "F"): + if extrema is None: + extrema = self.getextrema() + return self.im.histogram(extrema) + return self.im.histogram() + + def entropy(self, mask=None, extrema=None): + """ + Calculates and returns the entropy for the image. + + A bilevel image (mode "1") is treated as a grayscale ("L") + image by this method. + + If a mask is provided, the method employs the histogram for + those parts of the image where the mask image is non-zero. + The mask image must have the same size as the image, and be + either a bi-level image (mode "1") or a grayscale image ("L"). + + :param mask: An optional mask. + :param extrema: An optional tuple of manually-specified extrema. + :returns: A float value representing the image entropy + """ + self.load() + if mask: + mask.load() + return self.im.entropy((0, 0), mask.im) + if self.mode in ("I", "F"): + if extrema is None: + extrema = self.getextrema() + return self.im.entropy(extrema) + return self.im.entropy() + + def paste( + self, + im: Image | str | float | tuple[float, ...], + box: Image | tuple[int, int, int, int] | tuple[int, int] | None = None, + mask: Image | None = None, + ) -> None: + """ + Pastes another image into this image. The box argument is either + a 2-tuple giving the upper left corner, a 4-tuple defining the + left, upper, right, and lower pixel coordinate, or None (same as + (0, 0)). See :ref:`coordinate-system`. If a 4-tuple is given, the size + of the pasted image must match the size of the region. + + If the modes don't match, the pasted image is converted to the mode of + this image (see the :py:meth:`~PIL.Image.Image.convert` method for + details). + + Instead of an image, the source can be a integer or tuple + containing pixel values. The method then fills the region + with the given color. When creating RGB images, you can + also use color strings as supported by the ImageColor module. + + If a mask is given, this method updates only the regions + indicated by the mask. You can use either "1", "L", "LA", "RGBA" + or "RGBa" images (if present, the alpha band is used as mask). + Where the mask is 255, the given image is copied as is. Where + the mask is 0, the current value is preserved. Intermediate + values will mix the two images together, including their alpha + channels if they have them. + + See :py:meth:`~PIL.Image.Image.alpha_composite` if you want to + combine images with respect to their alpha channels. + + :param im: Source image or pixel value (integer, float or tuple). + :param box: An optional 4-tuple giving the region to paste into. + If a 2-tuple is used instead, it's treated as the upper left + corner. If omitted or None, the source is pasted into the + upper left corner. + + If an image is given as the second argument and there is no + third, the box defaults to (0, 0), and the second argument + is interpreted as a mask image. + :param mask: An optional mask image. + """ + + if isImageType(box): + if mask is not None: + msg = "If using second argument as mask, third argument must be None" + raise ValueError(msg) + # abbreviated paste(im, mask) syntax + mask = box + box = None + assert not isinstance(box, Image) + + if box is None: + box = (0, 0) + + if len(box) == 2: + # upper left corner given; get size from image or mask + if isImageType(im): + size = im.size + elif isImageType(mask): + size = mask.size + else: + # FIXME: use self.size here? + msg = "cannot determine region size; use 4-item box" + raise ValueError(msg) + box += (box[0] + size[0], box[1] + size[1]) + + if isinstance(im, str): + from . import ImageColor + + im = ImageColor.getcolor(im, self.mode) + + elif isImageType(im): + im.load() + if self.mode != im.mode: + if self.mode != "RGB" or im.mode not in ("LA", "RGBA", "RGBa"): + # should use an adapter for this! + im = im.convert(self.mode) + im = im.im + + self._ensure_mutable() + + if mask: + mask.load() + self.im.paste(im, box, mask.im) + else: + self.im.paste(im, box) + + def alpha_composite( + self, im: Image, dest: Sequence[int] = (0, 0), source: Sequence[int] = (0, 0) + ) -> None: + """'In-place' analog of Image.alpha_composite. Composites an image + onto this image. + + :param im: image to composite over this one + :param dest: Optional 2 tuple (left, top) specifying the upper + left corner in this (destination) image. + :param source: Optional 2 (left, top) tuple for the upper left + corner in the overlay source image, or 4 tuple (left, top, right, + bottom) for the bounds of the source rectangle + + Performance Note: Not currently implemented in-place in the core layer. + """ + + if not isinstance(source, (list, tuple)): + msg = "Source must be a list or tuple" + raise ValueError(msg) + if not isinstance(dest, (list, tuple)): + msg = "Destination must be a list or tuple" + raise ValueError(msg) + + if len(source) == 4: + overlay_crop_box = tuple(source) + elif len(source) == 2: + overlay_crop_box = tuple(source) + im.size + else: + msg = "Source must be a sequence of length 2 or 4" + raise ValueError(msg) + + if not len(dest) == 2: + msg = "Destination must be a sequence of length 2" + raise ValueError(msg) + if min(source) < 0: + msg = "Source must be non-negative" + raise ValueError(msg) + + # over image, crop if it's not the whole image. + if overlay_crop_box == (0, 0) + im.size: + overlay = im + else: + overlay = im.crop(overlay_crop_box) + + # target for the paste + box = tuple(dest) + (dest[0] + overlay.width, dest[1] + overlay.height) + + # destination image. don't copy if we're using the whole image. + if box == (0, 0) + self.size: + background = self + else: + background = self.crop(box) + + result = alpha_composite(background, overlay) + self.paste(result, box) + + def point( + self, + lut: Sequence[float] | Callable[[int], float] | ImagePointHandler, + mode: str | None = None, + ) -> Image: + """ + Maps this image through a lookup table or function. + + :param lut: A lookup table, containing 256 (or 65536 if + self.mode=="I" and mode == "L") values per band in the + image. A function can be used instead, it should take a + single argument. The function is called once for each + possible pixel value, and the resulting table is applied to + all bands of the image. + + It may also be an :py:class:`~PIL.Image.ImagePointHandler` + object:: + + class Example(Image.ImagePointHandler): + def point(self, data): + # Return result + :param mode: Output mode (default is same as input). This can only be used if + the source image has mode "L" or "P", and the output has mode "1" or the + source image mode is "I" and the output mode is "L". + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + self.load() + + if isinstance(lut, ImagePointHandler): + return lut.point(self) + + if callable(lut): + # if it isn't a list, it should be a function + if self.mode in ("I", "I;16", "F"): + # check if the function can be used with point_transform + # UNDONE wiredfool -- I think this prevents us from ever doing + # a gamma function point transform on > 8bit images. + scale, offset = _getscaleoffset(lut) + return self._new(self.im.point_transform(scale, offset)) + # for other modes, convert the function to a table + flatLut = [lut(i) for i in range(256)] * self.im.bands + else: + flatLut = lut + + if self.mode == "F": + # FIXME: _imaging returns a confusing error message for this case + msg = "point operation not supported for this mode" + raise ValueError(msg) + + if mode != "F": + flatLut = [round(i) for i in flatLut] + return self._new(self.im.point(flatLut, mode)) + + def putalpha(self, alpha: Image | int) -> None: + """ + Adds or replaces the alpha layer in this image. If the image + does not have an alpha layer, it's converted to "LA" or "RGBA". + The new layer must be either "L" or "1". + + :param alpha: The new alpha layer. This can either be an "L" or "1" + image having the same size as this image, or an integer. + """ + + self._ensure_mutable() + + if self.mode not in ("LA", "PA", "RGBA"): + # attempt to promote self to a matching alpha mode + try: + mode = getmodebase(self.mode) + "A" + try: + self.im.setmode(mode) + except (AttributeError, ValueError) as e: + # do things the hard way + im = self.im.convert(mode) + if im.mode not in ("LA", "PA", "RGBA"): + msg = "alpha channel could not be added" + raise ValueError(msg) from e # sanity check + self.im = im + self.pyaccess = None + self._mode = self.im.mode + except KeyError as e: + msg = "illegal image mode" + raise ValueError(msg) from e + + if self.mode in ("LA", "PA"): + band = 1 + else: + band = 3 + + if isImageType(alpha): + # alpha layer + if alpha.mode not in ("1", "L"): + msg = "illegal image mode" + raise ValueError(msg) + alpha.load() + if alpha.mode == "1": + alpha = alpha.convert("L") + else: + # constant alpha + alpha = cast(int, alpha) # see python/typing#1013 + try: + self.im.fillband(band, alpha) + except (AttributeError, ValueError): + # do things the hard way + alpha = new("L", self.size, alpha) + else: + return + + self.im.putband(alpha.im, band) + + def putdata( + self, + data: Sequence[float] | Sequence[Sequence[int]], + scale: float = 1.0, + offset: float = 0.0, + ) -> None: + """ + Copies pixel data from a flattened sequence object into the image. The + values should start at the upper left corner (0, 0), continue to the + end of the line, followed directly by the first value of the second + line, and so on. Data will be read until either the image or the + sequence ends. The scale and offset values are used to adjust the + sequence values: **pixel = value*scale + offset**. + + :param data: A flattened sequence object. + :param scale: An optional scale value. The default is 1.0. + :param offset: An optional offset value. The default is 0.0. + """ + + self._ensure_mutable() + + self.im.putdata(data, scale, offset) + + def putpalette(self, data, rawmode="RGB") -> None: + """ + Attaches a palette to this image. The image must be a "P", "PA", "L" + or "LA" image. + + The palette sequence must contain at most 256 colors, made up of one + integer value for each channel in the raw mode. + For example, if the raw mode is "RGB", then it can contain at most 768 + values, made up of red, green and blue values for the corresponding pixel + index in the 256 colors. + If the raw mode is "RGBA", then it can contain at most 1024 values, + containing red, green, blue and alpha values. + + Alternatively, an 8-bit string may be used instead of an integer sequence. + + :param data: A palette sequence (either a list or a string). + :param rawmode: The raw mode of the palette. Either "RGB", "RGBA", or a mode + that can be transformed to "RGB" or "RGBA" (e.g. "R", "BGR;15", "RGBA;L"). + """ + from . import ImagePalette + + if self.mode not in ("L", "LA", "P", "PA"): + msg = "illegal image mode" + raise ValueError(msg) + if isinstance(data, ImagePalette.ImagePalette): + palette = ImagePalette.raw(data.rawmode, data.palette) + else: + if not isinstance(data, bytes): + data = bytes(data) + palette = ImagePalette.raw(rawmode, data) + self._mode = "PA" if "A" in self.mode else "P" + self.palette = palette + self.palette.mode = "RGBA" if "A" in rawmode else "RGB" + self.load() # install new palette + + def putpixel( + self, xy: tuple[int, int], value: float | tuple[int, ...] | list[int] + ) -> None: + """ + Modifies the pixel at the given position. The color is given as + a single numerical value for single-band images, and a tuple for + multi-band images. In addition to this, RGB and RGBA tuples are + accepted for P and PA images. + + Note that this method is relatively slow. For more extensive changes, + use :py:meth:`~PIL.Image.Image.paste` or the :py:mod:`~PIL.ImageDraw` + module instead. + + See: + + * :py:meth:`~PIL.Image.Image.paste` + * :py:meth:`~PIL.Image.Image.putdata` + * :py:mod:`~PIL.ImageDraw` + + :param xy: The pixel coordinate, given as (x, y). See + :ref:`coordinate-system`. + :param value: The pixel value. + """ + + if self.readonly: + self._copy() + self.load() + + if self.pyaccess: + return self.pyaccess.putpixel(xy, value) + + if ( + self.mode in ("P", "PA") + and isinstance(value, (list, tuple)) + and len(value) in [3, 4] + ): + # RGB or RGBA value for a P or PA image + if self.mode == "PA": + alpha = value[3] if len(value) == 4 else 255 + value = value[:3] + palette_index = self.palette.getcolor(value, self) + value = (palette_index, alpha) if self.mode == "PA" else palette_index + return self.im.putpixel(xy, value) + + def remap_palette(self, dest_map, source_palette=None): + """ + Rewrites the image to reorder the palette. + + :param dest_map: A list of indexes into the original palette. + e.g. ``[1,0]`` would swap a two item palette, and ``list(range(256))`` + is the identity transform. + :param source_palette: Bytes or None. + :returns: An :py:class:`~PIL.Image.Image` object. + + """ + from . import ImagePalette + + if self.mode not in ("L", "P"): + msg = "illegal image mode" + raise ValueError(msg) + + bands = 3 + palette_mode = "RGB" + if source_palette is None: + if self.mode == "P": + self.load() + palette_mode = self.im.getpalettemode() + if palette_mode == "RGBA": + bands = 4 + source_palette = self.im.getpalette(palette_mode, palette_mode) + else: # L-mode + source_palette = bytearray(i // 3 for i in range(768)) + + palette_bytes = b"" + new_positions = [0] * 256 + + # pick only the used colors from the palette + for i, oldPosition in enumerate(dest_map): + palette_bytes += source_palette[ + oldPosition * bands : oldPosition * bands + bands + ] + new_positions[oldPosition] = i + + # replace the palette color id of all pixel with the new id + + # Palette images are [0..255], mapped through a 1 or 3 + # byte/color map. We need to remap the whole image + # from palette 1 to palette 2. New_positions is + # an array of indexes into palette 1. Palette 2 is + # palette 1 with any holes removed. + + # We're going to leverage the convert mechanism to use the + # C code to remap the image from palette 1 to palette 2, + # by forcing the source image into 'L' mode and adding a + # mapping 'L' mode palette, then converting back to 'L' + # sans palette thus converting the image bytes, then + # assigning the optimized RGB palette. + + # perf reference, 9500x4000 gif, w/~135 colors + # 14 sec prepatch, 1 sec postpatch with optimization forced. + + mapping_palette = bytearray(new_positions) + + m_im = self.copy() + m_im._mode = "P" + + m_im.palette = ImagePalette.ImagePalette( + palette_mode, palette=mapping_palette * bands + ) + # possibly set palette dirty, then + # m_im.putpalette(mapping_palette, 'L') # converts to 'P' + # or just force it. + # UNDONE -- this is part of the general issue with palettes + m_im.im.putpalette(palette_mode, palette_mode + ";L", m_im.palette.tobytes()) + + m_im = m_im.convert("L") + + m_im.putpalette(palette_bytes, palette_mode) + m_im.palette = ImagePalette.ImagePalette(palette_mode, palette=palette_bytes) + + if "transparency" in self.info: + try: + m_im.info["transparency"] = dest_map.index(self.info["transparency"]) + except ValueError: + if "transparency" in m_im.info: + del m_im.info["transparency"] + + return m_im + + def _get_safe_box(self, size, resample, box): + """Expands the box so it includes adjacent pixels + that may be used by resampling with the given resampling filter. + """ + filter_support = _filters_support[resample] - 0.5 + scale_x = (box[2] - box[0]) / size[0] + scale_y = (box[3] - box[1]) / size[1] + support_x = filter_support * scale_x + support_y = filter_support * scale_y + + return ( + max(0, int(box[0] - support_x)), + max(0, int(box[1] - support_y)), + min(self.size[0], math.ceil(box[2] + support_x)), + min(self.size[1], math.ceil(box[3] + support_y)), + ) + + def resize( + self, + size: tuple[int, int], + resample: int | None = None, + box: tuple[float, float, float, float] | None = None, + reducing_gap: float | None = None, + ) -> Image: + """ + Returns a resized copy of this image. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param resample: An optional resampling filter. This can be + one of :py:data:`Resampling.NEAREST`, :py:data:`Resampling.BOX`, + :py:data:`Resampling.BILINEAR`, :py:data:`Resampling.HAMMING`, + :py:data:`Resampling.BICUBIC` or :py:data:`Resampling.LANCZOS`. + If the image has mode "1" or "P", it is always set to + :py:data:`Resampling.NEAREST`. If the image mode specifies a number + of bits, such as "I;16", then the default filter is + :py:data:`Resampling.NEAREST`. Otherwise, the default filter is + :py:data:`Resampling.BICUBIC`. See: :ref:`concept-filters`. + :param box: An optional 4-tuple of floats providing + the source image region to be scaled. + The values must be within (0, 0, width, height) rectangle. + If omitted or None, the entire source is used. + :param reducing_gap: Apply optimization by resizing the image + in two steps. First, reducing the image by integer times + using :py:meth:`~PIL.Image.Image.reduce`. + Second, resizing using regular resampling. The last step + changes size no less than by ``reducing_gap`` times. + ``reducing_gap`` may be None (no first step is performed) + or should be greater than 1.0. The bigger ``reducing_gap``, + the closer the result to the fair resampling. + The smaller ``reducing_gap``, the faster resizing. + With ``reducing_gap`` greater or equal to 3.0, the result is + indistinguishable from fair resampling in most cases. + The default value is None (no optimization). + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if resample is None: + type_special = ";" in self.mode + resample = Resampling.NEAREST if type_special else Resampling.BICUBIC + elif resample not in ( + Resampling.NEAREST, + Resampling.BILINEAR, + Resampling.BICUBIC, + Resampling.LANCZOS, + Resampling.BOX, + Resampling.HAMMING, + ): + msg = f"Unknown resampling filter ({resample})." + + filters = [ + f"{filter[1]} ({filter[0]})" + for filter in ( + (Resampling.NEAREST, "Image.Resampling.NEAREST"), + (Resampling.LANCZOS, "Image.Resampling.LANCZOS"), + (Resampling.BILINEAR, "Image.Resampling.BILINEAR"), + (Resampling.BICUBIC, "Image.Resampling.BICUBIC"), + (Resampling.BOX, "Image.Resampling.BOX"), + (Resampling.HAMMING, "Image.Resampling.HAMMING"), + ) + ] + msg += f" Use {', '.join(filters[:-1])} or {filters[-1]}" + raise ValueError(msg) + + if reducing_gap is not None and reducing_gap < 1.0: + msg = "reducing_gap must be 1.0 or greater" + raise ValueError(msg) + + self.load() + if box is None: + box = (0, 0) + self.size + + if self.size == size and box == (0, 0) + self.size: + return self.copy() + + if self.mode in ("1", "P"): + resample = Resampling.NEAREST + + if self.mode in ["LA", "RGBA"] and resample != Resampling.NEAREST: + im = self.convert({"LA": "La", "RGBA": "RGBa"}[self.mode]) + im = im.resize(size, resample, box) + return im.convert(self.mode) + + self.load() + + if reducing_gap is not None and resample != Resampling.NEAREST: + factor_x = int((box[2] - box[0]) / size[0] / reducing_gap) or 1 + factor_y = int((box[3] - box[1]) / size[1] / reducing_gap) or 1 + if factor_x > 1 or factor_y > 1: + reduce_box = self._get_safe_box(size, resample, box) + factor = (factor_x, factor_y) + self = ( + self.reduce(factor, box=reduce_box) + if callable(self.reduce) + else Image.reduce(self, factor, box=reduce_box) + ) + box = ( + (box[0] - reduce_box[0]) / factor_x, + (box[1] - reduce_box[1]) / factor_y, + (box[2] - reduce_box[0]) / factor_x, + (box[3] - reduce_box[1]) / factor_y, + ) + + return self._new(self.im.resize(size, resample, box)) + + def reduce( + self, + factor: int | tuple[int, int], + box: tuple[int, int, int, int] | None = None, + ) -> Image: + """ + Returns a copy of the image reduced ``factor`` times. + If the size of the image is not dividable by ``factor``, + the resulting size will be rounded up. + + :param factor: A greater than 0 integer or tuple of two integers + for width and height separately. + :param box: An optional 4-tuple of ints providing + the source image region to be reduced. + The values must be within ``(0, 0, width, height)`` rectangle. + If omitted or ``None``, the entire source is used. + """ + if not isinstance(factor, (list, tuple)): + factor = (factor, factor) + + if box is None: + box = (0, 0) + self.size + + if factor == (1, 1) and box == (0, 0) + self.size: + return self.copy() + + if self.mode in ["LA", "RGBA"]: + im = self.convert({"LA": "La", "RGBA": "RGBa"}[self.mode]) + im = im.reduce(factor, box) + return im.convert(self.mode) + + self.load() + + return self._new(self.im.reduce(factor, box)) + + def rotate( + self, + angle: float, + resample: Resampling = Resampling.NEAREST, + expand: int | bool = False, + center: tuple[float, float] | None = None, + translate: tuple[int, int] | None = None, + fillcolor: float | tuple[float, ...] | str | None = None, + ) -> Image: + """ + Returns a rotated copy of this image. This method returns a + copy of this image, rotated the given number of degrees counter + clockwise around its centre. + + :param angle: In degrees counter clockwise. + :param resample: An optional resampling filter. This can be + one of :py:data:`Resampling.NEAREST` (use nearest neighbour), + :py:data:`Resampling.BILINEAR` (linear interpolation in a 2x2 + environment), or :py:data:`Resampling.BICUBIC` (cubic spline + interpolation in a 4x4 environment). If omitted, or if the image has + mode "1" or "P", it is set to :py:data:`Resampling.NEAREST`. + See :ref:`concept-filters`. + :param expand: Optional expansion flag. If true, expands the output + image to make it large enough to hold the entire rotated image. + If false or omitted, make the output image the same size as the + input image. Note that the expand flag assumes rotation around + the center and no translation. + :param center: Optional center of rotation (a 2-tuple). Origin is + the upper left corner. Default is the center of the image. + :param translate: An optional post-rotate translation (a 2-tuple). + :param fillcolor: An optional color for area outside the rotated image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + angle = angle % 360.0 + + # Fast paths regardless of filter, as long as we're not + # translating or changing the center. + if not (center or translate): + if angle == 0: + return self.copy() + if angle == 180: + return self.transpose(Transpose.ROTATE_180) + if angle in (90, 270) and (expand or self.width == self.height): + return self.transpose( + Transpose.ROTATE_90 if angle == 90 else Transpose.ROTATE_270 + ) + + # Calculate the affine matrix. Note that this is the reverse + # transformation (from destination image to source) because we + # want to interpolate the (discrete) destination pixel from + # the local area around the (floating) source pixel. + + # The matrix we actually want (note that it operates from the right): + # (1, 0, tx) (1, 0, cx) ( cos a, sin a, 0) (1, 0, -cx) + # (0, 1, ty) * (0, 1, cy) * (-sin a, cos a, 0) * (0, 1, -cy) + # (0, 0, 1) (0, 0, 1) ( 0, 0, 1) (0, 0, 1) + + # The reverse matrix is thus: + # (1, 0, cx) ( cos -a, sin -a, 0) (1, 0, -cx) (1, 0, -tx) + # (0, 1, cy) * (-sin -a, cos -a, 0) * (0, 1, -cy) * (0, 1, -ty) + # (0, 0, 1) ( 0, 0, 1) (0, 0, 1) (0, 0, 1) + + # In any case, the final translation may be updated at the end to + # compensate for the expand flag. + + w, h = self.size + + if translate is None: + post_trans = (0, 0) + else: + post_trans = translate + if center is None: + center = (w / 2, h / 2) + + angle = -math.radians(angle) + matrix = [ + round(math.cos(angle), 15), + round(math.sin(angle), 15), + 0.0, + round(-math.sin(angle), 15), + round(math.cos(angle), 15), + 0.0, + ] + + def transform(x, y, matrix): + (a, b, c, d, e, f) = matrix + return a * x + b * y + c, d * x + e * y + f + + matrix[2], matrix[5] = transform( + -center[0] - post_trans[0], -center[1] - post_trans[1], matrix + ) + matrix[2] += center[0] + matrix[5] += center[1] + + if expand: + # calculate output size + xx = [] + yy = [] + for x, y in ((0, 0), (w, 0), (w, h), (0, h)): + x, y = transform(x, y, matrix) + xx.append(x) + yy.append(y) + nw = math.ceil(max(xx)) - math.floor(min(xx)) + nh = math.ceil(max(yy)) - math.floor(min(yy)) + + # We multiply a translation matrix from the right. Because of its + # special form, this is the same as taking the image of the + # translation vector as new translation vector. + matrix[2], matrix[5] = transform(-(nw - w) / 2.0, -(nh - h) / 2.0, matrix) + w, h = nw, nh + + return self.transform( + (w, h), Transform.AFFINE, matrix, resample, fillcolor=fillcolor + ) + + def save( + self, fp: StrOrBytesPath | IO[bytes], format: str | None = None, **params: Any + ) -> None: + """ + Saves this image under the given filename. If no format is + specified, the format to use is determined from the filename + extension, if possible. + + Keyword options can be used to provide additional instructions + to the writer. If a writer doesn't recognise an option, it is + silently ignored. The available options are described in the + :doc:`image format documentation + <../handbook/image-file-formats>` for each writer. + + You can use a file object instead of a filename. In this case, + you must always specify the format. The file object must + implement the ``seek``, ``tell``, and ``write`` + methods, and be opened in binary mode. + + :param fp: A filename (string), os.PathLike object or file object. + :param format: Optional format override. If omitted, the + format to use is determined from the filename extension. + If a file object was used instead of a filename, this + parameter should always be used. + :param params: Extra parameters to the image writer. + :returns: None + :exception ValueError: If the output format could not be determined + from the file name. Use the format option to solve this. + :exception OSError: If the file could not be written. The file + may have been created, and may contain partial data. + """ + + filename: str | bytes = "" + open_fp = False + if is_path(fp): + filename = os.path.realpath(os.fspath(fp)) + open_fp = True + elif fp == sys.stdout: + try: + fp = sys.stdout.buffer + except AttributeError: + pass + if not filename and hasattr(fp, "name") and is_path(fp.name): + # only set the name for metadata purposes + filename = os.path.realpath(os.fspath(fp.name)) + + # may mutate self! + self._ensure_mutable() + + save_all = params.pop("save_all", False) + self.encoderinfo = params + self.encoderconfig: tuple[Any, ...] = () + + preinit() + + filename_ext = os.path.splitext(filename)[1].lower() + ext = filename_ext.decode() if isinstance(filename_ext, bytes) else filename_ext + + if not format: + if ext not in EXTENSION: + init() + try: + format = EXTENSION[ext] + except KeyError as e: + msg = f"unknown file extension: {ext}" + raise ValueError(msg) from e + + if format.upper() not in SAVE: + init() + if save_all: + save_handler = SAVE_ALL[format.upper()] + else: + save_handler = SAVE[format.upper()] + + created = False + if open_fp: + created = not os.path.exists(filename) + if params.get("append", False): + # Open also for reading ("+"), because TIFF save_all + # writer needs to go back and edit the written data. + fp = builtins.open(filename, "r+b") + else: + fp = builtins.open(filename, "w+b") + else: + fp = cast(IO[bytes], fp) + + try: + save_handler(self, fp, filename) + except Exception: + if open_fp: + fp.close() + if created: + try: + os.remove(filename) + except PermissionError: + pass + raise + if open_fp: + fp.close() + + def seek(self, frame: int) -> None: + """ + Seeks to the given frame in this sequence file. If you seek + beyond the end of the sequence, the method raises an + ``EOFError`` exception. When a sequence file is opened, the + library automatically seeks to frame 0. + + See :py:meth:`~PIL.Image.Image.tell`. + + If defined, :attr:`~PIL.Image.Image.n_frames` refers to the + number of available frames. + + :param frame: Frame number, starting at 0. + :exception EOFError: If the call attempts to seek beyond the end + of the sequence. + """ + + # overridden by file handlers + if frame != 0: + msg = "no more images in file" + raise EOFError(msg) + + def show(self, title: str | None = None) -> None: + """ + Displays this image. This method is mainly intended for debugging purposes. + + This method calls :py:func:`PIL.ImageShow.show` internally. You can use + :py:func:`PIL.ImageShow.register` to override its default behaviour. + + The image is first saved to a temporary file. By default, it will be in + PNG format. + + On Unix, the image is then opened using the **xdg-open**, **display**, + **gm**, **eog** or **xv** utility, depending on which one can be found. + + On macOS, the image is opened with the native Preview application. + + On Windows, the image is opened with the standard PNG display utility. + + :param title: Optional title to use for the image window, where possible. + """ + + _show(self, title=title) + + def split(self) -> tuple[Image, ...]: + """ + Split this image into individual bands. This method returns a + tuple of individual image bands from an image. For example, + splitting an "RGB" image creates three new images each + containing a copy of one of the original bands (red, green, + blue). + + If you need only one band, :py:meth:`~PIL.Image.Image.getchannel` + method can be more convenient and faster. + + :returns: A tuple containing bands. + """ + + self.load() + if self.im.bands == 1: + return (self.copy(),) + return tuple(map(self._new, self.im.split())) + + def getchannel(self, channel: int | str) -> Image: + """ + Returns an image containing a single channel of the source image. + + :param channel: What channel to return. Could be index + (0 for "R" channel of "RGB") or channel name + ("A" for alpha channel of "RGBA"). + :returns: An image in "L" mode. + + .. versionadded:: 4.3.0 + """ + self.load() + + if isinstance(channel, str): + try: + channel = self.getbands().index(channel) + except ValueError as e: + msg = f'The image has no channel "{channel}"' + raise ValueError(msg) from e + + return self._new(self.im.getband(channel)) + + def tell(self) -> int: + """ + Returns the current frame number. See :py:meth:`~PIL.Image.Image.seek`. + + If defined, :attr:`~PIL.Image.Image.n_frames` refers to the + number of available frames. + + :returns: Frame number, starting with 0. + """ + return 0 + + def thumbnail( + self, + size: tuple[float, float], + resample: Resampling = Resampling.BICUBIC, + reducing_gap: float | None = 2.0, + ) -> None: + """ + Make this image into a thumbnail. This method modifies the + image to contain a thumbnail version of itself, no larger than + the given size. This method calculates an appropriate thumbnail + size to preserve the aspect of the image, calls the + :py:meth:`~PIL.Image.Image.draft` method to configure the file reader + (where applicable), and finally resizes the image. + + Note that this function modifies the :py:class:`~PIL.Image.Image` + object in place. If you need to use the full resolution image as well, + apply this method to a :py:meth:`~PIL.Image.Image.copy` of the original + image. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param resample: Optional resampling filter. This can be one + of :py:data:`Resampling.NEAREST`, :py:data:`Resampling.BOX`, + :py:data:`Resampling.BILINEAR`, :py:data:`Resampling.HAMMING`, + :py:data:`Resampling.BICUBIC` or :py:data:`Resampling.LANCZOS`. + If omitted, it defaults to :py:data:`Resampling.BICUBIC`. + (was :py:data:`Resampling.NEAREST` prior to version 2.5.0). + See: :ref:`concept-filters`. + :param reducing_gap: Apply optimization by resizing the image + in two steps. First, reducing the image by integer times + using :py:meth:`~PIL.Image.Image.reduce` or + :py:meth:`~PIL.Image.Image.draft` for JPEG images. + Second, resizing using regular resampling. The last step + changes size no less than by ``reducing_gap`` times. + ``reducing_gap`` may be None (no first step is performed) + or should be greater than 1.0. The bigger ``reducing_gap``, + the closer the result to the fair resampling. + The smaller ``reducing_gap``, the faster resizing. + With ``reducing_gap`` greater or equal to 3.0, the result is + indistinguishable from fair resampling in most cases. + The default value is 2.0 (very close to fair resampling + while still being faster in many cases). + :returns: None + """ + + provided_size = tuple(map(math.floor, size)) + + def preserve_aspect_ratio() -> tuple[int, int] | None: + def round_aspect(number, key): + return max(min(math.floor(number), math.ceil(number), key=key), 1) + + x, y = provided_size + if x >= self.width and y >= self.height: + return None + + aspect = self.width / self.height + if x / y >= aspect: + x = round_aspect(y * aspect, key=lambda n: abs(aspect - n / y)) + else: + y = round_aspect( + x / aspect, key=lambda n: 0 if n == 0 else abs(aspect - x / n) + ) + return x, y + + box = None + final_size: tuple[int, int] + if reducing_gap is not None: + preserved_size = preserve_aspect_ratio() + if preserved_size is None: + return + final_size = preserved_size + + res = self.draft( + None, (int(size[0] * reducing_gap), int(size[1] * reducing_gap)) + ) + if res is not None: + box = res[1] + if box is None: + self.load() + + # load() may have changed the size of the image + preserved_size = preserve_aspect_ratio() + if preserved_size is None: + return + final_size = preserved_size + + if self.size != final_size: + im = self.resize(final_size, resample, box=box, reducing_gap=reducing_gap) + + self.im = im.im + self._size = final_size + self._mode = self.im.mode + + self.readonly = 0 + self.pyaccess = None + + # FIXME: the different transform methods need further explanation + # instead of bloating the method docs, add a separate chapter. + def transform( + self, + size: tuple[int, int], + method: Transform | ImageTransformHandler | SupportsGetData, + data: Sequence[Any] | None = None, + resample: int = Resampling.NEAREST, + fill: int = 1, + fillcolor: float | tuple[float, ...] | str | None = None, + ) -> Image: + """ + Transforms this image. This method creates a new image with the + given size, and the same mode as the original, and copies data + to the new image using the given transform. + + :param size: The output size in pixels, as a 2-tuple: + (width, height). + :param method: The transformation method. This is one of + :py:data:`Transform.EXTENT` (cut out a rectangular subregion), + :py:data:`Transform.AFFINE` (affine transform), + :py:data:`Transform.PERSPECTIVE` (perspective transform), + :py:data:`Transform.QUAD` (map a quadrilateral to a rectangle), or + :py:data:`Transform.MESH` (map a number of source quadrilaterals + in one operation). + + It may also be an :py:class:`~PIL.Image.ImageTransformHandler` + object:: + + class Example(Image.ImageTransformHandler): + def transform(self, size, data, resample, fill=1): + # Return result + + Implementations of :py:class:`~PIL.Image.ImageTransformHandler` + for some of the :py:class:`Transform` methods are provided + in :py:mod:`~PIL.ImageTransform`. + + It may also be an object with a ``method.getdata`` method + that returns a tuple supplying new ``method`` and ``data`` values:: + + class Example: + def getdata(self): + method = Image.Transform.EXTENT + data = (0, 0, 100, 100) + return method, data + :param data: Extra data to the transformation method. + :param resample: Optional resampling filter. It can be one of + :py:data:`Resampling.NEAREST` (use nearest neighbour), + :py:data:`Resampling.BILINEAR` (linear interpolation in a 2x2 + environment), or :py:data:`Resampling.BICUBIC` (cubic spline + interpolation in a 4x4 environment). If omitted, or if the image + has mode "1" or "P", it is set to :py:data:`Resampling.NEAREST`. + See: :ref:`concept-filters`. + :param fill: If ``method`` is an + :py:class:`~PIL.Image.ImageTransformHandler` object, this is one of + the arguments passed to it. Otherwise, it is unused. + :param fillcolor: Optional fill color for the area outside the + transform in the output image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if self.mode in ("LA", "RGBA") and resample != Resampling.NEAREST: + return ( + self.convert({"LA": "La", "RGBA": "RGBa"}[self.mode]) + .transform(size, method, data, resample, fill, fillcolor) + .convert(self.mode) + ) + + if isinstance(method, ImageTransformHandler): + return method.transform(size, self, resample=resample, fill=fill) + + if hasattr(method, "getdata"): + # compatibility w. old-style transform objects + method, data = method.getdata() + + if data is None: + msg = "missing method data" + raise ValueError(msg) + + im = new(self.mode, size, fillcolor) + if self.mode == "P" and self.palette: + im.palette = self.palette.copy() + im.info = self.info.copy() + if method == Transform.MESH: + # list of quads + for box, quad in data: + im.__transformer( + box, self, Transform.QUAD, quad, resample, fillcolor is None + ) + else: + im.__transformer( + (0, 0) + size, self, method, data, resample, fillcolor is None + ) + + return im + + def __transformer( + self, box, image, method, data, resample=Resampling.NEAREST, fill=1 + ): + w = box[2] - box[0] + h = box[3] - box[1] + + if method == Transform.AFFINE: + data = data[:6] + + elif method == Transform.EXTENT: + # convert extent to an affine transform + x0, y0, x1, y1 = data + xs = (x1 - x0) / w + ys = (y1 - y0) / h + method = Transform.AFFINE + data = (xs, 0, x0, 0, ys, y0) + + elif method == Transform.PERSPECTIVE: + data = data[:8] + + elif method == Transform.QUAD: + # quadrilateral warp. data specifies the four corners + # given as NW, SW, SE, and NE. + nw = data[:2] + sw = data[2:4] + se = data[4:6] + ne = data[6:8] + x0, y0 = nw + As = 1.0 / w + At = 1.0 / h + data = ( + x0, + (ne[0] - x0) * As, + (sw[0] - x0) * At, + (se[0] - sw[0] - ne[0] + x0) * As * At, + y0, + (ne[1] - y0) * As, + (sw[1] - y0) * At, + (se[1] - sw[1] - ne[1] + y0) * As * At, + ) + + else: + msg = "unknown transformation method" + raise ValueError(msg) + + if resample not in ( + Resampling.NEAREST, + Resampling.BILINEAR, + Resampling.BICUBIC, + ): + if resample in (Resampling.BOX, Resampling.HAMMING, Resampling.LANCZOS): + msg = { + Resampling.BOX: "Image.Resampling.BOX", + Resampling.HAMMING: "Image.Resampling.HAMMING", + Resampling.LANCZOS: "Image.Resampling.LANCZOS", + }[resample] + f" ({resample}) cannot be used." + else: + msg = f"Unknown resampling filter ({resample})." + + filters = [ + f"{filter[1]} ({filter[0]})" + for filter in ( + (Resampling.NEAREST, "Image.Resampling.NEAREST"), + (Resampling.BILINEAR, "Image.Resampling.BILINEAR"), + (Resampling.BICUBIC, "Image.Resampling.BICUBIC"), + ) + ] + msg += f" Use {', '.join(filters[:-1])} or {filters[-1]}" + raise ValueError(msg) + + image.load() + + self.load() + + if image.mode in ("1", "P"): + resample = Resampling.NEAREST + + self.im.transform(box, image.im, method, data, resample, fill) + + def transpose(self, method: Transpose) -> Image: + """ + Transpose image (flip or rotate in 90 degree steps) + + :param method: One of :py:data:`Transpose.FLIP_LEFT_RIGHT`, + :py:data:`Transpose.FLIP_TOP_BOTTOM`, :py:data:`Transpose.ROTATE_90`, + :py:data:`Transpose.ROTATE_180`, :py:data:`Transpose.ROTATE_270`, + :py:data:`Transpose.TRANSPOSE` or :py:data:`Transpose.TRANSVERSE`. + :returns: Returns a flipped or rotated copy of this image. + """ + + self.load() + return self._new(self.im.transpose(method)) + + def effect_spread(self, distance: int) -> Image: + """ + Randomly spread pixels in an image. + + :param distance: Distance to spread pixels. + """ + self.load() + return self._new(self.im.effect_spread(distance)) + + def toqimage(self): + """Returns a QImage copy of this image""" + from . import ImageQt + + if not ImageQt.qt_is_installed: + msg = "Qt bindings are not installed" + raise ImportError(msg) + return ImageQt.toqimage(self) + + def toqpixmap(self): + """Returns a QPixmap copy of this image""" + from . import ImageQt + + if not ImageQt.qt_is_installed: + msg = "Qt bindings are not installed" + raise ImportError(msg) + return ImageQt.toqpixmap(self) + + +# -------------------------------------------------------------------- +# Abstract handlers. + + +class ImagePointHandler: + """ + Used as a mixin by point transforms + (for use with :py:meth:`~PIL.Image.Image.point`) + """ + + @abc.abstractmethod + def point(self, im: Image) -> Image: + pass + + +class ImageTransformHandler: + """ + Used as a mixin by geometry transforms + (for use with :py:meth:`~PIL.Image.Image.transform`) + """ + + @abc.abstractmethod + def transform( + self, + size: tuple[int, int], + image: Image, + **options: Any, + ) -> Image: + pass + + +# -------------------------------------------------------------------- +# Factories + +# +# Debugging + + +def _wedge() -> Image: + """Create grayscale wedge (for debugging only)""" + + return Image()._new(core.wedge("L")) + + +def _check_size(size: Any) -> None: + """ + Common check to enforce type and sanity check on size tuples + + :param size: Should be a 2 tuple of (width, height) + :returns: None, or raises a ValueError + """ + + if not isinstance(size, (list, tuple)): + msg = "Size must be a list or tuple" + raise ValueError(msg) + if len(size) != 2: + msg = "Size must be a sequence of length 2" + raise ValueError(msg) + if size[0] < 0 or size[1] < 0: + msg = "Width and height must be >= 0" + raise ValueError(msg) + + +def new( + mode: str, + size: tuple[int, int] | list[int], + color: float | tuple[float, ...] | str | None = 0, +) -> Image: + """ + Creates a new image with the given mode and size. + + :param mode: The mode to use for the new image. See: + :ref:`concept-modes`. + :param size: A 2-tuple, containing (width, height) in pixels. + :param color: What color to use for the image. Default is black. + If given, this should be a single integer or floating point value + for single-band modes, and a tuple for multi-band modes (one value + per band). When creating RGB or HSV images, you can also use color + strings as supported by the ImageColor module. If the color is + None, the image is not initialised. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if mode in ("BGR;15", "BGR;16", "BGR;24"): + deprecate(mode, 12) + + _check_size(size) + + if color is None: + # don't initialize + return Image()._new(core.new(mode, size)) + + if isinstance(color, str): + # css3-style specifier + + from . import ImageColor + + color = ImageColor.getcolor(color, mode) + + im = Image() + if ( + mode == "P" + and isinstance(color, (list, tuple)) + and all(isinstance(i, int) for i in color) + ): + color_ints: tuple[int, ...] = cast(Tuple[int, ...], tuple(color)) + if len(color_ints) == 3 or len(color_ints) == 4: + # RGB or RGBA value for a P image + from . import ImagePalette + + im.palette = ImagePalette.ImagePalette() + color = im.palette.getcolor(color_ints) + return im._new(core.fill(mode, size, color)) + + +def frombytes( + mode: str, + size: tuple[int, int], + data: bytes | bytearray, + decoder_name: str = "raw", + *args: Any, +) -> Image: + """ + Creates a copy of an image memory from pixel data in a buffer. + + In its simplest form, this function takes three arguments + (mode, size, and unpacked pixel data). + + You can also use any pixel decoder supported by PIL. For more + information on available decoders, see the section + :ref:`Writing Your Own File Codec `. + + Note that this function decodes pixel data only, not entire images. + If you have an entire image in a string, wrap it in a + :py:class:`~io.BytesIO` object, and use :py:func:`~PIL.Image.open` to load + it. + + :param mode: The image mode. See: :ref:`concept-modes`. + :param size: The image size. + :param data: A byte buffer containing raw data for the given mode. + :param decoder_name: What decoder to use. + :param args: Additional parameters for the given decoder. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + _check_size(size) + + im = new(mode, size) + if im.width != 0 and im.height != 0: + decoder_args: Any = args + if len(decoder_args) == 1 and isinstance(decoder_args[0], tuple): + # may pass tuple instead of argument list + decoder_args = decoder_args[0] + + if decoder_name == "raw" and decoder_args == (): + decoder_args = mode + + im.frombytes(data, decoder_name, decoder_args) + return im + + +def frombuffer( + mode: str, size: tuple[int, int], data, decoder_name: str = "raw", *args: Any +) -> Image: + """ + Creates an image memory referencing pixel data in a byte buffer. + + This function is similar to :py:func:`~PIL.Image.frombytes`, but uses data + in the byte buffer, where possible. This means that changes to the + original buffer object are reflected in this image). Not all modes can + share memory; supported modes include "L", "RGBX", "RGBA", and "CMYK". + + Note that this function decodes pixel data only, not entire images. + If you have an entire image file in a string, wrap it in a + :py:class:`~io.BytesIO` object, and use :py:func:`~PIL.Image.open` to load it. + + The default parameters used for the "raw" decoder differs from that used for + :py:func:`~PIL.Image.frombytes`. This is a bug, and will probably be fixed in a + future release. The current release issues a warning if you do this; to disable + the warning, you should provide the full set of parameters. See below for details. + + :param mode: The image mode. See: :ref:`concept-modes`. + :param size: The image size. + :param data: A bytes or other buffer object containing raw + data for the given mode. + :param decoder_name: What decoder to use. + :param args: Additional parameters for the given decoder. For the + default encoder ("raw"), it's recommended that you provide the + full set of parameters:: + + frombuffer(mode, size, data, "raw", mode, 0, 1) + + :returns: An :py:class:`~PIL.Image.Image` object. + + .. versionadded:: 1.1.4 + """ + + _check_size(size) + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if decoder_name == "raw": + if args == (): + args = mode, 0, 1 + if args[0] in _MAPMODES: + im = new(mode, (0, 0)) + im = im._new(core.map_buffer(data, size, decoder_name, 0, args)) + if mode == "P": + from . import ImagePalette + + im.palette = ImagePalette.ImagePalette("RGB", im.im.getpalette("RGB")) + im.readonly = 1 + return im + + return frombytes(mode, size, data, decoder_name, args) + + +class SupportsArrayInterface(Protocol): + """ + An object that has an ``__array_interface__`` dictionary. + """ + + @property + def __array_interface__(self) -> dict[str, Any]: + raise NotImplementedError() + + +def fromarray(obj: SupportsArrayInterface, mode: str | None = None) -> Image: + """ + Creates an image memory from an object exporting the array interface + (using the buffer protocol):: + + from PIL import Image + import numpy as np + a = np.zeros((5, 5)) + im = Image.fromarray(a) + + If ``obj`` is not contiguous, then the ``tobytes`` method is called + and :py:func:`~PIL.Image.frombuffer` is used. + + In the case of NumPy, be aware that Pillow modes do not always correspond + to NumPy dtypes. Pillow modes only offer 1-bit pixels, 8-bit pixels, + 32-bit signed integer pixels, and 32-bit floating point pixels. + + Pillow images can also be converted to arrays:: + + from PIL import Image + import numpy as np + im = Image.open("hopper.jpg") + a = np.asarray(im) + + When converting Pillow images to arrays however, only pixel values are + transferred. This means that P and PA mode images will lose their palette. + + :param obj: Object with array interface + :param mode: Optional mode to use when reading ``obj``. Will be determined from + type if ``None``. + + This will not be used to convert the data after reading, but will be used to + change how the data is read:: + + from PIL import Image + import numpy as np + a = np.full((1, 1), 300) + im = Image.fromarray(a, mode="L") + im.getpixel((0, 0)) # 44 + im = Image.fromarray(a, mode="RGB") + im.getpixel((0, 0)) # (44, 1, 0) + + See: :ref:`concept-modes` for general information about modes. + :returns: An image object. + + .. versionadded:: 1.1.6 + """ + arr = obj.__array_interface__ + shape = arr["shape"] + ndim = len(shape) + strides = arr.get("strides", None) + if mode is None: + try: + typekey = (1, 1) + shape[2:], arr["typestr"] + except KeyError as e: + msg = "Cannot handle this data type" + raise TypeError(msg) from e + try: + mode, rawmode = _fromarray_typemap[typekey] + except KeyError as e: + typekey_shape, typestr = typekey + msg = f"Cannot handle this data type: {typekey_shape}, {typestr}" + raise TypeError(msg) from e + else: + rawmode = mode + if mode in ["1", "L", "I", "P", "F"]: + ndmax = 2 + elif mode == "RGB": + ndmax = 3 + else: + ndmax = 4 + if ndim > ndmax: + msg = f"Too many dimensions: {ndim} > {ndmax}." + raise ValueError(msg) + + size = 1 if ndim == 1 else shape[1], shape[0] + if strides is not None: + if hasattr(obj, "tobytes"): + obj = obj.tobytes() + elif hasattr(obj, "tostring"): + obj = obj.tostring() + else: + msg = "'strides' requires either tobytes() or tostring()" + raise ValueError(msg) + + return frombuffer(mode, size, obj, "raw", rawmode, 0, 1) + + +def fromqimage(im): + """Creates an image instance from a QImage image""" + from . import ImageQt + + if not ImageQt.qt_is_installed: + msg = "Qt bindings are not installed" + raise ImportError(msg) + return ImageQt.fromqimage(im) + + +def fromqpixmap(im): + """Creates an image instance from a QPixmap image""" + from . import ImageQt + + if not ImageQt.qt_is_installed: + msg = "Qt bindings are not installed" + raise ImportError(msg) + return ImageQt.fromqpixmap(im) + + +_fromarray_typemap = { + # (shape, typestr) => mode, rawmode + # first two members of shape are set to one + ((1, 1), "|b1"): ("1", "1;8"), + ((1, 1), "|u1"): ("L", "L"), + ((1, 1), "|i1"): ("I", "I;8"), + ((1, 1), "u2"): ("I", "I;16B"), + ((1, 1), "i2"): ("I", "I;16BS"), + ((1, 1), "u4"): ("I", "I;32B"), + ((1, 1), "i4"): ("I", "I;32BS"), + ((1, 1), "f4"): ("F", "F;32BF"), + ((1, 1), "f8"): ("F", "F;64BF"), + ((1, 1, 2), "|u1"): ("LA", "LA"), + ((1, 1, 3), "|u1"): ("RGB", "RGB"), + ((1, 1, 4), "|u1"): ("RGBA", "RGBA"), + # shortcuts: + ((1, 1), f"{_ENDIAN}i4"): ("I", "I"), + ((1, 1), f"{_ENDIAN}f4"): ("F", "F"), +} + + +def _decompression_bomb_check(size: tuple[int, int]) -> None: + if MAX_IMAGE_PIXELS is None: + return + + pixels = max(1, size[0]) * max(1, size[1]) + + if pixels > 2 * MAX_IMAGE_PIXELS: + msg = ( + f"Image size ({pixels} pixels) exceeds limit of {2 * MAX_IMAGE_PIXELS} " + "pixels, could be decompression bomb DOS attack." + ) + raise DecompressionBombError(msg) + + if pixels > MAX_IMAGE_PIXELS: + warnings.warn( + f"Image size ({pixels} pixels) exceeds limit of {MAX_IMAGE_PIXELS} pixels, " + "could be decompression bomb DOS attack.", + DecompressionBombWarning, + ) + + +def open( + fp: StrOrBytesPath | IO[bytes], + mode: Literal["r"] = "r", + formats: list[str] | tuple[str, ...] | None = None, +) -> ImageFile.ImageFile: + """ + Opens and identifies the given image file. + + This is a lazy operation; this function identifies the file, but + the file remains open and the actual image data is not read from + the file until you try to process the data (or call the + :py:meth:`~PIL.Image.Image.load` method). See + :py:func:`~PIL.Image.new`. See :ref:`file-handling`. + + :param fp: A filename (string), os.PathLike object or a file object. + The file object must implement ``file.read``, + ``file.seek``, and ``file.tell`` methods, + and be opened in binary mode. The file object will also seek to zero + before reading. + :param mode: The mode. If given, this argument must be "r". + :param formats: A list or tuple of formats to attempt to load the file in. + This can be used to restrict the set of formats checked. + Pass ``None`` to try all supported formats. You can print the set of + available formats by running ``python3 -m PIL`` or using + the :py:func:`PIL.features.pilinfo` function. + :returns: An :py:class:`~PIL.Image.Image` object. + :exception FileNotFoundError: If the file cannot be found. + :exception PIL.UnidentifiedImageError: If the image cannot be opened and + identified. + :exception ValueError: If the ``mode`` is not "r", or if a ``StringIO`` + instance is used for ``fp``. + :exception TypeError: If ``formats`` is not ``None``, a list or a tuple. + """ + + if mode != "r": + msg = f"bad mode {repr(mode)}" # type: ignore[unreachable] + raise ValueError(msg) + elif isinstance(fp, io.StringIO): + msg = ( # type: ignore[unreachable] + "StringIO cannot be used to open an image. " + "Binary data must be used instead." + ) + raise ValueError(msg) + + if formats is None: + formats = ID + elif not isinstance(formats, (list, tuple)): + msg = "formats must be a list or tuple" # type: ignore[unreachable] + raise TypeError(msg) + + exclusive_fp = False + filename: str | bytes = "" + if is_path(fp): + filename = os.path.realpath(os.fspath(fp)) + + if filename: + fp = builtins.open(filename, "rb") + exclusive_fp = True + else: + fp = cast(IO[bytes], fp) + + try: + fp.seek(0) + except (AttributeError, io.UnsupportedOperation): + fp = io.BytesIO(fp.read()) + exclusive_fp = True + + prefix = fp.read(16) + + preinit() + + warning_messages: list[str] = [] + + def _open_core( + fp: IO[bytes], + filename: str | bytes, + prefix: bytes, + formats: list[str] | tuple[str, ...], + ) -> ImageFile.ImageFile | None: + for i in formats: + i = i.upper() + if i not in OPEN: + init() + try: + factory, accept = OPEN[i] + result = not accept or accept(prefix) + if isinstance(result, str): + warning_messages.append(result) + elif result: + fp.seek(0) + im = factory(fp, filename) + _decompression_bomb_check(im.size) + return im + except (SyntaxError, IndexError, TypeError, struct.error) as e: + if WARN_POSSIBLE_FORMATS: + warning_messages.append(i + " opening failed. " + str(e)) + except BaseException: + if exclusive_fp: + fp.close() + raise + return None + + im = _open_core(fp, filename, prefix, formats) + + if im is None and formats is ID: + checked_formats = ID.copy() + if init(): + im = _open_core( + fp, + filename, + prefix, + tuple(format for format in formats if format not in checked_formats), + ) + + if im: + im._exclusive_fp = exclusive_fp + return im + + if exclusive_fp: + fp.close() + for message in warning_messages: + warnings.warn(message) + msg = "cannot identify image file %r" % (filename if filename else fp) + raise UnidentifiedImageError(msg) + + +# +# Image processing. + + +def alpha_composite(im1: Image, im2: Image) -> Image: + """ + Alpha composite im2 over im1. + + :param im1: The first image. Must have mode RGBA. + :param im2: The second image. Must have mode RGBA, and the same size as + the first image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + im1.load() + im2.load() + return im1._new(core.alpha_composite(im1.im, im2.im)) + + +def blend(im1: Image, im2: Image, alpha: float) -> Image: + """ + Creates a new image by interpolating between two input images, using + a constant alpha:: + + out = image1 * (1.0 - alpha) + image2 * alpha + + :param im1: The first image. + :param im2: The second image. Must have the same mode and size as + the first image. + :param alpha: The interpolation alpha factor. If alpha is 0.0, a + copy of the first image is returned. If alpha is 1.0, a copy of + the second image is returned. There are no restrictions on the + alpha value. If necessary, the result is clipped to fit into + the allowed output range. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + im1.load() + im2.load() + return im1._new(core.blend(im1.im, im2.im, alpha)) + + +def composite(image1: Image, image2: Image, mask: Image) -> Image: + """ + Create composite image by blending images using a transparency mask. + + :param image1: The first image. + :param image2: The second image. Must have the same mode and + size as the first image. + :param mask: A mask image. This image can have mode + "1", "L", or "RGBA", and must have the same size as the + other two images. + """ + + image = image2.copy() + image.paste(image1, None, mask) + return image + + +def eval(image, *args): + """ + Applies the function (which should take one argument) to each pixel + in the given image. If the image has more than one band, the same + function is applied to each band. Note that the function is + evaluated once for each possible pixel value, so you cannot use + random components or other generators. + + :param image: The input image. + :param function: A function object, taking one integer argument. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + return image.point(args[0]) + + +def merge(mode: str, bands: Sequence[Image]) -> Image: + """ + Merge a set of single band images into a new multiband image. + + :param mode: The mode to use for the output image. See: + :ref:`concept-modes`. + :param bands: A sequence containing one single-band image for + each band in the output image. All bands must have the + same size. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if getmodebands(mode) != len(bands) or "*" in mode: + msg = "wrong number of bands" + raise ValueError(msg) + for band in bands[1:]: + if band.mode != getmodetype(mode): + msg = "mode mismatch" + raise ValueError(msg) + if band.size != bands[0].size: + msg = "size mismatch" + raise ValueError(msg) + for band in bands: + band.load() + return bands[0]._new(core.merge(mode, *[b.im for b in bands])) + + +# -------------------------------------------------------------------- +# Plugin registry + + +def register_open( + id: str, + factory: Callable[[IO[bytes], str | bytes], ImageFile.ImageFile], + accept: Callable[[bytes], bool | str] | None = None, +) -> None: + """ + Register an image file plugin. This function should not be used + in application code. + + :param id: An image format identifier. + :param factory: An image file factory method. + :param accept: An optional function that can be used to quickly + reject images having another format. + """ + id = id.upper() + if id not in ID: + ID.append(id) + OPEN[id] = factory, accept + + +def register_mime(id: str, mimetype: str) -> None: + """ + Registers an image MIME type by populating ``Image.MIME``. This function + should not be used in application code. + + ``Image.MIME`` provides a mapping from image format identifiers to mime + formats, but :py:meth:`~PIL.ImageFile.ImageFile.get_format_mimetype` can + provide a different result for specific images. + + :param id: An image format identifier. + :param mimetype: The image MIME type for this format. + """ + MIME[id.upper()] = mimetype + + +def register_save( + id: str, driver: Callable[[Image, IO[bytes], str | bytes], None] +) -> None: + """ + Registers an image save function. This function should not be + used in application code. + + :param id: An image format identifier. + :param driver: A function to save images in this format. + """ + SAVE[id.upper()] = driver + + +def register_save_all( + id: str, driver: Callable[[Image, IO[bytes], str | bytes], None] +) -> None: + """ + Registers an image function to save all the frames + of a multiframe format. This function should not be + used in application code. + + :param id: An image format identifier. + :param driver: A function to save images in this format. + """ + SAVE_ALL[id.upper()] = driver + + +def register_extension(id: str, extension: str) -> None: + """ + Registers an image extension. This function should not be + used in application code. + + :param id: An image format identifier. + :param extension: An extension used for this format. + """ + EXTENSION[extension.lower()] = id.upper() + + +def register_extensions(id: str, extensions: list[str]) -> None: + """ + Registers image extensions. This function should not be + used in application code. + + :param id: An image format identifier. + :param extensions: A list of extensions used for this format. + """ + for extension in extensions: + register_extension(id, extension) + + +def registered_extensions() -> dict[str, str]: + """ + Returns a dictionary containing all file extensions belonging + to registered plugins + """ + init() + return EXTENSION + + +def register_decoder(name: str, decoder: type[ImageFile.PyDecoder]) -> None: + """ + Registers an image decoder. This function should not be + used in application code. + + :param name: The name of the decoder + :param decoder: An ImageFile.PyDecoder object + + .. versionadded:: 4.1.0 + """ + DECODERS[name] = decoder + + +def register_encoder(name: str, encoder: type[ImageFile.PyEncoder]) -> None: + """ + Registers an image encoder. This function should not be + used in application code. + + :param name: The name of the encoder + :param encoder: An ImageFile.PyEncoder object + + .. versionadded:: 4.1.0 + """ + ENCODERS[name] = encoder + + +# -------------------------------------------------------------------- +# Simple display support. + + +def _show(image: Image, **options: Any) -> None: + from . import ImageShow + + ImageShow.show(image, **options) + + +# -------------------------------------------------------------------- +# Effects + + +def effect_mandelbrot( + size: tuple[int, int], extent: tuple[float, float, float, float], quality: int +) -> Image: + """ + Generate a Mandelbrot set covering the given extent. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param extent: The extent to cover, as a 4-tuple: + (x0, y0, x1, y1). + :param quality: Quality. + """ + return Image()._new(core.effect_mandelbrot(size, extent, quality)) + + +def effect_noise(size: tuple[int, int], sigma: float) -> Image: + """ + Generate Gaussian noise centered around 128. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param sigma: Standard deviation of noise. + """ + return Image()._new(core.effect_noise(size, sigma)) + + +def linear_gradient(mode: str) -> Image: + """ + Generate 256x256 linear gradient from black to white, top to bottom. + + :param mode: Input mode. + """ + return Image()._new(core.linear_gradient(mode)) + + +def radial_gradient(mode: str) -> Image: + """ + Generate 256x256 radial gradient from black to white, centre to edge. + + :param mode: Input mode. + """ + return Image()._new(core.radial_gradient(mode)) + + +# -------------------------------------------------------------------- +# Resources + + +def _apply_env_variables(env: dict[str, str] | None = None) -> None: + env_dict = env if env is not None else os.environ + + for var_name, setter in [ + ("PILLOW_ALIGNMENT", core.set_alignment), + ("PILLOW_BLOCK_SIZE", core.set_block_size), + ("PILLOW_BLOCKS_MAX", core.set_blocks_max), + ]: + if var_name not in env_dict: + continue + + var = env_dict[var_name].lower() + + units = 1 + for postfix, mul in [("k", 1024), ("m", 1024 * 1024)]: + if var.endswith(postfix): + units = mul + var = var[: -len(postfix)] + + try: + var_int = int(var) * units + except ValueError: + warnings.warn(f"{var_name} is not int") + continue + + try: + setter(var_int) + except ValueError as e: + warnings.warn(f"{var_name}: {e}") + + +_apply_env_variables() +atexit.register(core.clear_cache) + + +if TYPE_CHECKING: + _ExifBase = MutableMapping[int, Any] +else: + _ExifBase = MutableMapping + + +class Exif(_ExifBase): + """ + This class provides read and write access to EXIF image data:: + + from PIL import Image + im = Image.open("exif.png") + exif = im.getexif() # Returns an instance of this class + + Information can be read and written, iterated over or deleted:: + + print(exif[274]) # 1 + exif[274] = 2 + for k, v in exif.items(): + print("Tag", k, "Value", v) # Tag 274 Value 2 + del exif[274] + + To access information beyond IFD0, :py:meth:`~PIL.Image.Exif.get_ifd` + returns a dictionary:: + + from PIL import ExifTags + im = Image.open("exif_gps.jpg") + exif = im.getexif() + gps_ifd = exif.get_ifd(ExifTags.IFD.GPSInfo) + print(gps_ifd) + + Other IFDs include ``ExifTags.IFD.Exif``, ``ExifTags.IFD.Makernote``, + ``ExifTags.IFD.Interop`` and ``ExifTags.IFD.IFD1``. + + :py:mod:`~PIL.ExifTags` also has enum classes to provide names for data:: + + print(exif[ExifTags.Base.Software]) # PIL + print(gps_ifd[ExifTags.GPS.GPSDateStamp]) # 1999:99:99 99:99:99 + """ + + endian = None + bigtiff = False + _loaded = False + + def __init__(self): + self._data = {} + self._hidden_data = {} + self._ifds = {} + self._info = None + self._loaded_exif = None + + def _fixup(self, value): + try: + if len(value) == 1 and isinstance(value, tuple): + return value[0] + except Exception: + pass + return value + + def _fixup_dict(self, src_dict): + # Helper function + # returns a dict with any single item tuples/lists as individual values + return {k: self._fixup(v) for k, v in src_dict.items()} + + def _get_ifd_dict(self, offset, group=None): + try: + # an offset pointer to the location of the nested embedded IFD. + # It should be a long, but may be corrupted. + self.fp.seek(offset) + except (KeyError, TypeError): + pass + else: + from . import TiffImagePlugin + + info = TiffImagePlugin.ImageFileDirectory_v2(self.head, group=group) + info.load(self.fp) + return self._fixup_dict(info) + + def _get_head(self): + version = b"\x2B" if self.bigtiff else b"\x2A" + if self.endian == "<": + head = b"II" + version + b"\x00" + o32le(8) + else: + head = b"MM\x00" + version + o32be(8) + if self.bigtiff: + head += o32le(8) if self.endian == "<" else o32be(8) + head += b"\x00\x00\x00\x00" + return head + + def load(self, data): + # Extract EXIF information. This is highly experimental, + # and is likely to be replaced with something better in a future + # version. + + # The EXIF record consists of a TIFF file embedded in a JPEG + # application marker (!). + if data == self._loaded_exif: + return + self._loaded_exif = data + self._data.clear() + self._hidden_data.clear() + self._ifds.clear() + if data and data.startswith(b"Exif\x00\x00"): + data = data[6:] + if not data: + self._info = None + return + + self.fp = io.BytesIO(data) + self.head = self.fp.read(8) + # process dictionary + from . import TiffImagePlugin + + self._info = TiffImagePlugin.ImageFileDirectory_v2(self.head) + self.endian = self._info._endian + self.fp.seek(self._info.next) + self._info.load(self.fp) + + def load_from_fp(self, fp, offset=None): + self._loaded_exif = None + self._data.clear() + self._hidden_data.clear() + self._ifds.clear() + + # process dictionary + from . import TiffImagePlugin + + self.fp = fp + if offset is not None: + self.head = self._get_head() + else: + self.head = self.fp.read(8) + self._info = TiffImagePlugin.ImageFileDirectory_v2(self.head) + if self.endian is None: + self.endian = self._info._endian + if offset is None: + offset = self._info.next + self.fp.tell() + self.fp.seek(offset) + self._info.load(self.fp) + + def _get_merged_dict(self): + merged_dict = dict(self) + + # get EXIF extension + if ExifTags.IFD.Exif in self: + ifd = self._get_ifd_dict(self[ExifTags.IFD.Exif], ExifTags.IFD.Exif) + if ifd: + merged_dict.update(ifd) + + # GPS + if ExifTags.IFD.GPSInfo in self: + merged_dict[ExifTags.IFD.GPSInfo] = self._get_ifd_dict( + self[ExifTags.IFD.GPSInfo], ExifTags.IFD.GPSInfo + ) + + return merged_dict + + def tobytes(self, offset: int = 8) -> bytes: + from . import TiffImagePlugin + + head = self._get_head() + ifd = TiffImagePlugin.ImageFileDirectory_v2(ifh=head) + for tag, value in self.items(): + if tag in [ + ExifTags.IFD.Exif, + ExifTags.IFD.GPSInfo, + ] and not isinstance(value, dict): + value = self.get_ifd(tag) + if ( + tag == ExifTags.IFD.Exif + and ExifTags.IFD.Interop in value + and not isinstance(value[ExifTags.IFD.Interop], dict) + ): + value = value.copy() + value[ExifTags.IFD.Interop] = self.get_ifd(ExifTags.IFD.Interop) + ifd[tag] = value + return b"Exif\x00\x00" + head + ifd.tobytes(offset) + + def get_ifd(self, tag): + if tag not in self._ifds: + if tag == ExifTags.IFD.IFD1: + if self._info is not None and self._info.next != 0: + self._ifds[tag] = self._get_ifd_dict(self._info.next) + elif tag in [ExifTags.IFD.Exif, ExifTags.IFD.GPSInfo]: + offset = self._hidden_data.get(tag, self.get(tag)) + if offset is not None: + self._ifds[tag] = self._get_ifd_dict(offset, tag) + elif tag in [ExifTags.IFD.Interop, ExifTags.IFD.Makernote]: + if ExifTags.IFD.Exif not in self._ifds: + self.get_ifd(ExifTags.IFD.Exif) + tag_data = self._ifds[ExifTags.IFD.Exif][tag] + if tag == ExifTags.IFD.Makernote: + from .TiffImagePlugin import ImageFileDirectory_v2 + + if tag_data[:8] == b"FUJIFILM": + ifd_offset = i32le(tag_data, 8) + ifd_data = tag_data[ifd_offset:] + + makernote = {} + for i in range(0, struct.unpack(" 4: + (offset,) = struct.unpack("H", tag_data[:2])[0]): + ifd_tag, typ, count, data = struct.unpack( + ">HHL4s", tag_data[i * 12 + 2 : (i + 1) * 12 + 2] + ) + if ifd_tag == 0x1101: + # CameraInfo + (offset,) = struct.unpack(">L", data) + self.fp.seek(offset) + + camerainfo = {"ModelID": self.fp.read(4)} + + self.fp.read(4) + # Seconds since 2000 + camerainfo["TimeStamp"] = i32le(self.fp.read(12)) + + self.fp.read(4) + camerainfo["InternalSerialNumber"] = self.fp.read(4) + + self.fp.read(12) + parallax = self.fp.read(4) + handler = ImageFileDirectory_v2._load_dispatch[ + TiffTags.FLOAT + ][1] + camerainfo["Parallax"] = handler( + ImageFileDirectory_v2(), parallax, False + ) + + self.fp.read(4) + camerainfo["Category"] = self.fp.read(2) + + makernote = {0x1101: dict(self._fixup_dict(camerainfo))} + self._ifds[tag] = makernote + else: + # Interop + self._ifds[tag] = self._get_ifd_dict(tag_data, tag) + ifd = self._ifds.get(tag, {}) + if tag == ExifTags.IFD.Exif and self._hidden_data: + ifd = { + k: v + for (k, v) in ifd.items() + if k not in (ExifTags.IFD.Interop, ExifTags.IFD.Makernote) + } + return ifd + + def hide_offsets(self) -> None: + for tag in (ExifTags.IFD.Exif, ExifTags.IFD.GPSInfo): + if tag in self: + self._hidden_data[tag] = self[tag] + del self[tag] + + def __str__(self) -> str: + if self._info is not None: + # Load all keys into self._data + for tag in self._info: + self[tag] + + return str(self._data) + + def __len__(self) -> int: + keys = set(self._data) + if self._info is not None: + keys.update(self._info) + return len(keys) + + def __getitem__(self, tag): + if self._info is not None and tag not in self._data and tag in self._info: + self._data[tag] = self._fixup(self._info[tag]) + del self._info[tag] + return self._data[tag] + + def __contains__(self, tag) -> bool: + return tag in self._data or (self._info is not None and tag in self._info) + + def __setitem__(self, tag, value) -> None: + if self._info is not None and tag in self._info: + del self._info[tag] + self._data[tag] = value + + def __delitem__(self, tag: int) -> None: + if self._info is not None and tag in self._info: + del self._info[tag] + else: + del self._data[tag] + + def __iter__(self): + keys = set(self._data) + if self._info is not None: + keys.update(self._info) + return iter(keys) diff --git a/MLPY/Lib/site-packages/PIL/ImageChops.py b/MLPY/Lib/site-packages/PIL/ImageChops.py new file mode 100644 index 0000000000000000000000000000000000000000..4ec5fafa8e716f3358d38606a1f775387827b10d --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageChops.py @@ -0,0 +1,311 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard channel operations +# +# History: +# 1996-03-24 fl Created +# 1996-08-13 fl Added logical operations (for "1" images) +# 2000-10-12 fl Added offset method (from Image.py) +# +# Copyright (c) 1997-2000 by Secret Labs AB +# Copyright (c) 1996-2000 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from __future__ import annotations + +from . import Image + + +def constant(image: Image.Image, value: int) -> Image.Image: + """Fill a channel with a given gray level. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.new("L", image.size, value) + + +def duplicate(image: Image.Image) -> Image.Image: + """Copy a channel. Alias for :py:meth:`PIL.Image.Image.copy`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return image.copy() + + +def invert(image: Image.Image) -> Image.Image: + """ + Invert an image (channel). :: + + out = MAX - image + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image.load() + return image._new(image.im.chop_invert()) + + +def lighter(image1: Image.Image, image2: Image.Image) -> Image.Image: + """ + Compares the two images, pixel by pixel, and returns a new image containing + the lighter values. :: + + out = max(image1, image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_lighter(image2.im)) + + +def darker(image1: Image.Image, image2: Image.Image) -> Image.Image: + """ + Compares the two images, pixel by pixel, and returns a new image containing + the darker values. :: + + out = min(image1, image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_darker(image2.im)) + + +def difference(image1: Image.Image, image2: Image.Image) -> Image.Image: + """ + Returns the absolute value of the pixel-by-pixel difference between the two + images. :: + + out = abs(image1 - image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_difference(image2.im)) + + +def multiply(image1: Image.Image, image2: Image.Image) -> Image.Image: + """ + Superimposes two images on top of each other. + + If you multiply an image with a solid black image, the result is black. If + you multiply with a solid white image, the image is unaffected. :: + + out = image1 * image2 / MAX + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_multiply(image2.im)) + + +def screen(image1: Image.Image, image2: Image.Image) -> Image.Image: + """ + Superimposes two inverted images on top of each other. :: + + out = MAX - ((MAX - image1) * (MAX - image2) / MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_screen(image2.im)) + + +def soft_light(image1: Image.Image, image2: Image.Image) -> Image.Image: + """ + Superimposes two images on top of each other using the Soft Light algorithm + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_soft_light(image2.im)) + + +def hard_light(image1: Image.Image, image2: Image.Image) -> Image.Image: + """ + Superimposes two images on top of each other using the Hard Light algorithm + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_hard_light(image2.im)) + + +def overlay(image1: Image.Image, image2: Image.Image) -> Image.Image: + """ + Superimposes two images on top of each other using the Overlay algorithm + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_overlay(image2.im)) + + +def add( + image1: Image.Image, image2: Image.Image, scale: float = 1.0, offset: float = 0 +) -> Image.Image: + """ + Adds two images, dividing the result by scale and adding the + offset. If omitted, scale defaults to 1.0, and offset to 0.0. :: + + out = ((image1 + image2) / scale + offset) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_add(image2.im, scale, offset)) + + +def subtract( + image1: Image.Image, image2: Image.Image, scale: float = 1.0, offset: float = 0 +) -> Image.Image: + """ + Subtracts two images, dividing the result by scale and adding the offset. + If omitted, scale defaults to 1.0, and offset to 0.0. :: + + out = ((image1 - image2) / scale + offset) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_subtract(image2.im, scale, offset)) + + +def add_modulo(image1: Image.Image, image2: Image.Image) -> Image.Image: + """Add two images, without clipping the result. :: + + out = ((image1 + image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_add_modulo(image2.im)) + + +def subtract_modulo(image1: Image.Image, image2: Image.Image) -> Image.Image: + """Subtract two images, without clipping the result. :: + + out = ((image1 - image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_subtract_modulo(image2.im)) + + +def logical_and(image1: Image.Image, image2: Image.Image) -> Image.Image: + """Logical AND between two images. + + Both of the images must have mode "1". If you would like to perform a + logical AND on an image with a mode other than "1", try + :py:meth:`~PIL.ImageChops.multiply` instead, using a black-and-white mask + as the second image. :: + + out = ((image1 and image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_and(image2.im)) + + +def logical_or(image1: Image.Image, image2: Image.Image) -> Image.Image: + """Logical OR between two images. + + Both of the images must have mode "1". :: + + out = ((image1 or image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_or(image2.im)) + + +def logical_xor(image1: Image.Image, image2: Image.Image) -> Image.Image: + """Logical XOR between two images. + + Both of the images must have mode "1". :: + + out = ((bool(image1) != bool(image2)) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_xor(image2.im)) + + +def blend(image1: Image.Image, image2: Image.Image, alpha: float) -> Image.Image: + """Blend images using constant transparency weight. Alias for + :py:func:`PIL.Image.blend`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.blend(image1, image2, alpha) + + +def composite( + image1: Image.Image, image2: Image.Image, mask: Image.Image +) -> Image.Image: + """Create composite using transparency mask. Alias for + :py:func:`PIL.Image.composite`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.composite(image1, image2, mask) + + +def offset(image: Image.Image, xoffset: int, yoffset: int | None = None) -> Image.Image: + """Returns a copy of the image where data has been offset by the given + distances. Data wraps around the edges. If ``yoffset`` is omitted, it + is assumed to be equal to ``xoffset``. + + :param image: Input image. + :param xoffset: The horizontal distance. + :param yoffset: The vertical distance. If omitted, both + distances are set to the same value. + :rtype: :py:class:`~PIL.Image.Image` + """ + + if yoffset is None: + yoffset = xoffset + image.load() + return image._new(image.im.offset(xoffset, yoffset)) diff --git a/MLPY/Lib/site-packages/PIL/ImageCms.py b/MLPY/Lib/site-packages/PIL/ImageCms.py new file mode 100644 index 0000000000000000000000000000000000000000..058801ae700de5d934515663aa1348ab91ce69fb --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageCms.py @@ -0,0 +1,1127 @@ +# The Python Imaging Library. +# $Id$ + +# Optional color management support, based on Kevin Cazabon's PyCMS +# library. + +# Originally released under LGPL. Graciously donated to PIL in +# March 2009, for distribution under the standard PIL license + +# History: + +# 2009-03-08 fl Added to PIL. + +# Copyright (C) 2002-2003 Kevin Cazabon +# Copyright (c) 2009 by Fredrik Lundh +# Copyright (c) 2013 by Eric Soroos + +# See the README file for information on usage and redistribution. See +# below for the original description. +from __future__ import annotations + +import operator +import sys +from enum import IntEnum, IntFlag +from functools import reduce +from typing import Any, Literal, SupportsFloat, SupportsInt, Union + +from . import Image, __version__ +from ._deprecate import deprecate +from ._typing import SupportsRead + +try: + from . import _imagingcms as core +except ImportError as ex: + # Allow error import for doc purposes, but error out when accessing + # anything in core. + from ._util import DeferredError + + core = DeferredError.new(ex) + +_DESCRIPTION = """ +pyCMS + + a Python / PIL interface to the littleCMS ICC Color Management System + Copyright (C) 2002-2003 Kevin Cazabon + kevin@cazabon.com + https://www.cazabon.com + + pyCMS home page: https://www.cazabon.com/pyCMS + littleCMS home page: https://www.littlecms.com + (littleCMS is Copyright (C) 1998-2001 Marti Maria) + + Originally released under LGPL. Graciously donated to PIL in + March 2009, for distribution under the standard PIL license + + The pyCMS.py module provides a "clean" interface between Python/PIL and + pyCMSdll, taking care of some of the more complex handling of the direct + pyCMSdll functions, as well as error-checking and making sure that all + relevant data is kept together. + + While it is possible to call pyCMSdll functions directly, it's not highly + recommended. + + Version History: + + 1.0.0 pil Oct 2013 Port to LCMS 2. + + 0.1.0 pil mod March 10, 2009 + + Renamed display profile to proof profile. The proof + profile is the profile of the device that is being + simulated, not the profile of the device which is + actually used to display/print the final simulation + (that'd be the output profile) - also see LCMSAPI.txt + input colorspace -> using 'renderingIntent' -> proof + colorspace -> using 'proofRenderingIntent' -> output + colorspace + + Added LCMS FLAGS support. + Added FLAGS["SOFTPROOFING"] as default flag for + buildProofTransform (otherwise the proof profile/intent + would be ignored). + + 0.1.0 pil March 2009 - added to PIL, as PIL.ImageCms + + 0.0.2 alpha Jan 6, 2002 + + Added try/except statements around type() checks of + potential CObjects... Python won't let you use type() + on them, and raises a TypeError (stupid, if you ask + me!) + + Added buildProofTransformFromOpenProfiles() function. + Additional fixes in DLL, see DLL code for details. + + 0.0.1 alpha first public release, Dec. 26, 2002 + + Known to-do list with current version (of Python interface, not pyCMSdll): + + none + +""" + +_VERSION = "1.0.0 pil" + + +def __getattr__(name: str) -> Any: + if name == "DESCRIPTION": + deprecate("PIL.ImageCms.DESCRIPTION", 12) + return _DESCRIPTION + elif name == "VERSION": + deprecate("PIL.ImageCms.VERSION", 12) + return _VERSION + elif name == "FLAGS": + deprecate("PIL.ImageCms.FLAGS", 12, "PIL.ImageCms.Flags") + return _FLAGS + msg = f"module '{__name__}' has no attribute '{name}'" + raise AttributeError(msg) + + +# --------------------------------------------------------------------. + + +# +# intent/direction values + + +class Intent(IntEnum): + PERCEPTUAL = 0 + RELATIVE_COLORIMETRIC = 1 + SATURATION = 2 + ABSOLUTE_COLORIMETRIC = 3 + + +class Direction(IntEnum): + INPUT = 0 + OUTPUT = 1 + PROOF = 2 + + +# +# flags + + +class Flags(IntFlag): + """Flags and documentation are taken from ``lcms2.h``.""" + + NONE = 0 + NOCACHE = 0x0040 + """Inhibit 1-pixel cache""" + NOOPTIMIZE = 0x0100 + """Inhibit optimizations""" + NULLTRANSFORM = 0x0200 + """Don't transform anyway""" + GAMUTCHECK = 0x1000 + """Out of Gamut alarm""" + SOFTPROOFING = 0x4000 + """Do softproofing""" + BLACKPOINTCOMPENSATION = 0x2000 + NOWHITEONWHITEFIXUP = 0x0004 + """Don't fix scum dot""" + HIGHRESPRECALC = 0x0400 + """Use more memory to give better accuracy""" + LOWRESPRECALC = 0x0800 + """Use less memory to minimize resources""" + # this should be 8BITS_DEVICELINK, but that is not a valid name in Python: + USE_8BITS_DEVICELINK = 0x0008 + """Create 8 bits devicelinks""" + GUESSDEVICECLASS = 0x0020 + """Guess device class (for ``transform2devicelink``)""" + KEEP_SEQUENCE = 0x0080 + """Keep profile sequence for devicelink creation""" + FORCE_CLUT = 0x0002 + """Force CLUT optimization""" + CLUT_POST_LINEARIZATION = 0x0001 + """create postlinearization tables if possible""" + CLUT_PRE_LINEARIZATION = 0x0010 + """create prelinearization tables if possible""" + NONEGATIVES = 0x8000 + """Prevent negative numbers in floating point transforms""" + COPY_ALPHA = 0x04000000 + """Alpha channels are copied on ``cmsDoTransform()``""" + NODEFAULTRESOURCEDEF = 0x01000000 + + _GRIDPOINTS_1 = 1 << 16 + _GRIDPOINTS_2 = 2 << 16 + _GRIDPOINTS_4 = 4 << 16 + _GRIDPOINTS_8 = 8 << 16 + _GRIDPOINTS_16 = 16 << 16 + _GRIDPOINTS_32 = 32 << 16 + _GRIDPOINTS_64 = 64 << 16 + _GRIDPOINTS_128 = 128 << 16 + + @staticmethod + def GRIDPOINTS(n: int) -> Flags: + """ + Fine-tune control over number of gridpoints + + :param n: :py:class:`int` in range ``0 <= n <= 255`` + """ + return Flags.NONE | ((n & 0xFF) << 16) + + +_MAX_FLAG = reduce(operator.or_, Flags) + + +_FLAGS = { + "MATRIXINPUT": 1, + "MATRIXOUTPUT": 2, + "MATRIXONLY": (1 | 2), + "NOWHITEONWHITEFIXUP": 4, # Don't hot fix scum dot + # Don't create prelinearization tables on precalculated transforms + # (internal use): + "NOPRELINEARIZATION": 16, + "GUESSDEVICECLASS": 32, # Guess device class (for transform2devicelink) + "NOTCACHE": 64, # Inhibit 1-pixel cache + "NOTPRECALC": 256, + "NULLTRANSFORM": 512, # Don't transform anyway + "HIGHRESPRECALC": 1024, # Use more memory to give better accuracy + "LOWRESPRECALC": 2048, # Use less memory to minimize resources + "WHITEBLACKCOMPENSATION": 8192, + "BLACKPOINTCOMPENSATION": 8192, + "GAMUTCHECK": 4096, # Out of Gamut alarm + "SOFTPROOFING": 16384, # Do softproofing + "PRESERVEBLACK": 32768, # Black preservation + "NODEFAULTRESOURCEDEF": 16777216, # CRD special + "GRIDPOINTS": lambda n: (n & 0xFF) << 16, # Gridpoints +} + + +# --------------------------------------------------------------------. +# Experimental PIL-level API +# --------------------------------------------------------------------. + +## +# Profile. + + +class ImageCmsProfile: + def __init__(self, profile: str | SupportsRead[bytes] | core.CmsProfile) -> None: + """ + :param profile: Either a string representing a filename, + a file like object containing a profile or a + low-level profile object + + """ + + if isinstance(profile, str): + if sys.platform == "win32": + profile_bytes_path = profile.encode() + try: + profile_bytes_path.decode("ascii") + except UnicodeDecodeError: + with open(profile, "rb") as f: + self._set(core.profile_frombytes(f.read())) + return + self._set(core.profile_open(profile), profile) + elif hasattr(profile, "read"): + self._set(core.profile_frombytes(profile.read())) + elif isinstance(profile, core.CmsProfile): + self._set(profile) + else: + msg = "Invalid type for Profile" # type: ignore[unreachable] + raise TypeError(msg) + + def _set(self, profile: core.CmsProfile, filename: str | None = None) -> None: + self.profile = profile + self.filename = filename + self.product_name = None # profile.product_name + self.product_info = None # profile.product_info + + def tobytes(self) -> bytes: + """ + Returns the profile in a format suitable for embedding in + saved images. + + :returns: a bytes object containing the ICC profile. + """ + + return core.profile_tobytes(self.profile) + + +class ImageCmsTransform(Image.ImagePointHandler): + """ + Transform. This can be used with the procedural API, or with the standard + :py:func:`~PIL.Image.Image.point` method. + + Will return the output profile in the ``output.info['icc_profile']``. + """ + + def __init__( + self, + input: ImageCmsProfile, + output: ImageCmsProfile, + input_mode: str, + output_mode: str, + intent: Intent = Intent.PERCEPTUAL, + proof: ImageCmsProfile | None = None, + proof_intent: Intent = Intent.ABSOLUTE_COLORIMETRIC, + flags: Flags = Flags.NONE, + ): + supported_modes = ( + "RGB", + "RGBA", + "RGBX", + "CMYK", + "I;16", + "I;16L", + "I;16B", + "YCbCr", + "LAB", + "L", + "1", + ) + for mode in (input_mode, output_mode): + if mode not in supported_modes: + deprecate( + mode, + 12, + { + "L;16": "I;16 or I;16L", + "L:16B": "I;16B", + "YCCA": "YCbCr", + "YCC": "YCbCr", + }.get(mode), + ) + if proof is None: + self.transform = core.buildTransform( + input.profile, output.profile, input_mode, output_mode, intent, flags + ) + else: + self.transform = core.buildProofTransform( + input.profile, + output.profile, + proof.profile, + input_mode, + output_mode, + intent, + proof_intent, + flags, + ) + # Note: inputMode and outputMode are for pyCMS compatibility only + self.input_mode = self.inputMode = input_mode + self.output_mode = self.outputMode = output_mode + + self.output_profile = output + + def point(self, im: Image.Image) -> Image.Image: + return self.apply(im) + + def apply(self, im: Image.Image, imOut: Image.Image | None = None) -> Image.Image: + im.load() + if imOut is None: + imOut = Image.new(self.output_mode, im.size, None) + self.transform.apply(im.im.id, imOut.im.id) + imOut.info["icc_profile"] = self.output_profile.tobytes() + return imOut + + def apply_in_place(self, im: Image.Image) -> Image.Image: + im.load() + if im.mode != self.output_mode: + msg = "mode mismatch" + raise ValueError(msg) # wrong output mode + self.transform.apply(im.im.id, im.im.id) + im.info["icc_profile"] = self.output_profile.tobytes() + return im + + +def get_display_profile(handle: SupportsInt | None = None) -> ImageCmsProfile | None: + """ + (experimental) Fetches the profile for the current display device. + + :returns: ``None`` if the profile is not known. + """ + + if sys.platform != "win32": + return None + + from . import ImageWin # type: ignore[unused-ignore, unreachable] + + if isinstance(handle, ImageWin.HDC): + profile = core.get_display_profile_win32(int(handle), 1) + else: + profile = core.get_display_profile_win32(int(handle or 0)) + if profile is None: + return None + return ImageCmsProfile(profile) + + +# --------------------------------------------------------------------. +# pyCMS compatible layer +# --------------------------------------------------------------------. + +_CmsProfileCompatible = Union[ + str, SupportsRead[bytes], core.CmsProfile, ImageCmsProfile +] + + +class PyCMSError(Exception): + """(pyCMS) Exception class. + This is used for all errors in the pyCMS API.""" + + pass + + +def profileToProfile( + im: Image.Image, + inputProfile: _CmsProfileCompatible, + outputProfile: _CmsProfileCompatible, + renderingIntent: Intent = Intent.PERCEPTUAL, + outputMode: str | None = None, + inPlace: bool = False, + flags: Flags = Flags.NONE, +) -> Image.Image | None: + """ + (pyCMS) Applies an ICC transformation to a given image, mapping from + ``inputProfile`` to ``outputProfile``. + + If the input or output profiles specified are not valid filenames, a + :exc:`PyCMSError` will be raised. If ``inPlace`` is ``True`` and + ``outputMode != im.mode``, a :exc:`PyCMSError` will be raised. + If an error occurs during application of the profiles, + a :exc:`PyCMSError` will be raised. + If ``outputMode`` is not a mode supported by the ``outputProfile`` (or by pyCMS), + a :exc:`PyCMSError` will be raised. + + This function applies an ICC transformation to im from ``inputProfile``'s + color space to ``outputProfile``'s color space using the specified rendering + intent to decide how to handle out-of-gamut colors. + + ``outputMode`` can be used to specify that a color mode conversion is to + be done using these profiles, but the specified profiles must be able + to handle that mode. I.e., if converting im from RGB to CMYK using + profiles, the input profile must handle RGB data, and the output + profile must handle CMYK data. + + :param im: An open :py:class:`~PIL.Image.Image` object (i.e. Image.new(...) + or Image.open(...), etc.) + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this image, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + profile you wish to use for this image, or a profile object + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the transform + + ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) + ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 + ImageCms.Intent.SATURATION = 2 + ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param outputMode: A valid PIL mode for the output image (i.e. "RGB", + "CMYK", etc.). Note: if rendering the image "inPlace", outputMode + MUST be the same mode as the input, or omitted completely. If + omitted, the outputMode will be the same as the mode of the input + image (im.mode) + :param inPlace: Boolean. If ``True``, the original image is modified in-place, + and ``None`` is returned. If ``False`` (default), a new + :py:class:`~PIL.Image.Image` object is returned with the transform applied. + :param flags: Integer (0-...) specifying additional flags + :returns: Either None or a new :py:class:`~PIL.Image.Image` object, depending on + the value of ``inPlace`` + :exception PyCMSError: + """ + + if outputMode is None: + outputMode = im.mode + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + msg = "renderingIntent must be an integer between 0 and 3" + raise PyCMSError(msg) + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + msg = f"flags must be an integer between 0 and {_MAX_FLAG}" + raise PyCMSError(msg) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + transform = ImageCmsTransform( + inputProfile, + outputProfile, + im.mode, + outputMode, + renderingIntent, + flags=flags, + ) + if inPlace: + transform.apply_in_place(im) + imOut = None + else: + imOut = transform.apply(im) + except (OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + return imOut + + +def getOpenProfile( + profileFilename: str | SupportsRead[bytes] | core.CmsProfile, +) -> ImageCmsProfile: + """ + (pyCMS) Opens an ICC profile file. + + The PyCMSProfile object can be passed back into pyCMS for use in creating + transforms and such (as in ImageCms.buildTransformFromOpenProfiles()). + + If ``profileFilename`` is not a valid filename for an ICC profile, + a :exc:`PyCMSError` will be raised. + + :param profileFilename: String, as a valid filename path to the ICC profile + you wish to open, or a file-like object. + :returns: A CmsProfile class object. + :exception PyCMSError: + """ + + try: + return ImageCmsProfile(profileFilename) + except (OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def buildTransform( + inputProfile: _CmsProfileCompatible, + outputProfile: _CmsProfileCompatible, + inMode: str, + outMode: str, + renderingIntent: Intent = Intent.PERCEPTUAL, + flags: Flags = Flags.NONE, +) -> ImageCmsTransform: + """ + (pyCMS) Builds an ICC transform mapping from the ``inputProfile`` to the + ``outputProfile``. Use applyTransform to apply the transform to a given + image. + + If the input or output profiles specified are not valid filenames, a + :exc:`PyCMSError` will be raised. If an error occurs during creation + of the transform, a :exc:`PyCMSError` will be raised. + + If ``inMode`` or ``outMode`` are not a mode supported by the ``outputProfile`` + (or by pyCMS), a :exc:`PyCMSError` will be raised. + + This function builds and returns an ICC transform from the ``inputProfile`` + to the ``outputProfile`` using the ``renderingIntent`` to determine what to do + with out-of-gamut colors. It will ONLY work for converting images that + are in ``inMode`` to images that are in ``outMode`` color format (PIL mode, + i.e. "RGB", "RGBA", "CMYK", etc.). + + Building the transform is a fair part of the overhead in + ImageCms.profileToProfile(), so if you're planning on converting multiple + images using the same input/output settings, this can save you time. + Once you have a transform object, it can be used with + ImageCms.applyProfile() to convert images without the need to re-compute + the lookup table for the transform. + + The reason pyCMS returns a class object rather than a handle directly + to the transform is that it needs to keep track of the PIL input/output + modes that the transform is meant for. These attributes are stored in + the ``inMode`` and ``outMode`` attributes of the object (which can be + manually overridden if you really want to, but I don't know of any + time that would be of use, or would even work). + + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this transform, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + profile you wish to use for this transform, or a profile object + :param inMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param outMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the transform + + ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) + ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 + ImageCms.Intent.SATURATION = 2 + ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param flags: Integer (0-...) specifying additional flags + :returns: A CmsTransform class object. + :exception PyCMSError: + """ + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + msg = "renderingIntent must be an integer between 0 and 3" + raise PyCMSError(msg) + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + msg = f"flags must be an integer between 0 and {_MAX_FLAG}" + raise PyCMSError(msg) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + return ImageCmsTransform( + inputProfile, outputProfile, inMode, outMode, renderingIntent, flags=flags + ) + except (OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def buildProofTransform( + inputProfile: _CmsProfileCompatible, + outputProfile: _CmsProfileCompatible, + proofProfile: _CmsProfileCompatible, + inMode: str, + outMode: str, + renderingIntent: Intent = Intent.PERCEPTUAL, + proofRenderingIntent: Intent = Intent.ABSOLUTE_COLORIMETRIC, + flags: Flags = Flags.SOFTPROOFING, +) -> ImageCmsTransform: + """ + (pyCMS) Builds an ICC transform mapping from the ``inputProfile`` to the + ``outputProfile``, but tries to simulate the result that would be + obtained on the ``proofProfile`` device. + + If the input, output, or proof profiles specified are not valid + filenames, a :exc:`PyCMSError` will be raised. + + If an error occurs during creation of the transform, + a :exc:`PyCMSError` will be raised. + + If ``inMode`` or ``outMode`` are not a mode supported by the ``outputProfile`` + (or by pyCMS), a :exc:`PyCMSError` will be raised. + + This function builds and returns an ICC transform from the ``inputProfile`` + to the ``outputProfile``, but tries to simulate the result that would be + obtained on the ``proofProfile`` device using ``renderingIntent`` and + ``proofRenderingIntent`` to determine what to do with out-of-gamut + colors. This is known as "soft-proofing". It will ONLY work for + converting images that are in ``inMode`` to images that are in outMode + color format (PIL mode, i.e. "RGB", "RGBA", "CMYK", etc.). + + Usage of the resulting transform object is exactly the same as with + ImageCms.buildTransform(). + + Proof profiling is generally used when using an output device to get a + good idea of what the final printed/displayed image would look like on + the ``proofProfile`` device when it's quicker and easier to use the + output device for judging color. Generally, this means that the + output device is a monitor, or a dye-sub printer (etc.), and the simulated + device is something more expensive, complicated, or time consuming + (making it difficult to make a real print for color judgement purposes). + + Soft-proofing basically functions by adjusting the colors on the + output device to match the colors of the device being simulated. However, + when the simulated device has a much wider gamut than the output + device, you may obtain marginal results. + + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this transform, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + (monitor, usually) profile you wish to use for this transform, or a + profile object + :param proofProfile: String, as a valid filename path to the ICC proof + profile you wish to use for this transform, or a profile object + :param inMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param outMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the input->proof (simulated) transform + + ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) + ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 + ImageCms.Intent.SATURATION = 2 + ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param proofRenderingIntent: Integer (0-3) specifying the rendering intent + you wish to use for proof->output transform + + ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) + ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 + ImageCms.Intent.SATURATION = 2 + ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param flags: Integer (0-...) specifying additional flags + :returns: A CmsTransform class object. + :exception PyCMSError: + """ + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + msg = "renderingIntent must be an integer between 0 and 3" + raise PyCMSError(msg) + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + msg = f"flags must be an integer between 0 and {_MAX_FLAG}" + raise PyCMSError(msg) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + if not isinstance(proofProfile, ImageCmsProfile): + proofProfile = ImageCmsProfile(proofProfile) + return ImageCmsTransform( + inputProfile, + outputProfile, + inMode, + outMode, + renderingIntent, + proofProfile, + proofRenderingIntent, + flags, + ) + except (OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +buildTransformFromOpenProfiles = buildTransform +buildProofTransformFromOpenProfiles = buildProofTransform + + +def applyTransform( + im: Image.Image, transform: ImageCmsTransform, inPlace: bool = False +) -> Image.Image | None: + """ + (pyCMS) Applies a transform to a given image. + + If ``im.mode != transform.input_mode``, a :exc:`PyCMSError` is raised. + + If ``inPlace`` is ``True`` and ``transform.input_mode != transform.output_mode``, a + :exc:`PyCMSError` is raised. + + If ``im.mode``, ``transform.input_mode`` or ``transform.output_mode`` is not + supported by pyCMSdll or the profiles you used for the transform, a + :exc:`PyCMSError` is raised. + + If an error occurs while the transform is being applied, + a :exc:`PyCMSError` is raised. + + This function applies a pre-calculated transform (from + ImageCms.buildTransform() or ImageCms.buildTransformFromOpenProfiles()) + to an image. The transform can be used for multiple images, saving + considerable calculation time if doing the same conversion multiple times. + + If you want to modify im in-place instead of receiving a new image as + the return value, set ``inPlace`` to ``True``. This can only be done if + ``transform.input_mode`` and ``transform.output_mode`` are the same, because we + can't change the mode in-place (the buffer sizes for some modes are + different). The default behavior is to return a new :py:class:`~PIL.Image.Image` + object of the same dimensions in mode ``transform.output_mode``. + + :param im: An :py:class:`~PIL.Image.Image` object, and ``im.mode`` must be the same + as the ``input_mode`` supported by the transform. + :param transform: A valid CmsTransform class object + :param inPlace: Bool. If ``True``, ``im`` is modified in place and ``None`` is + returned, if ``False``, a new :py:class:`~PIL.Image.Image` object with the + transform applied is returned (and ``im`` is not changed). The default is + ``False``. + :returns: Either ``None``, or a new :py:class:`~PIL.Image.Image` object, + depending on the value of ``inPlace``. The profile will be returned in + the image's ``info['icc_profile']``. + :exception PyCMSError: + """ + + try: + if inPlace: + transform.apply_in_place(im) + imOut = None + else: + imOut = transform.apply(im) + except (TypeError, ValueError) as v: + raise PyCMSError(v) from v + + return imOut + + +def createProfile( + colorSpace: Literal["LAB", "XYZ", "sRGB"], colorTemp: SupportsFloat = 0 +) -> core.CmsProfile: + """ + (pyCMS) Creates a profile. + + If colorSpace not in ``["LAB", "XYZ", "sRGB"]``, + a :exc:`PyCMSError` is raised. + + If using LAB and ``colorTemp`` is not a positive integer, + a :exc:`PyCMSError` is raised. + + If an error occurs while creating the profile, + a :exc:`PyCMSError` is raised. + + Use this function to create common profiles on-the-fly instead of + having to supply a profile on disk and knowing the path to it. It + returns a normal CmsProfile object that can be passed to + ImageCms.buildTransformFromOpenProfiles() to create a transform to apply + to images. + + :param colorSpace: String, the color space of the profile you wish to + create. + Currently only "LAB", "XYZ", and "sRGB" are supported. + :param colorTemp: Positive number for the white point for the profile, in + degrees Kelvin (i.e. 5000, 6500, 9600, etc.). The default is for D50 + illuminant if omitted (5000k). colorTemp is ONLY applied to LAB + profiles, and is ignored for XYZ and sRGB. + :returns: A CmsProfile class object + :exception PyCMSError: + """ + + if colorSpace not in ["LAB", "XYZ", "sRGB"]: + msg = ( + f"Color space not supported for on-the-fly profile creation ({colorSpace})" + ) + raise PyCMSError(msg) + + if colorSpace == "LAB": + try: + colorTemp = float(colorTemp) + except (TypeError, ValueError) as e: + msg = f'Color temperature must be numeric, "{colorTemp}" not valid' + raise PyCMSError(msg) from e + + try: + return core.createProfile(colorSpace, colorTemp) + except (TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileName(profile: _CmsProfileCompatible) -> str: + """ + + (pyCMS) Gets the internal product name for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, + a :exc:`PyCMSError` is raised If an error occurs while trying + to obtain the name tag, a :exc:`PyCMSError` is raised. + + Use this function to obtain the INTERNAL name of the profile (stored + in an ICC tag in the profile itself), usually the one used when the + profile was originally created. Sometimes this tag also contains + additional information supplied by the creator. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal name of the profile as stored + in an ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # do it in python, not c. + # // name was "%s - %s" (model, manufacturer) || Description , + # // but if the Model and Manufacturer were the same or the model + # // was long, Just the model, in 1.x + model = profile.profile.model + manufacturer = profile.profile.manufacturer + + if not (model or manufacturer): + return (profile.profile.profile_description or "") + "\n" + if not manufacturer or (model and len(model) > 30): + return f"{model}\n" + return f"{model} - {manufacturer}\n" + + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileInfo(profile: _CmsProfileCompatible) -> str: + """ + (pyCMS) Gets the internal product information for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, + a :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the info tag, + a :exc:`PyCMSError` is raised. + + Use this function to obtain the information stored in the profile's + info tag. This often contains details about the profile, and how it + was created, as supplied by the creator. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # add an extra newline to preserve pyCMS compatibility + # Python, not C. the white point bits weren't working well, + # so skipping. + # info was description \r\n\r\n copyright \r\n\r\n K007 tag \r\n\r\n whitepoint + description = profile.profile.profile_description + cpright = profile.profile.copyright + elements = [element for element in (description, cpright) if element] + return "\r\n\r\n".join(elements) + "\r\n\r\n" + + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileCopyright(profile: _CmsProfileCompatible) -> str: + """ + (pyCMS) Gets the copyright for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, a + :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the copyright tag, + a :exc:`PyCMSError` is raised. + + Use this function to obtain the information stored in the profile's + copyright tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return (profile.profile.copyright or "") + "\n" + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileManufacturer(profile: _CmsProfileCompatible) -> str: + """ + (pyCMS) Gets the manufacturer for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, a + :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the manufacturer tag, a + :exc:`PyCMSError` is raised. + + Use this function to obtain the information stored in the profile's + manufacturer tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return (profile.profile.manufacturer or "") + "\n" + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileModel(profile: _CmsProfileCompatible) -> str: + """ + (pyCMS) Gets the model for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, a + :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the model tag, + a :exc:`PyCMSError` is raised. + + Use this function to obtain the information stored in the profile's + model tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return (profile.profile.model or "") + "\n" + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileDescription(profile: _CmsProfileCompatible) -> str: + """ + (pyCMS) Gets the description for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, a + :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the description tag, + a :exc:`PyCMSError` is raised. + + Use this function to obtain the information stored in the profile's + description tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in an + ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return (profile.profile.profile_description or "") + "\n" + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getDefaultIntent(profile: _CmsProfileCompatible) -> int: + """ + (pyCMS) Gets the default intent name for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, a + :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the default intent, a + :exc:`PyCMSError` is raised. + + Use this function to determine the default (and usually best optimized) + rendering intent for this profile. Most profiles support multiple + rendering intents, but are intended mostly for one type of conversion. + If you wish to use a different intent than returned, use + ImageCms.isIntentSupported() to verify it will work first. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: Integer 0-3 specifying the default rendering intent for this + profile. + + ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) + ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 + ImageCms.Intent.SATURATION = 2 + ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.rendering_intent + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def isIntentSupported( + profile: _CmsProfileCompatible, intent: Intent, direction: Direction +) -> Literal[-1, 1]: + """ + (pyCMS) Checks if a given intent is supported. + + Use this function to verify that you can use your desired + ``intent`` with ``profile``, and that ``profile`` can be used for the + input/output/proof profile as you desire. + + Some profiles are created specifically for one "direction", can cannot + be used for others. Some profiles can only be used for certain + rendering intents, so it's best to either verify this before trying + to create a transform with them (using this function), or catch the + potential :exc:`PyCMSError` that will occur if they don't + support the modes you select. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :param intent: Integer (0-3) specifying the rendering intent you wish to + use with this profile + + ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT) + ImageCms.Intent.RELATIVE_COLORIMETRIC = 1 + ImageCms.Intent.SATURATION = 2 + ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param direction: Integer specifying if the profile is to be used for + input, output, or proof + + INPUT = 0 (or use ImageCms.Direction.INPUT) + OUTPUT = 1 (or use ImageCms.Direction.OUTPUT) + PROOF = 2 (or use ImageCms.Direction.PROOF) + + :returns: 1 if the intent/direction are supported, -1 if they are not. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # FIXME: I get different results for the same data w. different + # compilers. Bug in LittleCMS or in the binding? + if profile.profile.is_intent_supported(intent, direction): + return 1 + else: + return -1 + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def versions() -> tuple[str, str | None, str, str]: + """ + (pyCMS) Fetches versions. + """ + + deprecate( + "PIL.ImageCms.versions()", + 12, + '(PIL.features.version("littlecms2"), sys.version, PIL.__version__)', + ) + return _VERSION, core.littlecms_version, sys.version.split()[0], __version__ diff --git a/MLPY/Lib/site-packages/PIL/ImageColor.py b/MLPY/Lib/site-packages/PIL/ImageColor.py new file mode 100644 index 0000000000000000000000000000000000000000..4dcc33bf19db7dc7fbe5e72a696448d9117c4a8d --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageColor.py @@ -0,0 +1,320 @@ +# +# The Python Imaging Library +# $Id$ +# +# map CSS3-style colour description strings to RGB +# +# History: +# 2002-10-24 fl Added support for CSS-style color strings +# 2002-12-15 fl Added RGBA support +# 2004-03-27 fl Fixed remaining int() problems for Python 1.5.2 +# 2004-07-19 fl Fixed gray/grey spelling issues +# 2009-03-05 fl Fixed rounding error in grayscale calculation +# +# Copyright (c) 2002-2004 by Secret Labs AB +# Copyright (c) 2002-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import re +from functools import lru_cache + +from . import Image + + +@lru_cache +def getrgb(color: str) -> tuple[int, int, int] | tuple[int, int, int, int]: + """ + Convert a color string to an RGB or RGBA tuple. If the string cannot be + parsed, this function raises a :py:exc:`ValueError` exception. + + .. versionadded:: 1.1.4 + + :param color: A color string + :return: ``(red, green, blue[, alpha])`` + """ + if len(color) > 100: + msg = "color specifier is too long" + raise ValueError(msg) + color = color.lower() + + rgb = colormap.get(color, None) + if rgb: + if isinstance(rgb, tuple): + return rgb + rgb_tuple = getrgb(rgb) + assert len(rgb_tuple) == 3 + colormap[color] = rgb_tuple + return rgb_tuple + + # check for known string formats + if re.match("#[a-f0-9]{3}$", color): + return int(color[1] * 2, 16), int(color[2] * 2, 16), int(color[3] * 2, 16) + + if re.match("#[a-f0-9]{4}$", color): + return ( + int(color[1] * 2, 16), + int(color[2] * 2, 16), + int(color[3] * 2, 16), + int(color[4] * 2, 16), + ) + + if re.match("#[a-f0-9]{6}$", color): + return int(color[1:3], 16), int(color[3:5], 16), int(color[5:7], 16) + + if re.match("#[a-f0-9]{8}$", color): + return ( + int(color[1:3], 16), + int(color[3:5], 16), + int(color[5:7], 16), + int(color[7:9], 16), + ) + + m = re.match(r"rgb\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color) + if m: + return int(m.group(1)), int(m.group(2)), int(m.group(3)) + + m = re.match(r"rgb\(\s*(\d+)%\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color) + if m: + return ( + int((int(m.group(1)) * 255) / 100.0 + 0.5), + int((int(m.group(2)) * 255) / 100.0 + 0.5), + int((int(m.group(3)) * 255) / 100.0 + 0.5), + ) + + m = re.match( + r"hsl\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color + ) + if m: + from colorsys import hls_to_rgb + + rgb_floats = hls_to_rgb( + float(m.group(1)) / 360.0, + float(m.group(3)) / 100.0, + float(m.group(2)) / 100.0, + ) + return ( + int(rgb_floats[0] * 255 + 0.5), + int(rgb_floats[1] * 255 + 0.5), + int(rgb_floats[2] * 255 + 0.5), + ) + + m = re.match( + r"hs[bv]\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color + ) + if m: + from colorsys import hsv_to_rgb + + rgb_floats = hsv_to_rgb( + float(m.group(1)) / 360.0, + float(m.group(2)) / 100.0, + float(m.group(3)) / 100.0, + ) + return ( + int(rgb_floats[0] * 255 + 0.5), + int(rgb_floats[1] * 255 + 0.5), + int(rgb_floats[2] * 255 + 0.5), + ) + + m = re.match(r"rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color) + if m: + return int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4)) + msg = f"unknown color specifier: {repr(color)}" + raise ValueError(msg) + + +@lru_cache +def getcolor(color: str, mode: str) -> int | tuple[int, ...]: + """ + Same as :py:func:`~PIL.ImageColor.getrgb` for most modes. However, if + ``mode`` is HSV, converts the RGB value to a HSV value, or if ``mode`` is + not color or a palette image, converts the RGB value to a grayscale value. + If the string cannot be parsed, this function raises a :py:exc:`ValueError` + exception. + + .. versionadded:: 1.1.4 + + :param color: A color string + :param mode: Convert result to this mode + :return: ``graylevel, (graylevel, alpha) or (red, green, blue[, alpha])`` + """ + # same as getrgb, but converts the result to the given mode + rgb, alpha = getrgb(color), 255 + if len(rgb) == 4: + alpha = rgb[3] + rgb = rgb[:3] + + if mode == "HSV": + from colorsys import rgb_to_hsv + + r, g, b = rgb + h, s, v = rgb_to_hsv(r / 255, g / 255, b / 255) + return int(h * 255), int(s * 255), int(v * 255) + elif Image.getmodebase(mode) == "L": + r, g, b = rgb + # ITU-R Recommendation 601-2 for nonlinear RGB + # scaled to 24 bits to match the convert's implementation. + graylevel = (r * 19595 + g * 38470 + b * 7471 + 0x8000) >> 16 + if mode[-1] == "A": + return graylevel, alpha + return graylevel + elif mode[-1] == "A": + return rgb + (alpha,) + return rgb + + +colormap: dict[str, str | tuple[int, int, int]] = { + # X11 colour table from https://drafts.csswg.org/css-color-4/, with + # gray/grey spelling issues fixed. This is a superset of HTML 4.0 + # colour names used in CSS 1. + "aliceblue": "#f0f8ff", + "antiquewhite": "#faebd7", + "aqua": "#00ffff", + "aquamarine": "#7fffd4", + "azure": "#f0ffff", + "beige": "#f5f5dc", + "bisque": "#ffe4c4", + "black": "#000000", + "blanchedalmond": "#ffebcd", + "blue": "#0000ff", + "blueviolet": "#8a2be2", + "brown": "#a52a2a", + "burlywood": "#deb887", + "cadetblue": "#5f9ea0", + "chartreuse": "#7fff00", + "chocolate": "#d2691e", + "coral": "#ff7f50", + "cornflowerblue": "#6495ed", + "cornsilk": "#fff8dc", + "crimson": "#dc143c", + "cyan": "#00ffff", + "darkblue": "#00008b", + "darkcyan": "#008b8b", + "darkgoldenrod": "#b8860b", + "darkgray": "#a9a9a9", + "darkgrey": "#a9a9a9", + "darkgreen": "#006400", + "darkkhaki": "#bdb76b", + "darkmagenta": "#8b008b", + "darkolivegreen": "#556b2f", + "darkorange": "#ff8c00", + "darkorchid": "#9932cc", + "darkred": "#8b0000", + "darksalmon": "#e9967a", + "darkseagreen": "#8fbc8f", + "darkslateblue": "#483d8b", + "darkslategray": "#2f4f4f", + "darkslategrey": "#2f4f4f", + "darkturquoise": "#00ced1", + "darkviolet": "#9400d3", + "deeppink": "#ff1493", + "deepskyblue": "#00bfff", + "dimgray": "#696969", + "dimgrey": "#696969", + "dodgerblue": "#1e90ff", + "firebrick": "#b22222", + "floralwhite": "#fffaf0", + "forestgreen": "#228b22", + "fuchsia": "#ff00ff", + "gainsboro": "#dcdcdc", + "ghostwhite": "#f8f8ff", + "gold": "#ffd700", + "goldenrod": "#daa520", + "gray": "#808080", + "grey": "#808080", + "green": "#008000", + "greenyellow": "#adff2f", + "honeydew": "#f0fff0", + "hotpink": "#ff69b4", + "indianred": "#cd5c5c", + "indigo": "#4b0082", + "ivory": "#fffff0", + "khaki": "#f0e68c", + "lavender": "#e6e6fa", + "lavenderblush": "#fff0f5", + "lawngreen": "#7cfc00", + "lemonchiffon": "#fffacd", + "lightblue": "#add8e6", + "lightcoral": "#f08080", + "lightcyan": "#e0ffff", + "lightgoldenrodyellow": "#fafad2", + "lightgreen": "#90ee90", + "lightgray": "#d3d3d3", + "lightgrey": "#d3d3d3", + "lightpink": "#ffb6c1", + "lightsalmon": "#ffa07a", + "lightseagreen": "#20b2aa", + "lightskyblue": "#87cefa", + "lightslategray": "#778899", + "lightslategrey": "#778899", + "lightsteelblue": "#b0c4de", + "lightyellow": "#ffffe0", + "lime": "#00ff00", + "limegreen": "#32cd32", + "linen": "#faf0e6", + "magenta": "#ff00ff", + "maroon": "#800000", + "mediumaquamarine": "#66cdaa", + "mediumblue": "#0000cd", + "mediumorchid": "#ba55d3", + "mediumpurple": "#9370db", + "mediumseagreen": "#3cb371", + "mediumslateblue": "#7b68ee", + "mediumspringgreen": "#00fa9a", + "mediumturquoise": "#48d1cc", + "mediumvioletred": "#c71585", + "midnightblue": "#191970", + "mintcream": "#f5fffa", + "mistyrose": "#ffe4e1", + "moccasin": "#ffe4b5", + "navajowhite": "#ffdead", + "navy": "#000080", + "oldlace": "#fdf5e6", + "olive": "#808000", + "olivedrab": "#6b8e23", + "orange": "#ffa500", + "orangered": "#ff4500", + "orchid": "#da70d6", + "palegoldenrod": "#eee8aa", + "palegreen": "#98fb98", + "paleturquoise": "#afeeee", + "palevioletred": "#db7093", + "papayawhip": "#ffefd5", + "peachpuff": "#ffdab9", + "peru": "#cd853f", + "pink": "#ffc0cb", + "plum": "#dda0dd", + "powderblue": "#b0e0e6", + "purple": "#800080", + "rebeccapurple": "#663399", + "red": "#ff0000", + "rosybrown": "#bc8f8f", + "royalblue": "#4169e1", + "saddlebrown": "#8b4513", + "salmon": "#fa8072", + "sandybrown": "#f4a460", + "seagreen": "#2e8b57", + "seashell": "#fff5ee", + "sienna": "#a0522d", + "silver": "#c0c0c0", + "skyblue": "#87ceeb", + "slateblue": "#6a5acd", + "slategray": "#708090", + "slategrey": "#708090", + "snow": "#fffafa", + "springgreen": "#00ff7f", + "steelblue": "#4682b4", + "tan": "#d2b48c", + "teal": "#008080", + "thistle": "#d8bfd8", + "tomato": "#ff6347", + "turquoise": "#40e0d0", + "violet": "#ee82ee", + "wheat": "#f5deb3", + "white": "#ffffff", + "whitesmoke": "#f5f5f5", + "yellow": "#ffff00", + "yellowgreen": "#9acd32", +} diff --git a/MLPY/Lib/site-packages/PIL/ImageDraw.py b/MLPY/Lib/site-packages/PIL/ImageDraw.py new file mode 100644 index 0000000000000000000000000000000000000000..b5a97edb545e5428bd396d68e4896ce02a2d3955 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageDraw.py @@ -0,0 +1,1206 @@ +# +# The Python Imaging Library +# $Id$ +# +# drawing interface operations +# +# History: +# 1996-04-13 fl Created (experimental) +# 1996-08-07 fl Filled polygons, ellipses. +# 1996-08-13 fl Added text support +# 1998-06-28 fl Handle I and F images +# 1998-12-29 fl Added arc; use arc primitive to draw ellipses +# 1999-01-10 fl Added shape stuff (experimental) +# 1999-02-06 fl Added bitmap support +# 1999-02-11 fl Changed all primitives to take options +# 1999-02-20 fl Fixed backwards compatibility +# 2000-10-12 fl Copy on write, when necessary +# 2001-02-18 fl Use default ink for bitmap/text also in fill mode +# 2002-10-24 fl Added support for CSS-style color strings +# 2002-12-10 fl Added experimental support for RGBA-on-RGB drawing +# 2002-12-11 fl Refactored low-level drawing API (work in progress) +# 2004-08-26 fl Made Draw() a factory function, added getdraw() support +# 2004-09-04 fl Added width support to line primitive +# 2004-09-10 fl Added font mode handling +# 2006-06-19 fl Added font bearing support (getmask2) +# +# Copyright (c) 1997-2006 by Secret Labs AB +# Copyright (c) 1996-2006 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import math +import numbers +import struct +from types import ModuleType +from typing import TYPE_CHECKING, AnyStr, Callable, List, Sequence, Tuple, Union, cast + +from . import Image, ImageColor +from ._deprecate import deprecate +from ._typing import Coords + +# experimental access to the outline API +Outline: Callable[[], Image.core._Outline] | None +try: + Outline = Image.core.outline +except AttributeError: + Outline = None + +if TYPE_CHECKING: + from . import ImageDraw2, ImageFont + +_Ink = Union[float, Tuple[int, ...], str] + +""" +A simple 2D drawing interface for PIL images. +

+Application code should use the Draw factory, instead of +directly. +""" + + +class ImageDraw: + font: ( + ImageFont.ImageFont | ImageFont.FreeTypeFont | ImageFont.TransposedFont | None + ) = None + + def __init__(self, im: Image.Image, mode: str | None = None) -> None: + """ + Create a drawing instance. + + :param im: The image to draw in. + :param mode: Optional mode to use for color values. For RGB + images, this argument can be RGB or RGBA (to blend the + drawing into the image). For all other modes, this argument + must be the same as the image mode. If omitted, the mode + defaults to the mode of the image. + """ + im.load() + if im.readonly: + im._copy() # make it writeable + blend = 0 + if mode is None: + mode = im.mode + if mode != im.mode: + if mode == "RGBA" and im.mode == "RGB": + blend = 1 + else: + msg = "mode mismatch" + raise ValueError(msg) + if mode == "P": + self.palette = im.palette + else: + self.palette = None + self._image = im + self.im = im.im + self.draw = Image.core.draw(self.im, blend) + self.mode = mode + if mode in ("I", "F"): + self.ink = self.draw.draw_ink(1) + else: + self.ink = self.draw.draw_ink(-1) + if mode in ("1", "P", "I", "F"): + # FIXME: fix Fill2 to properly support matte for I+F images + self.fontmode = "1" + else: + self.fontmode = "L" # aliasing is okay for other modes + self.fill = False + + def getfont( + self, + ) -> ImageFont.ImageFont | ImageFont.FreeTypeFont | ImageFont.TransposedFont: + """ + Get the current default font. + + To set the default font for this ImageDraw instance:: + + from PIL import ImageDraw, ImageFont + draw.font = ImageFont.truetype("Tests/fonts/FreeMono.ttf") + + To set the default font for all future ImageDraw instances:: + + from PIL import ImageDraw, ImageFont + ImageDraw.ImageDraw.font = ImageFont.truetype("Tests/fonts/FreeMono.ttf") + + If the current default font is ``None``, + it is initialized with ``ImageFont.load_default()``. + + :returns: An image font.""" + if not self.font: + # FIXME: should add a font repository + from . import ImageFont + + self.font = ImageFont.load_default() + return self.font + + def _getfont( + self, font_size: float | None + ) -> ImageFont.ImageFont | ImageFont.FreeTypeFont | ImageFont.TransposedFont: + if font_size is not None: + from . import ImageFont + + return ImageFont.load_default(font_size) + else: + return self.getfont() + + def _getink( + self, ink: _Ink | None, fill: _Ink | None = None + ) -> tuple[int | None, int | None]: + result_ink = None + result_fill = None + if ink is None and fill is None: + if self.fill: + result_fill = self.ink + else: + result_ink = self.ink + else: + if ink is not None: + if isinstance(ink, str): + ink = ImageColor.getcolor(ink, self.mode) + if self.palette and not isinstance(ink, numbers.Number): + ink = self.palette.getcolor(ink, self._image) + result_ink = self.draw.draw_ink(ink) + if fill is not None: + if isinstance(fill, str): + fill = ImageColor.getcolor(fill, self.mode) + if self.palette and not isinstance(fill, numbers.Number): + fill = self.palette.getcolor(fill, self._image) + result_fill = self.draw.draw_ink(fill) + return result_ink, result_fill + + def arc( + self, + xy: Coords, + start: float, + end: float, + fill: _Ink | None = None, + width: int = 1, + ) -> None: + """Draw an arc.""" + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_arc(xy, start, end, ink, width) + + def bitmap( + self, xy: Sequence[int], bitmap: Image.Image, fill: _Ink | None = None + ) -> None: + """Draw a bitmap.""" + bitmap.load() + ink, fill = self._getink(fill) + if ink is None: + ink = fill + if ink is not None: + self.draw.draw_bitmap(xy, bitmap.im, ink) + + def chord( + self, + xy: Coords, + start: float, + end: float, + fill: _Ink | None = None, + outline: _Ink | None = None, + width: int = 1, + ) -> None: + """Draw a chord.""" + ink, fill_ink = self._getink(outline, fill) + if fill_ink is not None: + self.draw.draw_chord(xy, start, end, fill_ink, 1) + if ink is not None and ink != fill_ink and width != 0: + self.draw.draw_chord(xy, start, end, ink, 0, width) + + def ellipse( + self, + xy: Coords, + fill: _Ink | None = None, + outline: _Ink | None = None, + width: int = 1, + ) -> None: + """Draw an ellipse.""" + ink, fill_ink = self._getink(outline, fill) + if fill_ink is not None: + self.draw.draw_ellipse(xy, fill_ink, 1) + if ink is not None and ink != fill_ink and width != 0: + self.draw.draw_ellipse(xy, ink, 0, width) + + def circle( + self, + xy: Sequence[float], + radius: float, + fill: _Ink | None = None, + outline: _Ink | None = None, + width: int = 1, + ) -> None: + """Draw a circle given center coordinates and a radius.""" + ellipse_xy = (xy[0] - radius, xy[1] - radius, xy[0] + radius, xy[1] + radius) + self.ellipse(ellipse_xy, fill, outline, width) + + def line( + self, + xy: Coords, + fill: _Ink | None = None, + width: int = 0, + joint: str | None = None, + ) -> None: + """Draw a line, or a connected sequence of line segments.""" + ink = self._getink(fill)[0] + if ink is not None: + self.draw.draw_lines(xy, ink, width) + if joint == "curve" and width > 4: + points: Sequence[Sequence[float]] + if isinstance(xy[0], (list, tuple)): + points = cast(Sequence[Sequence[float]], xy) + else: + points = [ + cast(Sequence[float], tuple(xy[i : i + 2])) + for i in range(0, len(xy), 2) + ] + for i in range(1, len(points) - 1): + point = points[i] + angles = [ + math.degrees(math.atan2(end[0] - start[0], start[1] - end[1])) + % 360 + for start, end in ( + (points[i - 1], point), + (point, points[i + 1]), + ) + ] + if angles[0] == angles[1]: + # This is a straight line, so no joint is required + continue + + def coord_at_angle( + coord: Sequence[float], angle: float + ) -> tuple[float, ...]: + x, y = coord + angle -= 90 + distance = width / 2 - 1 + return tuple( + p + (math.floor(p_d) if p_d > 0 else math.ceil(p_d)) + for p, p_d in ( + (x, distance * math.cos(math.radians(angle))), + (y, distance * math.sin(math.radians(angle))), + ) + ) + + flipped = ( + angles[1] > angles[0] and angles[1] - 180 > angles[0] + ) or (angles[1] < angles[0] and angles[1] + 180 > angles[0]) + coords = [ + (point[0] - width / 2 + 1, point[1] - width / 2 + 1), + (point[0] + width / 2 - 1, point[1] + width / 2 - 1), + ] + if flipped: + start, end = (angles[1] + 90, angles[0] + 90) + else: + start, end = (angles[0] - 90, angles[1] - 90) + self.pieslice(coords, start - 90, end - 90, fill) + + if width > 8: + # Cover potential gaps between the line and the joint + if flipped: + gap_coords = [ + coord_at_angle(point, angles[0] + 90), + point, + coord_at_angle(point, angles[1] + 90), + ] + else: + gap_coords = [ + coord_at_angle(point, angles[0] - 90), + point, + coord_at_angle(point, angles[1] - 90), + ] + self.line(gap_coords, fill, width=3) + + def shape( + self, + shape: Image.core._Outline, + fill: _Ink | None = None, + outline: _Ink | None = None, + ) -> None: + """(Experimental) Draw a shape.""" + shape.close() + ink, fill_ink = self._getink(outline, fill) + if fill_ink is not None: + self.draw.draw_outline(shape, fill_ink, 1) + if ink is not None and ink != fill_ink: + self.draw.draw_outline(shape, ink, 0) + + def pieslice( + self, + xy: Coords, + start: float, + end: float, + fill: _Ink | None = None, + outline: _Ink | None = None, + width: int = 1, + ) -> None: + """Draw a pieslice.""" + ink, fill_ink = self._getink(outline, fill) + if fill_ink is not None: + self.draw.draw_pieslice(xy, start, end, fill_ink, 1) + if ink is not None and ink != fill_ink and width != 0: + self.draw.draw_pieslice(xy, start, end, ink, 0, width) + + def point(self, xy: Coords, fill: _Ink | None = None) -> None: + """Draw one or more individual pixels.""" + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_points(xy, ink) + + def polygon( + self, + xy: Coords, + fill: _Ink | None = None, + outline: _Ink | None = None, + width: int = 1, + ) -> None: + """Draw a polygon.""" + ink, fill_ink = self._getink(outline, fill) + if fill_ink is not None: + self.draw.draw_polygon(xy, fill_ink, 1) + if ink is not None and ink != fill_ink and width != 0: + if width == 1: + self.draw.draw_polygon(xy, ink, 0, width) + elif self.im is not None: + # To avoid expanding the polygon outwards, + # use the fill as a mask + mask = Image.new("1", self.im.size) + mask_ink = self._getink(1)[0] + + fill_im = mask.copy() + draw = Draw(fill_im) + draw.draw.draw_polygon(xy, mask_ink, 1) + + ink_im = mask.copy() + draw = Draw(ink_im) + width = width * 2 - 1 + draw.draw.draw_polygon(xy, mask_ink, 0, width) + + mask.paste(ink_im, mask=fill_im) + + im = Image.new(self.mode, self.im.size) + draw = Draw(im) + draw.draw.draw_polygon(xy, ink, 0, width) + self.im.paste(im.im, (0, 0) + im.size, mask.im) + + def regular_polygon( + self, + bounding_circle: Sequence[Sequence[float] | float], + n_sides: int, + rotation: float = 0, + fill: _Ink | None = None, + outline: _Ink | None = None, + width: int = 1, + ) -> None: + """Draw a regular polygon.""" + xy = _compute_regular_polygon_vertices(bounding_circle, n_sides, rotation) + self.polygon(xy, fill, outline, width) + + def rectangle( + self, + xy: Coords, + fill: _Ink | None = None, + outline: _Ink | None = None, + width: int = 1, + ) -> None: + """Draw a rectangle.""" + ink, fill_ink = self._getink(outline, fill) + if fill_ink is not None: + self.draw.draw_rectangle(xy, fill_ink, 1) + if ink is not None and ink != fill_ink and width != 0: + self.draw.draw_rectangle(xy, ink, 0, width) + + def rounded_rectangle( + self, + xy: Coords, + radius: float = 0, + fill: _Ink | None = None, + outline: _Ink | None = None, + width: int = 1, + *, + corners: tuple[bool, bool, bool, bool] | None = None, + ) -> None: + """Draw a rounded rectangle.""" + if isinstance(xy[0], (list, tuple)): + (x0, y0), (x1, y1) = cast(Sequence[Sequence[float]], xy) + else: + x0, y0, x1, y1 = cast(Sequence[float], xy) + if x1 < x0: + msg = "x1 must be greater than or equal to x0" + raise ValueError(msg) + if y1 < y0: + msg = "y1 must be greater than or equal to y0" + raise ValueError(msg) + if corners is None: + corners = (True, True, True, True) + + d = radius * 2 + + x0 = round(x0) + y0 = round(y0) + x1 = round(x1) + y1 = round(y1) + full_x, full_y = False, False + if all(corners): + full_x = d >= x1 - x0 - 1 + if full_x: + # The two left and two right corners are joined + d = x1 - x0 + full_y = d >= y1 - y0 - 1 + if full_y: + # The two top and two bottom corners are joined + d = y1 - y0 + if full_x and full_y: + # If all corners are joined, that is a circle + return self.ellipse(xy, fill, outline, width) + + if d == 0 or not any(corners): + # If the corners have no curve, + # or there are no corners, + # that is a rectangle + return self.rectangle(xy, fill, outline, width) + + r = int(d // 2) + ink, fill_ink = self._getink(outline, fill) + + def draw_corners(pieslice: bool) -> None: + parts: tuple[tuple[tuple[float, float, float, float], int, int], ...] + if full_x: + # Draw top and bottom halves + parts = ( + ((x0, y0, x0 + d, y0 + d), 180, 360), + ((x0, y1 - d, x0 + d, y1), 0, 180), + ) + elif full_y: + # Draw left and right halves + parts = ( + ((x0, y0, x0 + d, y0 + d), 90, 270), + ((x1 - d, y0, x1, y0 + d), 270, 90), + ) + else: + # Draw four separate corners + parts = tuple( + part + for i, part in enumerate( + ( + ((x0, y0, x0 + d, y0 + d), 180, 270), + ((x1 - d, y0, x1, y0 + d), 270, 360), + ((x1 - d, y1 - d, x1, y1), 0, 90), + ((x0, y1 - d, x0 + d, y1), 90, 180), + ) + ) + if corners[i] + ) + for part in parts: + if pieslice: + self.draw.draw_pieslice(*(part + (fill_ink, 1))) + else: + self.draw.draw_arc(*(part + (ink, width))) + + if fill_ink is not None: + draw_corners(True) + + if full_x: + self.draw.draw_rectangle((x0, y0 + r + 1, x1, y1 - r - 1), fill_ink, 1) + else: + self.draw.draw_rectangle((x0 + r + 1, y0, x1 - r - 1, y1), fill_ink, 1) + if not full_x and not full_y: + left = [x0, y0, x0 + r, y1] + if corners[0]: + left[1] += r + 1 + if corners[3]: + left[3] -= r + 1 + self.draw.draw_rectangle(left, fill_ink, 1) + + right = [x1 - r, y0, x1, y1] + if corners[1]: + right[1] += r + 1 + if corners[2]: + right[3] -= r + 1 + self.draw.draw_rectangle(right, fill_ink, 1) + if ink is not None and ink != fill_ink and width != 0: + draw_corners(False) + + if not full_x: + top = [x0, y0, x1, y0 + width - 1] + if corners[0]: + top[0] += r + 1 + if corners[1]: + top[2] -= r + 1 + self.draw.draw_rectangle(top, ink, 1) + + bottom = [x0, y1 - width + 1, x1, y1] + if corners[3]: + bottom[0] += r + 1 + if corners[2]: + bottom[2] -= r + 1 + self.draw.draw_rectangle(bottom, ink, 1) + if not full_y: + left = [x0, y0, x0 + width - 1, y1] + if corners[0]: + left[1] += r + 1 + if corners[3]: + left[3] -= r + 1 + self.draw.draw_rectangle(left, ink, 1) + + right = [x1 - width + 1, y0, x1, y1] + if corners[1]: + right[1] += r + 1 + if corners[2]: + right[3] -= r + 1 + self.draw.draw_rectangle(right, ink, 1) + + def _multiline_check(self, text: AnyStr) -> bool: + split_character = "\n" if isinstance(text, str) else b"\n" + + return split_character in text + + def _multiline_split(self, text: AnyStr) -> list[AnyStr]: + return text.split("\n" if isinstance(text, str) else b"\n") + + def _multiline_spacing(self, font, spacing, stroke_width): + return ( + self.textbbox((0, 0), "A", font, stroke_width=stroke_width)[3] + + stroke_width + + spacing + ) + + def text( + self, + xy: tuple[float, float], + text: str, + fill=None, + font: ( + ImageFont.ImageFont + | ImageFont.FreeTypeFont + | ImageFont.TransposedFont + | None + ) = None, + anchor=None, + spacing=4, + align="left", + direction=None, + features=None, + language=None, + stroke_width=0, + stroke_fill=None, + embedded_color=False, + *args, + **kwargs, + ) -> None: + """Draw text.""" + if embedded_color and self.mode not in ("RGB", "RGBA"): + msg = "Embedded color supported only in RGB and RGBA modes" + raise ValueError(msg) + + if font is None: + font = self._getfont(kwargs.get("font_size")) + + if self._multiline_check(text): + return self.multiline_text( + xy, + text, + fill, + font, + anchor, + spacing, + align, + direction, + features, + language, + stroke_width, + stroke_fill, + embedded_color, + ) + + def getink(fill: _Ink | None) -> int: + ink, fill_ink = self._getink(fill) + if ink is None: + assert fill_ink is not None + return fill_ink + return ink + + def draw_text(ink, stroke_width=0, stroke_offset=None) -> None: + mode = self.fontmode + if stroke_width == 0 and embedded_color: + mode = "RGBA" + coord = [] + start = [] + for i in range(2): + coord.append(int(xy[i])) + start.append(math.modf(xy[i])[0]) + try: + mask, offset = font.getmask2( # type: ignore[union-attr,misc] + text, + mode, + direction=direction, + features=features, + language=language, + stroke_width=stroke_width, + anchor=anchor, + ink=ink, + start=start, + *args, + **kwargs, + ) + coord = [coord[0] + offset[0], coord[1] + offset[1]] + except AttributeError: + try: + mask = font.getmask( # type: ignore[misc] + text, + mode, + direction, + features, + language, + stroke_width, + anchor, + ink, + start=start, + *args, + **kwargs, + ) + except TypeError: + mask = font.getmask(text) + if stroke_offset: + coord = [coord[0] + stroke_offset[0], coord[1] + stroke_offset[1]] + if mode == "RGBA": + # font.getmask2(mode="RGBA") returns color in RGB bands and mask in A + # extract mask and set text alpha + color, mask = mask, mask.getband(3) + ink_alpha = struct.pack("i", ink)[3] + color.fillband(3, ink_alpha) + x, y = coord + if self.im is not None: + self.im.paste( + color, (x, y, x + mask.size[0], y + mask.size[1]), mask + ) + else: + self.draw.draw_bitmap(coord, mask, ink) + + ink = getink(fill) + if ink is not None: + stroke_ink = None + if stroke_width: + stroke_ink = getink(stroke_fill) if stroke_fill is not None else ink + + if stroke_ink is not None: + # Draw stroked text + draw_text(stroke_ink, stroke_width) + + # Draw normal text + draw_text(ink, 0) + else: + # Only draw normal text + draw_text(ink) + + def multiline_text( + self, + xy: tuple[float, float], + text: str, + fill=None, + font: ( + ImageFont.ImageFont + | ImageFont.FreeTypeFont + | ImageFont.TransposedFont + | None + ) = None, + anchor=None, + spacing=4, + align="left", + direction=None, + features=None, + language=None, + stroke_width=0, + stroke_fill=None, + embedded_color=False, + *, + font_size=None, + ) -> None: + if direction == "ttb": + msg = "ttb direction is unsupported for multiline text" + raise ValueError(msg) + + if anchor is None: + anchor = "la" + elif len(anchor) != 2: + msg = "anchor must be a 2 character string" + raise ValueError(msg) + elif anchor[1] in "tb": + msg = "anchor not supported for multiline text" + raise ValueError(msg) + + if font is None: + font = self._getfont(font_size) + + widths = [] + max_width: float = 0 + lines = self._multiline_split(text) + line_spacing = self._multiline_spacing(font, spacing, stroke_width) + for line in lines: + line_width = self.textlength( + line, font, direction=direction, features=features, language=language + ) + widths.append(line_width) + max_width = max(max_width, line_width) + + top = xy[1] + if anchor[1] == "m": + top -= (len(lines) - 1) * line_spacing / 2.0 + elif anchor[1] == "d": + top -= (len(lines) - 1) * line_spacing + + for idx, line in enumerate(lines): + left = xy[0] + width_difference = max_width - widths[idx] + + # first align left by anchor + if anchor[0] == "m": + left -= width_difference / 2.0 + elif anchor[0] == "r": + left -= width_difference + + # then align by align parameter + if align == "left": + pass + elif align == "center": + left += width_difference / 2.0 + elif align == "right": + left += width_difference + else: + msg = 'align must be "left", "center" or "right"' + raise ValueError(msg) + + self.text( + (left, top), + line, + fill, + font, + anchor, + direction=direction, + features=features, + language=language, + stroke_width=stroke_width, + stroke_fill=stroke_fill, + embedded_color=embedded_color, + ) + top += line_spacing + + def textlength( + self, + text: str, + font: ( + ImageFont.ImageFont + | ImageFont.FreeTypeFont + | ImageFont.TransposedFont + | None + ) = None, + direction=None, + features=None, + language=None, + embedded_color=False, + *, + font_size=None, + ) -> float: + """Get the length of a given string, in pixels with 1/64 precision.""" + if self._multiline_check(text): + msg = "can't measure length of multiline text" + raise ValueError(msg) + if embedded_color and self.mode not in ("RGB", "RGBA"): + msg = "Embedded color supported only in RGB and RGBA modes" + raise ValueError(msg) + + if font is None: + font = self._getfont(font_size) + mode = "RGBA" if embedded_color else self.fontmode + return font.getlength(text, mode, direction, features, language) + + def textbbox( + self, + xy, + text, + font=None, + anchor=None, + spacing=4, + align="left", + direction=None, + features=None, + language=None, + stroke_width=0, + embedded_color=False, + *, + font_size=None, + ) -> tuple[int, int, int, int]: + """Get the bounding box of a given string, in pixels.""" + if embedded_color and self.mode not in ("RGB", "RGBA"): + msg = "Embedded color supported only in RGB and RGBA modes" + raise ValueError(msg) + + if font is None: + font = self._getfont(font_size) + + if self._multiline_check(text): + return self.multiline_textbbox( + xy, + text, + font, + anchor, + spacing, + align, + direction, + features, + language, + stroke_width, + embedded_color, + ) + + mode = "RGBA" if embedded_color else self.fontmode + bbox = font.getbbox( + text, mode, direction, features, language, stroke_width, anchor + ) + return bbox[0] + xy[0], bbox[1] + xy[1], bbox[2] + xy[0], bbox[3] + xy[1] + + def multiline_textbbox( + self, + xy, + text, + font=None, + anchor=None, + spacing=4, + align="left", + direction=None, + features=None, + language=None, + stroke_width=0, + embedded_color=False, + *, + font_size=None, + ) -> tuple[int, int, int, int]: + if direction == "ttb": + msg = "ttb direction is unsupported for multiline text" + raise ValueError(msg) + + if anchor is None: + anchor = "la" + elif len(anchor) != 2: + msg = "anchor must be a 2 character string" + raise ValueError(msg) + elif anchor[1] in "tb": + msg = "anchor not supported for multiline text" + raise ValueError(msg) + + if font is None: + font = self._getfont(font_size) + + widths = [] + max_width: float = 0 + lines = self._multiline_split(text) + line_spacing = self._multiline_spacing(font, spacing, stroke_width) + for line in lines: + line_width = self.textlength( + line, + font, + direction=direction, + features=features, + language=language, + embedded_color=embedded_color, + ) + widths.append(line_width) + max_width = max(max_width, line_width) + + top = xy[1] + if anchor[1] == "m": + top -= (len(lines) - 1) * line_spacing / 2.0 + elif anchor[1] == "d": + top -= (len(lines) - 1) * line_spacing + + bbox: tuple[int, int, int, int] | None = None + + for idx, line in enumerate(lines): + left = xy[0] + width_difference = max_width - widths[idx] + + # first align left by anchor + if anchor[0] == "m": + left -= width_difference / 2.0 + elif anchor[0] == "r": + left -= width_difference + + # then align by align parameter + if align == "left": + pass + elif align == "center": + left += width_difference / 2.0 + elif align == "right": + left += width_difference + else: + msg = 'align must be "left", "center" or "right"' + raise ValueError(msg) + + bbox_line = self.textbbox( + (left, top), + line, + font, + anchor, + direction=direction, + features=features, + language=language, + stroke_width=stroke_width, + embedded_color=embedded_color, + ) + if bbox is None: + bbox = bbox_line + else: + bbox = ( + min(bbox[0], bbox_line[0]), + min(bbox[1], bbox_line[1]), + max(bbox[2], bbox_line[2]), + max(bbox[3], bbox_line[3]), + ) + + top += line_spacing + + if bbox is None: + return xy[0], xy[1], xy[0], xy[1] + return bbox + + +def Draw(im: Image.Image, mode: str | None = None) -> ImageDraw: + """ + A simple 2D drawing interface for PIL images. + + :param im: The image to draw in. + :param mode: Optional mode to use for color values. For RGB + images, this argument can be RGB or RGBA (to blend the + drawing into the image). For all other modes, this argument + must be the same as the image mode. If omitted, the mode + defaults to the mode of the image. + """ + try: + return getattr(im, "getdraw")(mode) + except AttributeError: + return ImageDraw(im, mode) + + +def getdraw( + im: Image.Image | None = None, hints: list[str] | None = None +) -> tuple[ImageDraw2.Draw | None, ModuleType]: + """ + :param im: The image to draw in. + :param hints: An optional list of hints. Deprecated. + :returns: A (drawing context, drawing resource factory) tuple. + """ + if hints is not None: + deprecate("'hints' parameter", 12) + from . import ImageDraw2 + + draw = ImageDraw2.Draw(im) if im is not None else None + return draw, ImageDraw2 + + +def floodfill( + image: Image.Image, + xy: tuple[int, int], + value: float | tuple[int, ...], + border: float | tuple[int, ...] | None = None, + thresh: float = 0, +) -> None: + """ + .. warning:: This method is experimental. + + Fills a bounded region with a given color. + + :param image: Target image. + :param xy: Seed position (a 2-item coordinate tuple). See + :ref:`coordinate-system`. + :param value: Fill color. + :param border: Optional border value. If given, the region consists of + pixels with a color different from the border color. If not given, + the region consists of pixels having the same color as the seed + pixel. + :param thresh: Optional threshold value which specifies a maximum + tolerable difference of a pixel value from the 'background' in + order for it to be replaced. Useful for filling regions of + non-homogeneous, but similar, colors. + """ + # based on an implementation by Eric S. Raymond + # amended by yo1995 @20180806 + pixel = image.load() + assert pixel is not None + x, y = xy + try: + background = pixel[x, y] + if _color_diff(value, background) <= thresh: + return # seed point already has fill color + pixel[x, y] = value + except (ValueError, IndexError): + return # seed point outside image + edge = {(x, y)} + # use a set to keep record of current and previous edge pixels + # to reduce memory consumption + full_edge = set() + while edge: + new_edge = set() + for x, y in edge: # 4 adjacent method + for s, t in ((x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)): + # If already processed, or if a coordinate is negative, skip + if (s, t) in full_edge or s < 0 or t < 0: + continue + try: + p = pixel[s, t] + except (ValueError, IndexError): + pass + else: + full_edge.add((s, t)) + if border is None: + fill = _color_diff(p, background) <= thresh + else: + fill = p not in (value, border) + if fill: + pixel[s, t] = value + new_edge.add((s, t)) + full_edge = edge # discard pixels processed + edge = new_edge + + +def _compute_regular_polygon_vertices( + bounding_circle: Sequence[Sequence[float] | float], n_sides: int, rotation: float +) -> list[tuple[float, float]]: + """ + Generate a list of vertices for a 2D regular polygon. + + :param bounding_circle: The bounding circle is a sequence defined + by a point and radius. The polygon is inscribed in this circle. + (e.g. ``bounding_circle=(x, y, r)`` or ``((x, y), r)``) + :param n_sides: Number of sides + (e.g. ``n_sides=3`` for a triangle, ``6`` for a hexagon) + :param rotation: Apply an arbitrary rotation to the polygon + (e.g. ``rotation=90``, applies a 90 degree rotation) + :return: List of regular polygon vertices + (e.g. ``[(25, 50), (50, 50), (50, 25), (25, 25)]``) + + How are the vertices computed? + 1. Compute the following variables + - theta: Angle between the apothem & the nearest polygon vertex + - side_length: Length of each polygon edge + - centroid: Center of bounding circle (1st, 2nd elements of bounding_circle) + - polygon_radius: Polygon radius (last element of bounding_circle) + - angles: Location of each polygon vertex in polar grid + (e.g. A square with 0 degree rotation => [225.0, 315.0, 45.0, 135.0]) + + 2. For each angle in angles, get the polygon vertex at that angle + The vertex is computed using the equation below. + X= xcos(φ) + ysin(φ) + Y= −xsin(φ) + ycos(φ) + + Note: + φ = angle in degrees + x = 0 + y = polygon_radius + + The formula above assumes rotation around the origin. + In our case, we are rotating around the centroid. + To account for this, we use the formula below + X = xcos(φ) + ysin(φ) + centroid_x + Y = −xsin(φ) + ycos(φ) + centroid_y + """ + # 1. Error Handling + # 1.1 Check `n_sides` has an appropriate value + if not isinstance(n_sides, int): + msg = "n_sides should be an int" # type: ignore[unreachable] + raise TypeError(msg) + if n_sides < 3: + msg = "n_sides should be an int > 2" + raise ValueError(msg) + + # 1.2 Check `bounding_circle` has an appropriate value + if not isinstance(bounding_circle, (list, tuple)): + msg = "bounding_circle should be a sequence" + raise TypeError(msg) + + if len(bounding_circle) == 3: + if not all(isinstance(i, (int, float)) for i in bounding_circle): + msg = "bounding_circle should only contain numeric data" + raise ValueError(msg) + + *centroid, polygon_radius = cast(List[float], list(bounding_circle)) + elif len(bounding_circle) == 2 and isinstance(bounding_circle[0], (list, tuple)): + if not all( + isinstance(i, (int, float)) for i in bounding_circle[0] + ) or not isinstance(bounding_circle[1], (int, float)): + msg = "bounding_circle should only contain numeric data" + raise ValueError(msg) + + if len(bounding_circle[0]) != 2: + msg = "bounding_circle centre should contain 2D coordinates (e.g. (x, y))" + raise ValueError(msg) + + centroid = cast(List[float], list(bounding_circle[0])) + polygon_radius = cast(float, bounding_circle[1]) + else: + msg = ( + "bounding_circle should contain 2D coordinates " + "and a radius (e.g. (x, y, r) or ((x, y), r) )" + ) + raise ValueError(msg) + + if polygon_radius <= 0: + msg = "bounding_circle radius should be > 0" + raise ValueError(msg) + + # 1.3 Check `rotation` has an appropriate value + if not isinstance(rotation, (int, float)): + msg = "rotation should be an int or float" # type: ignore[unreachable] + raise ValueError(msg) + + # 2. Define Helper Functions + def _apply_rotation(point: list[float], degrees: float) -> tuple[float, float]: + return ( + round( + point[0] * math.cos(math.radians(360 - degrees)) + - point[1] * math.sin(math.radians(360 - degrees)) + + centroid[0], + 2, + ), + round( + point[1] * math.cos(math.radians(360 - degrees)) + + point[0] * math.sin(math.radians(360 - degrees)) + + centroid[1], + 2, + ), + ) + + def _compute_polygon_vertex(angle: float) -> tuple[float, float]: + start_point = [polygon_radius, 0] + return _apply_rotation(start_point, angle) + + def _get_angles(n_sides: int, rotation: float) -> list[float]: + angles = [] + degrees = 360 / n_sides + # Start with the bottom left polygon vertex + current_angle = (270 - 0.5 * degrees) + rotation + for _ in range(0, n_sides): + angles.append(current_angle) + current_angle += degrees + if current_angle > 360: + current_angle -= 360 + return angles + + # 3. Variable Declarations + angles = _get_angles(n_sides, rotation) + + # 4. Compute Vertices + return [_compute_polygon_vertex(angle) for angle in angles] + + +def _color_diff( + color1: float | tuple[int, ...], color2: float | tuple[int, ...] +) -> float: + """ + Uses 1-norm distance to calculate difference between two values. + """ + first = color1 if isinstance(color1, tuple) else (color1,) + second = color2 if isinstance(color2, tuple) else (color2,) + + return sum(abs(first[i] - second[i]) for i in range(0, len(second))) diff --git a/MLPY/Lib/site-packages/PIL/ImageDraw2.py b/MLPY/Lib/site-packages/PIL/ImageDraw2.py new file mode 100644 index 0000000000000000000000000000000000000000..b88cc315c4ead0e33144e34105c641d0ae861258 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageDraw2.py @@ -0,0 +1,206 @@ +# +# The Python Imaging Library +# $Id$ +# +# WCK-style drawing interface operations +# +# History: +# 2003-12-07 fl created +# 2005-05-15 fl updated; added to PIL as ImageDraw2 +# 2005-05-15 fl added text support +# 2005-05-20 fl added arc/chord/pieslice support +# +# Copyright (c) 2003-2005 by Secret Labs AB +# Copyright (c) 2003-2005 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +""" +(Experimental) WCK-style drawing interface operations + +.. seealso:: :py:mod:`PIL.ImageDraw` +""" +from __future__ import annotations + +from typing import BinaryIO + +from . import Image, ImageColor, ImageDraw, ImageFont, ImagePath +from ._typing import StrOrBytesPath + + +class Pen: + """Stores an outline color and width.""" + + def __init__(self, color: str, width: int = 1, opacity: int = 255) -> None: + self.color = ImageColor.getrgb(color) + self.width = width + + +class Brush: + """Stores a fill color""" + + def __init__(self, color: str, opacity: int = 255) -> None: + self.color = ImageColor.getrgb(color) + + +class Font: + """Stores a TrueType font and color""" + + def __init__( + self, color: str, file: StrOrBytesPath | BinaryIO, size: float = 12 + ) -> None: + # FIXME: add support for bitmap fonts + self.color = ImageColor.getrgb(color) + self.font = ImageFont.truetype(file, size) + + +class Draw: + """ + (Experimental) WCK-style drawing interface + """ + + def __init__( + self, + image: Image.Image | str, + size: tuple[int, int] | list[int] | None = None, + color: float | tuple[float, ...] | str | None = None, + ) -> None: + if isinstance(image, str): + if size is None: + msg = "If image argument is mode string, size must be a list or tuple" + raise ValueError(msg) + image = Image.new(image, size, color) + self.draw = ImageDraw.Draw(image) + self.image = image + self.transform = None + + def flush(self) -> Image.Image: + return self.image + + def render(self, op, xy, pen, brush=None): + # handle color arguments + outline = fill = None + width = 1 + if isinstance(pen, Pen): + outline = pen.color + width = pen.width + elif isinstance(brush, Pen): + outline = brush.color + width = brush.width + if isinstance(brush, Brush): + fill = brush.color + elif isinstance(pen, Brush): + fill = pen.color + # handle transformation + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + # render the item + if op == "line": + self.draw.line(xy, fill=outline, width=width) + else: + getattr(self.draw, op)(xy, fill=fill, outline=outline) + + def settransform(self, offset): + """Sets a transformation offset.""" + (xoffset, yoffset) = offset + self.transform = (1, 0, xoffset, 0, 1, yoffset) + + def arc(self, xy, start, end, *options): + """ + Draws an arc (a portion of a circle outline) between the start and end + angles, inside the given bounding box. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.arc` + """ + self.render("arc", xy, start, end, *options) + + def chord(self, xy, start, end, *options): + """ + Same as :py:meth:`~PIL.ImageDraw2.Draw.arc`, but connects the end points + with a straight line. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.chord` + """ + self.render("chord", xy, start, end, *options) + + def ellipse(self, xy, *options): + """ + Draws an ellipse inside the given bounding box. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.ellipse` + """ + self.render("ellipse", xy, *options) + + def line(self, xy, *options): + """ + Draws a line between the coordinates in the ``xy`` list. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.line` + """ + self.render("line", xy, *options) + + def pieslice(self, xy, start, end, *options): + """ + Same as arc, but also draws straight lines between the end points and the + center of the bounding box. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.pieslice` + """ + self.render("pieslice", xy, start, end, *options) + + def polygon(self, xy, *options): + """ + Draws a polygon. + + The polygon outline consists of straight lines between the given + coordinates, plus a straight line between the last and the first + coordinate. + + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.polygon` + """ + self.render("polygon", xy, *options) + + def rectangle(self, xy, *options): + """ + Draws a rectangle. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.rectangle` + """ + self.render("rectangle", xy, *options) + + def text(self, xy, text, font): + """ + Draws the string at the given position. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.text` + """ + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + self.draw.text(xy, text, font=font.font, fill=font.color) + + def textbbox(self, xy, text, font): + """ + Returns bounding box (in pixels) of given text. + + :return: ``(left, top, right, bottom)`` bounding box + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.textbbox` + """ + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + return self.draw.textbbox(xy, text, font=font.font) + + def textlength(self, text, font): + """ + Returns length (in pixels) of given text. + This is the amount by which following text should be offset. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.textlength` + """ + return self.draw.textlength(text, font=font.font) diff --git a/MLPY/Lib/site-packages/PIL/ImageEnhance.py b/MLPY/Lib/site-packages/PIL/ImageEnhance.py new file mode 100644 index 0000000000000000000000000000000000000000..39818dc2935e198be9e54eeb74511a1c48c0cefa --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageEnhance.py @@ -0,0 +1,107 @@ +# +# The Python Imaging Library. +# $Id$ +# +# image enhancement classes +# +# For a background, see "Image Processing By Interpolation and +# Extrapolation", Paul Haeberli and Douglas Voorhies. Available +# at http://www.graficaobscura.com/interp/index.html +# +# History: +# 1996-03-23 fl Created +# 2009-06-16 fl Fixed mean calculation +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +from . import Image, ImageFilter, ImageStat + + +class _Enhance: + image: Image.Image + degenerate: Image.Image + + def enhance(self, factor: float) -> Image.Image: + """ + Returns an enhanced image. + + :param factor: A floating point value controlling the enhancement. + Factor 1.0 always returns a copy of the original image, + lower factors mean less color (brightness, contrast, + etc), and higher values more. There are no restrictions + on this value. + :rtype: :py:class:`~PIL.Image.Image` + """ + return Image.blend(self.degenerate, self.image, factor) + + +class Color(_Enhance): + """Adjust image color balance. + + This class can be used to adjust the colour balance of an image, in + a manner similar to the controls on a colour TV set. An enhancement + factor of 0.0 gives a black and white image. A factor of 1.0 gives + the original image. + """ + + def __init__(self, image: Image.Image) -> None: + self.image = image + self.intermediate_mode = "L" + if "A" in image.getbands(): + self.intermediate_mode = "LA" + + self.degenerate = image.convert(self.intermediate_mode).convert(image.mode) + + +class Contrast(_Enhance): + """Adjust image contrast. + + This class can be used to control the contrast of an image, similar + to the contrast control on a TV set. An enhancement factor of 0.0 + gives a solid gray image. A factor of 1.0 gives the original image. + """ + + def __init__(self, image: Image.Image) -> None: + self.image = image + mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5) + self.degenerate = Image.new("L", image.size, mean).convert(image.mode) + + if "A" in image.getbands(): + self.degenerate.putalpha(image.getchannel("A")) + + +class Brightness(_Enhance): + """Adjust image brightness. + + This class can be used to control the brightness of an image. An + enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the + original image. + """ + + def __init__(self, image: Image.Image) -> None: + self.image = image + self.degenerate = Image.new(image.mode, image.size, 0) + + if "A" in image.getbands(): + self.degenerate.putalpha(image.getchannel("A")) + + +class Sharpness(_Enhance): + """Adjust image sharpness. + + This class can be used to adjust the sharpness of an image. An + enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the + original image, and a factor of 2.0 gives a sharpened image. + """ + + def __init__(self, image: Image.Image) -> None: + self.image = image + self.degenerate = image.filter(ImageFilter.SMOOTH) + + if "A" in image.getbands(): + self.degenerate.putalpha(image.getchannel("A")) diff --git a/MLPY/Lib/site-packages/PIL/ImageFile.py b/MLPY/Lib/site-packages/PIL/ImageFile.py new file mode 100644 index 0000000000000000000000000000000000000000..6857868be8b384476211f1a5236df410d3fd16c7 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageFile.py @@ -0,0 +1,810 @@ +# +# The Python Imaging Library. +# $Id$ +# +# base class for image file handlers +# +# history: +# 1995-09-09 fl Created +# 1996-03-11 fl Fixed load mechanism. +# 1996-04-15 fl Added pcx/xbm decoders. +# 1996-04-30 fl Added encoders. +# 1996-12-14 fl Added load helpers +# 1997-01-11 fl Use encode_to_file where possible +# 1997-08-27 fl Flush output in _save +# 1998-03-05 fl Use memory mapping for some modes +# 1999-02-04 fl Use memory mapping also for "I;16" and "I;16B" +# 1999-05-31 fl Added image parser +# 2000-10-12 fl Set readonly flag on memory-mapped images +# 2002-03-20 fl Use better messages for common decoder errors +# 2003-04-21 fl Fall back on mmap/map_buffer if map is not available +# 2003-10-30 fl Added StubImageFile class +# 2004-02-25 fl Made incremental parser more robust +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1995-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import abc +import io +import itertools +import struct +import sys +from typing import IO, Any, NamedTuple + +from . import Image +from ._deprecate import deprecate +from ._util import is_path + +MAXBLOCK = 65536 + +SAFEBLOCK = 1024 * 1024 + +LOAD_TRUNCATED_IMAGES = False +"""Whether or not to load truncated image files. User code may change this.""" + +ERRORS = { + -1: "image buffer overrun error", + -2: "decoding error", + -3: "unknown error", + -8: "bad configuration", + -9: "out of memory error", +} +""" +Dict of known error codes returned from :meth:`.PyDecoder.decode`, +:meth:`.PyEncoder.encode` :meth:`.PyEncoder.encode_to_pyfd` and +:meth:`.PyEncoder.encode_to_file`. +""" + + +# +# -------------------------------------------------------------------- +# Helpers + + +def _get_oserror(error: int, *, encoder: bool) -> OSError: + try: + msg = Image.core.getcodecstatus(error) + except AttributeError: + msg = ERRORS.get(error) + if not msg: + msg = f"{'encoder' if encoder else 'decoder'} error {error}" + msg += f" when {'writing' if encoder else 'reading'} image file" + return OSError(msg) + + +def raise_oserror(error: int) -> OSError: + deprecate( + "raise_oserror", + 12, + action="It is only useful for translating error codes returned by a codec's " + "decode() method, which ImageFile already does automatically.", + ) + raise _get_oserror(error, encoder=False) + + +def _tilesort(t): + # sort on offset + return t[2] + + +class _Tile(NamedTuple): + codec_name: str + extents: tuple[int, int, int, int] + offset: int + args: tuple[Any, ...] | str | None + + +# +# -------------------------------------------------------------------- +# ImageFile base class + + +class ImageFile(Image.Image): + """Base class for image file format handlers.""" + + def __init__(self, fp=None, filename=None): + super().__init__() + + self._min_frame = 0 + + self.custom_mimetype = None + + self.tile = None + """ A list of tile descriptors, or ``None`` """ + + self.readonly = 1 # until we know better + + self.decoderconfig = () + self.decodermaxblock = MAXBLOCK + + if is_path(fp): + # filename + self.fp = open(fp, "rb") + self.filename = fp + self._exclusive_fp = True + else: + # stream + self.fp = fp + self.filename = filename + # can be overridden + self._exclusive_fp = None + + try: + try: + self._open() + except ( + IndexError, # end of data + TypeError, # end of data (ord) + KeyError, # unsupported mode + EOFError, # got header but not the first frame + struct.error, + ) as v: + raise SyntaxError(v) from v + + if not self.mode or self.size[0] <= 0 or self.size[1] <= 0: + msg = "not identified by this driver" + raise SyntaxError(msg) + except BaseException: + # close the file only if we have opened it this constructor + if self._exclusive_fp: + self.fp.close() + raise + + def get_format_mimetype(self) -> str | None: + if self.custom_mimetype: + return self.custom_mimetype + if self.format is not None: + return Image.MIME.get(self.format.upper()) + return None + + def __setstate__(self, state): + self.tile = [] + super().__setstate__(state) + + def verify(self) -> None: + """Check file integrity""" + + # raise exception if something's wrong. must be called + # directly after open, and closes file when finished. + if self._exclusive_fp: + self.fp.close() + self.fp = None + + def load(self): + """Load image data based on tile list""" + + if self.tile is None: + msg = "cannot load this image" + raise OSError(msg) + + pixel = Image.Image.load(self) + if not self.tile: + return pixel + + self.map = None + use_mmap = self.filename and len(self.tile) == 1 + # As of pypy 2.1.0, memory mapping was failing here. + use_mmap = use_mmap and not hasattr(sys, "pypy_version_info") + + readonly = 0 + + # look for read/seek overrides + try: + read = self.load_read + # don't use mmap if there are custom read/seek functions + use_mmap = False + except AttributeError: + read = self.fp.read + + try: + seek = self.load_seek + use_mmap = False + except AttributeError: + seek = self.fp.seek + + if use_mmap: + # try memory mapping + decoder_name, extents, offset, args = self.tile[0] + if isinstance(args, str): + args = (args, 0, 1) + if ( + decoder_name == "raw" + and len(args) >= 3 + and args[0] == self.mode + and args[0] in Image._MAPMODES + ): + try: + # use mmap, if possible + import mmap + + with open(self.filename) as fp: + self.map = mmap.mmap(fp.fileno(), 0, access=mmap.ACCESS_READ) + if offset + self.size[1] * args[1] > self.map.size(): + msg = "buffer is not large enough" + raise OSError(msg) + self.im = Image.core.map_buffer( + self.map, self.size, decoder_name, offset, args + ) + readonly = 1 + # After trashing self.im, + # we might need to reload the palette data. + if self.palette: + self.palette.dirty = 1 + except (AttributeError, OSError, ImportError): + self.map = None + + self.load_prepare() + err_code = -3 # initialize to unknown error + if not self.map: + # sort tiles in file order + self.tile.sort(key=_tilesort) + + try: + # FIXME: This is a hack to handle TIFF's JpegTables tag. + prefix = self.tile_prefix + except AttributeError: + prefix = b"" + + # Remove consecutive duplicates that only differ by their offset + self.tile = [ + list(tiles)[-1] + for _, tiles in itertools.groupby( + self.tile, lambda tile: (tile[0], tile[1], tile[3]) + ) + ] + for decoder_name, extents, offset, args in self.tile: + seek(offset) + decoder = Image._getdecoder( + self.mode, decoder_name, args, self.decoderconfig + ) + try: + decoder.setimage(self.im, extents) + if decoder.pulls_fd: + decoder.setfd(self.fp) + err_code = decoder.decode(b"")[1] + else: + b = prefix + while True: + try: + s = read(self.decodermaxblock) + except (IndexError, struct.error) as e: + # truncated png/gif + if LOAD_TRUNCATED_IMAGES: + break + else: + msg = "image file is truncated" + raise OSError(msg) from e + + if not s: # truncated jpeg + if LOAD_TRUNCATED_IMAGES: + break + else: + msg = ( + "image file is truncated " + f"({len(b)} bytes not processed)" + ) + raise OSError(msg) + + b = b + s + n, err_code = decoder.decode(b) + if n < 0: + break + b = b[n:] + finally: + # Need to cleanup here to prevent leaks + decoder.cleanup() + + self.tile = [] + self.readonly = readonly + + self.load_end() + + if self._exclusive_fp and self._close_exclusive_fp_after_loading: + self.fp.close() + self.fp = None + + if not self.map and not LOAD_TRUNCATED_IMAGES and err_code < 0: + # still raised if decoder fails to return anything + raise _get_oserror(err_code, encoder=False) + + return Image.Image.load(self) + + def load_prepare(self) -> None: + # create image memory if necessary + if not self.im or self.im.mode != self.mode or self.im.size != self.size: + self.im = Image.core.new(self.mode, self.size) + # create palette (optional) + if self.mode == "P": + Image.Image.load(self) + + def load_end(self) -> None: + # may be overridden + pass + + # may be defined for contained formats + # def load_seek(self, pos: int) -> None: + # pass + + # may be defined for blocked formats (e.g. PNG) + # def load_read(self, read_bytes: int) -> bytes: + # pass + + def _seek_check(self, frame): + if ( + frame < self._min_frame + # Only check upper limit on frames if additional seek operations + # are not required to do so + or ( + not (hasattr(self, "_n_frames") and self._n_frames is None) + and frame >= self.n_frames + self._min_frame + ) + ): + msg = "attempt to seek outside sequence" + raise EOFError(msg) + + return self.tell() != frame + + +class StubHandler: + def open(self, im: StubImageFile) -> None: + pass + + @abc.abstractmethod + def load(self, im: StubImageFile) -> Image.Image: + pass + + +class StubImageFile(ImageFile): + """ + Base class for stub image loaders. + + A stub loader is an image loader that can identify files of a + certain format, but relies on external code to load the file. + """ + + def _open(self) -> None: + msg = "StubImageFile subclass must implement _open" + raise NotImplementedError(msg) + + def load(self): + loader = self._load() + if loader is None: + msg = f"cannot find loader for this {self.format} file" + raise OSError(msg) + image = loader.load(self) + assert image is not None + # become the other object (!) + self.__class__ = image.__class__ + self.__dict__ = image.__dict__ + return image.load() + + def _load(self) -> StubHandler | None: + """(Hook) Find actual image loader.""" + msg = "StubImageFile subclass must implement _load" + raise NotImplementedError(msg) + + +class Parser: + """ + Incremental image parser. This class implements the standard + feed/close consumer interface. + """ + + incremental = None + image: Image.Image | None = None + data = None + decoder = None + offset = 0 + finished = 0 + + def reset(self) -> None: + """ + (Consumer) Reset the parser. Note that you can only call this + method immediately after you've created a parser; parser + instances cannot be reused. + """ + assert self.data is None, "cannot reuse parsers" + + def feed(self, data): + """ + (Consumer) Feed data to the parser. + + :param data: A string buffer. + :exception OSError: If the parser failed to parse the image file. + """ + # collect data + + if self.finished: + return + + if self.data is None: + self.data = data + else: + self.data = self.data + data + + # parse what we have + if self.decoder: + if self.offset > 0: + # skip header + skip = min(len(self.data), self.offset) + self.data = self.data[skip:] + self.offset = self.offset - skip + if self.offset > 0 or not self.data: + return + + n, e = self.decoder.decode(self.data) + + if n < 0: + # end of stream + self.data = None + self.finished = 1 + if e < 0: + # decoding error + self.image = None + raise _get_oserror(e, encoder=False) + else: + # end of image + return + self.data = self.data[n:] + + elif self.image: + # if we end up here with no decoder, this file cannot + # be incrementally parsed. wait until we've gotten all + # available data + pass + + else: + # attempt to open this file + try: + with io.BytesIO(self.data) as fp: + im = Image.open(fp) + except OSError: + pass # not enough data + else: + flag = hasattr(im, "load_seek") or hasattr(im, "load_read") + if flag or len(im.tile) != 1: + # custom load code, or multiple tiles + self.decode = None + else: + # initialize decoder + im.load_prepare() + d, e, o, a = im.tile[0] + im.tile = [] + self.decoder = Image._getdecoder(im.mode, d, a, im.decoderconfig) + self.decoder.setimage(im.im, e) + + # calculate decoder offset + self.offset = o + if self.offset <= len(self.data): + self.data = self.data[self.offset :] + self.offset = 0 + + self.image = im + + def __enter__(self): + return self + + def __exit__(self, *args: object) -> None: + self.close() + + def close(self): + """ + (Consumer) Close the stream. + + :returns: An image object. + :exception OSError: If the parser failed to parse the image file either + because it cannot be identified or cannot be + decoded. + """ + # finish decoding + if self.decoder: + # get rid of what's left in the buffers + self.feed(b"") + self.data = self.decoder = None + if not self.finished: + msg = "image was incomplete" + raise OSError(msg) + if not self.image: + msg = "cannot parse this image" + raise OSError(msg) + if self.data: + # incremental parsing not possible; reopen the file + # not that we have all data + with io.BytesIO(self.data) as fp: + try: + self.image = Image.open(fp) + finally: + self.image.load() + return self.image + + +# -------------------------------------------------------------------- + + +def _save(im, fp, tile, bufsize=0) -> None: + """Helper to save image based on tile list + + :param im: Image object. + :param fp: File object. + :param tile: Tile list. + :param bufsize: Optional buffer size + """ + + im.load() + if not hasattr(im, "encoderconfig"): + im.encoderconfig = () + tile.sort(key=_tilesort) + # FIXME: make MAXBLOCK a configuration parameter + # It would be great if we could have the encoder specify what it needs + # But, it would need at least the image size in most cases. RawEncode is + # a tricky case. + bufsize = max(MAXBLOCK, bufsize, im.size[0] * 4) # see RawEncode.c + try: + fh = fp.fileno() + fp.flush() + _encode_tile(im, fp, tile, bufsize, fh) + except (AttributeError, io.UnsupportedOperation) as exc: + _encode_tile(im, fp, tile, bufsize, None, exc) + if hasattr(fp, "flush"): + fp.flush() + + +def _encode_tile(im, fp, tile: list[_Tile], bufsize, fh, exc=None): + for encoder_name, extents, offset, args in tile: + if offset > 0: + fp.seek(offset) + encoder = Image._getencoder(im.mode, encoder_name, args, im.encoderconfig) + try: + encoder.setimage(im.im, extents) + if encoder.pushes_fd: + encoder.setfd(fp) + errcode = encoder.encode_to_pyfd()[1] + else: + if exc: + # compress to Python file-compatible object + while True: + errcode, data = encoder.encode(bufsize)[1:] + fp.write(data) + if errcode: + break + else: + # slight speedup: compress to real file object + errcode = encoder.encode_to_file(fh, bufsize) + if errcode < 0: + raise _get_oserror(errcode, encoder=True) from exc + finally: + encoder.cleanup() + + +def _safe_read(fp, size): + """ + Reads large blocks in a safe way. Unlike fp.read(n), this function + doesn't trust the user. If the requested size is larger than + SAFEBLOCK, the file is read block by block. + + :param fp: File handle. Must implement a read method. + :param size: Number of bytes to read. + :returns: A string containing size bytes of data. + + Raises an OSError if the file is truncated and the read cannot be completed + + """ + if size <= 0: + return b"" + if size <= SAFEBLOCK: + data = fp.read(size) + if len(data) < size: + msg = "Truncated File Read" + raise OSError(msg) + return data + data = [] + remaining_size = size + while remaining_size > 0: + block = fp.read(min(remaining_size, SAFEBLOCK)) + if not block: + break + data.append(block) + remaining_size -= len(block) + if sum(len(d) for d in data) < size: + msg = "Truncated File Read" + raise OSError(msg) + return b"".join(data) + + +class PyCodecState: + def __init__(self) -> None: + self.xsize = 0 + self.ysize = 0 + self.xoff = 0 + self.yoff = 0 + + def extents(self) -> tuple[int, int, int, int]: + return self.xoff, self.yoff, self.xoff + self.xsize, self.yoff + self.ysize + + +class PyCodec: + fd: IO[bytes] | None + + def __init__(self, mode, *args): + self.im = None + self.state = PyCodecState() + self.fd = None + self.mode = mode + self.init(args) + + def init(self, args): + """ + Override to perform codec specific initialization + + :param args: Array of args items from the tile entry + :returns: None + """ + self.args = args + + def cleanup(self) -> None: + """ + Override to perform codec specific cleanup + + :returns: None + """ + pass + + def setfd(self, fd): + """ + Called from ImageFile to set the Python file-like object + + :param fd: A Python file-like object + :returns: None + """ + self.fd = fd + + def setimage(self, im, extents: tuple[int, int, int, int] | None = None) -> None: + """ + Called from ImageFile to set the core output image for the codec + + :param im: A core image object + :param extents: a 4 tuple of (x0, y0, x1, y1) defining the rectangle + for this tile + :returns: None + """ + + # following c code + self.im = im + + if extents: + (x0, y0, x1, y1) = extents + else: + (x0, y0, x1, y1) = (0, 0, 0, 0) + + if x0 == 0 and x1 == 0: + self.state.xsize, self.state.ysize = self.im.size + else: + self.state.xoff = x0 + self.state.yoff = y0 + self.state.xsize = x1 - x0 + self.state.ysize = y1 - y0 + + if self.state.xsize <= 0 or self.state.ysize <= 0: + msg = "Size cannot be negative" + raise ValueError(msg) + + if ( + self.state.xsize + self.state.xoff > self.im.size[0] + or self.state.ysize + self.state.yoff > self.im.size[1] + ): + msg = "Tile cannot extend outside image" + raise ValueError(msg) + + +class PyDecoder(PyCodec): + """ + Python implementation of a format decoder. Override this class and + add the decoding logic in the :meth:`decode` method. + + See :ref:`Writing Your Own File Codec in Python` + """ + + _pulls_fd = False + + @property + def pulls_fd(self) -> bool: + return self._pulls_fd + + def decode(self, buffer: bytes) -> tuple[int, int]: + """ + Override to perform the decoding process. + + :param buffer: A bytes object with the data to be decoded. + :returns: A tuple of ``(bytes consumed, errcode)``. + If finished with decoding return -1 for the bytes consumed. + Err codes are from :data:`.ImageFile.ERRORS`. + """ + msg = "unavailable in base decoder" + raise NotImplementedError(msg) + + def set_as_raw(self, data: bytes, rawmode=None) -> None: + """ + Convenience method to set the internal image from a stream of raw data + + :param data: Bytes to be set + :param rawmode: The rawmode to be used for the decoder. + If not specified, it will default to the mode of the image + :returns: None + """ + + if not rawmode: + rawmode = self.mode + d = Image._getdecoder(self.mode, "raw", rawmode) + assert self.im is not None + d.setimage(self.im, self.state.extents()) + s = d.decode(data) + + if s[0] >= 0: + msg = "not enough image data" + raise ValueError(msg) + if s[1] != 0: + msg = "cannot decode image data" + raise ValueError(msg) + + +class PyEncoder(PyCodec): + """ + Python implementation of a format encoder. Override this class and + add the decoding logic in the :meth:`encode` method. + + See :ref:`Writing Your Own File Codec in Python` + """ + + _pushes_fd = False + + @property + def pushes_fd(self) -> bool: + return self._pushes_fd + + def encode(self, bufsize: int) -> tuple[int, int, bytes]: + """ + Override to perform the encoding process. + + :param bufsize: Buffer size. + :returns: A tuple of ``(bytes encoded, errcode, bytes)``. + If finished with encoding return 1 for the error code. + Err codes are from :data:`.ImageFile.ERRORS`. + """ + msg = "unavailable in base encoder" + raise NotImplementedError(msg) + + def encode_to_pyfd(self) -> tuple[int, int]: + """ + If ``pushes_fd`` is ``True``, then this method will be used, + and ``encode()`` will only be called once. + + :returns: A tuple of ``(bytes consumed, errcode)``. + Err codes are from :data:`.ImageFile.ERRORS`. + """ + if not self.pushes_fd: + return 0, -8 # bad configuration + bytes_consumed, errcode, data = self.encode(0) + if data: + assert self.fd is not None + self.fd.write(data) + return bytes_consumed, errcode + + def encode_to_file(self, fh, bufsize): + """ + :param fh: File handle. + :param bufsize: Buffer size. + + :returns: If finished successfully, return 0. + Otherwise, return an error code. Err codes are from + :data:`.ImageFile.ERRORS`. + """ + errcode = 0 + while errcode == 0: + status, errcode, buf = self.encode(bufsize) + if status > 0: + fh.write(buf[status:]) + return errcode diff --git a/MLPY/Lib/site-packages/PIL/ImageFilter.py b/MLPY/Lib/site-packages/PIL/ImageFilter.py new file mode 100644 index 0000000000000000000000000000000000000000..88c250f61b614a36c060a4b28aa241a5279289b2 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageFilter.py @@ -0,0 +1,604 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard filters +# +# History: +# 1995-11-27 fl Created +# 2002-06-08 fl Added rank and mode filters +# 2003-09-15 fl Fixed rank calculation in rank filter; added expand call +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2002 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import abc +import functools +from types import ModuleType +from typing import TYPE_CHECKING, Any, Callable, Sequence, cast + +if TYPE_CHECKING: + from . import _imaging + from ._typing import NumpyArray + + +class Filter: + @abc.abstractmethod + def filter(self, image: _imaging.ImagingCore) -> _imaging.ImagingCore: + pass + + +class MultibandFilter(Filter): + pass + + +class BuiltinFilter(MultibandFilter): + filterargs: tuple[Any, ...] + + def filter(self, image: _imaging.ImagingCore) -> _imaging.ImagingCore: + if image.mode == "P": + msg = "cannot filter palette images" + raise ValueError(msg) + return image.filter(*self.filterargs) + + +class Kernel(BuiltinFilter): + """ + Create a convolution kernel. This only supports 3x3 and 5x5 integer and floating + point kernels. + + Kernels can only be applied to "L" and "RGB" images. + + :param size: Kernel size, given as (width, height). This must be (3,3) or (5,5). + :param kernel: A sequence containing kernel weights. The kernel will be flipped + vertically before being applied to the image. + :param scale: Scale factor. If given, the result for each pixel is divided by this + value. The default is the sum of the kernel weights. + :param offset: Offset. If given, this value is added to the result, after it has + been divided by the scale factor. + """ + + name = "Kernel" + + def __init__( + self, + size: tuple[int, int], + kernel: Sequence[float], + scale: float | None = None, + offset: float = 0, + ) -> None: + if scale is None: + # default scale is sum of kernel + scale = functools.reduce(lambda a, b: a + b, kernel) + if size[0] * size[1] != len(kernel): + msg = "not enough coefficients in kernel" + raise ValueError(msg) + self.filterargs = size, scale, offset, kernel + + +class RankFilter(Filter): + """ + Create a rank filter. The rank filter sorts all pixels in + a window of the given size, and returns the ``rank``'th value. + + :param size: The kernel size, in pixels. + :param rank: What pixel value to pick. Use 0 for a min filter, + ``size * size / 2`` for a median filter, ``size * size - 1`` + for a max filter, etc. + """ + + name = "Rank" + + def __init__(self, size: int, rank: int) -> None: + self.size = size + self.rank = rank + + def filter(self, image: _imaging.ImagingCore) -> _imaging.ImagingCore: + if image.mode == "P": + msg = "cannot filter palette images" + raise ValueError(msg) + image = image.expand(self.size // 2, self.size // 2) + return image.rankfilter(self.size, self.rank) + + +class MedianFilter(RankFilter): + """ + Create a median filter. Picks the median pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + + name = "Median" + + def __init__(self, size: int = 3) -> None: + self.size = size + self.rank = size * size // 2 + + +class MinFilter(RankFilter): + """ + Create a min filter. Picks the lowest pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + + name = "Min" + + def __init__(self, size: int = 3) -> None: + self.size = size + self.rank = 0 + + +class MaxFilter(RankFilter): + """ + Create a max filter. Picks the largest pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + + name = "Max" + + def __init__(self, size: int = 3) -> None: + self.size = size + self.rank = size * size - 1 + + +class ModeFilter(Filter): + """ + Create a mode filter. Picks the most frequent pixel value in a box with the + given size. Pixel values that occur only once or twice are ignored; if no + pixel value occurs more than twice, the original pixel value is preserved. + + :param size: The kernel size, in pixels. + """ + + name = "Mode" + + def __init__(self, size: int = 3) -> None: + self.size = size + + def filter(self, image: _imaging.ImagingCore) -> _imaging.ImagingCore: + return image.modefilter(self.size) + + +class GaussianBlur(MultibandFilter): + """Blurs the image with a sequence of extended box filters, which + approximates a Gaussian kernel. For details on accuracy see + + + :param radius: Standard deviation of the Gaussian kernel. Either a sequence of two + numbers for x and y, or a single number for both. + """ + + name = "GaussianBlur" + + def __init__(self, radius: float | Sequence[float] = 2) -> None: + self.radius = radius + + def filter(self, image: _imaging.ImagingCore) -> _imaging.ImagingCore: + xy = self.radius + if isinstance(xy, (int, float)): + xy = (xy, xy) + if xy == (0, 0): + return image.copy() + return image.gaussian_blur(xy) + + +class BoxBlur(MultibandFilter): + """Blurs the image by setting each pixel to the average value of the pixels + in a square box extending radius pixels in each direction. + Supports float radius of arbitrary size. Uses an optimized implementation + which runs in linear time relative to the size of the image + for any radius value. + + :param radius: Size of the box in a direction. Either a sequence of two numbers for + x and y, or a single number for both. + + Radius 0 does not blur, returns an identical image. + Radius 1 takes 1 pixel in each direction, i.e. 9 pixels in total. + """ + + name = "BoxBlur" + + def __init__(self, radius: float | Sequence[float]) -> None: + xy = radius if isinstance(radius, (tuple, list)) else (radius, radius) + if xy[0] < 0 or xy[1] < 0: + msg = "radius must be >= 0" + raise ValueError(msg) + self.radius = radius + + def filter(self, image: _imaging.ImagingCore) -> _imaging.ImagingCore: + xy = self.radius + if isinstance(xy, (int, float)): + xy = (xy, xy) + if xy == (0, 0): + return image.copy() + return image.box_blur(xy) + + +class UnsharpMask(MultibandFilter): + """Unsharp mask filter. + + See Wikipedia's entry on `digital unsharp masking`_ for an explanation of + the parameters. + + :param radius: Blur Radius + :param percent: Unsharp strength, in percent + :param threshold: Threshold controls the minimum brightness change that + will be sharpened + + .. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking + + """ + + name = "UnsharpMask" + + def __init__( + self, radius: float = 2, percent: int = 150, threshold: int = 3 + ) -> None: + self.radius = radius + self.percent = percent + self.threshold = threshold + + def filter(self, image: _imaging.ImagingCore) -> _imaging.ImagingCore: + return image.unsharp_mask(self.radius, self.percent, self.threshold) + + +class BLUR(BuiltinFilter): + name = "Blur" + # fmt: off + filterargs = (5, 5), 16, 0, ( + 1, 1, 1, 1, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 1, 1, 1, 1, + ) + # fmt: on + + +class CONTOUR(BuiltinFilter): + name = "Contour" + # fmt: off + filterargs = (3, 3), 1, 255, ( + -1, -1, -1, + -1, 8, -1, + -1, -1, -1, + ) + # fmt: on + + +class DETAIL(BuiltinFilter): + name = "Detail" + # fmt: off + filterargs = (3, 3), 6, 0, ( + 0, -1, 0, + -1, 10, -1, + 0, -1, 0, + ) + # fmt: on + + +class EDGE_ENHANCE(BuiltinFilter): + name = "Edge-enhance" + # fmt: off + filterargs = (3, 3), 2, 0, ( + -1, -1, -1, + -1, 10, -1, + -1, -1, -1, + ) + # fmt: on + + +class EDGE_ENHANCE_MORE(BuiltinFilter): + name = "Edge-enhance More" + # fmt: off + filterargs = (3, 3), 1, 0, ( + -1, -1, -1, + -1, 9, -1, + -1, -1, -1, + ) + # fmt: on + + +class EMBOSS(BuiltinFilter): + name = "Emboss" + # fmt: off + filterargs = (3, 3), 1, 128, ( + -1, 0, 0, + 0, 1, 0, + 0, 0, 0, + ) + # fmt: on + + +class FIND_EDGES(BuiltinFilter): + name = "Find Edges" + # fmt: off + filterargs = (3, 3), 1, 0, ( + -1, -1, -1, + -1, 8, -1, + -1, -1, -1, + ) + # fmt: on + + +class SHARPEN(BuiltinFilter): + name = "Sharpen" + # fmt: off + filterargs = (3, 3), 16, 0, ( + -2, -2, -2, + -2, 32, -2, + -2, -2, -2, + ) + # fmt: on + + +class SMOOTH(BuiltinFilter): + name = "Smooth" + # fmt: off + filterargs = (3, 3), 13, 0, ( + 1, 1, 1, + 1, 5, 1, + 1, 1, 1, + ) + # fmt: on + + +class SMOOTH_MORE(BuiltinFilter): + name = "Smooth More" + # fmt: off + filterargs = (5, 5), 100, 0, ( + 1, 1, 1, 1, 1, + 1, 5, 5, 5, 1, + 1, 5, 44, 5, 1, + 1, 5, 5, 5, 1, + 1, 1, 1, 1, 1, + ) + # fmt: on + + +class Color3DLUT(MultibandFilter): + """Three-dimensional color lookup table. + + Transforms 3-channel pixels using the values of the channels as coordinates + in the 3D lookup table and interpolating the nearest elements. + + This method allows you to apply almost any color transformation + in constant time by using pre-calculated decimated tables. + + .. versionadded:: 5.2.0 + + :param size: Size of the table. One int or tuple of (int, int, int). + Minimal size in any dimension is 2, maximum is 65. + :param table: Flat lookup table. A list of ``channels * size**3`` + float elements or a list of ``size**3`` channels-sized + tuples with floats. Channels are changed first, + then first dimension, then second, then third. + Value 0.0 corresponds lowest value of output, 1.0 highest. + :param channels: Number of channels in the table. Could be 3 or 4. + Default is 3. + :param target_mode: A mode for the result image. Should have not less + than ``channels`` channels. Default is ``None``, + which means that mode wouldn't be changed. + """ + + name = "Color 3D LUT" + + def __init__( + self, + size: int | tuple[int, int, int], + table: Sequence[float] | Sequence[Sequence[int]] | NumpyArray, + channels: int = 3, + target_mode: str | None = None, + **kwargs: bool, + ) -> None: + if channels not in (3, 4): + msg = "Only 3 or 4 output channels are supported" + raise ValueError(msg) + self.size = size = self._check_size(size) + self.channels = channels + self.mode = target_mode + + # Hidden flag `_copy_table=False` could be used to avoid extra copying + # of the table if the table is specially made for the constructor. + copy_table = kwargs.get("_copy_table", True) + items = size[0] * size[1] * size[2] + wrong_size = False + + numpy: ModuleType | None = None + if hasattr(table, "shape"): + try: + import numpy + except ImportError: + pass + + if numpy and isinstance(table, numpy.ndarray): + numpy_table: NumpyArray = table + if copy_table: + numpy_table = numpy_table.copy() + + if numpy_table.shape in [ + (items * channels,), + (items, channels), + (size[2], size[1], size[0], channels), + ]: + table = numpy_table.reshape(items * channels) + else: + wrong_size = True + + else: + if copy_table: + table = list(table) + + # Convert to a flat list + if table and isinstance(table[0], (list, tuple)): + raw_table = cast(Sequence[Sequence[int]], table) + flat_table: list[int] = [] + for pixel in raw_table: + if len(pixel) != channels: + msg = ( + "The elements of the table should " + f"have a length of {channels}." + ) + raise ValueError(msg) + flat_table.extend(pixel) + table = flat_table + + if wrong_size or len(table) != items * channels: + msg = ( + "The table should have either channels * size**3 float items " + "or size**3 items of channels-sized tuples with floats. " + f"Table should be: {channels}x{size[0]}x{size[1]}x{size[2]}. " + f"Actual length: {len(table)}" + ) + raise ValueError(msg) + self.table = table + + @staticmethod + def _check_size(size: Any) -> tuple[int, int, int]: + try: + _, _, _ = size + except ValueError as e: + msg = "Size should be either an integer or a tuple of three integers." + raise ValueError(msg) from e + except TypeError: + size = (size, size, size) + size = tuple(int(x) for x in size) + for size_1d in size: + if not 2 <= size_1d <= 65: + msg = "Size should be in [2, 65] range." + raise ValueError(msg) + return size + + @classmethod + def generate( + cls, + size: int | tuple[int, int, int], + callback: Callable[[float, float, float], tuple[float, ...]], + channels: int = 3, + target_mode: str | None = None, + ) -> Color3DLUT: + """Generates new LUT using provided callback. + + :param size: Size of the table. Passed to the constructor. + :param callback: Function with three parameters which correspond + three color channels. Will be called ``size**3`` + times with values from 0.0 to 1.0 and should return + a tuple with ``channels`` elements. + :param channels: The number of channels which should return callback. + :param target_mode: Passed to the constructor of the resulting + lookup table. + """ + size_1d, size_2d, size_3d = cls._check_size(size) + if channels not in (3, 4): + msg = "Only 3 or 4 output channels are supported" + raise ValueError(msg) + + table: list[float] = [0] * (size_1d * size_2d * size_3d * channels) + idx_out = 0 + for b in range(size_3d): + for g in range(size_2d): + for r in range(size_1d): + table[idx_out : idx_out + channels] = callback( + r / (size_1d - 1), g / (size_2d - 1), b / (size_3d - 1) + ) + idx_out += channels + + return cls( + (size_1d, size_2d, size_3d), + table, + channels=channels, + target_mode=target_mode, + _copy_table=False, + ) + + def transform( + self, + callback: Callable[..., tuple[float, ...]], + with_normals: bool = False, + channels: int | None = None, + target_mode: str | None = None, + ) -> Color3DLUT: + """Transforms the table values using provided callback and returns + a new LUT with altered values. + + :param callback: A function which takes old lookup table values + and returns a new set of values. The number + of arguments which function should take is + ``self.channels`` or ``3 + self.channels`` + if ``with_normals`` flag is set. + Should return a tuple of ``self.channels`` or + ``channels`` elements if it is set. + :param with_normals: If true, ``callback`` will be called with + coordinates in the color cube as the first + three arguments. Otherwise, ``callback`` + will be called only with actual color values. + :param channels: The number of channels in the resulting lookup table. + :param target_mode: Passed to the constructor of the resulting + lookup table. + """ + if channels not in (None, 3, 4): + msg = "Only 3 or 4 output channels are supported" + raise ValueError(msg) + ch_in = self.channels + ch_out = channels or ch_in + size_1d, size_2d, size_3d = self.size + + table = [0] * (size_1d * size_2d * size_3d * ch_out) + idx_in = 0 + idx_out = 0 + for b in range(size_3d): + for g in range(size_2d): + for r in range(size_1d): + values = self.table[idx_in : idx_in + ch_in] + if with_normals: + values = callback( + r / (size_1d - 1), + g / (size_2d - 1), + b / (size_3d - 1), + *values, + ) + else: + values = callback(*values) + table[idx_out : idx_out + ch_out] = values + idx_in += ch_in + idx_out += ch_out + + return type(self)( + self.size, + table, + channels=ch_out, + target_mode=target_mode or self.mode, + _copy_table=False, + ) + + def __repr__(self) -> str: + r = [ + f"{self.__class__.__name__} from {self.table.__class__.__name__}", + "size={:d}x{:d}x{:d}".format(*self.size), + f"channels={self.channels:d}", + ] + if self.mode: + r.append(f"target_mode={self.mode}") + return "<{}>".format(" ".join(r)) + + def filter(self, image: _imaging.ImagingCore) -> _imaging.ImagingCore: + from . import Image + + return image.color_lut_3d( + self.mode or image.mode, + Image.Resampling.BILINEAR, + self.channels, + self.size[0], + self.size[1], + self.size[2], + self.table, + ) diff --git a/MLPY/Lib/site-packages/PIL/ImageFont.py b/MLPY/Lib/site-packages/PIL/ImageFont.py new file mode 100644 index 0000000000000000000000000000000000000000..b315b0b01e815afb698f666387176776aed01121 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageFont.py @@ -0,0 +1,1290 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PIL raster font management +# +# History: +# 1996-08-07 fl created (experimental) +# 1997-08-25 fl minor adjustments to handle fonts from pilfont 0.3 +# 1999-02-06 fl rewrote most font management stuff in C +# 1999-03-17 fl take pth files into account in load_path (from Richard Jones) +# 2001-02-17 fl added freetype support +# 2001-05-09 fl added TransposedFont wrapper class +# 2002-03-04 fl make sure we have a "L" or "1" font +# 2002-12-04 fl skip non-directory entries in the system path +# 2003-04-29 fl add embedded default font +# 2003-09-27 fl added support for truetype charmap encodings +# +# Todo: +# Adapt to PILFONT2 format (16-bit fonts, compressed, single file) +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from __future__ import annotations + +import base64 +import os +import sys +import warnings +from enum import IntEnum +from io import BytesIO +from types import ModuleType +from typing import IO, TYPE_CHECKING, Any, BinaryIO + +from . import Image +from ._typing import StrOrBytesPath +from ._util import DeferredError, is_path + +if TYPE_CHECKING: + from . import ImageFile + from ._imaging import ImagingFont + from ._imagingft import Font + + +class Layout(IntEnum): + BASIC = 0 + RAQM = 1 + + +MAX_STRING_LENGTH = 1_000_000 + + +core: ModuleType | DeferredError +try: + from . import _imagingft as core +except ImportError as ex: + core = DeferredError.new(ex) + + +def _string_length_check(text: str | bytes | bytearray) -> None: + if MAX_STRING_LENGTH is not None and len(text) > MAX_STRING_LENGTH: + msg = "too many characters in string" + raise ValueError(msg) + + +# FIXME: add support for pilfont2 format (see FontFile.py) + +# -------------------------------------------------------------------- +# Font metrics format: +# "PILfont" LF +# fontdescriptor LF +# (optional) key=value... LF +# "DATA" LF +# binary data: 256*10*2 bytes (dx, dy, dstbox, srcbox) +# +# To place a character, cut out srcbox and paste at dstbox, +# relative to the character position. Then move the character +# position according to dx, dy. +# -------------------------------------------------------------------- + + +class ImageFont: + """PIL font wrapper""" + + font: ImagingFont + + def _load_pilfont(self, filename: str) -> None: + with open(filename, "rb") as fp: + image: ImageFile.ImageFile | None = None + for ext in (".png", ".gif", ".pbm"): + if image: + image.close() + try: + fullname = os.path.splitext(filename)[0] + ext + image = Image.open(fullname) + except Exception: + pass + else: + if image and image.mode in ("1", "L"): + break + else: + if image: + image.close() + msg = "cannot find glyph data file" + raise OSError(msg) + + self.file = fullname + + self._load_pilfont_data(fp, image) + image.close() + + def _load_pilfont_data(self, file: IO[bytes], image: Image.Image) -> None: + # read PILfont header + if file.readline() != b"PILfont\n": + msg = "Not a PILfont file" + raise SyntaxError(msg) + file.readline().split(b";") + self.info = [] # FIXME: should be a dictionary + while True: + s = file.readline() + if not s or s == b"DATA\n": + break + self.info.append(s) + + # read PILfont metrics + data = file.read(256 * 20) + + # check image + if image.mode not in ("1", "L"): + msg = "invalid font image mode" + raise TypeError(msg) + + image.load() + + self.font = Image.core.font(image.im, data) + + def getmask(self, text, mode="", *args, **kwargs): + """ + Create a bitmap for the text. + + If the font uses antialiasing, the bitmap should have mode ``L`` and use a + maximum value of 255. Otherwise, it should have mode ``1``. + + :param text: Text to render. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + .. versionadded:: 1.1.5 + + :return: An internal PIL storage memory instance as defined by the + :py:mod:`PIL.Image.core` interface module. + """ + _string_length_check(text) + Image._decompression_bomb_check(self.font.getsize(text)) + return self.font.getmask(text, mode) + + def getbbox( + self, text: str | bytes | bytearray, *args: Any, **kwargs: Any + ) -> tuple[int, int, int, int]: + """ + Returns bounding box (in pixels) of given text. + + .. versionadded:: 9.2.0 + + :param text: Text to render. + + :return: ``(left, top, right, bottom)`` bounding box + """ + _string_length_check(text) + width, height = self.font.getsize(text) + return 0, 0, width, height + + def getlength( + self, text: str | bytes | bytearray, *args: Any, **kwargs: Any + ) -> int: + """ + Returns length (in pixels) of given text. + This is the amount by which following text should be offset. + + .. versionadded:: 9.2.0 + """ + _string_length_check(text) + width, height = self.font.getsize(text) + return width + + +## +# Wrapper for FreeType fonts. Application code should use the +# truetype factory function to create font objects. + + +class FreeTypeFont: + """FreeType font wrapper (requires _imagingft service)""" + + font: Font + font_bytes: bytes + + def __init__( + self, + font: StrOrBytesPath | BinaryIO | None = None, + size: float = 10, + index: int = 0, + encoding: str = "", + layout_engine: Layout | None = None, + ) -> None: + # FIXME: use service provider instead + + if isinstance(core, DeferredError): + raise core.ex + + if size <= 0: + msg = "font size must be greater than 0" + raise ValueError(msg) + + self.path = font + self.size = size + self.index = index + self.encoding = encoding + + if layout_engine not in (Layout.BASIC, Layout.RAQM): + layout_engine = Layout.BASIC + if core.HAVE_RAQM: + layout_engine = Layout.RAQM + elif layout_engine == Layout.RAQM and not core.HAVE_RAQM: + warnings.warn( + "Raqm layout was requested, but Raqm is not available. " + "Falling back to basic layout." + ) + layout_engine = Layout.BASIC + + self.layout_engine = layout_engine + + def load_from_bytes(f): + self.font_bytes = f.read() + self.font = core.getfont( + "", size, index, encoding, self.font_bytes, layout_engine + ) + + if is_path(font): + font = os.path.realpath(os.fspath(font)) + if sys.platform == "win32": + font_bytes_path = font if isinstance(font, bytes) else font.encode() + try: + font_bytes_path.decode("ascii") + except UnicodeDecodeError: + # FreeType cannot load fonts with non-ASCII characters on Windows + # So load it into memory first + with open(font, "rb") as f: + load_from_bytes(f) + return + self.font = core.getfont( + font, size, index, encoding, layout_engine=layout_engine + ) + else: + load_from_bytes(font) + + def __getstate__(self): + return [self.path, self.size, self.index, self.encoding, self.layout_engine] + + def __setstate__(self, state): + path, size, index, encoding, layout_engine = state + self.__init__(path, size, index, encoding, layout_engine) + + def getname(self) -> tuple[str | None, str | None]: + """ + :return: A tuple of the font family (e.g. Helvetica) and the font style + (e.g. Bold) + """ + return self.font.family, self.font.style + + def getmetrics(self) -> tuple[int, int]: + """ + :return: A tuple of the font ascent (the distance from the baseline to + the highest outline point) and descent (the distance from the + baseline to the lowest outline point, a negative value) + """ + return self.font.ascent, self.font.descent + + def getlength( + self, text: str | bytes, mode="", direction=None, features=None, language=None + ) -> float: + """ + Returns length (in pixels with 1/64 precision) of given text when rendered + in font with provided direction, features, and language. + + This is the amount by which following text should be offset. + Text bounding box may extend past the length in some fonts, + e.g. when using italics or accents. + + The result is returned as a float; it is a whole number if using basic layout. + + Note that the sum of two lengths may not equal the length of a concatenated + string due to kerning. If you need to adjust for kerning, include the following + character and subtract its length. + + For example, instead of :: + + hello = font.getlength("Hello") + world = font.getlength("World") + hello_world = hello + world # not adjusted for kerning + assert hello_world == font.getlength("HelloWorld") # may fail + + use :: + + hello = font.getlength("HelloW") - font.getlength("W") # adjusted for kerning + world = font.getlength("World") + hello_world = hello + world # adjusted for kerning + assert hello_world == font.getlength("HelloWorld") # True + + or disable kerning with (requires libraqm) :: + + hello = draw.textlength("Hello", font, features=["-kern"]) + world = draw.textlength("World", font, features=["-kern"]) + hello_world = hello + world # kerning is disabled, no need to adjust + assert hello_world == draw.textlength("HelloWorld", font, features=["-kern"]) + + .. versionadded:: 8.0.0 + + :param text: Text to measure. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + `_ + Requires libraqm. + + :return: Either width for horizontal text, or height for vertical text. + """ + _string_length_check(text) + return self.font.getlength(text, mode, direction, features, language) / 64 + + def getbbox( + self, + text: str | bytes, + mode: str = "", + direction: str | None = None, + features: list[str] | None = None, + language: str | None = None, + stroke_width: float = 0, + anchor: str | None = None, + ) -> tuple[float, float, float, float]: + """ + Returns bounding box (in pixels) of given text relative to given anchor + when rendered in font with provided direction, features, and language. + + Use :py:meth:`getlength()` to get the offset of following text with + 1/64 pixel precision. The bounding box includes extra margins for + some fonts, e.g. italics or accents. + + .. versionadded:: 8.0.0 + + :param text: Text to render. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + `_ + Requires libraqm. + + :param stroke_width: The width of the text stroke. + + :param anchor: The text anchor alignment. Determines the relative location of + the anchor to the text. The default alignment is top left, + specifically ``la`` for horizontal text and ``lt`` for + vertical text. See :ref:`text-anchors` for details. + + :return: ``(left, top, right, bottom)`` bounding box + """ + _string_length_check(text) + size, offset = self.font.getsize( + text, mode, direction, features, language, anchor + ) + left, top = offset[0] - stroke_width, offset[1] - stroke_width + width, height = size[0] + 2 * stroke_width, size[1] + 2 * stroke_width + return left, top, left + width, top + height + + def getmask( + self, + text, + mode="", + direction=None, + features=None, + language=None, + stroke_width=0, + anchor=None, + ink=0, + start=None, + ): + """ + Create a bitmap for the text. + + If the font uses antialiasing, the bitmap should have mode ``L`` and use a + maximum value of 255. If the font has embedded color data, the bitmap + should have mode ``RGBA``. Otherwise, it should have mode ``1``. + + :param text: Text to render. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + .. versionadded:: 1.1.5 + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + `_ + Requires libraqm. + + .. versionadded:: 6.0.0 + + :param stroke_width: The width of the text stroke. + + .. versionadded:: 6.2.0 + + :param anchor: The text anchor alignment. Determines the relative location of + the anchor to the text. The default alignment is top left, + specifically ``la`` for horizontal text and ``lt`` for + vertical text. See :ref:`text-anchors` for details. + + .. versionadded:: 8.0.0 + + :param ink: Foreground ink for rendering in RGBA mode. + + .. versionadded:: 8.0.0 + + :param start: Tuple of horizontal and vertical offset, as text may render + differently when starting at fractional coordinates. + + .. versionadded:: 9.4.0 + + :return: An internal PIL storage memory instance as defined by the + :py:mod:`PIL.Image.core` interface module. + """ + return self.getmask2( + text, + mode, + direction=direction, + features=features, + language=language, + stroke_width=stroke_width, + anchor=anchor, + ink=ink, + start=start, + )[0] + + def getmask2( + self, + text: str | bytes, + mode="", + direction=None, + features=None, + language=None, + stroke_width=0, + anchor=None, + ink=0, + start=None, + *args, + **kwargs, + ): + """ + Create a bitmap for the text. + + If the font uses antialiasing, the bitmap should have mode ``L`` and use a + maximum value of 255. If the font has embedded color data, the bitmap + should have mode ``RGBA``. Otherwise, it should have mode ``1``. + + :param text: Text to render. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + .. versionadded:: 1.1.5 + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + `_ + Requires libraqm. + + .. versionadded:: 6.0.0 + + :param stroke_width: The width of the text stroke. + + .. versionadded:: 6.2.0 + + :param anchor: The text anchor alignment. Determines the relative location of + the anchor to the text. The default alignment is top left, + specifically ``la`` for horizontal text and ``lt`` for + vertical text. See :ref:`text-anchors` for details. + + .. versionadded:: 8.0.0 + + :param ink: Foreground ink for rendering in RGBA mode. + + .. versionadded:: 8.0.0 + + :param start: Tuple of horizontal and vertical offset, as text may render + differently when starting at fractional coordinates. + + .. versionadded:: 9.4.0 + + :return: A tuple of an internal PIL storage memory instance as defined by the + :py:mod:`PIL.Image.core` interface module, and the text offset, the + gap between the starting coordinate and the first marking + """ + _string_length_check(text) + if start is None: + start = (0, 0) + + def fill(width, height): + size = (width, height) + Image._decompression_bomb_check(size) + return Image.core.fill("RGBA" if mode == "RGBA" else "L", size) + + return self.font.render( + text, + fill, + mode, + direction, + features, + language, + stroke_width, + anchor, + ink, + start[0], + start[1], + ) + + def font_variant( + self, font=None, size=None, index=None, encoding=None, layout_engine=None + ): + """ + Create a copy of this FreeTypeFont object, + using any specified arguments to override the settings. + + Parameters are identical to the parameters used to initialize this + object. + + :return: A FreeTypeFont object. + """ + if font is None: + try: + font = BytesIO(self.font_bytes) + except AttributeError: + font = self.path + return FreeTypeFont( + font=font, + size=self.size if size is None else size, + index=self.index if index is None else index, + encoding=self.encoding if encoding is None else encoding, + layout_engine=layout_engine or self.layout_engine, + ) + + def get_variation_names(self) -> list[bytes]: + """ + :returns: A list of the named styles in a variation font. + :exception OSError: If the font is not a variation font. + """ + try: + names = self.font.getvarnames() + except AttributeError as e: + msg = "FreeType 2.9.1 or greater is required" + raise NotImplementedError(msg) from e + return [name.replace(b"\x00", b"") for name in names] + + def set_variation_by_name(self, name): + """ + :param name: The name of the style. + :exception OSError: If the font is not a variation font. + """ + names = self.get_variation_names() + if not isinstance(name, bytes): + name = name.encode() + index = names.index(name) + 1 + + if index == getattr(self, "_last_variation_index", None): + # When the same name is set twice in a row, + # there is an 'unknown freetype error' + # https://savannah.nongnu.org/bugs/?56186 + return + self._last_variation_index = index + + self.font.setvarname(index) + + def get_variation_axes(self): + """ + :returns: A list of the axes in a variation font. + :exception OSError: If the font is not a variation font. + """ + try: + axes = self.font.getvaraxes() + except AttributeError as e: + msg = "FreeType 2.9.1 or greater is required" + raise NotImplementedError(msg) from e + for axis in axes: + if axis["name"]: + axis["name"] = axis["name"].replace(b"\x00", b"") + return axes + + def set_variation_by_axes(self, axes: list[float]) -> None: + """ + :param axes: A list of values for each axis. + :exception OSError: If the font is not a variation font. + """ + try: + self.font.setvaraxes(axes) + except AttributeError as e: + msg = "FreeType 2.9.1 or greater is required" + raise NotImplementedError(msg) from e + + +class TransposedFont: + """Wrapper for writing rotated or mirrored text""" + + def __init__(self, font, orientation=None): + """ + Wrapper that creates a transposed font from any existing font + object. + + :param font: A font object. + :param orientation: An optional orientation. If given, this should + be one of Image.Transpose.FLIP_LEFT_RIGHT, Image.Transpose.FLIP_TOP_BOTTOM, + Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_180, or + Image.Transpose.ROTATE_270. + """ + self.font = font + self.orientation = orientation # any 'transpose' argument, or None + + def getmask(self, text, mode="", *args, **kwargs): + im = self.font.getmask(text, mode, *args, **kwargs) + if self.orientation is not None: + return im.transpose(self.orientation) + return im + + def getbbox(self, text, *args, **kwargs): + # TransposedFont doesn't support getmask2, move top-left point to (0, 0) + # this has no effect on ImageFont and simulates anchor="lt" for FreeTypeFont + left, top, right, bottom = self.font.getbbox(text, *args, **kwargs) + width = right - left + height = bottom - top + if self.orientation in (Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_270): + return 0, 0, height, width + return 0, 0, width, height + + def getlength(self, text: str | bytes, *args, **kwargs) -> float: + if self.orientation in (Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_270): + msg = "text length is undefined for text rotated by 90 or 270 degrees" + raise ValueError(msg) + return self.font.getlength(text, *args, **kwargs) + + +def load(filename: str) -> ImageFont: + """ + Load a font file. This function loads a font object from the given + bitmap font file, and returns the corresponding font object. + + :param filename: Name of font file. + :return: A font object. + :exception OSError: If the file could not be read. + """ + f = ImageFont() + f._load_pilfont(filename) + return f + + +def truetype( + font: StrOrBytesPath | BinaryIO | None = None, + size: float = 10, + index: int = 0, + encoding: str = "", + layout_engine: Layout | None = None, +) -> FreeTypeFont: + """ + Load a TrueType or OpenType font from a file or file-like object, + and create a font object. + This function loads a font object from the given file or file-like + object, and creates a font object for a font of the given size. + + Pillow uses FreeType to open font files. On Windows, be aware that FreeType + will keep the file open as long as the FreeTypeFont object exists. Windows + limits the number of files that can be open in C at once to 512, so if many + fonts are opened simultaneously and that limit is approached, an + ``OSError`` may be thrown, reporting that FreeType "cannot open resource". + A workaround would be to copy the file(s) into memory, and open that instead. + + This function requires the _imagingft service. + + :param font: A filename or file-like object containing a TrueType font. + If the file is not found in this filename, the loader may also + search in other directories, such as: + + * The :file:`fonts/` directory on Windows, + * :file:`/Library/Fonts/`, :file:`/System/Library/Fonts/` + and :file:`~/Library/Fonts/` on macOS. + * :file:`~/.local/share/fonts`, :file:`/usr/local/share/fonts`, + and :file:`/usr/share/fonts` on Linux; or those specified by + the ``XDG_DATA_HOME`` and ``XDG_DATA_DIRS`` environment variables + for user-installed and system-wide fonts, respectively. + + :param size: The requested size, in pixels. + :param index: Which font face to load (default is first available face). + :param encoding: Which font encoding to use (default is Unicode). Possible + encodings include (see the FreeType documentation for more + information): + + * "unic" (Unicode) + * "symb" (Microsoft Symbol) + * "ADOB" (Adobe Standard) + * "ADBE" (Adobe Expert) + * "ADBC" (Adobe Custom) + * "armn" (Apple Roman) + * "sjis" (Shift JIS) + * "gb " (PRC) + * "big5" + * "wans" (Extended Wansung) + * "joha" (Johab) + * "lat1" (Latin-1) + + This specifies the character set to use. It does not alter the + encoding of any text provided in subsequent operations. + :param layout_engine: Which layout engine to use, if available: + :attr:`.ImageFont.Layout.BASIC` or :attr:`.ImageFont.Layout.RAQM`. + If it is available, Raqm layout will be used by default. + Otherwise, basic layout will be used. + + Raqm layout is recommended for all non-English text. If Raqm layout + is not required, basic layout will have better performance. + + You can check support for Raqm layout using + :py:func:`PIL.features.check_feature` with ``feature="raqm"``. + + .. versionadded:: 4.2.0 + :return: A font object. + :exception OSError: If the file could not be read. + :exception ValueError: If the font size is not greater than zero. + """ + + def freetype(font: StrOrBytesPath | BinaryIO | None) -> FreeTypeFont: + return FreeTypeFont(font, size, index, encoding, layout_engine) + + try: + return freetype(font) + except OSError: + if not is_path(font): + raise + ttf_filename = os.path.basename(font) + + dirs = [] + if sys.platform == "win32": + # check the windows font repository + # NOTE: must use uppercase WINDIR, to work around bugs in + # 1.5.2's os.environ.get() + windir = os.environ.get("WINDIR") + if windir: + dirs.append(os.path.join(windir, "fonts")) + elif sys.platform in ("linux", "linux2"): + data_home = os.environ.get("XDG_DATA_HOME") + if not data_home: + # The freedesktop spec defines the following default directory for + # when XDG_DATA_HOME is unset or empty. This user-level directory + # takes precedence over system-level directories. + data_home = os.path.expanduser("~/.local/share") + xdg_dirs = [data_home] + + data_dirs = os.environ.get("XDG_DATA_DIRS") + if not data_dirs: + # Similarly, defaults are defined for the system-level directories + data_dirs = "/usr/local/share:/usr/share" + xdg_dirs += data_dirs.split(":") + + dirs += [os.path.join(xdg_dir, "fonts") for xdg_dir in xdg_dirs] + elif sys.platform == "darwin": + dirs += [ + "/Library/Fonts", + "/System/Library/Fonts", + os.path.expanduser("~/Library/Fonts"), + ] + + ext = os.path.splitext(ttf_filename)[1] + first_font_with_a_different_extension = None + for directory in dirs: + for walkroot, walkdir, walkfilenames in os.walk(directory): + for walkfilename in walkfilenames: + if ext and walkfilename == ttf_filename: + return freetype(os.path.join(walkroot, walkfilename)) + elif not ext and os.path.splitext(walkfilename)[0] == ttf_filename: + fontpath = os.path.join(walkroot, walkfilename) + if os.path.splitext(fontpath)[1] == ".ttf": + return freetype(fontpath) + if not ext and first_font_with_a_different_extension is None: + first_font_with_a_different_extension = fontpath + if first_font_with_a_different_extension: + return freetype(first_font_with_a_different_extension) + raise + + +def load_path(filename: str | bytes) -> ImageFont: + """ + Load font file. Same as :py:func:`~PIL.ImageFont.load`, but searches for a + bitmap font along the Python path. + + :param filename: Name of font file. + :return: A font object. + :exception OSError: If the file could not be read. + """ + if not isinstance(filename, str): + filename = filename.decode("utf-8") + for directory in sys.path: + try: + return load(os.path.join(directory, filename)) + except OSError: + pass + msg = "cannot find font file" + raise OSError(msg) + + +def load_default_imagefont() -> ImageFont: + f = ImageFont() + f._load_pilfont_data( + # courB08 + BytesIO( + base64.b64decode( + b""" +UElMZm9udAo7Ozs7OzsxMDsKREFUQQoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAA//8AAQAAAAAAAAABAAEA +BgAAAAH/+gADAAAAAQAAAAMABgAGAAAAAf/6AAT//QADAAAABgADAAYAAAAA//kABQABAAYAAAAL +AAgABgAAAAD/+AAFAAEACwAAABAACQAGAAAAAP/5AAUAAAAQAAAAFQAHAAYAAP////oABQAAABUA +AAAbAAYABgAAAAH/+QAE//wAGwAAAB4AAwAGAAAAAf/5AAQAAQAeAAAAIQAIAAYAAAAB//kABAAB +ACEAAAAkAAgABgAAAAD/+QAE//0AJAAAACgABAAGAAAAAP/6AAX//wAoAAAALQAFAAYAAAAB//8A +BAACAC0AAAAwAAMABgAAAAD//AAF//0AMAAAADUAAQAGAAAAAf//AAMAAAA1AAAANwABAAYAAAAB +//kABQABADcAAAA7AAgABgAAAAD/+QAFAAAAOwAAAEAABwAGAAAAAP/5AAYAAABAAAAARgAHAAYA +AAAA//kABQAAAEYAAABLAAcABgAAAAD/+QAFAAAASwAAAFAABwAGAAAAAP/5AAYAAABQAAAAVgAH +AAYAAAAA//kABQAAAFYAAABbAAcABgAAAAD/+QAFAAAAWwAAAGAABwAGAAAAAP/5AAUAAABgAAAA +ZQAHAAYAAAAA//kABQAAAGUAAABqAAcABgAAAAD/+QAFAAAAagAAAG8ABwAGAAAAAf/8AAMAAABv +AAAAcQAEAAYAAAAA//wAAwACAHEAAAB0AAYABgAAAAD/+gAE//8AdAAAAHgABQAGAAAAAP/7AAT/ +/gB4AAAAfAADAAYAAAAB//oABf//AHwAAACAAAUABgAAAAD/+gAFAAAAgAAAAIUABgAGAAAAAP/5 +AAYAAQCFAAAAiwAIAAYAAP////oABgAAAIsAAACSAAYABgAA////+gAFAAAAkgAAAJgABgAGAAAA +AP/6AAUAAACYAAAAnQAGAAYAAP////oABQAAAJ0AAACjAAYABgAA////+gAFAAAAowAAAKkABgAG +AAD////6AAUAAACpAAAArwAGAAYAAAAA//oABQAAAK8AAAC0AAYABgAA////+gAGAAAAtAAAALsA +BgAGAAAAAP/6AAQAAAC7AAAAvwAGAAYAAP////oABQAAAL8AAADFAAYABgAA////+gAGAAAAxQAA +AMwABgAGAAD////6AAUAAADMAAAA0gAGAAYAAP////oABQAAANIAAADYAAYABgAA////+gAGAAAA +2AAAAN8ABgAGAAAAAP/6AAUAAADfAAAA5AAGAAYAAP////oABQAAAOQAAADqAAYABgAAAAD/+gAF +AAEA6gAAAO8ABwAGAAD////6AAYAAADvAAAA9gAGAAYAAAAA//oABQAAAPYAAAD7AAYABgAA//// ++gAFAAAA+wAAAQEABgAGAAD////6AAYAAAEBAAABCAAGAAYAAP////oABgAAAQgAAAEPAAYABgAA +////+gAGAAABDwAAARYABgAGAAAAAP/6AAYAAAEWAAABHAAGAAYAAP////oABgAAARwAAAEjAAYA +BgAAAAD/+gAFAAABIwAAASgABgAGAAAAAf/5AAQAAQEoAAABKwAIAAYAAAAA//kABAABASsAAAEv +AAgABgAAAAH/+QAEAAEBLwAAATIACAAGAAAAAP/5AAX//AEyAAABNwADAAYAAAAAAAEABgACATcA +AAE9AAEABgAAAAH/+QAE//wBPQAAAUAAAwAGAAAAAP/7AAYAAAFAAAABRgAFAAYAAP////kABQAA +AUYAAAFMAAcABgAAAAD/+wAFAAABTAAAAVEABQAGAAAAAP/5AAYAAAFRAAABVwAHAAYAAAAA//sA +BQAAAVcAAAFcAAUABgAAAAD/+QAFAAABXAAAAWEABwAGAAAAAP/7AAYAAgFhAAABZwAHAAYAAP// +//kABQAAAWcAAAFtAAcABgAAAAD/+QAGAAABbQAAAXMABwAGAAAAAP/5AAQAAgFzAAABdwAJAAYA +AP////kABgAAAXcAAAF+AAcABgAAAAD/+QAGAAABfgAAAYQABwAGAAD////7AAUAAAGEAAABigAF +AAYAAP////sABQAAAYoAAAGQAAUABgAAAAD/+wAFAAABkAAAAZUABQAGAAD////7AAUAAgGVAAAB +mwAHAAYAAAAA//sABgACAZsAAAGhAAcABgAAAAD/+wAGAAABoQAAAacABQAGAAAAAP/7AAYAAAGn +AAABrQAFAAYAAAAA//kABgAAAa0AAAGzAAcABgAA////+wAGAAABswAAAboABQAGAAD////7AAUA +AAG6AAABwAAFAAYAAP////sABgAAAcAAAAHHAAUABgAAAAD/+wAGAAABxwAAAc0ABQAGAAD////7 +AAYAAgHNAAAB1AAHAAYAAAAA//sABQAAAdQAAAHZAAUABgAAAAH/+QAFAAEB2QAAAd0ACAAGAAAA +Av/6AAMAAQHdAAAB3gAHAAYAAAAA//kABAABAd4AAAHiAAgABgAAAAD/+wAF//0B4gAAAecAAgAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAB +//sAAwACAecAAAHpAAcABgAAAAD/+QAFAAEB6QAAAe4ACAAGAAAAAP/5AAYAAAHuAAAB9AAHAAYA +AAAA//oABf//AfQAAAH5AAUABgAAAAD/+QAGAAAB+QAAAf8ABwAGAAAAAv/5AAMAAgH/AAACAAAJ +AAYAAAAA//kABQABAgAAAAIFAAgABgAAAAH/+gAE//sCBQAAAggAAQAGAAAAAP/5AAYAAAIIAAAC +DgAHAAYAAAAB//kABf/+Ag4AAAISAAUABgAA////+wAGAAACEgAAAhkABQAGAAAAAP/7AAX//gIZ +AAACHgADAAYAAAAA//wABf/9Ah4AAAIjAAEABgAAAAD/+QAHAAACIwAAAioABwAGAAAAAP/6AAT/ ++wIqAAACLgABAAYAAAAA//kABP/8Ai4AAAIyAAMABgAAAAD/+gAFAAACMgAAAjcABgAGAAAAAf/5 +AAT//QI3AAACOgAEAAYAAAAB//kABP/9AjoAAAI9AAQABgAAAAL/+QAE//sCPQAAAj8AAgAGAAD/ +///7AAYAAgI/AAACRgAHAAYAAAAA//kABgABAkYAAAJMAAgABgAAAAH//AAD//0CTAAAAk4AAQAG +AAAAAf//AAQAAgJOAAACUQADAAYAAAAB//kABP/9AlEAAAJUAAQABgAAAAH/+QAF//4CVAAAAlgA +BQAGAAD////7AAYAAAJYAAACXwAFAAYAAP////kABgAAAl8AAAJmAAcABgAA////+QAGAAACZgAA +Am0ABwAGAAD////5AAYAAAJtAAACdAAHAAYAAAAA//sABQACAnQAAAJ5AAcABgAA////9wAGAAAC +eQAAAoAACQAGAAD////3AAYAAAKAAAAChwAJAAYAAP////cABgAAAocAAAKOAAkABgAA////9wAG +AAACjgAAApUACQAGAAD////4AAYAAAKVAAACnAAIAAYAAP////cABgAAApwAAAKjAAkABgAA//// ++gAGAAACowAAAqoABgAGAAAAAP/6AAUAAgKqAAACrwAIAAYAAP////cABQAAAq8AAAK1AAkABgAA +////9wAFAAACtQAAArsACQAGAAD////3AAUAAAK7AAACwQAJAAYAAP////gABQAAAsEAAALHAAgA +BgAAAAD/9wAEAAACxwAAAssACQAGAAAAAP/3AAQAAALLAAACzwAJAAYAAAAA//cABAAAAs8AAALT +AAkABgAAAAD/+AAEAAAC0wAAAtcACAAGAAD////6AAUAAALXAAAC3QAGAAYAAP////cABgAAAt0A +AALkAAkABgAAAAD/9wAFAAAC5AAAAukACQAGAAAAAP/3AAUAAALpAAAC7gAJAAYAAAAA//cABQAA +Au4AAALzAAkABgAAAAD/9wAFAAAC8wAAAvgACQAGAAAAAP/4AAUAAAL4AAAC/QAIAAYAAAAA//oA +Bf//Av0AAAMCAAUABgAA////+gAGAAADAgAAAwkABgAGAAD////3AAYAAAMJAAADEAAJAAYAAP// +//cABgAAAxAAAAMXAAkABgAA////9wAGAAADFwAAAx4ACQAGAAD////4AAYAAAAAAAoABwASAAYA +AP////cABgAAAAcACgAOABMABgAA////+gAFAAAADgAKABQAEAAGAAD////6AAYAAAAUAAoAGwAQ +AAYAAAAA//gABgAAABsACgAhABIABgAAAAD/+AAGAAAAIQAKACcAEgAGAAAAAP/4AAYAAAAnAAoA +LQASAAYAAAAA//gABgAAAC0ACgAzABIABgAAAAD/+QAGAAAAMwAKADkAEQAGAAAAAP/3AAYAAAA5 +AAoAPwATAAYAAP////sABQAAAD8ACgBFAA8ABgAAAAD/+wAFAAIARQAKAEoAEQAGAAAAAP/4AAUA +AABKAAoATwASAAYAAAAA//gABQAAAE8ACgBUABIABgAAAAD/+AAFAAAAVAAKAFkAEgAGAAAAAP/5 +AAUAAABZAAoAXgARAAYAAAAA//gABgAAAF4ACgBkABIABgAAAAD/+AAGAAAAZAAKAGoAEgAGAAAA +AP/4AAYAAABqAAoAcAASAAYAAAAA//kABgAAAHAACgB2ABEABgAAAAD/+AAFAAAAdgAKAHsAEgAG +AAD////4AAYAAAB7AAoAggASAAYAAAAA//gABQAAAIIACgCHABIABgAAAAD/+AAFAAAAhwAKAIwA +EgAGAAAAAP/4AAUAAACMAAoAkQASAAYAAAAA//gABQAAAJEACgCWABIABgAAAAD/+QAFAAAAlgAK +AJsAEQAGAAAAAP/6AAX//wCbAAoAoAAPAAYAAAAA//oABQABAKAACgClABEABgAA////+AAGAAAA +pQAKAKwAEgAGAAD////4AAYAAACsAAoAswASAAYAAP////gABgAAALMACgC6ABIABgAA////+QAG +AAAAugAKAMEAEQAGAAD////4AAYAAgDBAAoAyAAUAAYAAP////kABQACAMgACgDOABMABgAA//// ++QAGAAIAzgAKANUAEw== +""" + ) + ), + Image.open( + BytesIO( + base64.b64decode( + b""" +iVBORw0KGgoAAAANSUhEUgAAAx4AAAAUAQAAAAArMtZoAAAEwElEQVR4nABlAJr/AHVE4czCI/4u +Mc4b7vuds/xzjz5/3/7u/n9vMe7vnfH/9++vPn/xyf5zhxzjt8GHw8+2d83u8x27199/nxuQ6Od9 +M43/5z2I+9n9ZtmDBwMQECDRQw/eQIQohJXxpBCNVE6QCCAAAAD//wBlAJr/AgALyj1t/wINwq0g +LeNZUworuN1cjTPIzrTX6ofHWeo3v336qPzfEwRmBnHTtf95/fglZK5N0PDgfRTslpGBvz7LFc4F +IUXBWQGjQ5MGCx34EDFPwXiY4YbYxavpnhHFrk14CDAAAAD//wBlAJr/AgKqRooH2gAgPeggvUAA +Bu2WfgPoAwzRAABAAAAAAACQgLz/3Uv4Gv+gX7BJgDeeGP6AAAD1NMDzKHD7ANWr3loYbxsAD791 +NAADfcoIDyP44K/jv4Y63/Z+t98Ovt+ub4T48LAAAAD//wBlAJr/AuplMlADJAAAAGuAphWpqhMx +in0A/fRvAYBABPgBwBUgABBQ/sYAyv9g0bCHgOLoGAAAAAAAREAAwI7nr0ArYpow7aX8//9LaP/9 +SjdavWA8ePHeBIKB//81/83ndznOaXx379wAAAD//wBlAJr/AqDxW+D3AABAAbUh/QMnbQag/gAY +AYDAAACgtgD/gOqAAAB5IA/8AAAk+n9w0AAA8AAAmFRJuPo27ciC0cD5oeW4E7KA/wD3ECMAn2tt +y8PgwH8AfAxFzC0JzeAMtratAsC/ffwAAAD//wBlAJr/BGKAyCAA4AAAAvgeYTAwHd1kmQF5chkG +ABoMIHcL5xVpTfQbUqzlAAAErwAQBgAAEOClA5D9il08AEh/tUzdCBsXkbgACED+woQg8Si9VeqY +lODCn7lmF6NhnAEYgAAA/NMIAAAAAAD//2JgjLZgVGBg5Pv/Tvpc8hwGBjYGJADjHDrAwPzAjv/H +/Wf3PzCwtzcwHmBgYGcwbZz8wHaCAQMDOwMDQ8MCBgYOC3W7mp+f0w+wHOYxO3OG+e376hsMZjk3 +AAAAAP//YmCMY2A4wMAIN5e5gQETPD6AZisDAwMDgzSDAAPjByiHcQMDAwMDg1nOze1lByRu5/47 +c4859311AYNZzg0AAAAA//9iYGDBYihOIIMuwIjGL39/fwffA8b//xv/P2BPtzzHwCBjUQAAAAD/ +/yLFBrIBAAAA//9i1HhcwdhizX7u8NZNzyLbvT97bfrMf/QHI8evOwcSqGUJAAAA//9iYBB81iSw +pEE170Qrg5MIYydHqwdDQRMrAwcVrQAAAAD//2J4x7j9AAMDn8Q/BgYLBoaiAwwMjPdvMDBYM1Tv +oJodAAAAAP//Yqo/83+dxePWlxl3npsel9lvLfPcqlE9725C+acfVLMEAAAA//9i+s9gwCoaaGMR +evta/58PTEWzr21hufPjA8N+qlnBwAAAAAD//2JiWLci5v1+HmFXDqcnULE/MxgYGBj+f6CaJQAA +AAD//2Ji2FrkY3iYpYC5qDeGgeEMAwPDvwQBBoYvcTwOVLMEAAAA//9isDBgkP///0EOg9z35v// +Gc/eeW7BwPj5+QGZhANUswMAAAD//2JgqGBgYGBgqEMXlvhMPUsAAAAA//8iYDd1AAAAAP//AwDR +w7IkEbzhVQAAAABJRU5ErkJggg== +""" + ) + ) + ), + ) + return f + + +def load_default(size: float | None = None) -> FreeTypeFont | ImageFont: + """If FreeType support is available, load a version of Aileron Regular, + https://dotcolon.net/font/aileron, with a more limited character set. + + Otherwise, load a "better than nothing" font. + + .. versionadded:: 1.1.4 + + :param size: The font size of Aileron Regular. + + .. versionadded:: 10.1.0 + + :return: A font object. + """ + if isinstance(core, ModuleType) or size is not None: + return truetype( + BytesIO( + base64.b64decode( + b""" +AAEAAAAPAIAAAwBwRkZUTYwDlUAAADFoAAAAHEdERUYAqADnAAAo8AAAACRHUE9ThhmITwAAKfgAA +AduR1NVQnHxefoAACkUAAAA4k9TLzJovoHLAAABeAAAAGBjbWFw5lFQMQAAA6gAAAGqZ2FzcP//AA +MAACjoAAAACGdseWYmRXoPAAAGQAAAHfhoZWFkE18ayQAAAPwAAAA2aGhlYQboArEAAAE0AAAAJGh +tdHjjERZ8AAAB2AAAAdBsb2NhuOexrgAABVQAAADqbWF4cAC7AEYAAAFYAAAAIG5hbWUr+h5lAAAk +OAAAA6Jwb3N0D3oPTQAAJ9wAAAEKAAEAAAABGhxJDqIhXw889QALA+gAAAAA0Bqf2QAAAADhCh2h/ +2r/LgOxAyAAAAAIAAIAAAAAAAAAAQAAA8r/GgAAA7j/av9qA7EAAQAAAAAAAAAAAAAAAAAAAHQAAQ +AAAHQAQwAFAAAAAAACAAAAAQABAAAAQAAAAAAAAAADAfoBkAAFAAgCigJYAAAASwKKAlgAAAFeADI +BPgAAAAAFAAAAAAAAAAAAAAcAAAAAAAAAAAAAAABVS1dOAEAAIPsCAwL/GgDIA8oA5iAAAJMAAAAA +AhICsgAAACAAAwH0AAAAAAAAAU0AAADYAAAA8gA5AVMAVgJEAEYCRAA1AuQAKQKOAEAAsAArATsAZ +AE7AB4CMABVAkQAUADc/+EBEgAgANwAJQEv//sCRAApAkQAggJEADwCRAAtAkQAIQJEADkCRAArAk +QAMgJEACwCRAAxANwAJQDc/+ECRABnAkQAUAJEAEQB8wAjA1QANgJ/AB0CcwBkArsALwLFAGQCSwB +kAjcAZALGAC8C2gBkAQgAZAIgADcCYQBkAj8AZANiAGQCzgBkAuEALwJWAGQC3QAvAmsAZAJJADQC +ZAAiAqoAXgJuACADuAAaAnEAGQJFABMCTwAuATMAYgEv//sBJwAiAkQAUAH0ADIBLAApAhMAJAJjA +EoCEQAeAmcAHgIlAB4BIgAVAmcAHgJRAEoA7gA+AOn/8wIKAEoA9wBGA1cASgJRAEoCSgAeAmMASg +JnAB4BSgBKAcsAGAE5ABQCUABCAgIAAQMRAAEB4v/6AgEAAQHOABQBLwBAAPoAYAEvACECRABNA0Y +AJAItAHgBKgAcAkQAUAEsAHQAygAgAi0AOQD3ADYA9wAWAaEANgGhABYCbAAlAYMAeAGDADkA6/9q +AhsAFAIKABUB/QAVAAAAAwAAAAMAAAAcAAEAAAAAAKQAAwABAAAAHAAEAIgAAAAeABAAAwAOAH4Aq +QCrALEAtAC3ALsgGSAdICYgOiBEISL7Av//AAAAIACpAKsAsAC0ALcAuyAYIBwgJiA5IEQhIvsB// +//4/+5/7j/tP+y/7D/reBR4E/gR+A14CzfTwVxAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAEGAAABAAAAAAAAAAECAAAAAgAAAAAAAAAAAAAAAAAAAAEAAAMEBQYHCAkKCwwNDg8QERIT +FBUWFxgZGhscHR4fICEiIyQlJicoKSorLC0uLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVGR0hJSktMT +U5PUFFSU1RVVldYWVpbXF1eX2BhAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGQAAA +AAAAAAYnFmAAAAAABlAAAAAAAAAAAAAAAAAAAAAAAAAAAAY2htAAAAAAAAAABrbGlqAAAAAHAAbm9 +ycwBnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmACYAJgAmAD4AUgCCAMoBCgFO +AVwBcgGIAaYBvAHKAdYB6AH2AgwCIAJKAogCpgLWAw4DIgNkA5wDugPUA+gD/AQQBEYEogS8BPoFJ +gVSBWoFgAWwBcoF1gX6BhQGJAZMBmgGiga0BuIHGgdUB2YHkAeiB8AH3AfyCAoIHAgqCDoITghcCG +oIogjSCPoJKglYCXwJwgnqCgIKKApACl4Klgq8CtwLDAs8C1YLjAuyC9oL7gwMDCYMSAxgDKAMrAz +qDQoNTA1mDYQNoA2uDcAN2g3oDfYODA4iDkoOXA5sDnoOnA7EDvwAAAAFAAAAAAH0ArwAAwAGAAkA +DAAPAAAxESERAxMhExcRASELARETAfT6qv6syKr+jgFUqsiqArz9RAGLAP/+1P8B/v3VAP8BLP4CA +P8AAgA5//IAuQKyAAMACwAANyMDMwIyFhQGIiY0oE4MZk84JCQ4JLQB/v3AJDgkJDgAAgBWAeUBPA +LfAAMABwAAEyMnMxcjJzOmRgpagkYKWgHl+vr6AAAAAAIARgAAAf4CsgAbAB8AAAEHMxUjByM3Iwc +jNyM1MzcjNTM3MwczNzMHMxUrAQczAZgdZXEvOi9bLzovWmYdZXEvOi9bLzovWp9bHlsBn4w429vb +2ziMONvb29s4jAAAAAMANf+mAg4DDAAfACYALAAAJRQGBxUjNS4BJzMeARcRLgE0Njc1MxUeARcjJ +icVHgEBFBYXNQ4BExU+ATU0Ag5xWDpgcgRcBz41Xl9oVTpVYwpcC1ttXP6cLTQuM5szOrVRZwlOTQ +ZqVzZECAEAGlukZAlOTQdrUG8O7iNlAQgxNhDlCDj+8/YGOjReAAAAAAUAKf/yArsCvAAHAAsAFQA +dACcAABIyFhQGIiY0EyMBMwQiBhUUFjI2NTQSMhYUBiImNDYiBhUUFjI2NTR5iFBQiFCVVwHAV/5c +OiMjOiPmiFBQiFCxOiMjOiMCvFaSVlaS/ZoCsjIzMC80NC8w/uNWklZWkhozMC80NC8wAAAAAgBA/ +/ICbgLAACIALgAAARUjEQYjIiY1NDY3LgE1NDYzMhcVJiMiBhUUFhcWOwE1MxUFFBYzMjc1IyIHDg +ECbmBcYYOOVkg7R4hsQjY4Q0RNRD4SLDxW/pJUXzksPCkUUk0BgUb+zBVUZ0BkDw5RO1huCkULQzp +COAMBcHDHRz0J/AIHRQAAAAEAKwHlAIUC3wADAAATIycze0YKWgHl+gAAAAABAGT/sAEXAwwACQAA +EzMGEBcjLgE0Nt06dXU6OUBAAwzG/jDGVePs4wAAAAEAHv+wANEDDAAJAAATMx4BFAYHIzYQHjo5Q +EA5OnUDDFXj7ONVxgHQAAAAAQBVAFIB2wHbAA4AAAE3FwcXBycHJzcnNxcnMwEtmxOfcTJjYzJxnx +ObCj4BKD07KYolmZkliik7PbMAAQBQAFUB9AIlAAsAAAEjFSM1IzUzNTMVMwH0tTq1tTq1AR/Kyjj +OzgAAAAAB/+H/iACMAGQABAAANwcjNzOMWlFOXVrS3AAAAQAgAP8A8gE3AAMAABMjNTPy0tIA/zgA +AQAl//IApQByAAcAADYyFhQGIiY0STgkJDgkciQ4JCQ4AAAAAf/7/+IBNALQAAMAABcjEzM5Pvs+H +gLuAAAAAAIAKf/yAhsCwAADAAcAABIgECA2IBAgKQHy/g5gATL+zgLA/TJEAkYAAAAAAQCCAAABlg +KyAAgAAAERIxEHNTc2MwGWVr6SIygCsv1OAldxW1sWAAEAPAAAAg4CwAAZAAA3IRUhNRM+ATU0JiM +iDwEjNz4BMzIWFRQGB7kBUv4x+kI2QTt+EAFWAQp8aGVtSl5GRjEA/0RVLzlLmAoKa3FsUkNxXQAA +AAEALf/yAhYCwAAqAAABHgEVFAYjIi8BMxceATMyNjU0KwE1MzI2NTQmIyIGDwEjNz4BMzIWFRQGA +YxBSZJo2RUBVgEHV0JBUaQREUBUQzc5TQcBVgEKfGhfcEMBbxJbQl1x0AoKRkZHPn9GSD80QUVCCg +pfbGBPOlgAAAACACEAAAIkArIACgAPAAAlIxUjNSE1ATMRMyMRBg8BAiRXVv6qAVZWV60dHLCurq4 +rAdn+QgFLMibzAAABADn/8gIZArIAHQAAATIWFRQGIyIvATMXFjMyNjU0JiMiByMTIRUhBzc2ATNv +d5Fl1RQBVgIad0VSTkVhL1IwAYj+vh8rMAHHgGdtgcUKCoFXTU5bYgGRRvAuHQAAAAACACv/8gITA +sAAFwAjAAABMhYVFAYjIhE0NjMyFh8BIycmIyIDNzYTMjY1NCYjIgYVFBYBLmp7imr0l3RZdAgBXA +IYZ5wKJzU6QVNJSz5SUAHSgWltiQFGxcNlVQoKdv7sPiz+ZF1LTmJbU0lhAAAAAQAyAAACGgKyAAY +AAAEVASMBITUCGv6oXAFL/oECsij9dgJsRgAAAAMALP/xAhgCwAAWACAALAAAAR4BFRQGIyImNTQ2 +Ny4BNTQ2MhYVFAYmIgYVFBYyNjU0AzI2NTQmIyIGFRQWAZQ5S5BmbIpPOjA7ecp5P2F8Q0J8RIVJS +0pLTEtOAW0TXTxpZ2ZqPF0SE1A3VWVlVTdQ/UU0N0RENzT9/ko+Ok1NOj1LAAIAMf/yAhkCwAAXAC +MAAAEyERQGIyImLwEzFxYzMhMHBiMiJjU0NhMyNjU0JiMiBhUUFgEl9Jd0WXQIAVwCGGecCic1SWp +7imo+UlBAQVNJAsD+usXDZVUKCnYBFD4sgWltif5kW1NJYV1LTmIAAAACACX/8gClAiAABwAPAAAS +MhYUBiImNBIyFhQGIiY0STgkJDgkJDgkJDgkAiAkOCQkOP52JDgkJDgAAAAC/+H/iAClAiAABwAMA +AASMhYUBiImNBMHIzczSTgkJDgkaFpSTl4CICQ4JCQ4/mba5gAAAQBnAB4B+AH0AAYAAAENARUlNS +UB+P6qAVb+bwGRAbCmpkbJRMkAAAIAUAC7AfQBuwADAAcAAAEhNSERITUhAfT+XAGk/lwBpAGDOP8 +AOAABAEQAHgHVAfQABgAAARUFNS0BNQHV/m8BVv6qAStEyUSmpkYAAAAAAgAj//IB1ALAABgAIAAA +ATIWFRQHDgEHIz4BNz4BNTQmIyIGByM+ARIyFhQGIiY0AQRibmktIAJWBSEqNig+NTlHBFoDezQ4J +CQ4JALAZ1BjaS03JS1DMD5LLDQ/SUVgcv2yJDgkJDgAAAAAAgA2/5gDFgKYADYAQgAAAQMGFRQzMj +Y1NCYjIg4CFRQWMzI2NxcGIyImNTQ+AjMyFhUUBiMiJwcGIyImNTQ2MzIfATcHNzYmIyIGFRQzMjY +Cej8EJjJJlnBAfGQ+oHtAhjUYg5OPx0h2k06Os3xRWQsVLjY5VHtdPBwJETcJDyUoOkZEJz8B0f74 +EQ8kZl6EkTFZjVOLlyknMVm1pmCiaTq4lX6CSCknTVRmmR8wPdYnQzxuSWVGAAIAHQAAAncCsgAHA +AoAACUjByMTMxMjATMDAcj+UVz4dO5d/sjPZPT0ArL9TgE6ATQAAAADAGQAAAJMArIAEAAbACcAAA +EeARUUBgcGKwERMzIXFhUUJRUzMjc2NTQnJiMTPgE1NCcmKwEVMzIBvkdHZkwiNt7LOSGq/oeFHBt +hahIlSTM+cB8Yj5UWAW8QT0VYYgwFArIEF5Fv1eMED2NfDAL93AU+N24PBP0AAAAAAQAv//ICjwLA +ABsAAAEyFh8BIycmIyIGFRQWMzI/ATMHDgEjIiY1NDYBdX+PCwFWAiKiaHx5ZaIiAlYBCpWBk6a0A +sCAagoKpqN/gaOmCgplhcicn8sAAAIAZAAAAp8CsgAMABkAAAEeARUUBgcGKwERMzITPgE1NCYnJi +sBETMyAY59lJp8IzXN0jUVWmdjWRs5d3I4Aq4QqJWUug8EArL9mQ+PeHGHDgX92gAAAAABAGQAAAI +vArIACwAAJRUhESEVIRUhFSEVAi/+NQHB/pUBTf6zRkYCskbwRvAAAAABAGQAAAIlArIACQAAExUh +FSERIxEhFboBQ/69VgHBAmzwRv7KArJGAAAAAAEAL//yAo8CwAAfAAABMxEjNQcGIyImNTQ2MzIWH +wEjJyYjIgYVFBYzMjY1IwGP90wfPnWTprSSf48LAVYCIqJofHllVG+hAU3+s3hARsicn8uAagoKpq +N/gaN1XAAAAAEAZAAAAowCsgALAAABESMRIREjETMRIRECjFb+hFZWAXwCsv1OAS7+0gKy/sQBPAA +AAAABAGQAAAC6ArIAAwAAMyMRM7pWVgKyAAABADf/8gHoArIAEwAAAREUBw4BIyImLwEzFxYzMjc2 +NREB6AIFcGpgbQIBVgIHfXQKAQKy/lYxIltob2EpKYyEFD0BpwAAAAABAGQAAAJ0ArIACwAACQEjA +wcVIxEzEQEzATsBJ3ntQlZWAVVlAWH+nwEnR+ACsv6RAW8AAQBkAAACLwKyAAUAACUVIREzEQIv/j +VWRkYCsv2UAAABAGQAAAMUArIAFAAAAREjETQ3BgcDIwMmJxYVESMRMxsBAxRWAiMxemx8NxsCVo7 +MywKy/U4BY7ZLco7+nAFmoFxLtP6dArL9lwJpAAAAAAEAZAAAAoACsgANAAAhIwEWFREjETMBJjUR +MwKAhP67A1aEAUUDVAJeeov+pwKy/aJ5jAFZAAAAAgAv//ICuwLAAAkAEwAAEiAWFRQGICY1NBIyN +jU0JiIGFRTbATSsrP7MrNrYenrYegLAxaKhxsahov47nIeIm5uIhwACAGQAAAJHArIADgAYAAABHg +EVFAYHBisBESMRMzITNjQnJisBETMyAZRUX2VOHzuAVtY7GlxcGDWIiDUCrgtnVlVpCgT+5gKy/rU +V1BUF/vgAAAACAC//zAK9AsAAEgAcAAAlFhcHJiMiBwYjIiY1NDYgFhUUJRQWMjY1NCYiBgI9PUMx +UDcfKh8omqysATSs/dR62Hp62HpICTg7NgkHxqGixcWitbWHnJyHiJubAAIAZAAAAlgCsgAXACMAA +CUWFyMmJyYnJisBESMRMzIXHgEVFAYHFiUzMjc+ATU0JyYrAQIqDCJfGQwNWhAhglbiOx9QXEY1Tv +6bhDATMj1lGSyMtYgtOXR0BwH+1wKyBApbU0BSESRAAgVAOGoQBAABADT/8gIoAsAAJQAAATIWFyM +uASMiBhUUFhceARUUBiMiJiczHgEzMjY1NCYnLgE1NDYBOmd2ClwGS0E6SUNRdW+HZnKKC1wPWkQ9 +Uk1cZGuEAsBwXUJHNjQ3OhIbZVZZbm5kREo+NT5DFRdYUFdrAAAAAAEAIgAAAmQCsgAHAAABIxEjE +SM1IQJk9lb2AkICbP2UAmxGAAEAXv/yAmQCsgAXAAABERQHDgEiJicmNREzERQXHgEyNjc2NRECZA +IIgfCBCAJWAgZYmlgGAgKy/k0qFFxzc1wUKgGz/lUrEkRQUEQSKwGrAAAAAAEAIAAAAnoCsgAGAAA +hIwMzGwEzAYJ07l3N1FwCsv2PAnEAAAEAGgAAA7ECsgAMAAABAyMLASMDMxsBMxsBA7HAcZyicrZi +kaB0nJkCsv1OAlP9rQKy/ZsCW/2kAmYAAAEAGQAAAm8CsgALAAAhCwEjEwMzGwEzAxMCCsrEY/bkY +re+Y/D6AST+3AFcAVb+5gEa/q3+oQAAAQATAAACUQKyAAgAAAERIxEDMxsBMwFdVvRjwLphARD+8A +EQAaL+sQFPAAABAC4AAAI5ArIACQAAJRUhNQEhNSEVAQI5/fUBof57Aen+YUZGQgIqRkX92QAAAAA +BAGL/sAEFAwwABwAAARUjETMVIxEBBWlpowMMOP0UOANcAAAB//v/4gE0AtAAAwAABSMDMwE0Pvs+ +HgLuAAAAAQAi/7AAxQMMAAcAABcjNTMRIzUzxaNpaaNQOALsOAABAFAA1wH0AmgABgAAJQsBIxMzE +wGwjY1GsESw1wFZ/qcBkf5vAAAAAQAy/6oBwv/iAAMAAAUhNSEBwv5wAZBWOAAAAAEAKQJEALYCsg +ADAAATIycztjhVUAJEbgAAAAACACT/8gHQAiAAHQAlAAAhJwcGIyImNTQ2OwE1NCcmIyIHIz4BMzI +XFh0BFBcnMjY9ASYVFAF6CR0wVUtgkJoiAgdgaQlaBm1Zrg4DCuQ9R+5MOSFQR1tbDiwUUXBUXowf +J8c9SjRORzYSgVwAAAAAAgBK//ICRQLfABEAHgAAATIWFRQGIyImLwEVIxEzETc2EzI2NTQmIyIGH +QEUFgFUcYCVbiNJEyNWVigySElcU01JXmECIJd4i5QTEDRJAt/+3jkq/hRuZV55ZWsdX14AAQAe// +IB9wIgABgAAAEyFhcjJiMiBhUUFjMyNjczDgEjIiY1NDYBF152DFocbEJXU0A1Rw1aE3pbaoKQAiB +oWH5qZm1tPDlaXYuLgZcAAAACAB7/8gIZAt8AEQAeAAABESM1BwYjIiY1NDYzMhYfAREDMjY9ATQm +IyIGFRQWAhlWKDJacYCVbiNJEyOnSV5hQUlcUwLf/SFVOSqXeIuUExA0ARb9VWVrHV9ebmVeeQACA +B7/8gH9AiAAFQAbAAABFAchHgEzMjY3Mw4BIyImNTQ2MzIWJyIGByEmAf0C/oAGUkA1SwlaD4FXbI +WObmt45UBVBwEqDQEYFhNjWD84W16Oh3+akU9aU60AAAEAFQAAARoC8gAWAAATBh0BMxUjESMRIzU +zNTQ3PgEzMhcVJqcDbW1WOTkDB0k8Hx5oAngVITRC/jQBzEIsJRs5PwVHEwAAAAIAHv8uAhkCIAAi +AC8AAAERFAcOASMiLwEzFx4BMzI2NzY9AQcGIyImNTQ2MzIWHwE1AzI2PQE0JiMiBhUUFgIZAQSEd +NwRAVcBBU5DTlUDASgyWnGAlW4jSRMjp0leYUFJXFMCEv5wSh1zeq8KCTI8VU0ZIQk5Kpd4i5QTED +RJ/iJlax1fXm5lXnkAAQBKAAACCgLkABcAAAEWFREjETQnLgEHDgEdASMRMxE3NjMyFgIIAlYCBDs +6RVRWViE5UVViAYUbQP7WASQxGzI7AQJyf+kC5P7TPSxUAAACAD4AAACsAsAABwALAAASMhYUBiIm +NBMjETNeLiAgLiBiVlYCwCAuICAu/WACEgAC//P/LgCnAsAABwAVAAASMhYUBiImNBcRFAcGIyInN +RY3NjURWS4gIC4gYgMLcRwNSgYCAsAgLiAgLo79wCUbZAJGBzMOHgJEAAAAAQBKAAACCALfAAsAAC +EnBxUjETMREzMHEwGTwTJWVvdu9/rgN6kC3/4oAQv6/ugAAQBG//wA3gLfAA8AABMRFBceATcVBiM +iJicmNRGcAQIcIxkkKi4CAQLf/bkhERoSBD4EJC8SNAJKAAAAAQBKAAADEAIgACQAAAEWFREjETQn +JiMiFREjETQnJiMiFREjETMVNzYzMhYXNzYzMhYDCwVWBAxedFYEDF50VlYiJko7ThAvJkpEVAGfI +jn+vAEcQyRZ1v76ARxDJFnW/voCEk08HzYtRB9HAAAAAAEASgAAAgoCIAAWAAABFhURIxE0JyYjIg +YdASMRMxU3NjMyFgIIAlYCCXBEVVZWITlRVWIBhRtA/tYBJDEbbHR/6QISWz0sVAAAAAACAB7/8gI +sAiAABwARAAASIBYUBiAmNBIyNjU0JiIGFRSlAQCHh/8Ah7ieWlqeWgIgn/Cfn/D+s3ZfYHV1YF8A +AgBK/zwCRQIgABEAHgAAATIWFRQGIyImLwERIxEzFTc2EzI2NTQmIyIGHQEUFgFUcYCVbiNJEyNWV +igySElcU01JXmECIJd4i5QTEDT+8wLWVTkq/hRuZV55ZWsdX14AAgAe/zwCGQIgABEAHgAAAREjEQ +cGIyImNTQ2MzIWHwE1AzI2PQE0JiMiBhUUFgIZVigyWnGAlW4jSRMjp0leYUFJXFMCEv0qARk5Kpd +4i5QTEDRJ/iJlax1fXm5lXnkAAQBKAAABPgIeAA0AAAEyFxUmBhURIxEzFTc2ARoWDkdXVlYwIwIe +B0EFVlf+0gISU0cYAAEAGP/yAa0CIAAjAAATMhYXIyYjIgYVFBYXHgEVFAYjIiYnMxYzMjY1NCYnL +gE1NDbkV2MJWhNdKy04PF1XbVhWbgxaE2ktOjlEUllkAiBaS2MrJCUoEBlPQkhOVFZoKCUmLhIWSE +BIUwAAAAEAFP/4ARQCiQAXAAATERQXHgE3FQYjIiYnJjURIzUzNTMVMxWxAQMmMx8qMjMEAUdHVmM +BzP7PGw4mFgY/BSwxDjQBNUJ7e0IAAAABAEL/8gICAhIAFwAAAREjNQcGIyImJyY1ETMRFBceATMy +Nj0BAgJWITlRT2EKBVYEBkA1RFECEv3uWj4qTToiOQE+/tIlJC43c4DpAAAAAAEAAQAAAfwCEgAGA +AABAyMDMxsBAfzJaclfop8CEv3uAhL+LQHTAAABAAEAAAMLAhIADAAAAQMjCwEjAzMbATMbAQMLqW +Z2dmapY3t0a3Z7AhL97gG+/kICEv5AAcD+QwG9AAAB//oAAAHWAhIACwAAARMjJwcjEwMzFzczARq +8ZIuKY763ZoWFYwEO/vLV1QEMAQbNzQAAAQAB/y4B+wISABEAAAEDDgEjIic1FjMyNj8BAzMbAQH7 +2iFZQB8NDRIpNhQH02GenQIS/cFVUAJGASozEwIt/i4B0gABABQAAAGxAg4ACQAAJRUhNQEhNSEVA +QGx/mMBNP7iAYL+zkREQgGIREX+ewAAAAABAED/sAEOAwwALAAAASMiBhUUFxYVFAYHHgEVFAcGFR +QWOwEVIyImNTQ3NjU0JzU2NTQnJjU0NjsBAQ4MKiMLDS4pKS4NCyMqDAtERAwLUlILDERECwLUGBk +WTlsgKzUFBTcrIFtOFhkYOC87GFVMIkUIOAhFIkxVGDsvAAAAAAEAYP84AJoDIAADAAAXIxEzmjo6 +yAPoAAEAIf+wAO8DDAAsAAATFQYVFBcWFRQGKwE1MzI2NTQnJjU0NjcuATU0NzY1NCYrATUzMhYVF +AcGFRTvUgsMREQLDCojCw0uKSkuDQsjKgwLREQMCwF6OAhFIkxVGDsvOBgZFk5bICs1BQU3KyBbTh +YZGDgvOxhVTCJFAAABAE0A3wH2AWQAEwAAATMUIyImJyYjIhUjNDMyFhcWMzIBvjhuGywtQR0xOG4 +bLC1BHTEBZIURGCNMhREYIwAAAwAk/94DIgLoAAcAEQApAAAAIBYQBiAmECQgBhUUFiA2NTQlMhYX +IyYjIgYUFjMyNjczDgEjIiY1NDYBAQFE3d3+vN0CB/7wubkBELn+xVBnD1wSWDo+QTcqOQZcEmZWX +HN2Aujg/rbg4AFKpr+Mjb6+jYxbWEldV5ZZNShLVn5na34AAgB4AFIB9AGeAAUACwAAAQcXIyc3Mw +cXIyc3AUqJiUmJifOJiUmJiQGepqampqampqYAAAIAHAHSAQ4CwAAHAA8AABIyFhQGIiY0NiIGFBY +yNjRgakREakSTNCEhNCECwEJqQkJqCiM4IyM4AAAAAAIAUAAAAfQCCwALAA8AAAEzFSMVIzUjNTM1 +MxMhNSEBP7W1OrW1OrX+XAGkAVs4tLQ4sP31OAAAAQB0AkQBAQKyAAMAABMjNzOsOD1QAkRuAAAAA +AEAIADsAKoBdgAHAAASMhYUBiImNEg6KCg6KAF2KDooKDoAAAIAOQBSAbUBngAFAAsAACUHIzcnMw +UHIzcnMwELiUmJiUkBM4lJiYlJ+KampqampqYAAAABADYB5QDhAt8ABAAAEzczByM2Xk1OXQHv8Po +AAQAWAeUAwQLfAAQAABMHIzczwV5NTl0C1fD6AAIANgHlAYsC3wAEAAkAABM3MwcjPwEzByM2Xk1O +XapeTU5dAe/w+grw+gAAAgAWAeUBawLfAAQACQAAEwcjNzMXByM3M8FeTU5dql5NTl0C1fD6CvD6A +AADACX/8gI1AHIABwAPABcAADYyFhQGIiY0NjIWFAYiJjQ2MhYUBiImNEk4JCQ4JOw4JCQ4JOw4JC +Q4JHIkOCQkOCQkOCQkOCQkOCQkOAAAAAEAeABSAUoBngAFAAABBxcjJzcBSomJSYmJAZ6mpqamAAA +AAAEAOQBSAQsBngAFAAAlByM3JzMBC4lJiYlJ+KampgAAAf9qAAABgQKyAAMAACsBATM/VwHAVwKy +AAAAAAIAFAHIAdwClAAHABQAABMVIxUjNSM1BRUjNwcjJxcjNTMXN9pKMkoByDICKzQqATJLKysCl +CmjoykBy46KiY3Lm5sAAQAVAAABvALyABgAAAERIxEjESMRIzUzNTQ3NjMyFxUmBgcGHQEBvFbCVj +k5AxHHHx5iVgcDAg798gHM/jQBzEIOJRuWBUcIJDAVIRYAAAABABX//AHkAvIAJQAAJR4BNxUGIyI +mJyY1ESYjIgcGHQEzFSMRIxEjNTM1NDc2MzIXERQBowIcIxkkKi4CAR4nXgwDbW1WLy8DEbNdOmYa +EQQ/BCQvEjQCFQZWFSEWQv40AcxCDiUblhP9uSEAAAAAAAAWAQ4AAQAAAAAAAAATACgAAQAAAAAAA +QAHAEwAAQAAAAAAAgAHAGQAAQAAAAAAAwAaAKIAAQAAAAAABAAHAM0AAQAAAAAABQA8AU8AAQAAAA +AABgAPAawAAQAAAAAACAALAdQAAQAAAAAACQALAfgAAQAAAAAACwAXAjQAAQAAAAAADAAXAnwAAwA +BBAkAAAAmAAAAAwABBAkAAQAOADwAAwABBAkAAgAOAFQAAwABBAkAAwA0AGwAAwABBAkABAAOAL0A +AwABBAkABQB4ANUAAwABBAkABgAeAYwAAwABBAkACAAWAbwAAwABBAkACQAWAeAAAwABBAkACwAuA +gQAAwABBAkADAAuAkwATgBvACAAUgBpAGcAaAB0AHMAIABSAGUAcwBlAHIAdgBlAGQALgAATm8gUm +lnaHRzIFJlc2VydmVkLgAAQQBpAGwAZQByAG8AbgAAQWlsZXJvbgAAUgBlAGcAdQBsAGEAcgAAUmV +ndWxhcgAAMQAuADEAMAAyADsAVQBLAFcATgA7AEEAaQBsAGUAcgBvAG4ALQBSAGUAZwB1AGwAYQBy +AAAxLjEwMjtVS1dOO0FpbGVyb24tUmVndWxhcgAAQQBpAGwAZQByAG8AbgAAQWlsZXJvbgAAVgBlA +HIAcwBpAG8AbgAgADEALgAxADAAMgA7AFAAUwAgADAAMAAxAC4AMQAwADIAOwBoAG8AdABjAG8Abg +B2ACAAMQAuADAALgA3ADAAOwBtAGEAawBlAG8AdABmAC4AbABpAGIAMgAuADUALgA1ADgAMwAyADk +AAFZlcnNpb24gMS4xMDI7UFMgMDAxLjEwMjtob3Rjb252IDEuMC43MDttYWtlb3RmLmxpYjIuNS41 +ODMyOQAAQQBpAGwAZQByAG8AbgAtAFIAZQBnAHUAbABhAHIAAEFpbGVyb24tUmVndWxhcgAAUwBvA +HIAYQAgAFMAYQBnAGEAbgBvAABTb3JhIFNhZ2FubwAAUwBvAHIAYQAgAFMAYQBnAGEAbgBvAABTb3 +JhIFNhZ2FubwAAaAB0AHQAcAA6AC8ALwB3AHcAdwAuAGQAbwB0AGMAbwBsAG8AbgAuAG4AZQB0AAB +odHRwOi8vd3d3LmRvdGNvbG9uLm5ldAAAaAB0AHQAcAA6AC8ALwB3AHcAdwAuAGQAbwB0AGMAbwBs +AG8AbgAuAG4AZQB0AABodHRwOi8vd3d3LmRvdGNvbG9uLm5ldAAAAAACAAAAAAAA/4MAMgAAAAAAA +AAAAAAAAAAAAAAAAAAAAHQAAAABAAIAAwAEAAUABgAHAAgACQAKAAsADAANAA4ADwAQABEAEgATAB +QAFQAWABcAGAAZABoAGwAcAB0AHgAfACAAIQAiACMAJAAlACYAJwAoACkAKgArACwALQAuAC8AMAA +xADIAMwA0ADUANgA3ADgAOQA6ADsAPAA9AD4APwBAAEEAQgBDAEQARQBGAEcASABJAEoASwBMAE0A +TgBPAFAAUQBSAFMAVABVAFYAVwBYAFkAWgBbAFwAXQBeAF8AYABhAIsAqQCDAJMAjQDDAKoAtgC3A +LQAtQCrAL4AvwC8AIwAwADBAAAAAAAB//8AAgABAAAADAAAABwAAAACAAIAAwBxAAEAcgBzAAIABA +AAAAIAAAABAAAACgBMAGYAAkRGTFQADmxhdG4AGgAEAAAAAP//AAEAAAAWAANDQVQgAB5NT0wgABZ +ST00gABYAAP//AAEAAAAA//8AAgAAAAEAAmxpZ2EADmxvY2wAFAAAAAEAAQAAAAEAAAACAAYAEAAG +AAAAAgASADQABAAAAAEATAADAAAAAgAQABYAAQAcAAAAAQABAE8AAQABAGcAAQABAE8AAwAAAAIAE +AAWAAEAHAAAAAEAAQAvAAEAAQBnAAEAAQAvAAEAGgABAAgAAgAGAAwAcwACAE8AcgACAEwAAQABAE +kAAAABAAAACgBGAGAAAkRGTFQADmxhdG4AHAAEAAAAAP//AAIAAAABABYAA0NBVCAAFk1PTCAAFlJ +PTSAAFgAA//8AAgAAAAEAAmNwc3AADmtlcm4AFAAAAAEAAAAAAAEAAQACAAYADgABAAAAAQASAAIA +AAACAB4ANgABAAoABQAFAAoAAgABACQAPQAAAAEAEgAEAAAAAQAMAAEAOP/nAAEAAQAkAAIGigAEA +AAFJAXKABoAGQAA//gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAD/sv+4/+z/7v/MAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAD/xAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/9T/6AAAAAD/8QAA +ABD/vQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/7gAAAAAAAAAAAAAAAAAA//MAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABIAAAAAAAAAAP/5AAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/gAAD/4AAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//L/9AAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAA/+gAAAAAAAkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/zAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/mAAAAAAAAAAAAAAAAAAD +/4gAA//AAAAAA//YAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/+AAAAAAAAP/OAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/zv/qAAAAAP/0AAAACAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/ZAAD/egAA/1kAAAAA/5D/rgAAAAAAAAAAAA +AAAAAAAAAAAAAAAAD/9AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAD/8AAA/7b/8P+wAAD/8P/E/98AAAAA/8P/+P/0//oAAAAAAAAAAAAA//gA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/+AAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/w//C/9MAAP/SAAD/9wAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAD/yAAA/+kAAAAA//QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/9wAAAAD//QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAP/2AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAP/cAAAAAAAAAAAAAAAA/7YAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAP/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/6AAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAkAFAAEAAAAAQACwAAABcA +BgAAAAAAAAAIAA4AAAAAAAsAEgAAAAAAAAATABkAAwANAAAAAQAJAAAAAAAAAAAAAAAAAAAAGAAAA +AAABwAAAAAAAAAAAAAAFQAFAAAAAAAYABgAAAAUAAAACgAAAAwAAgAPABEAFgAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFAAEAEQBdAAYAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAcAAAAAAAAABwAAAAAACAAAAAAAAAAAAAcAAAAHAAAAEwAJ +ABUADgAPAAAACwAQAAAAAAAAAAAAAAAAAAUAGAACAAIAAgAAAAIAGAAXAAAAGAAAABYAFgACABYAA +gAWAAAAEQADAAoAFAAMAA0ABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASAAAAEgAGAAEAHgAkAC +YAJwApACoALQAuAC8AMgAzADcAOAA5ADoAPAA9AEUASABOAE8AUgBTAFUAVwBZAFoAWwBcAF0AcwA +AAAAAAQAAAADa3tfFAAAAANAan9kAAAAA4QodoQ== +""" + ) + ), + 10 if size is None else size, + layout_engine=Layout.BASIC, + ) + return load_default_imagefont() diff --git a/MLPY/Lib/site-packages/PIL/ImageGrab.py b/MLPY/Lib/site-packages/PIL/ImageGrab.py new file mode 100644 index 0000000000000000000000000000000000000000..c3f769e849d299e73b94ea28f057e1d2f417b640 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageGrab.py @@ -0,0 +1,194 @@ +# +# The Python Imaging Library +# $Id$ +# +# screen grabber +# +# History: +# 2001-04-26 fl created +# 2001-09-17 fl use builtin driver, if present +# 2002-11-19 fl added grabclipboard support +# +# Copyright (c) 2001-2002 by Secret Labs AB +# Copyright (c) 2001-2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import io +import os +import shutil +import subprocess +import sys +import tempfile + +from . import Image + + +def grab( + bbox: tuple[int, int, int, int] | None = None, + include_layered_windows: bool = False, + all_screens: bool = False, + xdisplay: str | None = None, +) -> Image.Image: + im: Image.Image + if xdisplay is None: + if sys.platform == "darwin": + fh, filepath = tempfile.mkstemp(".png") + os.close(fh) + args = ["screencapture"] + if bbox: + left, top, right, bottom = bbox + args += ["-R", f"{left},{top},{right-left},{bottom-top}"] + subprocess.call(args + ["-x", filepath]) + im = Image.open(filepath) + im.load() + os.unlink(filepath) + if bbox: + im_resized = im.resize((right - left, bottom - top)) + im.close() + return im_resized + return im + elif sys.platform == "win32": + offset, size, data = Image.core.grabscreen_win32( + include_layered_windows, all_screens + ) + im = Image.frombytes( + "RGB", + size, + data, + # RGB, 32-bit line padding, origin lower left corner + "raw", + "BGR", + (size[0] * 3 + 3) & -4, + -1, + ) + if bbox: + x0, y0 = offset + left, top, right, bottom = bbox + im = im.crop((left - x0, top - y0, right - x0, bottom - y0)) + return im + # Cast to Optional[str] needed for Windows and macOS. + display_name: str | None = xdisplay + try: + if not Image.core.HAVE_XCB: + msg = "Pillow was built without XCB support" + raise OSError(msg) + size, data = Image.core.grabscreen_x11(display_name) + except OSError: + if ( + display_name is None + and sys.platform not in ("darwin", "win32") + and shutil.which("gnome-screenshot") + ): + fh, filepath = tempfile.mkstemp(".png") + os.close(fh) + subprocess.call(["gnome-screenshot", "-f", filepath]) + im = Image.open(filepath) + im.load() + os.unlink(filepath) + if bbox: + im_cropped = im.crop(bbox) + im.close() + return im_cropped + return im + else: + raise + else: + im = Image.frombytes("RGB", size, data, "raw", "BGRX", size[0] * 4, 1) + if bbox: + im = im.crop(bbox) + return im + + +def grabclipboard() -> Image.Image | list[str] | None: + if sys.platform == "darwin": + fh, filepath = tempfile.mkstemp(".png") + os.close(fh) + commands = [ + 'set theFile to (open for access POSIX file "' + + filepath + + '" with write permission)', + "try", + " write (the clipboard as «class PNGf») to theFile", + "end try", + "close access theFile", + ] + script = ["osascript"] + for command in commands: + script += ["-e", command] + subprocess.call(script) + + im = None + if os.stat(filepath).st_size != 0: + im = Image.open(filepath) + im.load() + os.unlink(filepath) + return im + elif sys.platform == "win32": + fmt, data = Image.core.grabclipboard_win32() + if fmt == "file": # CF_HDROP + import struct + + o = struct.unpack_from("I", data)[0] + if data[16] != 0: + files = data[o:].decode("utf-16le").split("\0") + else: + files = data[o:].decode("mbcs").split("\0") + return files[: files.index("")] + if isinstance(data, bytes): + data = io.BytesIO(data) + if fmt == "png": + from . import PngImagePlugin + + return PngImagePlugin.PngImageFile(data) + elif fmt == "DIB": + from . import BmpImagePlugin + + return BmpImagePlugin.DibImageFile(data) + return None + else: + if os.getenv("WAYLAND_DISPLAY"): + session_type = "wayland" + elif os.getenv("DISPLAY"): + session_type = "x11" + else: # Session type check failed + session_type = None + + if shutil.which("wl-paste") and session_type in ("wayland", None): + args = ["wl-paste", "-t", "image"] + elif shutil.which("xclip") and session_type in ("x11", None): + args = ["xclip", "-selection", "clipboard", "-t", "image/png", "-o"] + else: + msg = "wl-paste or xclip is required for ImageGrab.grabclipboard() on Linux" + raise NotImplementedError(msg) + + p = subprocess.run(args, capture_output=True) + if p.returncode != 0: + err = p.stderr + for silent_error in [ + # wl-paste, when the clipboard is empty + b"Nothing is copied", + # Ubuntu/Debian wl-paste, when the clipboard is empty + b"No selection", + # Ubuntu/Debian wl-paste, when an image isn't available + b"No suitable type of content copied", + # wl-paste or Ubuntu/Debian xclip, when an image isn't available + b" not available", + # xclip, when an image isn't available + b"cannot convert ", + # xclip, when the clipboard isn't initialized + b"xclip: Error: There is no owner for the ", + ]: + if silent_error in err: + return None + msg = f"{args[0]} error" + if err: + msg += f": {err.strip().decode()}" + raise ChildProcessError(msg) + + data = io.BytesIO(p.stdout) + im = Image.open(data) + im.load() + return im diff --git a/MLPY/Lib/site-packages/PIL/ImageMath.py b/MLPY/Lib/site-packages/PIL/ImageMath.py new file mode 100644 index 0000000000000000000000000000000000000000..75fce77b03161e44e7040db4cc18678262bbe781 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageMath.py @@ -0,0 +1,357 @@ +# +# The Python Imaging Library +# $Id$ +# +# a simple math add-on for the Python Imaging Library +# +# History: +# 1999-02-15 fl Original PIL Plus release +# 2005-05-05 fl Simplified and cleaned up for PIL 1.1.6 +# 2005-09-12 fl Fixed int() and float() for Python 2.4.1 +# +# Copyright (c) 1999-2005 by Secret Labs AB +# Copyright (c) 2005 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import builtins +from types import CodeType +from typing import Any, Callable + +from . import Image, _imagingmath +from ._deprecate import deprecate + + +class _Operand: + """Wraps an image operand, providing standard operators""" + + def __init__(self, im: Image.Image): + self.im = im + + def __fixup(self, im1: _Operand | float) -> Image.Image: + # convert image to suitable mode + if isinstance(im1, _Operand): + # argument was an image. + if im1.im.mode in ("1", "L"): + return im1.im.convert("I") + elif im1.im.mode in ("I", "F"): + return im1.im + else: + msg = f"unsupported mode: {im1.im.mode}" + raise ValueError(msg) + else: + # argument was a constant + if isinstance(im1, (int, float)) and self.im.mode in ("1", "L", "I"): + return Image.new("I", self.im.size, im1) + else: + return Image.new("F", self.im.size, im1) + + def apply( + self, + op: str, + im1: _Operand | float, + im2: _Operand | float | None = None, + mode: str | None = None, + ) -> _Operand: + im_1 = self.__fixup(im1) + if im2 is None: + # unary operation + out = Image.new(mode or im_1.mode, im_1.size, None) + im_1.load() + try: + op = getattr(_imagingmath, f"{op}_{im_1.mode}") + except AttributeError as e: + msg = f"bad operand type for '{op}'" + raise TypeError(msg) from e + _imagingmath.unop(op, out.im.id, im_1.im.id) + else: + # binary operation + im_2 = self.__fixup(im2) + if im_1.mode != im_2.mode: + # convert both arguments to floating point + if im_1.mode != "F": + im_1 = im_1.convert("F") + if im_2.mode != "F": + im_2 = im_2.convert("F") + if im_1.size != im_2.size: + # crop both arguments to a common size + size = ( + min(im_1.size[0], im_2.size[0]), + min(im_1.size[1], im_2.size[1]), + ) + if im_1.size != size: + im_1 = im_1.crop((0, 0) + size) + if im_2.size != size: + im_2 = im_2.crop((0, 0) + size) + out = Image.new(mode or im_1.mode, im_1.size, None) + im_1.load() + im_2.load() + try: + op = getattr(_imagingmath, f"{op}_{im_1.mode}") + except AttributeError as e: + msg = f"bad operand type for '{op}'" + raise TypeError(msg) from e + _imagingmath.binop(op, out.im.id, im_1.im.id, im_2.im.id) + return _Operand(out) + + # unary operators + def __bool__(self) -> bool: + # an image is "true" if it contains at least one non-zero pixel + return self.im.getbbox() is not None + + def __abs__(self) -> _Operand: + return self.apply("abs", self) + + def __pos__(self) -> _Operand: + return self + + def __neg__(self) -> _Operand: + return self.apply("neg", self) + + # binary operators + def __add__(self, other: _Operand | float) -> _Operand: + return self.apply("add", self, other) + + def __radd__(self, other: _Operand | float) -> _Operand: + return self.apply("add", other, self) + + def __sub__(self, other: _Operand | float) -> _Operand: + return self.apply("sub", self, other) + + def __rsub__(self, other: _Operand | float) -> _Operand: + return self.apply("sub", other, self) + + def __mul__(self, other: _Operand | float) -> _Operand: + return self.apply("mul", self, other) + + def __rmul__(self, other: _Operand | float) -> _Operand: + return self.apply("mul", other, self) + + def __truediv__(self, other: _Operand | float) -> _Operand: + return self.apply("div", self, other) + + def __rtruediv__(self, other: _Operand | float) -> _Operand: + return self.apply("div", other, self) + + def __mod__(self, other: _Operand | float) -> _Operand: + return self.apply("mod", self, other) + + def __rmod__(self, other: _Operand | float) -> _Operand: + return self.apply("mod", other, self) + + def __pow__(self, other: _Operand | float) -> _Operand: + return self.apply("pow", self, other) + + def __rpow__(self, other: _Operand | float) -> _Operand: + return self.apply("pow", other, self) + + # bitwise + def __invert__(self) -> _Operand: + return self.apply("invert", self) + + def __and__(self, other: _Operand | float) -> _Operand: + return self.apply("and", self, other) + + def __rand__(self, other: _Operand | float) -> _Operand: + return self.apply("and", other, self) + + def __or__(self, other: _Operand | float) -> _Operand: + return self.apply("or", self, other) + + def __ror__(self, other: _Operand | float) -> _Operand: + return self.apply("or", other, self) + + def __xor__(self, other: _Operand | float) -> _Operand: + return self.apply("xor", self, other) + + def __rxor__(self, other: _Operand | float) -> _Operand: + return self.apply("xor", other, self) + + def __lshift__(self, other: _Operand | float) -> _Operand: + return self.apply("lshift", self, other) + + def __rshift__(self, other: _Operand | float) -> _Operand: + return self.apply("rshift", self, other) + + # logical + def __eq__(self, other): + return self.apply("eq", self, other) + + def __ne__(self, other): + return self.apply("ne", self, other) + + def __lt__(self, other: _Operand | float) -> _Operand: + return self.apply("lt", self, other) + + def __le__(self, other: _Operand | float) -> _Operand: + return self.apply("le", self, other) + + def __gt__(self, other: _Operand | float) -> _Operand: + return self.apply("gt", self, other) + + def __ge__(self, other: _Operand | float) -> _Operand: + return self.apply("ge", self, other) + + +# conversions +def imagemath_int(self: _Operand) -> _Operand: + return _Operand(self.im.convert("I")) + + +def imagemath_float(self: _Operand) -> _Operand: + return _Operand(self.im.convert("F")) + + +# logical +def imagemath_equal(self: _Operand, other: _Operand | float | None) -> _Operand: + return self.apply("eq", self, other, mode="I") + + +def imagemath_notequal(self: _Operand, other: _Operand | float | None) -> _Operand: + return self.apply("ne", self, other, mode="I") + + +def imagemath_min(self: _Operand, other: _Operand | float | None) -> _Operand: + return self.apply("min", self, other) + + +def imagemath_max(self: _Operand, other: _Operand | float | None) -> _Operand: + return self.apply("max", self, other) + + +def imagemath_convert(self: _Operand, mode: str) -> _Operand: + return _Operand(self.im.convert(mode)) + + +ops = { + "int": imagemath_int, + "float": imagemath_float, + "equal": imagemath_equal, + "notequal": imagemath_notequal, + "min": imagemath_min, + "max": imagemath_max, + "convert": imagemath_convert, +} + + +def lambda_eval( + expression: Callable[[dict[str, Any]], Any], + options: dict[str, Any] = {}, + **kw: Any, +) -> Any: + """ + Returns the result of an image function. + + :py:mod:`~PIL.ImageMath` only supports single-layer images. To process multi-band + images, use the :py:meth:`~PIL.Image.Image.split` method or + :py:func:`~PIL.Image.merge` function. + + :param expression: A function that receives a dictionary. + :param options: Values to add to the function's dictionary. You + can either use a dictionary, or one or more keyword + arguments. + :return: The expression result. This is usually an image object, but can + also be an integer, a floating point value, or a pixel tuple, + depending on the expression. + """ + + args: dict[str, Any] = ops.copy() + args.update(options) + args.update(kw) + for k, v in args.items(): + if hasattr(v, "im"): + args[k] = _Operand(v) + + out = expression(args) + try: + return out.im + except AttributeError: + return out + + +def unsafe_eval( + expression: str, + options: dict[str, Any] = {}, + **kw: Any, +) -> Any: + """ + Evaluates an image expression. This uses Python's ``eval()`` function to process + the expression string, and carries the security risks of doing so. It is not + recommended to process expressions without considering this. + :py:meth:`~lambda_eval` is a more secure alternative. + + :py:mod:`~PIL.ImageMath` only supports single-layer images. To process multi-band + images, use the :py:meth:`~PIL.Image.Image.split` method or + :py:func:`~PIL.Image.merge` function. + + :param expression: A string containing a Python-style expression. + :param options: Values to add to the evaluation context. You + can either use a dictionary, or one or more keyword + arguments. + :return: The evaluated expression. This is usually an image object, but can + also be an integer, a floating point value, or a pixel tuple, + depending on the expression. + """ + + # build execution namespace + args: dict[str, Any] = ops.copy() + for k in list(options.keys()) + list(kw.keys()): + if "__" in k or hasattr(builtins, k): + msg = f"'{k}' not allowed" + raise ValueError(msg) + + args.update(options) + args.update(kw) + for k, v in args.items(): + if hasattr(v, "im"): + args[k] = _Operand(v) + + compiled_code = compile(expression, "", "eval") + + def scan(code: CodeType) -> None: + for const in code.co_consts: + if type(const) is type(compiled_code): + scan(const) + + for name in code.co_names: + if name not in args and name != "abs": + msg = f"'{name}' not allowed" + raise ValueError(msg) + + scan(compiled_code) + out = builtins.eval(expression, {"__builtins": {"abs": abs}}, args) + try: + return out.im + except AttributeError: + return out + + +def eval( + expression: str, + _dict: dict[str, Any] = {}, + **kw: Any, +) -> Any: + """ + Evaluates an image expression. + + Deprecated. Use lambda_eval() or unsafe_eval() instead. + + :param expression: A string containing a Python-style expression. + :param _dict: Values to add to the evaluation context. You + can either use a dictionary, or one or more keyword + arguments. + :return: The evaluated expression. This is usually an image object, but can + also be an integer, a floating point value, or a pixel tuple, + depending on the expression. + + .. deprecated:: 10.3.0 + """ + + deprecate( + "ImageMath.eval", + 12, + "ImageMath.lambda_eval or ImageMath.unsafe_eval", + ) + return unsafe_eval(expression, _dict, **kw) diff --git a/MLPY/Lib/site-packages/PIL/ImageMode.py b/MLPY/Lib/site-packages/PIL/ImageMode.py new file mode 100644 index 0000000000000000000000000000000000000000..21c041befe95057f5dcbde1af9ff6a1011211b3f --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageMode.py @@ -0,0 +1,92 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard mode descriptors +# +# History: +# 2006-03-20 fl Added +# +# Copyright (c) 2006 by Secret Labs AB. +# Copyright (c) 2006 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import sys +from functools import lru_cache +from typing import NamedTuple + +from ._deprecate import deprecate + + +class ModeDescriptor(NamedTuple): + """Wrapper for mode strings.""" + + mode: str + bands: tuple[str, ...] + basemode: str + basetype: str + typestr: str + + def __str__(self) -> str: + return self.mode + + +@lru_cache +def getmode(mode: str) -> ModeDescriptor: + """Gets a mode descriptor for the given mode.""" + endian = "<" if sys.byteorder == "little" else ">" + + modes = { + # core modes + # Bits need to be extended to bytes + "1": ("L", "L", ("1",), "|b1"), + "L": ("L", "L", ("L",), "|u1"), + "I": ("L", "I", ("I",), f"{endian}i4"), + "F": ("L", "F", ("F",), f"{endian}f4"), + "P": ("P", "L", ("P",), "|u1"), + "RGB": ("RGB", "L", ("R", "G", "B"), "|u1"), + "RGBX": ("RGB", "L", ("R", "G", "B", "X"), "|u1"), + "RGBA": ("RGB", "L", ("R", "G", "B", "A"), "|u1"), + "CMYK": ("RGB", "L", ("C", "M", "Y", "K"), "|u1"), + "YCbCr": ("RGB", "L", ("Y", "Cb", "Cr"), "|u1"), + # UNDONE - unsigned |u1i1i1 + "LAB": ("RGB", "L", ("L", "A", "B"), "|u1"), + "HSV": ("RGB", "L", ("H", "S", "V"), "|u1"), + # extra experimental modes + "RGBa": ("RGB", "L", ("R", "G", "B", "a"), "|u1"), + "BGR;15": ("RGB", "L", ("B", "G", "R"), "|u1"), + "BGR;16": ("RGB", "L", ("B", "G", "R"), "|u1"), + "BGR;24": ("RGB", "L", ("B", "G", "R"), "|u1"), + "LA": ("L", "L", ("L", "A"), "|u1"), + "La": ("L", "L", ("L", "a"), "|u1"), + "PA": ("RGB", "L", ("P", "A"), "|u1"), + } + if mode in modes: + if mode in ("BGR;15", "BGR;16", "BGR;24"): + deprecate(mode, 12) + base_mode, base_type, bands, type_str = modes[mode] + return ModeDescriptor(mode, bands, base_mode, base_type, type_str) + + mapping_modes = { + # I;16 == I;16L, and I;32 == I;32L + "I;16": "u2", + "I;16BS": ">i2", + "I;16N": f"{endian}u2", + "I;16NS": f"{endian}i2", + "I;32": "u4", + "I;32L": "i4", + "I;32LS": " +from __future__ import annotations + +import re + +from . import Image, _imagingmorph + +LUT_SIZE = 1 << 9 + +# fmt: off +ROTATION_MATRIX = [ + 6, 3, 0, + 7, 4, 1, + 8, 5, 2, +] +MIRROR_MATRIX = [ + 2, 1, 0, + 5, 4, 3, + 8, 7, 6, +] +# fmt: on + + +class LutBuilder: + """A class for building a MorphLut from a descriptive language + + The input patterns is a list of a strings sequences like these:: + + 4:(... + .1. + 111)->1 + + (whitespaces including linebreaks are ignored). The option 4 + describes a series of symmetry operations (in this case a + 4-rotation), the pattern is described by: + + - . or X - Ignore + - 1 - Pixel is on + - 0 - Pixel is off + + The result of the operation is described after "->" string. + + The default is to return the current pixel value, which is + returned if no other match is found. + + Operations: + + - 4 - 4 way rotation + - N - Negate + - 1 - Dummy op for no other operation (an op must always be given) + - M - Mirroring + + Example:: + + lb = LutBuilder(patterns = ["4:(... .1. 111)->1"]) + lut = lb.build_lut() + + """ + + def __init__( + self, patterns: list[str] | None = None, op_name: str | None = None + ) -> None: + if patterns is not None: + self.patterns = patterns + else: + self.patterns = [] + self.lut: bytearray | None = None + if op_name is not None: + known_patterns = { + "corner": ["1:(... ... ...)->0", "4:(00. 01. ...)->1"], + "dilation4": ["4:(... .0. .1.)->1"], + "dilation8": ["4:(... .0. .1.)->1", "4:(... .0. ..1)->1"], + "erosion4": ["4:(... .1. .0.)->0"], + "erosion8": ["4:(... .1. .0.)->0", "4:(... .1. ..0)->0"], + "edge": [ + "1:(... ... ...)->0", + "4:(.0. .1. ...)->1", + "4:(01. .1. ...)->1", + ], + } + if op_name not in known_patterns: + msg = f"Unknown pattern {op_name}!" + raise Exception(msg) + + self.patterns = known_patterns[op_name] + + def add_patterns(self, patterns: list[str]) -> None: + self.patterns += patterns + + def build_default_lut(self) -> None: + symbols = [0, 1] + m = 1 << 4 # pos of current pixel + self.lut = bytearray(symbols[(i & m) > 0] for i in range(LUT_SIZE)) + + def get_lut(self) -> bytearray | None: + return self.lut + + def _string_permute(self, pattern: str, permutation: list[int]) -> str: + """string_permute takes a pattern and a permutation and returns the + string permuted according to the permutation list. + """ + assert len(permutation) == 9 + return "".join(pattern[p] for p in permutation) + + def _pattern_permute( + self, basic_pattern: str, options: str, basic_result: int + ) -> list[tuple[str, int]]: + """pattern_permute takes a basic pattern and its result and clones + the pattern according to the modifications described in the $options + parameter. It returns a list of all cloned patterns.""" + patterns = [(basic_pattern, basic_result)] + + # rotations + if "4" in options: + res = patterns[-1][1] + for i in range(4): + patterns.append( + (self._string_permute(patterns[-1][0], ROTATION_MATRIX), res) + ) + # mirror + if "M" in options: + n = len(patterns) + for pattern, res in patterns[:n]: + patterns.append((self._string_permute(pattern, MIRROR_MATRIX), res)) + + # negate + if "N" in options: + n = len(patterns) + for pattern, res in patterns[:n]: + # Swap 0 and 1 + pattern = pattern.replace("0", "Z").replace("1", "0").replace("Z", "1") + res = 1 - int(res) + patterns.append((pattern, res)) + + return patterns + + def build_lut(self) -> bytearray: + """Compile all patterns into a morphology lut. + + TBD :Build based on (file) morphlut:modify_lut + """ + self.build_default_lut() + assert self.lut is not None + patterns = [] + + # Parse and create symmetries of the patterns strings + for p in self.patterns: + m = re.search(r"(\w*):?\s*\((.+?)\)\s*->\s*(\d)", p.replace("\n", "")) + if not m: + msg = 'Syntax error in pattern "' + p + '"' + raise Exception(msg) + options = m.group(1) + pattern = m.group(2) + result = int(m.group(3)) + + # Get rid of spaces + pattern = pattern.replace(" ", "").replace("\n", "") + + patterns += self._pattern_permute(pattern, options, result) + + # compile the patterns into regular expressions for speed + compiled_patterns = [] + for pattern in patterns: + p = pattern[0].replace(".", "X").replace("X", "[01]") + compiled_patterns.append((re.compile(p), pattern[1])) + + # Step through table and find patterns that match. + # Note that all the patterns are searched. The last one + # caught overrides + for i in range(LUT_SIZE): + # Build the bit pattern + bitpattern = bin(i)[2:] + bitpattern = ("0" * (9 - len(bitpattern)) + bitpattern)[::-1] + + for pattern, r in compiled_patterns: + if pattern.match(bitpattern): + self.lut[i] = [0, 1][r] + + return self.lut + + +class MorphOp: + """A class for binary morphological operators""" + + def __init__( + self, + lut: bytearray | None = None, + op_name: str | None = None, + patterns: list[str] | None = None, + ) -> None: + """Create a binary morphological operator""" + self.lut = lut + if op_name is not None: + self.lut = LutBuilder(op_name=op_name).build_lut() + elif patterns is not None: + self.lut = LutBuilder(patterns=patterns).build_lut() + + def apply(self, image: Image.Image) -> tuple[int, Image.Image]: + """Run a single morphological operation on an image + + Returns a tuple of the number of changed pixels and the + morphed image""" + if self.lut is None: + msg = "No operator loaded" + raise Exception(msg) + + if image.mode != "L": + msg = "Image mode must be L" + raise ValueError(msg) + outimage = Image.new(image.mode, image.size, None) + count = _imagingmorph.apply(bytes(self.lut), image.im.id, outimage.im.id) + return count, outimage + + def match(self, image: Image.Image) -> list[tuple[int, int]]: + """Get a list of coordinates matching the morphological operation on + an image. + + Returns a list of tuples of (x,y) coordinates + of all matching pixels. See :ref:`coordinate-system`.""" + if self.lut is None: + msg = "No operator loaded" + raise Exception(msg) + + if image.mode != "L": + msg = "Image mode must be L" + raise ValueError(msg) + return _imagingmorph.match(bytes(self.lut), image.im.id) + + def get_on_pixels(self, image: Image.Image) -> list[tuple[int, int]]: + """Get a list of all turned on pixels in a binary image + + Returns a list of tuples of (x,y) coordinates + of all matching pixels. See :ref:`coordinate-system`.""" + + if image.mode != "L": + msg = "Image mode must be L" + raise ValueError(msg) + return _imagingmorph.get_on_pixels(image.im.id) + + def load_lut(self, filename: str) -> None: + """Load an operator from an mrl file""" + with open(filename, "rb") as f: + self.lut = bytearray(f.read()) + + if len(self.lut) != LUT_SIZE: + self.lut = None + msg = "Wrong size operator file!" + raise Exception(msg) + + def save_lut(self, filename: str) -> None: + """Save an operator to an mrl file""" + if self.lut is None: + msg = "No operator loaded" + raise Exception(msg) + with open(filename, "wb") as f: + f.write(self.lut) + + def set_lut(self, lut: bytearray | None) -> None: + """Set the lut from an external source""" + self.lut = lut diff --git a/MLPY/Lib/site-packages/PIL/ImageOps.py b/MLPY/Lib/site-packages/PIL/ImageOps.py new file mode 100644 index 0000000000000000000000000000000000000000..a1cf08981482dc131ede970053806a2698458dbd --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageOps.py @@ -0,0 +1,728 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard image operations +# +# History: +# 2001-10-20 fl Created +# 2001-10-23 fl Added autocontrast operator +# 2001-12-18 fl Added Kevin's fit operator +# 2004-03-14 fl Fixed potential division by zero in equalize +# 2005-05-05 fl Fixed equalize for low number of values +# +# Copyright (c) 2001-2004 by Secret Labs AB +# Copyright (c) 2001-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import functools +import operator +import re +from typing import Protocol, Sequence, cast + +from . import ExifTags, Image, ImagePalette + +# +# helpers + + +def _border(border: int | tuple[int, ...]) -> tuple[int, int, int, int]: + if isinstance(border, tuple): + if len(border) == 2: + left, top = right, bottom = border + elif len(border) == 4: + left, top, right, bottom = border + else: + left = top = right = bottom = border + return left, top, right, bottom + + +def _color(color: str | int | tuple[int, ...], mode: str) -> int | tuple[int, ...]: + if isinstance(color, str): + from . import ImageColor + + color = ImageColor.getcolor(color, mode) + return color + + +def _lut(image: Image.Image, lut: list[int]) -> Image.Image: + if image.mode == "P": + # FIXME: apply to lookup table, not image data + msg = "mode P support coming soon" + raise NotImplementedError(msg) + elif image.mode in ("L", "RGB"): + if image.mode == "RGB" and len(lut) == 256: + lut = lut + lut + lut + return image.point(lut) + else: + msg = f"not supported for mode {image.mode}" + raise OSError(msg) + + +# +# actions + + +def autocontrast( + image: Image.Image, + cutoff: float | tuple[float, float] = 0, + ignore: int | Sequence[int] | None = None, + mask: Image.Image | None = None, + preserve_tone: bool = False, +) -> Image.Image: + """ + Maximize (normalize) image contrast. This function calculates a + histogram of the input image (or mask region), removes ``cutoff`` percent of the + lightest and darkest pixels from the histogram, and remaps the image + so that the darkest pixel becomes black (0), and the lightest + becomes white (255). + + :param image: The image to process. + :param cutoff: The percent to cut off from the histogram on the low and + high ends. Either a tuple of (low, high), or a single + number for both. + :param ignore: The background pixel value (use None for no background). + :param mask: Histogram used in contrast operation is computed using pixels + within the mask. If no mask is given the entire image is used + for histogram computation. + :param preserve_tone: Preserve image tone in Photoshop-like style autocontrast. + + .. versionadded:: 8.2.0 + + :return: An image. + """ + if preserve_tone: + histogram = image.convert("L").histogram(mask) + else: + histogram = image.histogram(mask) + + lut = [] + for layer in range(0, len(histogram), 256): + h = histogram[layer : layer + 256] + if ignore is not None: + # get rid of outliers + if isinstance(ignore, int): + h[ignore] = 0 + else: + for ix in ignore: + h[ix] = 0 + if cutoff: + # cut off pixels from both ends of the histogram + if not isinstance(cutoff, tuple): + cutoff = (cutoff, cutoff) + # get number of pixels + n = 0 + for ix in range(256): + n = n + h[ix] + # remove cutoff% pixels from the low end + cut = int(n * cutoff[0] // 100) + for lo in range(256): + if cut > h[lo]: + cut = cut - h[lo] + h[lo] = 0 + else: + h[lo] -= cut + cut = 0 + if cut <= 0: + break + # remove cutoff% samples from the high end + cut = int(n * cutoff[1] // 100) + for hi in range(255, -1, -1): + if cut > h[hi]: + cut = cut - h[hi] + h[hi] = 0 + else: + h[hi] -= cut + cut = 0 + if cut <= 0: + break + # find lowest/highest samples after preprocessing + for lo in range(256): + if h[lo]: + break + for hi in range(255, -1, -1): + if h[hi]: + break + if hi <= lo: + # don't bother + lut.extend(list(range(256))) + else: + scale = 255.0 / (hi - lo) + offset = -lo * scale + for ix in range(256): + ix = int(ix * scale + offset) + if ix < 0: + ix = 0 + elif ix > 255: + ix = 255 + lut.append(ix) + return _lut(image, lut) + + +def colorize( + image: Image.Image, + black: str | tuple[int, ...], + white: str | tuple[int, ...], + mid: str | int | tuple[int, ...] | None = None, + blackpoint: int = 0, + whitepoint: int = 255, + midpoint: int = 127, +) -> Image.Image: + """ + Colorize grayscale image. + This function calculates a color wedge which maps all black pixels in + the source image to the first color and all white pixels to the + second color. If ``mid`` is specified, it uses three-color mapping. + The ``black`` and ``white`` arguments should be RGB tuples or color names; + optionally you can use three-color mapping by also specifying ``mid``. + Mapping positions for any of the colors can be specified + (e.g. ``blackpoint``), where these parameters are the integer + value corresponding to where the corresponding color should be mapped. + These parameters must have logical order, such that + ``blackpoint <= midpoint <= whitepoint`` (if ``mid`` is specified). + + :param image: The image to colorize. + :param black: The color to use for black input pixels. + :param white: The color to use for white input pixels. + :param mid: The color to use for midtone input pixels. + :param blackpoint: an int value [0, 255] for the black mapping. + :param whitepoint: an int value [0, 255] for the white mapping. + :param midpoint: an int value [0, 255] for the midtone mapping. + :return: An image. + """ + + # Initial asserts + assert image.mode == "L" + if mid is None: + assert 0 <= blackpoint <= whitepoint <= 255 + else: + assert 0 <= blackpoint <= midpoint <= whitepoint <= 255 + + # Define colors from arguments + rgb_black = cast(Sequence[int], _color(black, "RGB")) + rgb_white = cast(Sequence[int], _color(white, "RGB")) + rgb_mid = cast(Sequence[int], _color(mid, "RGB")) if mid is not None else None + + # Empty lists for the mapping + red = [] + green = [] + blue = [] + + # Create the low-end values + for i in range(0, blackpoint): + red.append(rgb_black[0]) + green.append(rgb_black[1]) + blue.append(rgb_black[2]) + + # Create the mapping (2-color) + if rgb_mid is None: + range_map = range(0, whitepoint - blackpoint) + + for i in range_map: + red.append( + rgb_black[0] + i * (rgb_white[0] - rgb_black[0]) // len(range_map) + ) + green.append( + rgb_black[1] + i * (rgb_white[1] - rgb_black[1]) // len(range_map) + ) + blue.append( + rgb_black[2] + i * (rgb_white[2] - rgb_black[2]) // len(range_map) + ) + + # Create the mapping (3-color) + else: + range_map1 = range(0, midpoint - blackpoint) + range_map2 = range(0, whitepoint - midpoint) + + for i in range_map1: + red.append( + rgb_black[0] + i * (rgb_mid[0] - rgb_black[0]) // len(range_map1) + ) + green.append( + rgb_black[1] + i * (rgb_mid[1] - rgb_black[1]) // len(range_map1) + ) + blue.append( + rgb_black[2] + i * (rgb_mid[2] - rgb_black[2]) // len(range_map1) + ) + for i in range_map2: + red.append(rgb_mid[0] + i * (rgb_white[0] - rgb_mid[0]) // len(range_map2)) + green.append( + rgb_mid[1] + i * (rgb_white[1] - rgb_mid[1]) // len(range_map2) + ) + blue.append(rgb_mid[2] + i * (rgb_white[2] - rgb_mid[2]) // len(range_map2)) + + # Create the high-end values + for i in range(0, 256 - whitepoint): + red.append(rgb_white[0]) + green.append(rgb_white[1]) + blue.append(rgb_white[2]) + + # Return converted image + image = image.convert("RGB") + return _lut(image, red + green + blue) + + +def contain( + image: Image.Image, size: tuple[int, int], method: int = Image.Resampling.BICUBIC +) -> Image.Image: + """ + Returns a resized version of the image, set to the maximum width and height + within the requested size, while maintaining the original aspect ratio. + + :param image: The image to resize. + :param size: The requested output size in pixels, given as a + (width, height) tuple. + :param method: Resampling method to use. Default is + :py:attr:`~PIL.Image.Resampling.BICUBIC`. + See :ref:`concept-filters`. + :return: An image. + """ + + im_ratio = image.width / image.height + dest_ratio = size[0] / size[1] + + if im_ratio != dest_ratio: + if im_ratio > dest_ratio: + new_height = round(image.height / image.width * size[0]) + if new_height != size[1]: + size = (size[0], new_height) + else: + new_width = round(image.width / image.height * size[1]) + if new_width != size[0]: + size = (new_width, size[1]) + return image.resize(size, resample=method) + + +def cover( + image: Image.Image, size: tuple[int, int], method: int = Image.Resampling.BICUBIC +) -> Image.Image: + """ + Returns a resized version of the image, so that the requested size is + covered, while maintaining the original aspect ratio. + + :param image: The image to resize. + :param size: The requested output size in pixels, given as a + (width, height) tuple. + :param method: Resampling method to use. Default is + :py:attr:`~PIL.Image.Resampling.BICUBIC`. + See :ref:`concept-filters`. + :return: An image. + """ + + im_ratio = image.width / image.height + dest_ratio = size[0] / size[1] + + if im_ratio != dest_ratio: + if im_ratio < dest_ratio: + new_height = round(image.height / image.width * size[0]) + if new_height != size[1]: + size = (size[0], new_height) + else: + new_width = round(image.width / image.height * size[1]) + if new_width != size[0]: + size = (new_width, size[1]) + return image.resize(size, resample=method) + + +def pad( + image: Image.Image, + size: tuple[int, int], + method: int = Image.Resampling.BICUBIC, + color: str | int | tuple[int, ...] | None = None, + centering: tuple[float, float] = (0.5, 0.5), +) -> Image.Image: + """ + Returns a resized and padded version of the image, expanded to fill the + requested aspect ratio and size. + + :param image: The image to resize and crop. + :param size: The requested output size in pixels, given as a + (width, height) tuple. + :param method: Resampling method to use. Default is + :py:attr:`~PIL.Image.Resampling.BICUBIC`. + See :ref:`concept-filters`. + :param color: The background color of the padded image. + :param centering: Control the position of the original image within the + padded version. + + (0.5, 0.5) will keep the image centered + (0, 0) will keep the image aligned to the top left + (1, 1) will keep the image aligned to the bottom + right + :return: An image. + """ + + resized = contain(image, size, method) + if resized.size == size: + out = resized + else: + out = Image.new(image.mode, size, color) + if resized.palette: + out.putpalette(resized.getpalette()) + if resized.width != size[0]: + x = round((size[0] - resized.width) * max(0, min(centering[0], 1))) + out.paste(resized, (x, 0)) + else: + y = round((size[1] - resized.height) * max(0, min(centering[1], 1))) + out.paste(resized, (0, y)) + return out + + +def crop(image: Image.Image, border: int = 0) -> Image.Image: + """ + Remove border from image. The same amount of pixels are removed + from all four sides. This function works on all image modes. + + .. seealso:: :py:meth:`~PIL.Image.Image.crop` + + :param image: The image to crop. + :param border: The number of pixels to remove. + :return: An image. + """ + left, top, right, bottom = _border(border) + return image.crop((left, top, image.size[0] - right, image.size[1] - bottom)) + + +def scale( + image: Image.Image, factor: float, resample: int = Image.Resampling.BICUBIC +) -> Image.Image: + """ + Returns a rescaled image by a specific factor given in parameter. + A factor greater than 1 expands the image, between 0 and 1 contracts the + image. + + :param image: The image to rescale. + :param factor: The expansion factor, as a float. + :param resample: Resampling method to use. Default is + :py:attr:`~PIL.Image.Resampling.BICUBIC`. + See :ref:`concept-filters`. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + if factor == 1: + return image.copy() + elif factor <= 0: + msg = "the factor must be greater than 0" + raise ValueError(msg) + else: + size = (round(factor * image.width), round(factor * image.height)) + return image.resize(size, resample) + + +class SupportsGetMesh(Protocol): + """ + An object that supports the ``getmesh`` method, taking an image as an + argument, and returning a list of tuples. Each tuple contains two tuples, + the source box as a tuple of 4 integers, and a tuple of 8 integers for the + final quadrilateral, in order of top left, bottom left, bottom right, top + right. + """ + + def getmesh( + self, image: Image.Image + ) -> list[ + tuple[tuple[int, int, int, int], tuple[int, int, int, int, int, int, int, int]] + ]: ... + + +def deform( + image: Image.Image, + deformer: SupportsGetMesh, + resample: int = Image.Resampling.BILINEAR, +) -> Image.Image: + """ + Deform the image. + + :param image: The image to deform. + :param deformer: A deformer object. Any object that implements a + ``getmesh`` method can be used. + :param resample: An optional resampling filter. Same values possible as + in the PIL.Image.transform function. + :return: An image. + """ + return image.transform( + image.size, Image.Transform.MESH, deformer.getmesh(image), resample + ) + + +def equalize(image: Image.Image, mask: Image.Image | None = None) -> Image.Image: + """ + Equalize the image histogram. This function applies a non-linear + mapping to the input image, in order to create a uniform + distribution of grayscale values in the output image. + + :param image: The image to equalize. + :param mask: An optional mask. If given, only the pixels selected by + the mask are included in the analysis. + :return: An image. + """ + if image.mode == "P": + image = image.convert("RGB") + h = image.histogram(mask) + lut = [] + for b in range(0, len(h), 256): + histo = [_f for _f in h[b : b + 256] if _f] + if len(histo) <= 1: + lut.extend(list(range(256))) + else: + step = (functools.reduce(operator.add, histo) - histo[-1]) // 255 + if not step: + lut.extend(list(range(256))) + else: + n = step // 2 + for i in range(256): + lut.append(n // step) + n = n + h[i + b] + return _lut(image, lut) + + +def expand( + image: Image.Image, + border: int | tuple[int, ...] = 0, + fill: str | int | tuple[int, ...] = 0, +) -> Image.Image: + """ + Add border to the image + + :param image: The image to expand. + :param border: Border width, in pixels. + :param fill: Pixel fill value (a color value). Default is 0 (black). + :return: An image. + """ + left, top, right, bottom = _border(border) + width = left + image.size[0] + right + height = top + image.size[1] + bottom + color = _color(fill, image.mode) + if image.palette: + palette = ImagePalette.ImagePalette(palette=image.getpalette()) + if isinstance(color, tuple) and (len(color) == 3 or len(color) == 4): + color = palette.getcolor(color) + else: + palette = None + out = Image.new(image.mode, (width, height), color) + if palette: + out.putpalette(palette.palette) + out.paste(image, (left, top)) + return out + + +def fit( + image: Image.Image, + size: tuple[int, int], + method: int = Image.Resampling.BICUBIC, + bleed: float = 0.0, + centering: tuple[float, float] = (0.5, 0.5), +) -> Image.Image: + """ + Returns a resized and cropped version of the image, cropped to the + requested aspect ratio and size. + + This function was contributed by Kevin Cazabon. + + :param image: The image to resize and crop. + :param size: The requested output size in pixels, given as a + (width, height) tuple. + :param method: Resampling method to use. Default is + :py:attr:`~PIL.Image.Resampling.BICUBIC`. + See :ref:`concept-filters`. + :param bleed: Remove a border around the outside of the image from all + four edges. The value is a decimal percentage (use 0.01 for + one percent). The default value is 0 (no border). + Cannot be greater than or equal to 0.5. + :param centering: Control the cropping position. Use (0.5, 0.5) for + center cropping (e.g. if cropping the width, take 50% off + of the left side, and therefore 50% off the right side). + (0.0, 0.0) will crop from the top left corner (i.e. if + cropping the width, take all of the crop off of the right + side, and if cropping the height, take all of it off the + bottom). (1.0, 0.0) will crop from the bottom left + corner, etc. (i.e. if cropping the width, take all of the + crop off the left side, and if cropping the height take + none from the top, and therefore all off the bottom). + :return: An image. + """ + + # by Kevin Cazabon, Feb 17/2000 + # kevin@cazabon.com + # https://www.cazabon.com + + centering_x, centering_y = centering + + if not 0.0 <= centering_x <= 1.0: + centering_x = 0.5 + if not 0.0 <= centering_y <= 1.0: + centering_y = 0.5 + + if not 0.0 <= bleed < 0.5: + bleed = 0.0 + + # calculate the area to use for resizing and cropping, subtracting + # the 'bleed' around the edges + + # number of pixels to trim off on Top and Bottom, Left and Right + bleed_pixels = (bleed * image.size[0], bleed * image.size[1]) + + live_size = ( + image.size[0] - bleed_pixels[0] * 2, + image.size[1] - bleed_pixels[1] * 2, + ) + + # calculate the aspect ratio of the live_size + live_size_ratio = live_size[0] / live_size[1] + + # calculate the aspect ratio of the output image + output_ratio = size[0] / size[1] + + # figure out if the sides or top/bottom will be cropped off + if live_size_ratio == output_ratio: + # live_size is already the needed ratio + crop_width = live_size[0] + crop_height = live_size[1] + elif live_size_ratio >= output_ratio: + # live_size is wider than what's needed, crop the sides + crop_width = output_ratio * live_size[1] + crop_height = live_size[1] + else: + # live_size is taller than what's needed, crop the top and bottom + crop_width = live_size[0] + crop_height = live_size[0] / output_ratio + + # make the crop + crop_left = bleed_pixels[0] + (live_size[0] - crop_width) * centering_x + crop_top = bleed_pixels[1] + (live_size[1] - crop_height) * centering_y + + crop = (crop_left, crop_top, crop_left + crop_width, crop_top + crop_height) + + # resize the image and return it + return image.resize(size, method, box=crop) + + +def flip(image: Image.Image) -> Image.Image: + """ + Flip the image vertically (top to bottom). + + :param image: The image to flip. + :return: An image. + """ + return image.transpose(Image.Transpose.FLIP_TOP_BOTTOM) + + +def grayscale(image: Image.Image) -> Image.Image: + """ + Convert the image to grayscale. + + :param image: The image to convert. + :return: An image. + """ + return image.convert("L") + + +def invert(image: Image.Image) -> Image.Image: + """ + Invert (negate) the image. + + :param image: The image to invert. + :return: An image. + """ + lut = list(range(255, -1, -1)) + return image.point(lut) if image.mode == "1" else _lut(image, lut) + + +def mirror(image: Image.Image) -> Image.Image: + """ + Flip image horizontally (left to right). + + :param image: The image to mirror. + :return: An image. + """ + return image.transpose(Image.Transpose.FLIP_LEFT_RIGHT) + + +def posterize(image: Image.Image, bits: int) -> Image.Image: + """ + Reduce the number of bits for each color channel. + + :param image: The image to posterize. + :param bits: The number of bits to keep for each channel (1-8). + :return: An image. + """ + mask = ~(2 ** (8 - bits) - 1) + lut = [i & mask for i in range(256)] + return _lut(image, lut) + + +def solarize(image: Image.Image, threshold: int = 128) -> Image.Image: + """ + Invert all pixel values above a threshold. + + :param image: The image to solarize. + :param threshold: All pixels above this grayscale level are inverted. + :return: An image. + """ + lut = [] + for i in range(256): + if i < threshold: + lut.append(i) + else: + lut.append(255 - i) + return _lut(image, lut) + + +def exif_transpose(image: Image.Image, *, in_place: bool = False) -> Image.Image | None: + """ + If an image has an EXIF Orientation tag, other than 1, transpose the image + accordingly, and remove the orientation data. + + :param image: The image to transpose. + :param in_place: Boolean. Keyword-only argument. + If ``True``, the original image is modified in-place, and ``None`` is returned. + If ``False`` (default), a new :py:class:`~PIL.Image.Image` object is returned + with the transposition applied. If there is no transposition, a copy of the + image will be returned. + """ + image.load() + image_exif = image.getexif() + orientation = image_exif.get(ExifTags.Base.Orientation, 1) + method = { + 2: Image.Transpose.FLIP_LEFT_RIGHT, + 3: Image.Transpose.ROTATE_180, + 4: Image.Transpose.FLIP_TOP_BOTTOM, + 5: Image.Transpose.TRANSPOSE, + 6: Image.Transpose.ROTATE_270, + 7: Image.Transpose.TRANSVERSE, + 8: Image.Transpose.ROTATE_90, + }.get(orientation) + if method is not None: + transposed_image = image.transpose(method) + if in_place: + image.im = transposed_image.im + image.pyaccess = None + image._size = transposed_image._size + exif_image = image if in_place else transposed_image + + exif = exif_image.getexif() + if ExifTags.Base.Orientation in exif: + del exif[ExifTags.Base.Orientation] + if "exif" in exif_image.info: + exif_image.info["exif"] = exif.tobytes() + elif "Raw profile type exif" in exif_image.info: + exif_image.info["Raw profile type exif"] = exif.tobytes().hex() + for key in ("XML:com.adobe.xmp", "xmp"): + if key in exif_image.info: + for pattern in ( + r'tiff:Orientation="([0-9])"', + r"([0-9])", + ): + value = exif_image.info[key] + exif_image.info[key] = ( + re.sub(pattern, "", value) + if isinstance(value, str) + else re.sub(pattern.encode(), b"", value) + ) + if not in_place: + return transposed_image + elif not in_place: + return image.copy() + return None diff --git a/MLPY/Lib/site-packages/PIL/ImagePalette.py b/MLPY/Lib/site-packages/PIL/ImagePalette.py new file mode 100644 index 0000000000000000000000000000000000000000..cee1c936e8c58ed89f6e6add510a7155657ddca5 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImagePalette.py @@ -0,0 +1,284 @@ +# +# The Python Imaging Library. +# $Id$ +# +# image palette object +# +# History: +# 1996-03-11 fl Rewritten. +# 1997-01-03 fl Up and running. +# 1997-08-23 fl Added load hack +# 2001-04-16 fl Fixed randint shadow bug in random() +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import array +from typing import IO, TYPE_CHECKING, Sequence + +from . import GimpGradientFile, GimpPaletteFile, ImageColor, PaletteFile + +if TYPE_CHECKING: + from . import Image + + +class ImagePalette: + """ + Color palette for palette mapped images + + :param mode: The mode to use for the palette. See: + :ref:`concept-modes`. Defaults to "RGB" + :param palette: An optional palette. If given, it must be a bytearray, + an array or a list of ints between 0-255. The list must consist of + all channels for one color followed by the next color (e.g. RGBRGBRGB). + Defaults to an empty palette. + """ + + def __init__( + self, + mode: str = "RGB", + palette: Sequence[int] | bytes | bytearray | None = None, + ) -> None: + self.mode = mode + self.rawmode: str | None = None # if set, palette contains raw data + self.palette = palette or bytearray() + self.dirty: int | None = None + + @property + def palette(self) -> Sequence[int] | bytes | bytearray: + return self._palette + + @palette.setter + def palette(self, palette: Sequence[int] | bytes | bytearray) -> None: + self._colors: dict[tuple[int, ...], int] | None = None + self._palette = palette + + @property + def colors(self) -> dict[tuple[int, ...], int]: + if self._colors is None: + mode_len = len(self.mode) + self._colors = {} + for i in range(0, len(self.palette), mode_len): + color = tuple(self.palette[i : i + mode_len]) + if color in self._colors: + continue + self._colors[color] = i // mode_len + return self._colors + + @colors.setter + def colors(self, colors: dict[tuple[int, ...], int]) -> None: + self._colors = colors + + def copy(self) -> ImagePalette: + new = ImagePalette() + + new.mode = self.mode + new.rawmode = self.rawmode + if self.palette is not None: + new.palette = self.palette[:] + new.dirty = self.dirty + + return new + + def getdata(self) -> tuple[str, Sequence[int] | bytes | bytearray]: + """ + Get palette contents in format suitable for the low-level + ``im.putpalette`` primitive. + + .. warning:: This method is experimental. + """ + if self.rawmode: + return self.rawmode, self.palette + return self.mode, self.tobytes() + + def tobytes(self) -> bytes: + """Convert palette to bytes. + + .. warning:: This method is experimental. + """ + if self.rawmode: + msg = "palette contains raw palette data" + raise ValueError(msg) + if isinstance(self.palette, bytes): + return self.palette + arr = array.array("B", self.palette) + return arr.tobytes() + + # Declare tostring as an alias for tobytes + tostring = tobytes + + def _new_color_index( + self, image: Image.Image | None = None, e: Exception | None = None + ) -> int: + if not isinstance(self.palette, bytearray): + self._palette = bytearray(self.palette) + index = len(self.palette) // 3 + special_colors: tuple[int | tuple[int, ...] | None, ...] = () + if image: + special_colors = ( + image.info.get("background"), + image.info.get("transparency"), + ) + while index in special_colors: + index += 1 + if index >= 256: + if image: + # Search for an unused index + for i, count in reversed(list(enumerate(image.histogram()))): + if count == 0 and i not in special_colors: + index = i + break + if index >= 256: + msg = "cannot allocate more than 256 colors" + raise ValueError(msg) from e + return index + + def getcolor( + self, + color: tuple[int, ...], + image: Image.Image | None = None, + ) -> int: + """Given an rgb tuple, allocate palette entry. + + .. warning:: This method is experimental. + """ + if self.rawmode: + msg = "palette contains raw palette data" + raise ValueError(msg) + if isinstance(color, tuple): + if self.mode == "RGB": + if len(color) == 4: + if color[3] != 255: + msg = "cannot add non-opaque RGBA color to RGB palette" + raise ValueError(msg) + color = color[:3] + elif self.mode == "RGBA": + if len(color) == 3: + color += (255,) + try: + return self.colors[color] + except KeyError as e: + # allocate new color slot + index = self._new_color_index(image, e) + assert isinstance(self._palette, bytearray) + self.colors[color] = index + if index * 3 < len(self.palette): + self._palette = ( + self._palette[: index * 3] + + bytes(color) + + self._palette[index * 3 + 3 :] + ) + else: + self._palette += bytes(color) + self.dirty = 1 + return index + else: + msg = f"unknown color specifier: {repr(color)}" # type: ignore[unreachable] + raise ValueError(msg) + + def save(self, fp: str | IO[str]) -> None: + """Save palette to text file. + + .. warning:: This method is experimental. + """ + if self.rawmode: + msg = "palette contains raw palette data" + raise ValueError(msg) + if isinstance(fp, str): + fp = open(fp, "w") + fp.write("# Palette\n") + fp.write(f"# Mode: {self.mode}\n") + for i in range(256): + fp.write(f"{i}") + for j in range(i * len(self.mode), (i + 1) * len(self.mode)): + try: + fp.write(f" {self.palette[j]}") + except IndexError: + fp.write(" 0") + fp.write("\n") + fp.close() + + +# -------------------------------------------------------------------- +# Internal + + +def raw(rawmode, data: Sequence[int] | bytes | bytearray) -> ImagePalette: + palette = ImagePalette() + palette.rawmode = rawmode + palette.palette = data + palette.dirty = 1 + return palette + + +# -------------------------------------------------------------------- +# Factories + + +def make_linear_lut(black: int, white: float) -> list[int]: + if black == 0: + return [int(white * i // 255) for i in range(256)] + + msg = "unavailable when black is non-zero" + raise NotImplementedError(msg) # FIXME + + +def make_gamma_lut(exp: float) -> list[int]: + return [int(((i / 255.0) ** exp) * 255.0 + 0.5) for i in range(256)] + + +def negative(mode: str = "RGB") -> ImagePalette: + palette = list(range(256 * len(mode))) + palette.reverse() + return ImagePalette(mode, [i // len(mode) for i in palette]) + + +def random(mode: str = "RGB") -> ImagePalette: + from random import randint + + palette = [randint(0, 255) for _ in range(256 * len(mode))] + return ImagePalette(mode, palette) + + +def sepia(white: str = "#fff0c0") -> ImagePalette: + bands = [make_linear_lut(0, band) for band in ImageColor.getrgb(white)] + return ImagePalette("RGB", [bands[i % 3][i // 3] for i in range(256 * 3)]) + + +def wedge(mode: str = "RGB") -> ImagePalette: + palette = list(range(256 * len(mode))) + return ImagePalette(mode, [i // len(mode) for i in palette]) + + +def load(filename: str) -> tuple[bytes, str]: + # FIXME: supports GIMP gradients only + + with open(filename, "rb") as fp: + paletteHandlers: list[ + type[ + GimpPaletteFile.GimpPaletteFile + | GimpGradientFile.GimpGradientFile + | PaletteFile.PaletteFile + ] + ] = [ + GimpPaletteFile.GimpPaletteFile, + GimpGradientFile.GimpGradientFile, + PaletteFile.PaletteFile, + ] + for paletteHandler in paletteHandlers: + try: + fp.seek(0) + lut = paletteHandler(fp).getpalette() + if lut: + break + except (SyntaxError, ValueError): + pass + else: + msg = "cannot load palette" + raise OSError(msg) + + return lut # data, rawmode diff --git a/MLPY/Lib/site-packages/PIL/ImagePath.py b/MLPY/Lib/site-packages/PIL/ImagePath.py new file mode 100644 index 0000000000000000000000000000000000000000..c986e909506c8b4ef538a433e9107682be9e67bc --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImagePath.py @@ -0,0 +1,20 @@ +# +# The Python Imaging Library +# $Id$ +# +# path interface +# +# History: +# 1996-11-04 fl Created +# 2002-04-14 fl Added documentation stub class +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +from . import Image + +Path = Image.core.path diff --git a/MLPY/Lib/site-packages/PIL/ImageQt.py b/MLPY/Lib/site-packages/PIL/ImageQt.py new file mode 100644 index 0000000000000000000000000000000000000000..8d0e6b47f683d0ee2c207decb3e89f85f821fc4b --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageQt.py @@ -0,0 +1,205 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a simple Qt image interface. +# +# history: +# 2006-06-03 fl: created +# 2006-06-04 fl: inherit from QImage instead of wrapping it +# 2006-06-05 fl: removed toimage helper; move string support to ImageQt +# 2013-11-13 fl: add support for Qt5 (aurelien.ballier@cyclonit.com) +# +# Copyright (c) 2006 by Secret Labs AB +# Copyright (c) 2006 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import sys +from io import BytesIO +from typing import Callable + +from . import Image +from ._util import is_path + +qt_version: str | None +qt_versions = [ + ["6", "PyQt6"], + ["side6", "PySide6"], +] + +# If a version has already been imported, attempt it first +qt_versions.sort(key=lambda version: version[1] in sys.modules, reverse=True) +for version, qt_module in qt_versions: + try: + QBuffer: type + QIODevice: type + QImage: type + QPixmap: type + qRgba: Callable[[int, int, int, int], int] + if qt_module == "PyQt6": + from PyQt6.QtCore import QBuffer, QIODevice + from PyQt6.QtGui import QImage, QPixmap, qRgba + elif qt_module == "PySide6": + from PySide6.QtCore import QBuffer, QIODevice + from PySide6.QtGui import QImage, QPixmap, qRgba + except (ImportError, RuntimeError): + continue + qt_is_installed = True + qt_version = version + break +else: + qt_is_installed = False + qt_version = None + + +def rgb(r, g, b, a=255): + """(Internal) Turns an RGB color into a Qt compatible color integer.""" + # use qRgb to pack the colors, and then turn the resulting long + # into a negative integer with the same bitpattern. + return qRgba(r, g, b, a) & 0xFFFFFFFF + + +def fromqimage(im): + """ + :param im: QImage or PIL ImageQt object + """ + buffer = QBuffer() + if qt_version == "6": + try: + qt_openmode = QIODevice.OpenModeFlag + except AttributeError: + qt_openmode = QIODevice.OpenMode + else: + qt_openmode = QIODevice + buffer.open(qt_openmode.ReadWrite) + # preserve alpha channel with png + # otherwise ppm is more friendly with Image.open + if im.hasAlphaChannel(): + im.save(buffer, "png") + else: + im.save(buffer, "ppm") + + b = BytesIO() + b.write(buffer.data()) + buffer.close() + b.seek(0) + + return Image.open(b) + + +def fromqpixmap(im): + return fromqimage(im) + + +def align8to32(bytes, width, mode): + """ + converts each scanline of data from 8 bit to 32 bit aligned + """ + + bits_per_pixel = {"1": 1, "L": 8, "P": 8, "I;16": 16}[mode] + + # calculate bytes per line and the extra padding if needed + bits_per_line = bits_per_pixel * width + full_bytes_per_line, remaining_bits_per_line = divmod(bits_per_line, 8) + bytes_per_line = full_bytes_per_line + (1 if remaining_bits_per_line else 0) + + extra_padding = -bytes_per_line % 4 + + # already 32 bit aligned by luck + if not extra_padding: + return bytes + + new_data = [ + bytes[i * bytes_per_line : (i + 1) * bytes_per_line] + b"\x00" * extra_padding + for i in range(len(bytes) // bytes_per_line) + ] + + return b"".join(new_data) + + +def _toqclass_helper(im): + data = None + colortable = None + exclusive_fp = False + + # handle filename, if given instead of image name + if hasattr(im, "toUtf8"): + # FIXME - is this really the best way to do this? + im = str(im.toUtf8(), "utf-8") + if is_path(im): + im = Image.open(im) + exclusive_fp = True + + qt_format = QImage.Format if qt_version == "6" else QImage + if im.mode == "1": + format = qt_format.Format_Mono + elif im.mode == "L": + format = qt_format.Format_Indexed8 + colortable = [rgb(i, i, i) for i in range(256)] + elif im.mode == "P": + format = qt_format.Format_Indexed8 + palette = im.getpalette() + colortable = [rgb(*palette[i : i + 3]) for i in range(0, len(palette), 3)] + elif im.mode == "RGB": + # Populate the 4th channel with 255 + im = im.convert("RGBA") + + data = im.tobytes("raw", "BGRA") + format = qt_format.Format_RGB32 + elif im.mode == "RGBA": + data = im.tobytes("raw", "BGRA") + format = qt_format.Format_ARGB32 + elif im.mode == "I;16": + im = im.point(lambda i: i * 256) + + format = qt_format.Format_Grayscale16 + else: + if exclusive_fp: + im.close() + msg = f"unsupported image mode {repr(im.mode)}" + raise ValueError(msg) + + size = im.size + __data = data or align8to32(im.tobytes(), size[0], im.mode) + if exclusive_fp: + im.close() + return {"data": __data, "size": size, "format": format, "colortable": colortable} + + +if qt_is_installed: + + class ImageQt(QImage): + def __init__(self, im): + """ + An PIL image wrapper for Qt. This is a subclass of PyQt's QImage + class. + + :param im: A PIL Image object, or a file name (given either as + Python string or a PyQt string object). + """ + im_data = _toqclass_helper(im) + # must keep a reference, or Qt will crash! + # All QImage constructors that take data operate on an existing + # buffer, so this buffer has to hang on for the life of the image. + # Fixes https://github.com/python-pillow/Pillow/issues/1370 + self.__data = im_data["data"] + super().__init__( + self.__data, + im_data["size"][0], + im_data["size"][1], + im_data["format"], + ) + if im_data["colortable"]: + self.setColorTable(im_data["colortable"]) + + +def toqimage(im) -> ImageQt: + return ImageQt(im) + + +def toqpixmap(im): + qimage = toqimage(im) + return QPixmap.fromImage(qimage) diff --git a/MLPY/Lib/site-packages/PIL/ImageSequence.py b/MLPY/Lib/site-packages/PIL/ImageSequence.py new file mode 100644 index 0000000000000000000000000000000000000000..11bd04be4e05ee6109301494bb759e567fb11c1e --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageSequence.py @@ -0,0 +1,86 @@ +# +# The Python Imaging Library. +# $Id$ +# +# sequence support classes +# +# history: +# 1997-02-20 fl Created +# +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1997 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +## +from __future__ import annotations + +from typing import Callable + +from . import Image + + +class Iterator: + """ + This class implements an iterator object that can be used to loop + over an image sequence. + + You can use the ``[]`` operator to access elements by index. This operator + will raise an :py:exc:`IndexError` if you try to access a nonexistent + frame. + + :param im: An image object. + """ + + def __init__(self, im: Image.Image): + if not hasattr(im, "seek"): + msg = "im must have seek method" + raise AttributeError(msg) + self.im = im + self.position = getattr(self.im, "_min_frame", 0) + + def __getitem__(self, ix: int) -> Image.Image: + try: + self.im.seek(ix) + return self.im + except EOFError as e: + msg = "end of sequence" + raise IndexError(msg) from e + + def __iter__(self) -> Iterator: + return self + + def __next__(self) -> Image.Image: + try: + self.im.seek(self.position) + self.position += 1 + return self.im + except EOFError as e: + msg = "end of sequence" + raise StopIteration(msg) from e + + +def all_frames( + im: Image.Image | list[Image.Image], + func: Callable[[Image.Image], Image.Image] | None = None, +) -> list[Image.Image]: + """ + Applies a given function to all frames in an image or a list of images. + The frames are returned as a list of separate images. + + :param im: An image, or a list of images. + :param func: The function to apply to all of the image frames. + :returns: A list of images. + """ + if not isinstance(im, list): + im = [im] + + ims = [] + for imSequence in im: + current = imSequence.tell() + + ims += [im_frame.copy() for im_frame in Iterator(imSequence)] + + imSequence.seek(current) + return [func(im) for im in ims] if func else ims diff --git a/MLPY/Lib/site-packages/PIL/ImageShow.py b/MLPY/Lib/site-packages/PIL/ImageShow.py new file mode 100644 index 0000000000000000000000000000000000000000..786d00672d03e0c54de9027d2161c894479b76fa --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageShow.py @@ -0,0 +1,363 @@ +# +# The Python Imaging Library. +# $Id$ +# +# im.show() drivers +# +# History: +# 2008-04-06 fl Created +# +# Copyright (c) Secret Labs AB 2008. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import abc +import os +import shutil +import subprocess +import sys +from shlex import quote +from typing import Any + +from . import Image + +_viewers = [] + + +def register(viewer, order: int = 1) -> None: + """ + The :py:func:`register` function is used to register additional viewers:: + + from PIL import ImageShow + ImageShow.register(MyViewer()) # MyViewer will be used as a last resort + ImageShow.register(MySecondViewer(), 0) # MySecondViewer will be prioritised + ImageShow.register(ImageShow.XVViewer(), 0) # XVViewer will be prioritised + + :param viewer: The viewer to be registered. + :param order: + Zero or a negative integer to prepend this viewer to the list, + a positive integer to append it. + """ + try: + if issubclass(viewer, Viewer): + viewer = viewer() + except TypeError: + pass # raised if viewer wasn't a class + if order > 0: + _viewers.append(viewer) + else: + _viewers.insert(0, viewer) + + +def show(image: Image.Image, title: str | None = None, **options: Any) -> bool: + r""" + Display a given image. + + :param image: An image object. + :param title: Optional title. Not all viewers can display the title. + :param \**options: Additional viewer options. + :returns: ``True`` if a suitable viewer was found, ``False`` otherwise. + """ + for viewer in _viewers: + if viewer.show(image, title=title, **options): + return True + return False + + +class Viewer: + """Base class for viewers.""" + + # main api + + def show(self, image: Image.Image, **options: Any) -> int: + """ + The main function for displaying an image. + Converts the given image to the target format and displays it. + """ + + if not ( + image.mode in ("1", "RGBA") + or (self.format == "PNG" and image.mode in ("I;16", "LA")) + ): + base = Image.getmodebase(image.mode) + if image.mode != base: + image = image.convert(base) + + return self.show_image(image, **options) + + # hook methods + + format: str | None = None + """The format to convert the image into.""" + options: dict[str, Any] = {} + """Additional options used to convert the image.""" + + def get_format(self, image: Image.Image) -> str | None: + """Return format name, or ``None`` to save as PGM/PPM.""" + return self.format + + def get_command(self, file: str, **options: Any) -> str: + """ + Returns the command used to display the file. + Not implemented in the base class. + """ + msg = "unavailable in base viewer" + raise NotImplementedError(msg) + + def save_image(self, image: Image.Image) -> str: + """Save to temporary file and return filename.""" + return image._dump(format=self.get_format(image), **self.options) + + def show_image(self, image: Image.Image, **options: Any) -> int: + """Display the given image.""" + return self.show_file(self.save_image(image), **options) + + def show_file(self, path: str, **options: Any) -> int: + """ + Display given file. + """ + if not os.path.exists(path): + raise FileNotFoundError + os.system(self.get_command(path, **options)) # nosec + return 1 + + +# -------------------------------------------------------------------- + + +class WindowsViewer(Viewer): + """The default viewer on Windows is the default system application for PNG files.""" + + format = "PNG" + options = {"compress_level": 1, "save_all": True} + + def get_command(self, file: str, **options: Any) -> str: + return ( + f'start "Pillow" /WAIT "{file}" ' + "&& ping -n 4 127.0.0.1 >NUL " + f'&& del /f "{file}"' + ) + + def show_file(self, path: str, **options: Any) -> int: + """ + Display given file. + """ + if not os.path.exists(path): + raise FileNotFoundError + subprocess.Popen( + self.get_command(path, **options), + shell=True, + creationflags=getattr(subprocess, "CREATE_NO_WINDOW"), + ) # nosec + return 1 + + +if sys.platform == "win32": + register(WindowsViewer) + + +class MacViewer(Viewer): + """The default viewer on macOS using ``Preview.app``.""" + + format = "PNG" + options = {"compress_level": 1, "save_all": True} + + def get_command(self, file: str, **options: Any) -> str: + # on darwin open returns immediately resulting in the temp + # file removal while app is opening + command = "open -a Preview.app" + command = f"({command} {quote(file)}; sleep 20; rm -f {quote(file)})&" + return command + + def show_file(self, path: str, **options: Any) -> int: + """ + Display given file. + """ + if not os.path.exists(path): + raise FileNotFoundError + subprocess.call(["open", "-a", "Preview.app", path]) + executable = sys.executable or shutil.which("python3") + if executable: + subprocess.Popen( + [ + executable, + "-c", + "import os, sys, time; time.sleep(20); os.remove(sys.argv[1])", + path, + ] + ) + return 1 + + +if sys.platform == "darwin": + register(MacViewer) + + +class UnixViewer(Viewer): + format = "PNG" + options = {"compress_level": 1, "save_all": True} + + @abc.abstractmethod + def get_command_ex(self, file: str, **options: Any) -> tuple[str, str]: + pass + + def get_command(self, file: str, **options: Any) -> str: + command = self.get_command_ex(file, **options)[0] + return f"{command} {quote(file)}" + + +class XDGViewer(UnixViewer): + """ + The freedesktop.org ``xdg-open`` command. + """ + + def get_command_ex(self, file: str, **options: Any) -> tuple[str, str]: + command = executable = "xdg-open" + return command, executable + + def show_file(self, path: str, **options: Any) -> int: + """ + Display given file. + """ + if not os.path.exists(path): + raise FileNotFoundError + subprocess.Popen(["xdg-open", path]) + return 1 + + +class DisplayViewer(UnixViewer): + """ + The ImageMagick ``display`` command. + This viewer supports the ``title`` parameter. + """ + + def get_command_ex( + self, file: str, title: str | None = None, **options: Any + ) -> tuple[str, str]: + command = executable = "display" + if title: + command += f" -title {quote(title)}" + return command, executable + + def show_file(self, path: str, **options: Any) -> int: + """ + Display given file. + """ + if not os.path.exists(path): + raise FileNotFoundError + args = ["display"] + title = options.get("title") + if title: + args += ["-title", title] + args.append(path) + + subprocess.Popen(args) + return 1 + + +class GmDisplayViewer(UnixViewer): + """The GraphicsMagick ``gm display`` command.""" + + def get_command_ex(self, file: str, **options: Any) -> tuple[str, str]: + executable = "gm" + command = "gm display" + return command, executable + + def show_file(self, path: str, **options: Any) -> int: + """ + Display given file. + """ + if not os.path.exists(path): + raise FileNotFoundError + subprocess.Popen(["gm", "display", path]) + return 1 + + +class EogViewer(UnixViewer): + """The GNOME Image Viewer ``eog`` command.""" + + def get_command_ex(self, file: str, **options: Any) -> tuple[str, str]: + executable = "eog" + command = "eog -n" + return command, executable + + def show_file(self, path: str, **options: Any) -> int: + """ + Display given file. + """ + if not os.path.exists(path): + raise FileNotFoundError + subprocess.Popen(["eog", "-n", path]) + return 1 + + +class XVViewer(UnixViewer): + """ + The X Viewer ``xv`` command. + This viewer supports the ``title`` parameter. + """ + + def get_command_ex( + self, file: str, title: str | None = None, **options: Any + ) -> tuple[str, str]: + # note: xv is pretty outdated. most modern systems have + # imagemagick's display command instead. + command = executable = "xv" + if title: + command += f" -name {quote(title)}" + return command, executable + + def show_file(self, path: str, **options: Any) -> int: + """ + Display given file. + """ + if not os.path.exists(path): + raise FileNotFoundError + args = ["xv"] + title = options.get("title") + if title: + args += ["-name", title] + args.append(path) + + subprocess.Popen(args) + return 1 + + +if sys.platform not in ("win32", "darwin"): # unixoids + if shutil.which("xdg-open"): + register(XDGViewer) + if shutil.which("display"): + register(DisplayViewer) + if shutil.which("gm"): + register(GmDisplayViewer) + if shutil.which("eog"): + register(EogViewer) + if shutil.which("xv"): + register(XVViewer) + + +class IPythonViewer(Viewer): + """The viewer for IPython frontends.""" + + def show_image(self, image: Image.Image, **options: Any) -> int: + ipython_display(image) + return 1 + + +try: + from IPython.display import display as ipython_display +except ImportError: + pass +else: + register(IPythonViewer) + + +if __name__ == "__main__": + if len(sys.argv) < 2: + print("Syntax: python3 ImageShow.py imagefile [title]") + sys.exit() + + with Image.open(sys.argv[1]) as im: + print(show(im, *sys.argv[2:])) diff --git a/MLPY/Lib/site-packages/PIL/ImageStat.py b/MLPY/Lib/site-packages/PIL/ImageStat.py new file mode 100644 index 0000000000000000000000000000000000000000..eb4162f6bd4b641e7ea229850539902b2b1c0db0 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageStat.py @@ -0,0 +1,160 @@ +# +# The Python Imaging Library. +# $Id$ +# +# global image statistics +# +# History: +# 1996-04-05 fl Created +# 1997-05-21 fl Added mask; added rms, var, stddev attributes +# 1997-08-05 fl Added median +# 1998-07-05 hk Fixed integer overflow error +# +# Notes: +# This class shows how to implement delayed evaluation of attributes. +# To get a certain value, simply access the corresponding attribute. +# The __getattr__ dispatcher takes care of the rest. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996-97. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import math +from functools import cached_property + +from . import Image + + +class Stat: + def __init__( + self, image_or_list: Image.Image | list[int], mask: Image.Image | None = None + ) -> None: + """ + Calculate statistics for the given image. If a mask is included, + only the regions covered by that mask are included in the + statistics. You can also pass in a previously calculated histogram. + + :param image: A PIL image, or a precalculated histogram. + + .. note:: + + For a PIL image, calculations rely on the + :py:meth:`~PIL.Image.Image.histogram` method. The pixel counts are + grouped into 256 bins, even if the image has more than 8 bits per + channel. So ``I`` and ``F`` mode images have a maximum ``mean``, + ``median`` and ``rms`` of 255, and cannot have an ``extrema`` maximum + of more than 255. + + :param mask: An optional mask. + """ + if isinstance(image_or_list, Image.Image): + self.h = image_or_list.histogram(mask) + elif isinstance(image_or_list, list): + self.h = image_or_list + else: + msg = "first argument must be image or list" # type: ignore[unreachable] + raise TypeError(msg) + self.bands = list(range(len(self.h) // 256)) + + @cached_property + def extrema(self) -> list[tuple[int, int]]: + """ + Min/max values for each band in the image. + + .. note:: + This relies on the :py:meth:`~PIL.Image.Image.histogram` method, and + simply returns the low and high bins used. This is correct for + images with 8 bits per channel, but fails for other modes such as + ``I`` or ``F``. Instead, use :py:meth:`~PIL.Image.Image.getextrema` to + return per-band extrema for the image. This is more correct and + efficient because, for non-8-bit modes, the histogram method uses + :py:meth:`~PIL.Image.Image.getextrema` to determine the bins used. + """ + + def minmax(histogram: list[int]) -> tuple[int, int]: + res_min, res_max = 255, 0 + for i in range(256): + if histogram[i]: + res_min = i + break + for i in range(255, -1, -1): + if histogram[i]: + res_max = i + break + return res_min, res_max + + return [minmax(self.h[i:]) for i in range(0, len(self.h), 256)] + + @cached_property + def count(self) -> list[int]: + """Total number of pixels for each band in the image.""" + return [sum(self.h[i : i + 256]) for i in range(0, len(self.h), 256)] + + @cached_property + def sum(self) -> list[float]: + """Sum of all pixels for each band in the image.""" + + v = [] + for i in range(0, len(self.h), 256): + layer_sum = 0.0 + for j in range(256): + layer_sum += j * self.h[i + j] + v.append(layer_sum) + return v + + @cached_property + def sum2(self) -> list[float]: + """Squared sum of all pixels for each band in the image.""" + + v = [] + for i in range(0, len(self.h), 256): + sum2 = 0.0 + for j in range(256): + sum2 += (j**2) * float(self.h[i + j]) + v.append(sum2) + return v + + @cached_property + def mean(self) -> list[float]: + """Average (arithmetic mean) pixel level for each band in the image.""" + return [self.sum[i] / self.count[i] for i in self.bands] + + @cached_property + def median(self) -> list[int]: + """Median pixel level for each band in the image.""" + + v = [] + for i in self.bands: + s = 0 + half = self.count[i] // 2 + b = i * 256 + for j in range(256): + s = s + self.h[b + j] + if s > half: + break + v.append(j) + return v + + @cached_property + def rms(self) -> list[float]: + """RMS (root-mean-square) for each band in the image.""" + return [math.sqrt(self.sum2[i] / self.count[i]) for i in self.bands] + + @cached_property + def var(self) -> list[float]: + """Variance for each band in the image.""" + return [ + (self.sum2[i] - (self.sum[i] ** 2.0) / self.count[i]) / self.count[i] + for i in self.bands + ] + + @cached_property + def stddev(self) -> list[float]: + """Standard deviation for each band in the image.""" + return [math.sqrt(self.var[i]) for i in self.bands] + + +Global = Stat # compatibility diff --git a/MLPY/Lib/site-packages/PIL/ImageTk.py b/MLPY/Lib/site-packages/PIL/ImageTk.py new file mode 100644 index 0000000000000000000000000000000000000000..87176ae54540fa79e194e2bff8bfacced9876d62 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageTk.py @@ -0,0 +1,284 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a Tk display interface +# +# History: +# 96-04-08 fl Created +# 96-09-06 fl Added getimage method +# 96-11-01 fl Rewritten, removed image attribute and crop method +# 97-05-09 fl Use PyImagingPaste method instead of image type +# 97-05-12 fl Minor tweaks to match the IFUNC95 interface +# 97-05-17 fl Support the "pilbitmap" booster patch +# 97-06-05 fl Added file= and data= argument to image constructors +# 98-03-09 fl Added width and height methods to Image classes +# 98-07-02 fl Use default mode for "P" images without palette attribute +# 98-07-02 fl Explicitly destroy Tkinter image objects +# 99-07-24 fl Support multiple Tk interpreters (from Greg Couch) +# 99-07-26 fl Automatically hook into Tkinter (if possible) +# 99-08-15 fl Hook uses _imagingtk instead of _imaging +# +# Copyright (c) 1997-1999 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import tkinter +from io import BytesIO + +from . import Image + +# -------------------------------------------------------------------- +# Check for Tkinter interface hooks + +_pilbitmap_ok = None + + +def _pilbitmap_check() -> int: + global _pilbitmap_ok + if _pilbitmap_ok is None: + try: + im = Image.new("1", (1, 1)) + tkinter.BitmapImage(data=f"PIL:{im.im.id}") + _pilbitmap_ok = 1 + except tkinter.TclError: + _pilbitmap_ok = 0 + return _pilbitmap_ok + + +def _get_image_from_kw(kw): + source = None + if "file" in kw: + source = kw.pop("file") + elif "data" in kw: + source = BytesIO(kw.pop("data")) + if source: + return Image.open(source) + + +def _pyimagingtkcall(command, photo, id): + tk = photo.tk + try: + tk.call(command, photo, id) + except tkinter.TclError: + # activate Tkinter hook + # may raise an error if it cannot attach to Tkinter + from . import _imagingtk + + _imagingtk.tkinit(tk.interpaddr()) + tk.call(command, photo, id) + + +# -------------------------------------------------------------------- +# PhotoImage + + +class PhotoImage: + """ + A Tkinter-compatible photo image. This can be used + everywhere Tkinter expects an image object. If the image is an RGBA + image, pixels having alpha 0 are treated as transparent. + + The constructor takes either a PIL image, or a mode and a size. + Alternatively, you can use the ``file`` or ``data`` options to initialize + the photo image object. + + :param image: Either a PIL image, or a mode string. If a mode string is + used, a size must also be given. + :param size: If the first argument is a mode string, this defines the size + of the image. + :keyword file: A filename to load the image from (using + ``Image.open(file)``). + :keyword data: An 8-bit string containing image data (as loaded from an + image file). + """ + + def __init__(self, image=None, size=None, **kw): + # Tk compatibility: file or data + if image is None: + image = _get_image_from_kw(kw) + + if hasattr(image, "mode") and hasattr(image, "size"): + # got an image instead of a mode + mode = image.mode + if mode == "P": + # palette mapped data + image.apply_transparency() + image.load() + try: + mode = image.palette.mode + except AttributeError: + mode = "RGB" # default + size = image.size + kw["width"], kw["height"] = size + else: + mode = image + image = None + + if mode not in ["1", "L", "RGB", "RGBA"]: + mode = Image.getmodebase(mode) + + self.__mode = mode + self.__size = size + self.__photo = tkinter.PhotoImage(**kw) + self.tk = self.__photo.tk + if image: + self.paste(image) + + def __del__(self) -> None: + name = self.__photo.name + self.__photo.name = None + try: + self.__photo.tk.call("image", "delete", name) + except Exception: + pass # ignore internal errors + + def __str__(self) -> str: + """ + Get the Tkinter photo image identifier. This method is automatically + called by Tkinter whenever a PhotoImage object is passed to a Tkinter + method. + + :return: A Tkinter photo image identifier (a string). + """ + return str(self.__photo) + + def width(self) -> int: + """ + Get the width of the image. + + :return: The width, in pixels. + """ + return self.__size[0] + + def height(self) -> int: + """ + Get the height of the image. + + :return: The height, in pixels. + """ + return self.__size[1] + + def paste(self, im: Image.Image) -> None: + """ + Paste a PIL image into the photo image. Note that this can + be very slow if the photo image is displayed. + + :param im: A PIL image. The size must match the target region. If the + mode does not match, the image is converted to the mode of + the bitmap image. + """ + # convert to blittable + im.load() + image = im.im + if image.isblock() and im.mode == self.__mode: + block = image + else: + block = image.new_block(self.__mode, im.size) + image.convert2(block, image) # convert directly between buffers + + _pyimagingtkcall("PyImagingPhoto", self.__photo, block.id) + + +# -------------------------------------------------------------------- +# BitmapImage + + +class BitmapImage: + """ + A Tkinter-compatible bitmap image. This can be used everywhere Tkinter + expects an image object. + + The given image must have mode "1". Pixels having value 0 are treated as + transparent. Options, if any, are passed on to Tkinter. The most commonly + used option is ``foreground``, which is used to specify the color for the + non-transparent parts. See the Tkinter documentation for information on + how to specify colours. + + :param image: A PIL image. + """ + + def __init__(self, image=None, **kw): + # Tk compatibility: file or data + if image is None: + image = _get_image_from_kw(kw) + + self.__mode = image.mode + self.__size = image.size + + if _pilbitmap_check(): + # fast way (requires the pilbitmap booster patch) + image.load() + kw["data"] = f"PIL:{image.im.id}" + self.__im = image # must keep a reference + else: + # slow but safe way + kw["data"] = image.tobitmap() + self.__photo = tkinter.BitmapImage(**kw) + + def __del__(self) -> None: + name = self.__photo.name + self.__photo.name = None + try: + self.__photo.tk.call("image", "delete", name) + except Exception: + pass # ignore internal errors + + def width(self) -> int: + """ + Get the width of the image. + + :return: The width, in pixels. + """ + return self.__size[0] + + def height(self) -> int: + """ + Get the height of the image. + + :return: The height, in pixels. + """ + return self.__size[1] + + def __str__(self) -> str: + """ + Get the Tkinter bitmap image identifier. This method is automatically + called by Tkinter whenever a BitmapImage object is passed to a Tkinter + method. + + :return: A Tkinter bitmap image identifier (a string). + """ + return str(self.__photo) + + +def getimage(photo: PhotoImage) -> Image.Image: + """Copies the contents of a PhotoImage to a PIL image memory.""" + im = Image.new("RGBA", (photo.width(), photo.height())) + block = im.im + + _pyimagingtkcall("PyImagingPhotoGet", photo, block.id) + + return im + + +def _show(image, title): + """Helper for the Image.show method.""" + + class UI(tkinter.Label): + def __init__(self, master, im): + if im.mode == "1": + self.image = BitmapImage(im, foreground="white", master=master) + else: + self.image = PhotoImage(im, master=master) + super().__init__(master, image=self.image, bg="black", bd=0) + + if not tkinter._default_root: + msg = "tkinter not initialized" + raise OSError(msg) + top = tkinter.Toplevel() + if title: + top.title(title) + UI(top, image).pack() diff --git a/MLPY/Lib/site-packages/PIL/ImageTransform.py b/MLPY/Lib/site-packages/PIL/ImageTransform.py new file mode 100644 index 0000000000000000000000000000000000000000..b715b8df99a504b2a4c2f948d75ff146792a975a --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageTransform.py @@ -0,0 +1,135 @@ +# +# The Python Imaging Library. +# $Id$ +# +# transform wrappers +# +# History: +# 2002-04-08 fl Created +# +# Copyright (c) 2002 by Secret Labs AB +# Copyright (c) 2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +from typing import Any, Sequence + +from . import Image + + +class Transform(Image.ImageTransformHandler): + """Base class for other transforms defined in :py:mod:`~PIL.ImageTransform`.""" + + method: Image.Transform + + def __init__(self, data: Sequence[Any]) -> None: + self.data = data + + def getdata(self) -> tuple[Image.Transform, Sequence[int]]: + return self.method, self.data + + def transform( + self, + size: tuple[int, int], + image: Image.Image, + **options: Any, + ) -> Image.Image: + """Perform the transform. Called from :py:meth:`.Image.transform`.""" + # can be overridden + method, data = self.getdata() + return image.transform(size, method, data, **options) + + +class AffineTransform(Transform): + """ + Define an affine image transform. + + This function takes a 6-tuple (a, b, c, d, e, f) which contain the first + two rows from an affine transform matrix. For each pixel (x, y) in the + output image, the new value is taken from a position (a x + b y + c, + d x + e y + f) in the input image, rounded to nearest pixel. + + This function can be used to scale, translate, rotate, and shear the + original image. + + See :py:meth:`.Image.transform` + + :param matrix: A 6-tuple (a, b, c, d, e, f) containing the first two rows + from an affine transform matrix. + """ + + method = Image.Transform.AFFINE + + +class PerspectiveTransform(Transform): + """ + Define a perspective image transform. + + This function takes an 8-tuple (a, b, c, d, e, f, g, h). For each pixel + (x, y) in the output image, the new value is taken from a position + ((a x + b y + c) / (g x + h y + 1), (d x + e y + f) / (g x + h y + 1)) in + the input image, rounded to nearest pixel. + + This function can be used to scale, translate, rotate, and shear the + original image. + + See :py:meth:`.Image.transform` + + :param matrix: An 8-tuple (a, b, c, d, e, f, g, h). + """ + + method = Image.Transform.PERSPECTIVE + + +class ExtentTransform(Transform): + """ + Define a transform to extract a subregion from an image. + + Maps a rectangle (defined by two corners) from the image to a rectangle of + the given size. The resulting image will contain data sampled from between + the corners, such that (x0, y0) in the input image will end up at (0,0) in + the output image, and (x1, y1) at size. + + This method can be used to crop, stretch, shrink, or mirror an arbitrary + rectangle in the current image. It is slightly slower than crop, but about + as fast as a corresponding resize operation. + + See :py:meth:`.Image.transform` + + :param bbox: A 4-tuple (x0, y0, x1, y1) which specifies two points in the + input image's coordinate system. See :ref:`coordinate-system`. + """ + + method = Image.Transform.EXTENT + + +class QuadTransform(Transform): + """ + Define a quad image transform. + + Maps a quadrilateral (a region defined by four corners) from the image to a + rectangle of the given size. + + See :py:meth:`.Image.transform` + + :param xy: An 8-tuple (x0, y0, x1, y1, x2, y2, x3, y3) which contain the + upper left, lower left, lower right, and upper right corner of the + source quadrilateral. + """ + + method = Image.Transform.QUAD + + +class MeshTransform(Transform): + """ + Define a mesh image transform. A mesh transform consists of one or more + individual quad transforms. + + See :py:meth:`.Image.transform` + + :param data: A list of (bbox, quad) tuples. + """ + + method = Image.Transform.MESH diff --git a/MLPY/Lib/site-packages/PIL/ImageWin.py b/MLPY/Lib/site-packages/PIL/ImageWin.py new file mode 100644 index 0000000000000000000000000000000000000000..cf608664c43a918b7513e87510cb6b5c53bd4ee4 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImageWin.py @@ -0,0 +1,238 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a Windows DIB display interface +# +# History: +# 1996-05-20 fl Created +# 1996-09-20 fl Fixed subregion exposure +# 1997-09-21 fl Added draw primitive (for tzPrint) +# 2003-05-21 fl Added experimental Window/ImageWindow classes +# 2003-09-05 fl Added fromstring/tostring methods +# +# Copyright (c) Secret Labs AB 1997-2003. +# Copyright (c) Fredrik Lundh 1996-2003. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +from . import Image + + +class HDC: + """ + Wraps an HDC integer. The resulting object can be passed to the + :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` + methods. + """ + + def __init__(self, dc: int) -> None: + self.dc = dc + + def __int__(self) -> int: + return self.dc + + +class HWND: + """ + Wraps an HWND integer. The resulting object can be passed to the + :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` + methods, instead of a DC. + """ + + def __init__(self, wnd: int) -> None: + self.wnd = wnd + + def __int__(self) -> int: + return self.wnd + + +class Dib: + """ + A Windows bitmap with the given mode and size. The mode can be one of "1", + "L", "P", or "RGB". + + If the display requires a palette, this constructor creates a suitable + palette and associates it with the image. For an "L" image, 128 graylevels + are allocated. For an "RGB" image, a 6x6x6 colour cube is used, together + with 20 graylevels. + + To make sure that palettes work properly under Windows, you must call the + ``palette`` method upon certain events from Windows. + + :param image: Either a PIL image, or a mode string. If a mode string is + used, a size must also be given. The mode can be one of "1", + "L", "P", or "RGB". + :param size: If the first argument is a mode string, this + defines the size of the image. + """ + + def __init__( + self, image: Image.Image | str, size: tuple[int, int] | list[int] | None = None + ) -> None: + if isinstance(image, str): + mode = image + image = "" + else: + mode = image.mode + size = image.size + if mode not in ["1", "L", "P", "RGB"]: + mode = Image.getmodebase(mode) + self.image = Image.core.display(mode, size) + self.mode = mode + self.size = size + if image: + assert not isinstance(image, str) + self.paste(image) + + def expose(self, handle): + """ + Copy the bitmap contents to a device context. + + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. In PythonWin, you can use + ``CDC.GetHandleAttrib()`` to get a suitable handle. + """ + if isinstance(handle, HWND): + dc = self.image.getdc(handle) + try: + result = self.image.expose(dc) + finally: + self.image.releasedc(handle, dc) + else: + result = self.image.expose(handle) + return result + + def draw(self, handle, dst, src=None): + """ + Same as expose, but allows you to specify where to draw the image, and + what part of it to draw. + + The destination and source areas are given as 4-tuple rectangles. If + the source is omitted, the entire image is copied. If the source and + the destination have different sizes, the image is resized as + necessary. + """ + if not src: + src = (0, 0) + self.size + if isinstance(handle, HWND): + dc = self.image.getdc(handle) + try: + result = self.image.draw(dc, dst, src) + finally: + self.image.releasedc(handle, dc) + else: + result = self.image.draw(handle, dst, src) + return result + + def query_palette(self, handle): + """ + Installs the palette associated with the image in the given device + context. + + This method should be called upon **QUERYNEWPALETTE** and + **PALETTECHANGED** events from Windows. If this method returns a + non-zero value, one or more display palette entries were changed, and + the image should be redrawn. + + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. + :return: A true value if one or more entries were changed (this + indicates that the image should be redrawn). + """ + if isinstance(handle, HWND): + handle = self.image.getdc(handle) + try: + result = self.image.query_palette(handle) + finally: + self.image.releasedc(handle, handle) + else: + result = self.image.query_palette(handle) + return result + + def paste( + self, im: Image.Image, box: tuple[int, int, int, int] | None = None + ) -> None: + """ + Paste a PIL image into the bitmap image. + + :param im: A PIL image. The size must match the target region. + If the mode does not match, the image is converted to the + mode of the bitmap image. + :param box: A 4-tuple defining the left, upper, right, and + lower pixel coordinate. See :ref:`coordinate-system`. If + None is given instead of a tuple, all of the image is + assumed. + """ + im.load() + if self.mode != im.mode: + im = im.convert(self.mode) + if box: + self.image.paste(im.im, box) + else: + self.image.paste(im.im) + + def frombytes(self, buffer: bytes) -> None: + """ + Load display memory contents from byte data. + + :param buffer: A buffer containing display data (usually + data returned from :py:func:`~PIL.ImageWin.Dib.tobytes`) + """ + self.image.frombytes(buffer) + + def tobytes(self) -> bytes: + """ + Copy display memory contents to bytes object. + + :return: A bytes object containing display data. + """ + return self.image.tobytes() + + +class Window: + """Create a Window with the given title size.""" + + def __init__( + self, title: str = "PIL", width: int | None = None, height: int | None = None + ) -> None: + self.hwnd = Image.core.createwindow( + title, self.__dispatcher, width or 0, height or 0 + ) + + def __dispatcher(self, action, *args): + return getattr(self, f"ui_handle_{action}")(*args) + + def ui_handle_clear(self, dc, x0, y0, x1, y1): + pass + + def ui_handle_damage(self, x0, y0, x1, y1): + pass + + def ui_handle_destroy(self) -> None: + pass + + def ui_handle_repair(self, dc, x0, y0, x1, y1): + pass + + def ui_handle_resize(self, width, height): + pass + + def mainloop(self) -> None: + Image.core.eventloop() + + +class ImageWindow(Window): + """Create an image window which displays the given image.""" + + def __init__(self, image, title="PIL"): + if not isinstance(image, Dib): + image = Dib(image) + self.image = image + width, height = image.size + super().__init__(title, width=width, height=height) + + def ui_handle_repair(self, dc, x0, y0, x1, y1): + self.image.draw(dc, (x0, y0, x1, y1)) diff --git a/MLPY/Lib/site-packages/PIL/ImtImagePlugin.py b/MLPY/Lib/site-packages/PIL/ImtImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..11bc17c56b15c132649fa31547bab3aa7a05e807 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/ImtImagePlugin.py @@ -0,0 +1,103 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IM Tools support for PIL +# +# history: +# 1996-05-27 fl Created (read 8-bit images only) +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.2) +# +# Copyright (c) Secret Labs AB 1997-2001. +# Copyright (c) Fredrik Lundh 1996-2001. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import re + +from . import Image, ImageFile + +# +# -------------------------------------------------------------------- + +field = re.compile(rb"([a-z]*) ([^ \r\n]*)") + + +## +# Image plugin for IM Tools images. + + +class ImtImageFile(ImageFile.ImageFile): + format = "IMT" + format_description = "IM Tools" + + def _open(self) -> None: + # Quick rejection: if there's not a LF among the first + # 100 bytes, this is (probably) not a text header. + + assert self.fp is not None + + buffer = self.fp.read(100) + if b"\n" not in buffer: + msg = "not an IM file" + raise SyntaxError(msg) + + xsize = ysize = 0 + + while True: + if buffer: + s = buffer[:1] + buffer = buffer[1:] + else: + s = self.fp.read(1) + if not s: + break + + if s == b"\x0C": + # image data begins + self.tile = [ + ( + "raw", + (0, 0) + self.size, + self.fp.tell() - len(buffer), + (self.mode, 0, 1), + ) + ] + + break + + else: + # read key/value pair + if b"\n" not in buffer: + buffer += self.fp.read(100) + lines = buffer.split(b"\n") + s += lines.pop(0) + buffer = b"\n".join(lines) + if len(s) == 1 or len(s) > 100: + break + if s[0] == ord(b"*"): + continue # comment + + m = field.match(s) + if not m: + break + k, v = m.group(1, 2) + if k == b"width": + xsize = int(v) + self._size = xsize, ysize + elif k == b"height": + ysize = int(v) + self._size = xsize, ysize + elif k == b"pixel" and v == b"n8": + self._mode = "L" + + +# +# -------------------------------------------------------------------- + +Image.register_open(ImtImageFile.format, ImtImageFile) + +# +# no extension registered (".im" is simply too common) diff --git a/MLPY/Lib/site-packages/PIL/IptcImagePlugin.py b/MLPY/Lib/site-packages/PIL/IptcImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..6ad09e050a2069240fdbdd1efe6da3081f3afeb3 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/IptcImagePlugin.py @@ -0,0 +1,235 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IPTC/NAA file handling +# +# history: +# 1995-10-01 fl Created +# 1998-03-09 fl Cleaned up and added to PIL +# 2002-06-18 fl Added getiptcinfo helper +# +# Copyright (c) Secret Labs AB 1997-2002. +# Copyright (c) Fredrik Lundh 1995. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +from io import BytesIO +from typing import Sequence + +from . import Image, ImageFile +from ._binary import i16be as i16 +from ._binary import i32be as i32 +from ._deprecate import deprecate + +COMPRESSION = {1: "raw", 5: "jpeg"} + + +def __getattr__(name: str) -> bytes: + if name == "PAD": + deprecate("IptcImagePlugin.PAD", 12) + return b"\0\0\0\0" + msg = f"module '{__name__}' has no attribute '{name}'" + raise AttributeError(msg) + + +# +# Helpers + + +def _i(c: bytes) -> int: + return i32((b"\0\0\0\0" + c)[-4:]) + + +def _i8(c: int | bytes) -> int: + return c if isinstance(c, int) else c[0] + + +def i(c: bytes) -> int: + """.. deprecated:: 10.2.0""" + deprecate("IptcImagePlugin.i", 12) + return _i(c) + + +def dump(c: Sequence[int | bytes]) -> None: + """.. deprecated:: 10.2.0""" + deprecate("IptcImagePlugin.dump", 12) + for i in c: + print(f"{_i8(i):02x}", end=" ") + print() + + +## +# Image plugin for IPTC/NAA datastreams. To read IPTC/NAA fields +# from TIFF and JPEG files, use the getiptcinfo function. + + +class IptcImageFile(ImageFile.ImageFile): + format = "IPTC" + format_description = "IPTC/NAA" + + def getint(self, key: tuple[int, int]) -> int: + return _i(self.info[key]) + + def field(self) -> tuple[tuple[int, int] | None, int]: + # + # get a IPTC field header + s = self.fp.read(5) + if not s.strip(b"\x00"): + return None, 0 + + tag = s[1], s[2] + + # syntax + if s[0] != 0x1C or tag[0] not in [1, 2, 3, 4, 5, 6, 7, 8, 9, 240]: + msg = "invalid IPTC/NAA file" + raise SyntaxError(msg) + + # field size + size = s[3] + if size > 132: + msg = "illegal field length in IPTC/NAA file" + raise OSError(msg) + elif size == 128: + size = 0 + elif size > 128: + size = _i(self.fp.read(size - 128)) + else: + size = i16(s, 3) + + return tag, size + + def _open(self) -> None: + # load descriptive fields + while True: + offset = self.fp.tell() + tag, size = self.field() + if not tag or tag == (8, 10): + break + if size: + tagdata = self.fp.read(size) + else: + tagdata = None + if tag in self.info: + if isinstance(self.info[tag], list): + self.info[tag].append(tagdata) + else: + self.info[tag] = [self.info[tag], tagdata] + else: + self.info[tag] = tagdata + + # mode + layers = self.info[(3, 60)][0] + component = self.info[(3, 60)][1] + if (3, 65) in self.info: + id = self.info[(3, 65)][0] - 1 + else: + id = 0 + if layers == 1 and not component: + self._mode = "L" + elif layers == 3 and component: + self._mode = "RGB"[id] + elif layers == 4 and component: + self._mode = "CMYK"[id] + + # size + self._size = self.getint((3, 20)), self.getint((3, 30)) + + # compression + try: + compression = COMPRESSION[self.getint((3, 120))] + except KeyError as e: + msg = "Unknown IPTC image compression" + raise OSError(msg) from e + + # tile + if tag == (8, 10): + self.tile = [("iptc", (0, 0) + self.size, offset, compression)] + + def load(self): + if len(self.tile) != 1 or self.tile[0][0] != "iptc": + return ImageFile.ImageFile.load(self) + + offset, compression = self.tile[0][2:] + + self.fp.seek(offset) + + # Copy image data to temporary file + o = BytesIO() + if compression == "raw": + # To simplify access to the extracted file, + # prepend a PPM header + o.write(b"P5\n%d %d\n255\n" % self.size) + while True: + type, size = self.field() + if type != (8, 10): + break + while size > 0: + s = self.fp.read(min(size, 8192)) + if not s: + break + o.write(s) + size -= len(s) + + with Image.open(o) as _im: + _im.load() + self.im = _im.im + + +Image.register_open(IptcImageFile.format, IptcImageFile) + +Image.register_extension(IptcImageFile.format, ".iim") + + +def getiptcinfo(im): + """ + Get IPTC information from TIFF, JPEG, or IPTC file. + + :param im: An image containing IPTC data. + :returns: A dictionary containing IPTC information, or None if + no IPTC information block was found. + """ + from . import JpegImagePlugin, TiffImagePlugin + + data = None + + if isinstance(im, IptcImageFile): + # return info dictionary right away + return im.info + + elif isinstance(im, JpegImagePlugin.JpegImageFile): + # extract the IPTC/NAA resource + photoshop = im.info.get("photoshop") + if photoshop: + data = photoshop.get(0x0404) + + elif isinstance(im, TiffImagePlugin.TiffImageFile): + # get raw data from the IPTC/NAA tag (PhotoShop tags the data + # as 4-byte integers, so we cannot use the get method...) + try: + data = im.tag.tagdata[TiffImagePlugin.IPTC_NAA_CHUNK] + except (AttributeError, KeyError): + pass + + if data is None: + return None # no properties + + # create an IptcImagePlugin object without initializing it + class FakeImage: + pass + + im = FakeImage() + im.__class__ = IptcImageFile + + # parse the IPTC information chunk + im.info = {} + im.fp = BytesIO(data) + + try: + im._open() + except (IndexError, KeyError): + pass # expected failure + + return im.info diff --git a/MLPY/Lib/site-packages/PIL/Jpeg2KImagePlugin.py b/MLPY/Lib/site-packages/PIL/Jpeg2KImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..3ba2f751c4533a88ad814dd5e163c06f39acf445 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/Jpeg2KImagePlugin.py @@ -0,0 +1,408 @@ +# +# The Python Imaging Library +# $Id$ +# +# JPEG2000 file handling +# +# History: +# 2014-03-12 ajh Created +# 2021-06-30 rogermb Extract dpi information from the 'resc' header box +# +# Copyright (c) 2014 Coriolis Systems Limited +# Copyright (c) 2014 Alastair Houghton +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import io +import os +import struct +from typing import IO, Tuple, cast + +from . import Image, ImageFile, ImagePalette, _binary + + +class BoxReader: + """ + A small helper class to read fields stored in JPEG2000 header boxes + and to easily step into and read sub-boxes. + """ + + def __init__(self, fp, length=-1): + self.fp = fp + self.has_length = length >= 0 + self.length = length + self.remaining_in_box = -1 + + def _can_read(self, num_bytes: int) -> bool: + if self.has_length and self.fp.tell() + num_bytes > self.length: + # Outside box: ensure we don't read past the known file length + return False + if self.remaining_in_box >= 0: + # Inside box contents: ensure read does not go past box boundaries + return num_bytes <= self.remaining_in_box + else: + return True # No length known, just read + + def _read_bytes(self, num_bytes: int) -> bytes: + if not self._can_read(num_bytes): + msg = "Not enough data in header" + raise SyntaxError(msg) + + data = self.fp.read(num_bytes) + if len(data) < num_bytes: + msg = f"Expected to read {num_bytes} bytes but only got {len(data)}." + raise OSError(msg) + + if self.remaining_in_box > 0: + self.remaining_in_box -= num_bytes + return data + + def read_fields(self, field_format: str) -> tuple[int | bytes, ...]: + size = struct.calcsize(field_format) + data = self._read_bytes(size) + return struct.unpack(field_format, data) + + def read_boxes(self) -> BoxReader: + size = self.remaining_in_box + data = self._read_bytes(size) + return BoxReader(io.BytesIO(data), size) + + def has_next_box(self) -> bool: + if self.has_length: + return self.fp.tell() + self.remaining_in_box < self.length + else: + return True + + def next_box_type(self) -> bytes: + # Skip the rest of the box if it has not been read + if self.remaining_in_box > 0: + self.fp.seek(self.remaining_in_box, os.SEEK_CUR) + self.remaining_in_box = -1 + + # Read the length and type of the next box + lbox, tbox = cast(Tuple[int, bytes], self.read_fields(">I4s")) + if lbox == 1: + lbox = cast(int, self.read_fields(">Q")[0]) + hlen = 16 + else: + hlen = 8 + + if lbox < hlen or not self._can_read(lbox - hlen): + msg = "Invalid header length" + raise SyntaxError(msg) + + self.remaining_in_box = lbox - hlen + return tbox + + +def _parse_codestream(fp) -> tuple[tuple[int, int], str]: + """Parse the JPEG 2000 codestream to extract the size and component + count from the SIZ marker segment, returning a PIL (size, mode) tuple.""" + + hdr = fp.read(2) + lsiz = _binary.i16be(hdr) + siz = hdr + fp.read(lsiz - 2) + lsiz, rsiz, xsiz, ysiz, xosiz, yosiz, _, _, _, _, csiz = struct.unpack_from( + ">HHIIIIIIIIH", siz + ) + + size = (xsiz - xosiz, ysiz - yosiz) + if csiz == 1: + ssiz = struct.unpack_from(">B", siz, 38) + if (ssiz[0] & 0x7F) + 1 > 8: + mode = "I;16" + else: + mode = "L" + elif csiz == 2: + mode = "LA" + elif csiz == 3: + mode = "RGB" + elif csiz == 4: + mode = "RGBA" + else: + msg = "unable to determine J2K image mode" + raise SyntaxError(msg) + + return size, mode + + +def _res_to_dpi(num: int, denom: int, exp: int) -> float | None: + """Convert JPEG2000's (numerator, denominator, exponent-base-10) resolution, + calculated as (num / denom) * 10^exp and stored in dots per meter, + to floating-point dots per inch.""" + if denom == 0: + return None + return (254 * num * (10**exp)) / (10000 * denom) + + +def _parse_jp2_header(fp): + """Parse the JP2 header box to extract size, component count, + color space information, and optionally DPI information, + returning a (size, mode, mimetype, dpi) tuple.""" + + # Find the JP2 header box + reader = BoxReader(fp) + header = None + mimetype = None + while reader.has_next_box(): + tbox = reader.next_box_type() + + if tbox == b"jp2h": + header = reader.read_boxes() + break + elif tbox == b"ftyp": + if reader.read_fields(">4s")[0] == b"jpx ": + mimetype = "image/jpx" + + size = None + mode = None + bpc = None + nc = None + dpi = None # 2-tuple of DPI info, or None + palette = None + + while header.has_next_box(): + tbox = header.next_box_type() + + if tbox == b"ihdr": + height, width, nc, bpc = header.read_fields(">IIHB") + size = (width, height) + if nc == 1 and (bpc & 0x7F) > 8: + mode = "I;16" + elif nc == 1: + mode = "L" + elif nc == 2: + mode = "LA" + elif nc == 3: + mode = "RGB" + elif nc == 4: + mode = "RGBA" + elif tbox == b"colr" and nc == 4: + meth, _, _, enumcs = header.read_fields(">BBBI") + if meth == 1 and enumcs == 12: + mode = "CMYK" + elif tbox == b"pclr" and mode in ("L", "LA"): + ne, npc = header.read_fields(">HB") + bitdepths = header.read_fields(">" + ("B" * npc)) + if max(bitdepths) <= 8: + palette = ImagePalette.ImagePalette() + for i in range(ne): + palette.getcolor(header.read_fields(">" + ("B" * npc))) + mode = "P" if mode == "L" else "PA" + elif tbox == b"res ": + res = header.read_boxes() + while res.has_next_box(): + tres = res.next_box_type() + if tres == b"resc": + vrcn, vrcd, hrcn, hrcd, vrce, hrce = res.read_fields(">HHHHBB") + hres = _res_to_dpi(hrcn, hrcd, hrce) + vres = _res_to_dpi(vrcn, vrcd, vrce) + if hres is not None and vres is not None: + dpi = (hres, vres) + break + + if size is None or mode is None: + msg = "Malformed JP2 header" + raise SyntaxError(msg) + + return size, mode, mimetype, dpi, palette + + +## +# Image plugin for JPEG2000 images. + + +class Jpeg2KImageFile(ImageFile.ImageFile): + format = "JPEG2000" + format_description = "JPEG 2000 (ISO 15444)" + + def _open(self) -> None: + sig = self.fp.read(4) + if sig == b"\xff\x4f\xff\x51": + self.codec = "j2k" + self._size, self._mode = _parse_codestream(self.fp) + else: + sig = sig + self.fp.read(8) + + if sig == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a": + self.codec = "jp2" + header = _parse_jp2_header(self.fp) + self._size, self._mode, self.custom_mimetype, dpi, self.palette = header + if dpi is not None: + self.info["dpi"] = dpi + if self.fp.read(12).endswith(b"jp2c\xff\x4f\xff\x51"): + self._parse_comment() + else: + msg = "not a JPEG 2000 file" + raise SyntaxError(msg) + + self._reduce = 0 + self.layers = 0 + + fd = -1 + length = -1 + + try: + fd = self.fp.fileno() + length = os.fstat(fd).st_size + except Exception: + fd = -1 + try: + pos = self.fp.tell() + self.fp.seek(0, io.SEEK_END) + length = self.fp.tell() + self.fp.seek(pos) + except Exception: + length = -1 + + self.tile = [ + ( + "jpeg2k", + (0, 0) + self.size, + 0, + (self.codec, self._reduce, self.layers, fd, length), + ) + ] + + def _parse_comment(self) -> None: + hdr = self.fp.read(2) + length = _binary.i16be(hdr) + self.fp.seek(length - 2, os.SEEK_CUR) + + while True: + marker = self.fp.read(2) + if not marker: + break + typ = marker[1] + if typ in (0x90, 0xD9): + # Start of tile or end of codestream + break + hdr = self.fp.read(2) + length = _binary.i16be(hdr) + if typ == 0x64: + # Comment + self.info["comment"] = self.fp.read(length - 2)[2:] + break + else: + self.fp.seek(length - 2, os.SEEK_CUR) + + @property + def reduce(self): + # https://github.com/python-pillow/Pillow/issues/4343 found that the + # new Image 'reduce' method was shadowed by this plugin's 'reduce' + # property. This attempts to allow for both scenarios + return self._reduce or super().reduce + + @reduce.setter + def reduce(self, value): + self._reduce = value + + def load(self): + if self.tile and self._reduce: + power = 1 << self._reduce + adjust = power >> 1 + self._size = ( + int((self.size[0] + adjust) / power), + int((self.size[1] + adjust) / power), + ) + + # Update the reduce and layers settings + t = self.tile[0] + t3 = (t[3][0], self._reduce, self.layers, t[3][3], t[3][4]) + self.tile = [(t[0], (0, 0) + self.size, t[2], t3)] + + return ImageFile.ImageFile.load(self) + + +def _accept(prefix: bytes) -> bool: + return ( + prefix[:4] == b"\xff\x4f\xff\x51" + or prefix[:12] == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a" + ) + + +# ------------------------------------------------------------ +# Save support + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + # Get the keyword arguments + info = im.encoderinfo + + if isinstance(filename, str): + filename = filename.encode() + if filename.endswith(b".j2k") or info.get("no_jp2", False): + kind = "j2k" + else: + kind = "jp2" + + offset = info.get("offset", None) + tile_offset = info.get("tile_offset", None) + tile_size = info.get("tile_size", None) + quality_mode = info.get("quality_mode", "rates") + quality_layers = info.get("quality_layers", None) + if quality_layers is not None and not ( + isinstance(quality_layers, (list, tuple)) + and all( + isinstance(quality_layer, (int, float)) for quality_layer in quality_layers + ) + ): + msg = "quality_layers must be a sequence of numbers" + raise ValueError(msg) + + num_resolutions = info.get("num_resolutions", 0) + cblk_size = info.get("codeblock_size", None) + precinct_size = info.get("precinct_size", None) + irreversible = info.get("irreversible", False) + progression = info.get("progression", "LRCP") + cinema_mode = info.get("cinema_mode", "no") + mct = info.get("mct", 0) + signed = info.get("signed", False) + comment = info.get("comment") + if isinstance(comment, str): + comment = comment.encode() + plt = info.get("plt", False) + + fd = -1 + if hasattr(fp, "fileno"): + try: + fd = fp.fileno() + except Exception: + fd = -1 + + im.encoderconfig = ( + offset, + tile_offset, + tile_size, + quality_mode, + quality_layers, + num_resolutions, + cblk_size, + precinct_size, + irreversible, + progression, + cinema_mode, + mct, + signed, + fd, + comment, + plt, + ) + + ImageFile._save(im, fp, [("jpeg2k", (0, 0) + im.size, 0, kind)]) + + +# ------------------------------------------------------------ +# Registry stuff + + +Image.register_open(Jpeg2KImageFile.format, Jpeg2KImageFile, _accept) +Image.register_save(Jpeg2KImageFile.format, _save) + +Image.register_extensions( + Jpeg2KImageFile.format, [".jp2", ".j2k", ".jpc", ".jpf", ".jpx", ".j2c"] +) + +Image.register_mime(Jpeg2KImageFile.format, "image/jp2") diff --git a/MLPY/Lib/site-packages/PIL/JpegImagePlugin.py b/MLPY/Lib/site-packages/PIL/JpegImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..9ddf78fec20ae15f0a71d86d070c4cd82f134ada --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/JpegImagePlugin.py @@ -0,0 +1,861 @@ +# +# The Python Imaging Library. +# $Id$ +# +# JPEG (JFIF) file handling +# +# See "Digital Compression and Coding of Continuous-Tone Still Images, +# Part 1, Requirements and Guidelines" (CCITT T.81 / ISO 10918-1) +# +# History: +# 1995-09-09 fl Created +# 1995-09-13 fl Added full parser +# 1996-03-25 fl Added hack to use the IJG command line utilities +# 1996-05-05 fl Workaround Photoshop 2.5 CMYK polarity bug +# 1996-05-28 fl Added draft support, JFIF version (0.1) +# 1996-12-30 fl Added encoder options, added progression property (0.2) +# 1997-08-27 fl Save mode 1 images as BW (0.3) +# 1998-07-12 fl Added YCbCr to draft and save methods (0.4) +# 1998-10-19 fl Don't hang on files using 16-bit DQT's (0.4.1) +# 2001-04-16 fl Extract DPI settings from JFIF files (0.4.2) +# 2002-07-01 fl Skip pad bytes before markers; identify Exif files (0.4.3) +# 2003-04-25 fl Added experimental EXIF decoder (0.5) +# 2003-06-06 fl Added experimental EXIF GPSinfo decoder +# 2003-09-13 fl Extract COM markers +# 2009-09-06 fl Added icc_profile support (from Florian Hoech) +# 2009-03-06 fl Changed CMYK handling; always use Adobe polarity (0.6) +# 2009-03-08 fl Added subsampling support (from Justin Huff). +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import array +import io +import math +import os +import struct +import subprocess +import sys +import tempfile +import warnings +from typing import IO, Any + +from . import Image, ImageFile +from ._binary import i16be as i16 +from ._binary import i32be as i32 +from ._binary import o8 +from ._binary import o16be as o16 +from .JpegPresets import presets + +# +# Parser + + +def Skip(self: JpegImageFile, marker: int) -> None: + n = i16(self.fp.read(2)) - 2 + ImageFile._safe_read(self.fp, n) + + +def APP(self, marker): + # + # Application marker. Store these in the APP dictionary. + # Also look for well-known application markers. + + n = i16(self.fp.read(2)) - 2 + s = ImageFile._safe_read(self.fp, n) + + app = "APP%d" % (marker & 15) + + self.app[app] = s # compatibility + self.applist.append((app, s)) + + if marker == 0xFFE0 and s[:4] == b"JFIF": + # extract JFIF information + self.info["jfif"] = version = i16(s, 5) # version + self.info["jfif_version"] = divmod(version, 256) + # extract JFIF properties + try: + jfif_unit = s[7] + jfif_density = i16(s, 8), i16(s, 10) + except Exception: + pass + else: + if jfif_unit == 1: + self.info["dpi"] = jfif_density + self.info["jfif_unit"] = jfif_unit + self.info["jfif_density"] = jfif_density + elif marker == 0xFFE1 and s[:6] == b"Exif\0\0": + # extract EXIF information + if "exif" in self.info: + self.info["exif"] += s[6:] + else: + self.info["exif"] = s + self._exif_offset = self.fp.tell() - n + 6 + elif marker == 0xFFE1 and s[:29] == b"http://ns.adobe.com/xap/1.0/\x00": + self.info["xmp"] = s.split(b"\x00", 1)[1] + elif marker == 0xFFE2 and s[:5] == b"FPXR\0": + # extract FlashPix information (incomplete) + self.info["flashpix"] = s # FIXME: value will change + elif marker == 0xFFE2 and s[:12] == b"ICC_PROFILE\0": + # Since an ICC profile can be larger than the maximum size of + # a JPEG marker (64K), we need provisions to split it into + # multiple markers. The format defined by the ICC specifies + # one or more APP2 markers containing the following data: + # Identifying string ASCII "ICC_PROFILE\0" (12 bytes) + # Marker sequence number 1, 2, etc (1 byte) + # Number of markers Total of APP2's used (1 byte) + # Profile data (remainder of APP2 data) + # Decoders should use the marker sequence numbers to + # reassemble the profile, rather than assuming that the APP2 + # markers appear in the correct sequence. + self.icclist.append(s) + elif marker == 0xFFED and s[:14] == b"Photoshop 3.0\x00": + # parse the image resource block + offset = 14 + photoshop = self.info.setdefault("photoshop", {}) + while s[offset : offset + 4] == b"8BIM": + try: + offset += 4 + # resource code + code = i16(s, offset) + offset += 2 + # resource name (usually empty) + name_len = s[offset] + # name = s[offset+1:offset+1+name_len] + offset += 1 + name_len + offset += offset & 1 # align + # resource data block + size = i32(s, offset) + offset += 4 + data = s[offset : offset + size] + if code == 0x03ED: # ResolutionInfo + data = { + "XResolution": i32(data, 0) / 65536, + "DisplayedUnitsX": i16(data, 4), + "YResolution": i32(data, 8) / 65536, + "DisplayedUnitsY": i16(data, 12), + } + photoshop[code] = data + offset += size + offset += offset & 1 # align + except struct.error: + break # insufficient data + + elif marker == 0xFFEE and s[:5] == b"Adobe": + self.info["adobe"] = i16(s, 5) + # extract Adobe custom properties + try: + adobe_transform = s[11] + except IndexError: + pass + else: + self.info["adobe_transform"] = adobe_transform + elif marker == 0xFFE2 and s[:4] == b"MPF\0": + # extract MPO information + self.info["mp"] = s[4:] + # offset is current location minus buffer size + # plus constant header size + self.info["mpoffset"] = self.fp.tell() - n + 4 + + +def COM(self: JpegImageFile, marker: int) -> None: + # + # Comment marker. Store these in the APP dictionary. + n = i16(self.fp.read(2)) - 2 + s = ImageFile._safe_read(self.fp, n) + + self.info["comment"] = s + self.app["COM"] = s # compatibility + self.applist.append(("COM", s)) + + +def SOF(self: JpegImageFile, marker: int) -> None: + # + # Start of frame marker. Defines the size and mode of the + # image. JPEG is colour blind, so we use some simple + # heuristics to map the number of layers to an appropriate + # mode. Note that this could be made a bit brighter, by + # looking for JFIF and Adobe APP markers. + + n = i16(self.fp.read(2)) - 2 + s = ImageFile._safe_read(self.fp, n) + self._size = i16(s, 3), i16(s, 1) + + self.bits = s[0] + if self.bits != 8: + msg = f"cannot handle {self.bits}-bit layers" + raise SyntaxError(msg) + + self.layers = s[5] + if self.layers == 1: + self._mode = "L" + elif self.layers == 3: + self._mode = "RGB" + elif self.layers == 4: + self._mode = "CMYK" + else: + msg = f"cannot handle {self.layers}-layer images" + raise SyntaxError(msg) + + if marker in [0xFFC2, 0xFFC6, 0xFFCA, 0xFFCE]: + self.info["progressive"] = self.info["progression"] = 1 + + if self.icclist: + # fixup icc profile + self.icclist.sort() # sort by sequence number + if self.icclist[0][13] == len(self.icclist): + profile = [p[14:] for p in self.icclist] + icc_profile = b"".join(profile) + else: + icc_profile = None # wrong number of fragments + self.info["icc_profile"] = icc_profile + self.icclist = [] + + for i in range(6, len(s), 3): + t = s[i : i + 3] + # 4-tuples: id, vsamp, hsamp, qtable + self.layer.append((t[0], t[1] // 16, t[1] & 15, t[2])) + + +def DQT(self: JpegImageFile, marker: int) -> None: + # + # Define quantization table. Note that there might be more + # than one table in each marker. + + # FIXME: The quantization tables can be used to estimate the + # compression quality. + + n = i16(self.fp.read(2)) - 2 + s = ImageFile._safe_read(self.fp, n) + while len(s): + v = s[0] + precision = 1 if (v // 16 == 0) else 2 # in bytes + qt_length = 1 + precision * 64 + if len(s) < qt_length: + msg = "bad quantization table marker" + raise SyntaxError(msg) + data = array.array("B" if precision == 1 else "H", s[1:qt_length]) + if sys.byteorder == "little" and precision > 1: + data.byteswap() # the values are always big-endian + self.quantization[v & 15] = [data[i] for i in zigzag_index] + s = s[qt_length:] + + +# +# JPEG marker table + +MARKER = { + 0xFFC0: ("SOF0", "Baseline DCT", SOF), + 0xFFC1: ("SOF1", "Extended Sequential DCT", SOF), + 0xFFC2: ("SOF2", "Progressive DCT", SOF), + 0xFFC3: ("SOF3", "Spatial lossless", SOF), + 0xFFC4: ("DHT", "Define Huffman table", Skip), + 0xFFC5: ("SOF5", "Differential sequential DCT", SOF), + 0xFFC6: ("SOF6", "Differential progressive DCT", SOF), + 0xFFC7: ("SOF7", "Differential spatial", SOF), + 0xFFC8: ("JPG", "Extension", None), + 0xFFC9: ("SOF9", "Extended sequential DCT (AC)", SOF), + 0xFFCA: ("SOF10", "Progressive DCT (AC)", SOF), + 0xFFCB: ("SOF11", "Spatial lossless DCT (AC)", SOF), + 0xFFCC: ("DAC", "Define arithmetic coding conditioning", Skip), + 0xFFCD: ("SOF13", "Differential sequential DCT (AC)", SOF), + 0xFFCE: ("SOF14", "Differential progressive DCT (AC)", SOF), + 0xFFCF: ("SOF15", "Differential spatial (AC)", SOF), + 0xFFD0: ("RST0", "Restart 0", None), + 0xFFD1: ("RST1", "Restart 1", None), + 0xFFD2: ("RST2", "Restart 2", None), + 0xFFD3: ("RST3", "Restart 3", None), + 0xFFD4: ("RST4", "Restart 4", None), + 0xFFD5: ("RST5", "Restart 5", None), + 0xFFD6: ("RST6", "Restart 6", None), + 0xFFD7: ("RST7", "Restart 7", None), + 0xFFD8: ("SOI", "Start of image", None), + 0xFFD9: ("EOI", "End of image", None), + 0xFFDA: ("SOS", "Start of scan", Skip), + 0xFFDB: ("DQT", "Define quantization table", DQT), + 0xFFDC: ("DNL", "Define number of lines", Skip), + 0xFFDD: ("DRI", "Define restart interval", Skip), + 0xFFDE: ("DHP", "Define hierarchical progression", SOF), + 0xFFDF: ("EXP", "Expand reference component", Skip), + 0xFFE0: ("APP0", "Application segment 0", APP), + 0xFFE1: ("APP1", "Application segment 1", APP), + 0xFFE2: ("APP2", "Application segment 2", APP), + 0xFFE3: ("APP3", "Application segment 3", APP), + 0xFFE4: ("APP4", "Application segment 4", APP), + 0xFFE5: ("APP5", "Application segment 5", APP), + 0xFFE6: ("APP6", "Application segment 6", APP), + 0xFFE7: ("APP7", "Application segment 7", APP), + 0xFFE8: ("APP8", "Application segment 8", APP), + 0xFFE9: ("APP9", "Application segment 9", APP), + 0xFFEA: ("APP10", "Application segment 10", APP), + 0xFFEB: ("APP11", "Application segment 11", APP), + 0xFFEC: ("APP12", "Application segment 12", APP), + 0xFFED: ("APP13", "Application segment 13", APP), + 0xFFEE: ("APP14", "Application segment 14", APP), + 0xFFEF: ("APP15", "Application segment 15", APP), + 0xFFF0: ("JPG0", "Extension 0", None), + 0xFFF1: ("JPG1", "Extension 1", None), + 0xFFF2: ("JPG2", "Extension 2", None), + 0xFFF3: ("JPG3", "Extension 3", None), + 0xFFF4: ("JPG4", "Extension 4", None), + 0xFFF5: ("JPG5", "Extension 5", None), + 0xFFF6: ("JPG6", "Extension 6", None), + 0xFFF7: ("JPG7", "Extension 7", None), + 0xFFF8: ("JPG8", "Extension 8", None), + 0xFFF9: ("JPG9", "Extension 9", None), + 0xFFFA: ("JPG10", "Extension 10", None), + 0xFFFB: ("JPG11", "Extension 11", None), + 0xFFFC: ("JPG12", "Extension 12", None), + 0xFFFD: ("JPG13", "Extension 13", None), + 0xFFFE: ("COM", "Comment", COM), +} + + +def _accept(prefix: bytes) -> bool: + # Magic number was taken from https://en.wikipedia.org/wiki/JPEG + return prefix[:3] == b"\xFF\xD8\xFF" + + +## +# Image plugin for JPEG and JFIF images. + + +class JpegImageFile(ImageFile.ImageFile): + format = "JPEG" + format_description = "JPEG (ISO 10918)" + + def _open(self): + s = self.fp.read(3) + + if not _accept(s): + msg = "not a JPEG file" + raise SyntaxError(msg) + s = b"\xFF" + + # Create attributes + self.bits = self.layers = 0 + + # JPEG specifics (internal) + self.layer = [] + self.huffman_dc = {} + self.huffman_ac = {} + self.quantization = {} + self.app = {} # compatibility + self.applist = [] + self.icclist = [] + + while True: + i = s[0] + if i == 0xFF: + s = s + self.fp.read(1) + i = i16(s) + else: + # Skip non-0xFF junk + s = self.fp.read(1) + continue + + if i in MARKER: + name, description, handler = MARKER[i] + if handler is not None: + handler(self, i) + if i == 0xFFDA: # start of scan + rawmode = self.mode + if self.mode == "CMYK": + rawmode = "CMYK;I" # assume adobe conventions + self.tile = [("jpeg", (0, 0) + self.size, 0, (rawmode, ""))] + # self.__offset = self.fp.tell() + break + s = self.fp.read(1) + elif i in {0, 0xFFFF}: + # padded marker or junk; move on + s = b"\xff" + elif i == 0xFF00: # Skip extraneous data (escaped 0xFF) + s = self.fp.read(1) + else: + msg = "no marker found" + raise SyntaxError(msg) + + self._read_dpi_from_exif() + + def load_read(self, read_bytes: int) -> bytes: + """ + internal: read more image data + For premature EOF and LOAD_TRUNCATED_IMAGES adds EOI marker + so libjpeg can finish decoding + """ + s = self.fp.read(read_bytes) + + if not s and ImageFile.LOAD_TRUNCATED_IMAGES and not hasattr(self, "_ended"): + # Premature EOF. + # Pretend file is finished adding EOI marker + self._ended = True + return b"\xFF\xD9" + + return s + + def draft( + self, mode: str | None, size: tuple[int, int] | None + ) -> tuple[str, tuple[int, int, float, float]] | None: + if len(self.tile) != 1: + return None + + # Protect from second call + if self.decoderconfig: + return None + + d, e, o, a = self.tile[0] + scale = 1 + original_size = self.size + + if a[0] == "RGB" and mode in ["L", "YCbCr"]: + self._mode = mode + a = mode, "" + + if size: + scale = min(self.size[0] // size[0], self.size[1] // size[1]) + for s in [8, 4, 2, 1]: + if scale >= s: + break + e = ( + e[0], + e[1], + (e[2] - e[0] + s - 1) // s + e[0], + (e[3] - e[1] + s - 1) // s + e[1], + ) + self._size = ((self.size[0] + s - 1) // s, (self.size[1] + s - 1) // s) + scale = s + + self.tile = [(d, e, o, a)] + self.decoderconfig = (scale, 0) + + box = (0, 0, original_size[0] / scale, original_size[1] / scale) + return self.mode, box + + def load_djpeg(self) -> None: + # ALTERNATIVE: handle JPEGs via the IJG command line utilities + + f, path = tempfile.mkstemp() + os.close(f) + if os.path.exists(self.filename): + subprocess.check_call(["djpeg", "-outfile", path, self.filename]) + else: + try: + os.unlink(path) + except OSError: + pass + + msg = "Invalid Filename" + raise ValueError(msg) + + try: + with Image.open(path) as _im: + _im.load() + self.im = _im.im + finally: + try: + os.unlink(path) + except OSError: + pass + + self._mode = self.im.mode + self._size = self.im.size + + self.tile = [] + + def _getexif(self) -> dict[str, Any] | None: + return _getexif(self) + + def _read_dpi_from_exif(self) -> None: + # If DPI isn't in JPEG header, fetch from EXIF + if "dpi" in self.info or "exif" not in self.info: + return + try: + exif = self.getexif() + resolution_unit = exif[0x0128] + x_resolution = exif[0x011A] + try: + dpi = float(x_resolution[0]) / x_resolution[1] + except TypeError: + dpi = x_resolution + if math.isnan(dpi): + msg = "DPI is not a number" + raise ValueError(msg) + if resolution_unit == 3: # cm + # 1 dpcm = 2.54 dpi + dpi *= 2.54 + self.info["dpi"] = dpi, dpi + except ( + struct.error, # truncated EXIF + KeyError, # dpi not included + SyntaxError, # invalid/unreadable EXIF + TypeError, # dpi is an invalid float + ValueError, # dpi is an invalid float + ZeroDivisionError, # invalid dpi rational value + ): + self.info["dpi"] = 72, 72 + + def _getmp(self): + return _getmp(self) + + +def _getexif(self) -> dict[str, Any] | None: + if "exif" not in self.info: + return None + return self.getexif()._get_merged_dict() + + +def _getmp(self): + # Extract MP information. This method was inspired by the "highly + # experimental" _getexif version that's been in use for years now, + # itself based on the ImageFileDirectory class in the TIFF plugin. + + # The MP record essentially consists of a TIFF file embedded in a JPEG + # application marker. + try: + data = self.info["mp"] + except KeyError: + return None + file_contents = io.BytesIO(data) + head = file_contents.read(8) + endianness = ">" if head[:4] == b"\x4d\x4d\x00\x2a" else "<" + # process dictionary + from . import TiffImagePlugin + + try: + info = TiffImagePlugin.ImageFileDirectory_v2(head) + file_contents.seek(info.next) + info.load(file_contents) + mp = dict(info) + except Exception as e: + msg = "malformed MP Index (unreadable directory)" + raise SyntaxError(msg) from e + # it's an error not to have a number of images + try: + quant = mp[0xB001] + except KeyError as e: + msg = "malformed MP Index (no number of images)" + raise SyntaxError(msg) from e + # get MP entries + mpentries = [] + try: + rawmpentries = mp[0xB002] + for entrynum in range(0, quant): + unpackedentry = struct.unpack_from( + f"{endianness}LLLHH", rawmpentries, entrynum * 16 + ) + labels = ("Attribute", "Size", "DataOffset", "EntryNo1", "EntryNo2") + mpentry = dict(zip(labels, unpackedentry)) + mpentryattr = { + "DependentParentImageFlag": bool(mpentry["Attribute"] & (1 << 31)), + "DependentChildImageFlag": bool(mpentry["Attribute"] & (1 << 30)), + "RepresentativeImageFlag": bool(mpentry["Attribute"] & (1 << 29)), + "Reserved": (mpentry["Attribute"] & (3 << 27)) >> 27, + "ImageDataFormat": (mpentry["Attribute"] & (7 << 24)) >> 24, + "MPType": mpentry["Attribute"] & 0x00FFFFFF, + } + if mpentryattr["ImageDataFormat"] == 0: + mpentryattr["ImageDataFormat"] = "JPEG" + else: + msg = "unsupported picture format in MPO" + raise SyntaxError(msg) + mptypemap = { + 0x000000: "Undefined", + 0x010001: "Large Thumbnail (VGA Equivalent)", + 0x010002: "Large Thumbnail (Full HD Equivalent)", + 0x020001: "Multi-Frame Image (Panorama)", + 0x020002: "Multi-Frame Image: (Disparity)", + 0x020003: "Multi-Frame Image: (Multi-Angle)", + 0x030000: "Baseline MP Primary Image", + } + mpentryattr["MPType"] = mptypemap.get(mpentryattr["MPType"], "Unknown") + mpentry["Attribute"] = mpentryattr + mpentries.append(mpentry) + mp[0xB002] = mpentries + except KeyError as e: + msg = "malformed MP Index (bad MP Entry)" + raise SyntaxError(msg) from e + # Next we should try and parse the individual image unique ID list; + # we don't because I've never seen this actually used in a real MPO + # file and so can't test it. + return mp + + +# -------------------------------------------------------------------- +# stuff to save JPEG files + +RAWMODE = { + "1": "L", + "L": "L", + "RGB": "RGB", + "RGBX": "RGB", + "CMYK": "CMYK;I", # assume adobe conventions + "YCbCr": "YCbCr", +} + +# fmt: off +zigzag_index = ( + 0, 1, 5, 6, 14, 15, 27, 28, + 2, 4, 7, 13, 16, 26, 29, 42, + 3, 8, 12, 17, 25, 30, 41, 43, + 9, 11, 18, 24, 31, 40, 44, 53, + 10, 19, 23, 32, 39, 45, 52, 54, + 20, 22, 33, 38, 46, 51, 55, 60, + 21, 34, 37, 47, 50, 56, 59, 61, + 35, 36, 48, 49, 57, 58, 62, 63, +) + +samplings = { + (1, 1, 1, 1, 1, 1): 0, + (2, 1, 1, 1, 1, 1): 1, + (2, 2, 1, 1, 1, 1): 2, +} +# fmt: on + + +def get_sampling(im): + # There's no subsampling when images have only 1 layer + # (grayscale images) or when they are CMYK (4 layers), + # so set subsampling to the default value. + # + # NOTE: currently Pillow can't encode JPEG to YCCK format. + # If YCCK support is added in the future, subsampling code will have + # to be updated (here and in JpegEncode.c) to deal with 4 layers. + if not hasattr(im, "layers") or im.layers in (1, 4): + return -1 + sampling = im.layer[0][1:3] + im.layer[1][1:3] + im.layer[2][1:3] + return samplings.get(sampling, -1) + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + if im.width == 0 or im.height == 0: + msg = "cannot write empty image as JPEG" + raise ValueError(msg) + + try: + rawmode = RAWMODE[im.mode] + except KeyError as e: + msg = f"cannot write mode {im.mode} as JPEG" + raise OSError(msg) from e + + info = im.encoderinfo + + dpi = [round(x) for x in info.get("dpi", (0, 0))] + + quality = info.get("quality", -1) + subsampling = info.get("subsampling", -1) + qtables = info.get("qtables") + + if quality == "keep": + quality = -1 + subsampling = "keep" + qtables = "keep" + elif quality in presets: + preset = presets[quality] + quality = -1 + subsampling = preset.get("subsampling", -1) + qtables = preset.get("quantization") + elif not isinstance(quality, int): + msg = "Invalid quality setting" + raise ValueError(msg) + else: + if subsampling in presets: + subsampling = presets[subsampling].get("subsampling", -1) + if isinstance(qtables, str) and qtables in presets: + qtables = presets[qtables].get("quantization") + + if subsampling == "4:4:4": + subsampling = 0 + elif subsampling == "4:2:2": + subsampling = 1 + elif subsampling == "4:2:0": + subsampling = 2 + elif subsampling == "4:1:1": + # For compatibility. Before Pillow 4.3, 4:1:1 actually meant 4:2:0. + # Set 4:2:0 if someone is still using that value. + subsampling = 2 + elif subsampling == "keep": + if im.format != "JPEG": + msg = "Cannot use 'keep' when original image is not a JPEG" + raise ValueError(msg) + subsampling = get_sampling(im) + + def validate_qtables(qtables): + if qtables is None: + return qtables + if isinstance(qtables, str): + try: + lines = [ + int(num) + for line in qtables.splitlines() + for num in line.split("#", 1)[0].split() + ] + except ValueError as e: + msg = "Invalid quantization table" + raise ValueError(msg) from e + else: + qtables = [lines[s : s + 64] for s in range(0, len(lines), 64)] + if isinstance(qtables, (tuple, list, dict)): + if isinstance(qtables, dict): + qtables = [ + qtables[key] for key in range(len(qtables)) if key in qtables + ] + elif isinstance(qtables, tuple): + qtables = list(qtables) + if not (0 < len(qtables) < 5): + msg = "None or too many quantization tables" + raise ValueError(msg) + for idx, table in enumerate(qtables): + try: + if len(table) != 64: + msg = "Invalid quantization table" + raise TypeError(msg) + table = array.array("H", table) + except TypeError as e: + msg = "Invalid quantization table" + raise ValueError(msg) from e + else: + qtables[idx] = list(table) + return qtables + + if qtables == "keep": + if im.format != "JPEG": + msg = "Cannot use 'keep' when original image is not a JPEG" + raise ValueError(msg) + qtables = getattr(im, "quantization", None) + qtables = validate_qtables(qtables) + + extra = info.get("extra", b"") + + MAX_BYTES_IN_MARKER = 65533 + icc_profile = info.get("icc_profile") + if icc_profile: + ICC_OVERHEAD_LEN = 14 + MAX_DATA_BYTES_IN_MARKER = MAX_BYTES_IN_MARKER - ICC_OVERHEAD_LEN + markers = [] + while icc_profile: + markers.append(icc_profile[:MAX_DATA_BYTES_IN_MARKER]) + icc_profile = icc_profile[MAX_DATA_BYTES_IN_MARKER:] + i = 1 + for marker in markers: + size = o16(2 + ICC_OVERHEAD_LEN + len(marker)) + extra += ( + b"\xFF\xE2" + + size + + b"ICC_PROFILE\0" + + o8(i) + + o8(len(markers)) + + marker + ) + i += 1 + + comment = info.get("comment", im.info.get("comment")) + + # "progressive" is the official name, but older documentation + # says "progression" + # FIXME: issue a warning if the wrong form is used (post-1.1.7) + progressive = info.get("progressive", False) or info.get("progression", False) + + optimize = info.get("optimize", False) + + exif = info.get("exif", b"") + if isinstance(exif, Image.Exif): + exif = exif.tobytes() + if len(exif) > MAX_BYTES_IN_MARKER: + msg = "EXIF data is too long" + raise ValueError(msg) + + # get keyword arguments + im.encoderconfig = ( + quality, + progressive, + info.get("smooth", 0), + optimize, + info.get("keep_rgb", False), + info.get("streamtype", 0), + dpi[0], + dpi[1], + subsampling, + info.get("restart_marker_blocks", 0), + info.get("restart_marker_rows", 0), + qtables, + comment, + extra, + exif, + ) + + # if we optimize, libjpeg needs a buffer big enough to hold the whole image + # in a shot. Guessing on the size, at im.size bytes. (raw pixel size is + # channels*size, this is a value that's been used in a django patch. + # https://github.com/matthewwithanm/django-imagekit/issues/50 + bufsize = 0 + if optimize or progressive: + # CMYK can be bigger + if im.mode == "CMYK": + bufsize = 4 * im.size[0] * im.size[1] + # keep sets quality to -1, but the actual value may be high. + elif quality >= 95 or quality == -1: + bufsize = 2 * im.size[0] * im.size[1] + else: + bufsize = im.size[0] * im.size[1] + if exif: + bufsize += len(exif) + 5 + if extra: + bufsize += len(extra) + 1 + else: + # The EXIF info needs to be written as one block, + APP1, + one spare byte. + # Ensure that our buffer is big enough. Same with the icc_profile block. + bufsize = max(bufsize, len(exif) + 5, len(extra) + 1) + + ImageFile._save(im, fp, [("jpeg", (0, 0) + im.size, 0, rawmode)], bufsize) + + +def _save_cjpeg(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + # ALTERNATIVE: handle JPEGs via the IJG command line utilities. + tempfile = im._dump() + subprocess.check_call(["cjpeg", "-outfile", filename, tempfile]) + try: + os.unlink(tempfile) + except OSError: + pass + + +## +# Factory for making JPEG and MPO instances +def jpeg_factory(fp=None, filename=None): + im = JpegImageFile(fp, filename) + try: + mpheader = im._getmp() + if mpheader[45057] > 1: + for segment, content in im.applist: + if segment == "APP1" and b' hdrgm:Version="' in content: + # Ultra HDR images are not yet supported + return im + # It's actually an MPO + from .MpoImagePlugin import MpoImageFile + + # Don't reload everything, just convert it. + im = MpoImageFile.adopt(im, mpheader) + except (TypeError, IndexError): + # It is really a JPEG + pass + except SyntaxError: + warnings.warn( + "Image appears to be a malformed MPO file, it will be " + "interpreted as a base JPEG file" + ) + return im + + +# --------------------------------------------------------------------- +# Registry stuff + +Image.register_open(JpegImageFile.format, jpeg_factory, _accept) +Image.register_save(JpegImageFile.format, _save) + +Image.register_extensions(JpegImageFile.format, [".jfif", ".jpe", ".jpg", ".jpeg"]) + +Image.register_mime(JpegImageFile.format, "image/jpeg") diff --git a/MLPY/Lib/site-packages/PIL/JpegPresets.py b/MLPY/Lib/site-packages/PIL/JpegPresets.py new file mode 100644 index 0000000000000000000000000000000000000000..47346257dc34ada83356af8cf564bff71673c029 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/JpegPresets.py @@ -0,0 +1,242 @@ +""" +JPEG quality settings equivalent to the Photoshop settings. +Can be used when saving JPEG files. + +The following presets are available by default: +``web_low``, ``web_medium``, ``web_high``, ``web_very_high``, ``web_maximum``, +``low``, ``medium``, ``high``, ``maximum``. +More presets can be added to the :py:data:`presets` dict if needed. + +To apply the preset, specify:: + + quality="preset_name" + +To apply only the quantization table:: + + qtables="preset_name" + +To apply only the subsampling setting:: + + subsampling="preset_name" + +Example:: + + im.save("image_name.jpg", quality="web_high") + +Subsampling +----------- + +Subsampling is the practice of encoding images by implementing less resolution +for chroma information than for luma information. +(ref.: https://en.wikipedia.org/wiki/Chroma_subsampling) + +Possible subsampling values are 0, 1 and 2 that correspond to 4:4:4, 4:2:2 and +4:2:0. + +You can get the subsampling of a JPEG with the +:func:`.JpegImagePlugin.get_sampling` function. + +In JPEG compressed data a JPEG marker is used instead of an EXIF tag. +(ref.: https://web.archive.org/web/20240227115053/https://exiv2.org/tags.html) + + +Quantization tables +------------------- + +They are values use by the DCT (Discrete cosine transform) to remove +*unnecessary* information from the image (the lossy part of the compression). +(ref.: https://en.wikipedia.org/wiki/Quantization_matrix#Quantization_matrices, +https://en.wikipedia.org/wiki/JPEG#Quantization) + +You can get the quantization tables of a JPEG with:: + + im.quantization + +This will return a dict with a number of lists. You can pass this dict +directly as the qtables argument when saving a JPEG. + +The quantization table format in presets is a list with sublists. These formats +are interchangeable. + +Libjpeg ref.: +https://web.archive.org/web/20120328125543/http://www.jpegcameras.com/libjpeg/libjpeg-3.html + +""" + +from __future__ import annotations + +# fmt: off +presets = { + 'web_low': {'subsampling': 2, # "4:2:0" + 'quantization': [ + [20, 16, 25, 39, 50, 46, 62, 68, + 16, 18, 23, 38, 38, 53, 65, 68, + 25, 23, 31, 38, 53, 65, 68, 68, + 39, 38, 38, 53, 65, 68, 68, 68, + 50, 38, 53, 65, 68, 68, 68, 68, + 46, 53, 65, 68, 68, 68, 68, 68, + 62, 65, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68], + [21, 25, 32, 38, 54, 68, 68, 68, + 25, 28, 24, 38, 54, 68, 68, 68, + 32, 24, 32, 43, 66, 68, 68, 68, + 38, 38, 43, 53, 68, 68, 68, 68, + 54, 54, 66, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68] + ]}, + 'web_medium': {'subsampling': 2, # "4:2:0" + 'quantization': [ + [16, 11, 11, 16, 23, 27, 31, 30, + 11, 12, 12, 15, 20, 23, 23, 30, + 11, 12, 13, 16, 23, 26, 35, 47, + 16, 15, 16, 23, 26, 37, 47, 64, + 23, 20, 23, 26, 39, 51, 64, 64, + 27, 23, 26, 37, 51, 64, 64, 64, + 31, 23, 35, 47, 64, 64, 64, 64, + 30, 30, 47, 64, 64, 64, 64, 64], + [17, 15, 17, 21, 20, 26, 38, 48, + 15, 19, 18, 17, 20, 26, 35, 43, + 17, 18, 20, 22, 26, 30, 46, 53, + 21, 17, 22, 28, 30, 39, 53, 64, + 20, 20, 26, 30, 39, 48, 64, 64, + 26, 26, 30, 39, 48, 63, 64, 64, + 38, 35, 46, 53, 64, 64, 64, 64, + 48, 43, 53, 64, 64, 64, 64, 64] + ]}, + 'web_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, + 4, 5, 5, 6, 8, 10, 12, 12, + 4, 5, 5, 6, 10, 12, 14, 19, + 6, 6, 6, 11, 12, 15, 19, 28, + 9, 8, 10, 12, 16, 20, 27, 31, + 11, 10, 12, 15, 20, 27, 31, 31, + 12, 12, 14, 19, 27, 31, 31, 31, + 16, 12, 19, 28, 31, 31, 31, 31], + [7, 7, 13, 24, 26, 31, 31, 31, + 7, 12, 16, 21, 31, 31, 31, 31, + 13, 16, 17, 31, 31, 31, 31, 31, + 24, 21, 31, 31, 31, 31, 31, 31, + 26, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31] + ]}, + 'web_very_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 4, 5, 7, 9, + 2, 2, 2, 4, 5, 7, 9, 12, + 3, 3, 4, 5, 8, 10, 12, 12, + 4, 4, 5, 7, 10, 12, 12, 12, + 5, 5, 7, 9, 12, 12, 12, 12, + 6, 6, 9, 12, 12, 12, 12, 12], + [3, 3, 5, 9, 13, 15, 15, 15, + 3, 4, 6, 11, 14, 12, 12, 12, + 5, 6, 9, 14, 12, 12, 12, 12, + 9, 11, 14, 12, 12, 12, 12, 12, + 13, 14, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'web_maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 2, + 1, 1, 1, 1, 1, 1, 2, 2, + 1, 1, 1, 1, 1, 2, 2, 3, + 1, 1, 1, 1, 2, 2, 3, 3, + 1, 1, 1, 2, 2, 3, 3, 3, + 1, 1, 2, 2, 3, 3, 3, 3], + [1, 1, 1, 2, 2, 3, 3, 3, + 1, 1, 1, 2, 3, 3, 3, 3, + 1, 1, 1, 3, 3, 3, 3, 3, + 2, 2, 3, 3, 3, 3, 3, 3, + 2, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3] + ]}, + 'low': {'subsampling': 2, # "4:2:0" + 'quantization': [ + [18, 14, 14, 21, 30, 35, 34, 17, + 14, 16, 16, 19, 26, 23, 12, 12, + 14, 16, 17, 21, 23, 12, 12, 12, + 21, 19, 21, 23, 12, 12, 12, 12, + 30, 26, 23, 12, 12, 12, 12, 12, + 35, 23, 12, 12, 12, 12, 12, 12, + 34, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12], + [20, 19, 22, 27, 20, 20, 17, 17, + 19, 25, 23, 14, 14, 12, 12, 12, + 22, 23, 14, 14, 12, 12, 12, 12, + 27, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'medium': {'subsampling': 2, # "4:2:0" + 'quantization': [ + [12, 8, 8, 12, 17, 21, 24, 17, + 8, 9, 9, 11, 15, 19, 12, 12, + 8, 9, 10, 12, 19, 12, 12, 12, + 12, 11, 12, 21, 12, 12, 12, 12, + 17, 15, 19, 12, 12, 12, 12, 12, + 21, 19, 12, 12, 12, 12, 12, 12, + 24, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12], + [13, 11, 13, 16, 20, 20, 17, 17, + 11, 14, 14, 14, 14, 12, 12, 12, + 13, 14, 14, 14, 12, 12, 12, 12, + 16, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, + 4, 5, 5, 6, 8, 10, 12, 12, + 4, 5, 5, 6, 10, 12, 12, 12, + 6, 6, 6, 11, 12, 12, 12, 12, + 9, 8, 10, 12, 12, 12, 12, 12, + 11, 10, 12, 12, 12, 12, 12, 12, + 12, 12, 12, 12, 12, 12, 12, 12, + 16, 12, 12, 12, 12, 12, 12, 12], + [7, 7, 13, 24, 20, 20, 17, 17, + 7, 12, 16, 14, 14, 12, 12, 12, + 13, 16, 14, 14, 12, 12, 12, 12, + 24, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 4, 5, 7, 9, + 2, 2, 2, 4, 5, 7, 9, 12, + 3, 3, 4, 5, 8, 10, 12, 12, + 4, 4, 5, 7, 10, 12, 12, 12, + 5, 5, 7, 9, 12, 12, 12, 12, + 6, 6, 9, 12, 12, 12, 12, 12], + [3, 3, 5, 9, 13, 15, 15, 15, + 3, 4, 6, 10, 14, 12, 12, 12, + 5, 6, 9, 14, 12, 12, 12, 12, + 9, 10, 14, 12, 12, 12, 12, 12, + 13, 14, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12] + ]}, +} +# fmt: on diff --git a/MLPY/Lib/site-packages/PIL/McIdasImagePlugin.py b/MLPY/Lib/site-packages/PIL/McIdasImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..d2a8f32e77a2d31e86dc338355159abdab3797c9 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/McIdasImagePlugin.py @@ -0,0 +1,78 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Basic McIdas support for PIL +# +# History: +# 1997-05-05 fl Created (8-bit images only) +# 2009-03-08 fl Added 16/32-bit support. +# +# Thanks to Richard Jones and Craig Swank for specs and samples. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import struct + +from . import Image, ImageFile + + +def _accept(prefix: bytes) -> bool: + return prefix[:8] == b"\x00\x00\x00\x00\x00\x00\x00\x04" + + +## +# Image plugin for McIdas area images. + + +class McIdasImageFile(ImageFile.ImageFile): + format = "MCIDAS" + format_description = "McIdas area file" + + def _open(self) -> None: + # parse area file directory + assert self.fp is not None + + s = self.fp.read(256) + if not _accept(s) or len(s) != 256: + msg = "not an McIdas area file" + raise SyntaxError(msg) + + self.area_descriptor_raw = s + self.area_descriptor = w = [0] + list(struct.unpack("!64i", s)) + + # get mode + if w[11] == 1: + mode = rawmode = "L" + elif w[11] == 2: + # FIXME: add memory map support + mode = "I" + rawmode = "I;16B" + elif w[11] == 4: + # FIXME: add memory map support + mode = "I" + rawmode = "I;32B" + else: + msg = "unsupported McIdas format" + raise SyntaxError(msg) + + self._mode = mode + self._size = w[10], w[9] + + offset = w[34] + w[15] + stride = w[15] + w[10] * w[11] * w[14] + + self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride, 1))] + + +# -------------------------------------------------------------------- +# registry + +Image.register_open(McIdasImageFile.format, McIdasImageFile, _accept) + +# no default extension diff --git a/MLPY/Lib/site-packages/PIL/MicImagePlugin.py b/MLPY/Lib/site-packages/PIL/MicImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..ecf27a867cb46b5c72128cdcad1ed0b4928d4dd2 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/MicImagePlugin.py @@ -0,0 +1,107 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Microsoft Image Composer support for PIL +# +# Notes: +# uses TiffImagePlugin.py to read the actual image streams +# +# History: +# 97-01-20 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import olefile + +from . import Image, TiffImagePlugin + +# +# -------------------------------------------------------------------- + + +def _accept(prefix: bytes) -> bool: + return prefix[:8] == olefile.MAGIC + + +## +# Image plugin for Microsoft's Image Composer file format. + + +class MicImageFile(TiffImagePlugin.TiffImageFile): + format = "MIC" + format_description = "Microsoft Image Composer" + _close_exclusive_fp_after_loading = False + + def _open(self) -> None: + # read the OLE directory and see if this is a likely + # to be a Microsoft Image Composer file + + try: + self.ole = olefile.OleFileIO(self.fp) + except OSError as e: + msg = "not an MIC file; invalid OLE file" + raise SyntaxError(msg) from e + + # find ACI subfiles with Image members (maybe not the + # best way to identify MIC files, but what the... ;-) + + self.images = [ + path + for path in self.ole.listdir() + if path[1:] and path[0][-4:] == ".ACI" and path[1] == "Image" + ] + + # if we didn't find any images, this is probably not + # an MIC file. + if not self.images: + msg = "not an MIC file; no image entries" + raise SyntaxError(msg) + + self.frame = -1 + self._n_frames = len(self.images) + self.is_animated = self._n_frames > 1 + + self.__fp = self.fp + self.seek(0) + + def seek(self, frame): + if not self._seek_check(frame): + return + try: + filename = self.images[frame] + except IndexError as e: + msg = "no such frame" + raise EOFError(msg) from e + + self.fp = self.ole.openstream(filename) + + TiffImagePlugin.TiffImageFile._open(self) + + self.frame = frame + + def tell(self) -> int: + return self.frame + + def close(self) -> None: + self.__fp.close() + self.ole.close() + super().close() + + def __exit__(self, *args: object) -> None: + self.__fp.close() + self.ole.close() + super().__exit__() + + +# +# -------------------------------------------------------------------- + +Image.register_open(MicImageFile.format, MicImageFile, _accept) + +Image.register_extension(MicImageFile.format, ".mic") diff --git a/MLPY/Lib/site-packages/PIL/MpegImagePlugin.py b/MLPY/Lib/site-packages/PIL/MpegImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..cf62cd87d1f56a58958ed17aec17f73583205a74 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/MpegImagePlugin.py @@ -0,0 +1,88 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPEG file handling +# +# History: +# 95-09-09 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +from . import Image, ImageFile +from ._binary import i8 +from ._typing import SupportsRead + +# +# Bitstream parser + + +class BitStream: + def __init__(self, fp: SupportsRead[bytes]) -> None: + self.fp = fp + self.bits = 0 + self.bitbuffer = 0 + + def next(self) -> int: + return i8(self.fp.read(1)) + + def peek(self, bits: int) -> int: + while self.bits < bits: + c = self.next() + if c < 0: + self.bits = 0 + continue + self.bitbuffer = (self.bitbuffer << 8) + c + self.bits += 8 + return self.bitbuffer >> (self.bits - bits) & (1 << bits) - 1 + + def skip(self, bits: int) -> None: + while self.bits < bits: + self.bitbuffer = (self.bitbuffer << 8) + i8(self.fp.read(1)) + self.bits += 8 + self.bits = self.bits - bits + + def read(self, bits: int) -> int: + v = self.peek(bits) + self.bits = self.bits - bits + return v + + +def _accept(prefix: bytes) -> bool: + return prefix[:4] == b"\x00\x00\x01\xb3" + + +## +# Image plugin for MPEG streams. This plugin can identify a stream, +# but it cannot read it. + + +class MpegImageFile(ImageFile.ImageFile): + format = "MPEG" + format_description = "MPEG" + + def _open(self) -> None: + assert self.fp is not None + + s = BitStream(self.fp) + if s.read(32) != 0x1B3: + msg = "not an MPEG file" + raise SyntaxError(msg) + + self._mode = "RGB" + self._size = s.read(12), s.read(12) + + +# -------------------------------------------------------------------- +# Registry stuff + +Image.register_open(MpegImageFile.format, MpegImageFile, _accept) + +Image.register_extensions(MpegImageFile.format, [".mpg", ".mpeg"]) + +Image.register_mime(MpegImageFile.format, "video/mpeg") diff --git a/MLPY/Lib/site-packages/PIL/MpoImagePlugin.py b/MLPY/Lib/site-packages/PIL/MpoImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..0a3a0e4c01480c3507ea00e6e0635382697c1461 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/MpoImagePlugin.py @@ -0,0 +1,180 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPO file handling +# +# See "Multi-Picture Format" (CIPA DC-007-Translation 2009, Standard of the +# Camera & Imaging Products Association) +# +# The multi-picture object combines multiple JPEG images (with a modified EXIF +# data format) into a single file. While it can theoretically be used much like +# a GIF animation, it is commonly used to represent 3D photographs and is (as +# of this writing) the most commonly used format by 3D cameras. +# +# History: +# 2014-03-13 Feneric Created +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import itertools +import os +import struct +from typing import IO + +from . import ( + Image, + ImageSequence, + JpegImagePlugin, + TiffImagePlugin, +) +from ._binary import o32le + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + JpegImagePlugin._save(im, fp, filename) + + +def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + append_images = im.encoderinfo.get("append_images", []) + if not append_images and not getattr(im, "is_animated", False): + _save(im, fp, filename) + return + + mpf_offset = 28 + offsets: list[int] = [] + for imSequence in itertools.chain([im], append_images): + for im_frame in ImageSequence.Iterator(imSequence): + if not offsets: + # APP2 marker + im_frame.encoderinfo["extra"] = ( + b"\xFF\xE2" + struct.pack(">H", 6 + 82) + b"MPF\0" + b" " * 82 + ) + exif = im_frame.encoderinfo.get("exif") + if isinstance(exif, Image.Exif): + exif = exif.tobytes() + im_frame.encoderinfo["exif"] = exif + if exif: + mpf_offset += 4 + len(exif) + + JpegImagePlugin._save(im_frame, fp, filename) + offsets.append(fp.tell()) + else: + im_frame.save(fp, "JPEG") + offsets.append(fp.tell() - offsets[-1]) + + ifd = TiffImagePlugin.ImageFileDirectory_v2() + ifd[0xB000] = b"0100" + ifd[0xB001] = len(offsets) + + mpentries = b"" + data_offset = 0 + for i, size in enumerate(offsets): + if i == 0: + mptype = 0x030000 # Baseline MP Primary Image + else: + mptype = 0x000000 # Undefined + mpentries += struct.pack(" None: + self.fp.seek(0) # prep the fp in order to pass the JPEG test + JpegImagePlugin.JpegImageFile._open(self) + self._after_jpeg_open() + + def _after_jpeg_open(self, mpheader=None): + self.mpinfo = mpheader if mpheader is not None else self._getmp() + self.n_frames = self.mpinfo[0xB001] + self.__mpoffsets = [ + mpent["DataOffset"] + self.info["mpoffset"] for mpent in self.mpinfo[0xB002] + ] + self.__mpoffsets[0] = 0 + # Note that the following assertion will only be invalid if something + # gets broken within JpegImagePlugin. + assert self.n_frames == len(self.__mpoffsets) + del self.info["mpoffset"] # no longer needed + self.is_animated = self.n_frames > 1 + self._fp = self.fp # FIXME: hack + self._fp.seek(self.__mpoffsets[0]) # get ready to read first frame + self.__frame = 0 + self.offset = 0 + # for now we can only handle reading and individual frame extraction + self.readonly = 1 + + def load_seek(self, pos: int) -> None: + self._fp.seek(pos) + + def seek(self, frame: int) -> None: + if not self._seek_check(frame): + return + self.fp = self._fp + self.offset = self.__mpoffsets[frame] + + original_exif = self.info.get("exif") + if "exif" in self.info: + del self.info["exif"] + + self.fp.seek(self.offset + 2) # skip SOI marker + if not self.fp.read(2): + msg = "No data found for frame" + raise ValueError(msg) + self.fp.seek(self.offset) + JpegImagePlugin.JpegImageFile._open(self) + if self.info.get("exif") != original_exif: + self._reload_exif() + + self.tile = [("jpeg", (0, 0) + self.size, self.offset, self.tile[0][-1])] + self.__frame = frame + + def tell(self) -> int: + return self.__frame + + @staticmethod + def adopt(jpeg_instance, mpheader=None): + """ + Transform the instance of JpegImageFile into + an instance of MpoImageFile. + After the call, the JpegImageFile is extended + to be an MpoImageFile. + + This is essentially useful when opening a JPEG + file that reveals itself as an MPO, to avoid + double call to _open. + """ + jpeg_instance.__class__ = MpoImageFile + jpeg_instance._after_jpeg_open(mpheader) + return jpeg_instance + + +# --------------------------------------------------------------------- +# Registry stuff + +# Note that since MPO shares a factory with JPEG, we do not need to do a +# separate registration for it here. +# Image.register_open(MpoImageFile.format, +# JpegImagePlugin.jpeg_factory, _accept) +Image.register_save(MpoImageFile.format, _save) +Image.register_save_all(MpoImageFile.format, _save_all) + +Image.register_extension(MpoImageFile.format, ".mpo") + +Image.register_mime(MpoImageFile.format, "image/mpo") diff --git a/MLPY/Lib/site-packages/PIL/MspImagePlugin.py b/MLPY/Lib/site-packages/PIL/MspImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..fea52dca8d6b9cc691a87be62e970ec92b6bdf76 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/MspImagePlugin.py @@ -0,0 +1,200 @@ +# +# The Python Imaging Library. +# +# MSP file handling +# +# This is the format used by the Paint program in Windows 1 and 2. +# +# History: +# 95-09-05 fl Created +# 97-01-03 fl Read/write MSP images +# 17-02-21 es Fixed RLE interpretation +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995-97. +# Copyright (c) Eric Soroos 2017. +# +# See the README file for information on usage and redistribution. +# +# More info on this format: https://archive.org/details/gg243631 +# Page 313: +# Figure 205. Windows Paint Version 1: "DanM" Format +# Figure 206. Windows Paint Version 2: "LinS" Format. Used in Windows V2.03 +# +# See also: https://www.fileformat.info/format/mspaint/egff.htm +from __future__ import annotations + +import io +import struct +from typing import IO + +from . import Image, ImageFile +from ._binary import i16le as i16 +from ._binary import o16le as o16 + +# +# read MSP files + + +def _accept(prefix: bytes) -> bool: + return prefix[:4] in [b"DanM", b"LinS"] + + +## +# Image plugin for Windows MSP images. This plugin supports both +# uncompressed (Windows 1.0). + + +class MspImageFile(ImageFile.ImageFile): + format = "MSP" + format_description = "Windows Paint" + + def _open(self) -> None: + # Header + assert self.fp is not None + + s = self.fp.read(32) + if not _accept(s): + msg = "not an MSP file" + raise SyntaxError(msg) + + # Header checksum + checksum = 0 + for i in range(0, 32, 2): + checksum = checksum ^ i16(s, i) + if checksum != 0: + msg = "bad MSP checksum" + raise SyntaxError(msg) + + self._mode = "1" + self._size = i16(s, 4), i16(s, 6) + + if s[:4] == b"DanM": + self.tile = [("raw", (0, 0) + self.size, 32, ("1", 0, 1))] + else: + self.tile = [("MSP", (0, 0) + self.size, 32, None)] + + +class MspDecoder(ImageFile.PyDecoder): + # The algo for the MSP decoder is from + # https://www.fileformat.info/format/mspaint/egff.htm + # cc-by-attribution -- That page references is taken from the + # Encyclopedia of Graphics File Formats and is licensed by + # O'Reilly under the Creative Common/Attribution license + # + # For RLE encoded files, the 32byte header is followed by a scan + # line map, encoded as one 16bit word of encoded byte length per + # line. + # + # NOTE: the encoded length of the line can be 0. This was not + # handled in the previous version of this encoder, and there's no + # mention of how to handle it in the documentation. From the few + # examples I've seen, I've assumed that it is a fill of the + # background color, in this case, white. + # + # + # Pseudocode of the decoder: + # Read a BYTE value as the RunType + # If the RunType value is zero + # Read next byte as the RunCount + # Read the next byte as the RunValue + # Write the RunValue byte RunCount times + # If the RunType value is non-zero + # Use this value as the RunCount + # Read and write the next RunCount bytes literally + # + # e.g.: + # 0x00 03 ff 05 00 01 02 03 04 + # would yield the bytes: + # 0xff ff ff 00 01 02 03 04 + # + # which are then interpreted as a bit packed mode '1' image + + _pulls_fd = True + + def decode(self, buffer: bytes) -> tuple[int, int]: + assert self.fd is not None + + img = io.BytesIO() + blank_line = bytearray((0xFF,) * ((self.state.xsize + 7) // 8)) + try: + self.fd.seek(32) + rowmap = struct.unpack_from( + f"<{self.state.ysize}H", self.fd.read(self.state.ysize * 2) + ) + except struct.error as e: + msg = "Truncated MSP file in row map" + raise OSError(msg) from e + + for x, rowlen in enumerate(rowmap): + try: + if rowlen == 0: + img.write(blank_line) + continue + row = self.fd.read(rowlen) + if len(row) != rowlen: + msg = f"Truncated MSP file, expected {rowlen} bytes on row {x}" + raise OSError(msg) + idx = 0 + while idx < rowlen: + runtype = row[idx] + idx += 1 + if runtype == 0: + (runcount, runval) = struct.unpack_from("Bc", row, idx) + img.write(runval * runcount) + idx += 2 + else: + runcount = runtype + img.write(row[idx : idx + runcount]) + idx += runcount + + except struct.error as e: + msg = f"Corrupted MSP file in row {x}" + raise OSError(msg) from e + + self.set_as_raw(img.getvalue(), ("1", 0, 1)) + + return -1, 0 + + +Image.register_decoder("MSP", MspDecoder) + + +# +# write MSP files (uncompressed only) + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + if im.mode != "1": + msg = f"cannot write mode {im.mode} as MSP" + raise OSError(msg) + + # create MSP header + header = [0] * 16 + + header[0], header[1] = i16(b"Da"), i16(b"nM") # version 1 + header[2], header[3] = im.size + header[4], header[5] = 1, 1 + header[6], header[7] = 1, 1 + header[8], header[9] = im.size + + checksum = 0 + for h in header: + checksum = checksum ^ h + header[12] = checksum # FIXME: is this the right field? + + # header + for h in header: + fp.write(o16(h)) + + # image body + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 32, ("1", 0, 1))]) + + +# +# registry + +Image.register_open(MspImageFile.format, MspImageFile, _accept) +Image.register_save(MspImageFile.format, _save) + +Image.register_extension(MspImageFile.format, ".msp") diff --git a/MLPY/Lib/site-packages/PIL/PSDraw.py b/MLPY/Lib/site-packages/PIL/PSDraw.py new file mode 100644 index 0000000000000000000000000000000000000000..3d3059d5eb61ad4f135c1fe33e866e8539995b59 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/PSDraw.py @@ -0,0 +1,237 @@ +# +# The Python Imaging Library +# $Id$ +# +# Simple PostScript graphics interface +# +# History: +# 1996-04-20 fl Created +# 1999-01-10 fl Added gsave/grestore to image method +# 2005-05-04 fl Fixed floating point issue in image (from Eric Etheridge) +# +# Copyright (c) 1997-2005 by Secret Labs AB. All rights reserved. +# Copyright (c) 1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING + +from . import EpsImagePlugin + +## +# Simple PostScript graphics interface. + + +class PSDraw: + """ + Sets up printing to the given file. If ``fp`` is omitted, + ``sys.stdout.buffer`` or ``sys.stdout`` is assumed. + """ + + def __init__(self, fp=None): + if not fp: + try: + fp = sys.stdout.buffer + except AttributeError: + fp = sys.stdout + self.fp = fp + + def begin_document(self, id: str | None = None) -> None: + """Set up printing of a document. (Write PostScript DSC header.)""" + # FIXME: incomplete + self.fp.write( + b"%!PS-Adobe-3.0\n" + b"save\n" + b"/showpage { } def\n" + b"%%EndComments\n" + b"%%BeginDocument\n" + ) + # self.fp.write(ERROR_PS) # debugging! + self.fp.write(EDROFF_PS) + self.fp.write(VDI_PS) + self.fp.write(b"%%EndProlog\n") + self.isofont: dict[bytes, int] = {} + + def end_document(self) -> None: + """Ends printing. (Write PostScript DSC footer.)""" + self.fp.write(b"%%EndDocument\nrestore showpage\n%%End\n") + if hasattr(self.fp, "flush"): + self.fp.flush() + + def setfont(self, font: str, size: int) -> None: + """ + Selects which font to use. + + :param font: A PostScript font name + :param size: Size in points. + """ + font_bytes = bytes(font, "UTF-8") + if font_bytes not in self.isofont: + # reencode font + self.fp.write( + b"/PSDraw-%s ISOLatin1Encoding /%s E\n" % (font_bytes, font_bytes) + ) + self.isofont[font_bytes] = 1 + # rough + self.fp.write(b"/F0 %d /PSDraw-%s F\n" % (size, font_bytes)) + + def line(self, xy0: tuple[int, int], xy1: tuple[int, int]) -> None: + """ + Draws a line between the two points. Coordinates are given in + PostScript point coordinates (72 points per inch, (0, 0) is the lower + left corner of the page). + """ + self.fp.write(b"%d %d %d %d Vl\n" % (*xy0, *xy1)) + + def rectangle(self, box: tuple[int, int, int, int]) -> None: + """ + Draws a rectangle. + + :param box: A tuple of four integers, specifying left, bottom, width and + height. + """ + self.fp.write(b"%d %d M 0 %d %d Vr\n" % box) + + def text(self, xy: tuple[int, int], text: str) -> None: + """ + Draws text at the given position. You must use + :py:meth:`~PIL.PSDraw.PSDraw.setfont` before calling this method. + """ + text_bytes = bytes(text, "UTF-8") + text_bytes = b"\\(".join(text_bytes.split(b"(")) + text_bytes = b"\\)".join(text_bytes.split(b")")) + self.fp.write(b"%d %d M (%s) S\n" % (xy + (text_bytes,))) + + if TYPE_CHECKING: + from . import Image + + def image( + self, box: tuple[int, int, int, int], im: Image.Image, dpi: int | None = None + ) -> None: + """Draw a PIL image, centered in the given box.""" + # default resolution depends on mode + if not dpi: + if im.mode == "1": + dpi = 200 # fax + else: + dpi = 100 # grayscale + # image size (on paper) + x = im.size[0] * 72 / dpi + y = im.size[1] * 72 / dpi + # max allowed size + xmax = float(box[2] - box[0]) + ymax = float(box[3] - box[1]) + if x > xmax: + y = y * xmax / x + x = xmax + if y > ymax: + x = x * ymax / y + y = ymax + dx = (xmax - x) / 2 + box[0] + dy = (ymax - y) / 2 + box[1] + self.fp.write(b"gsave\n%f %f translate\n" % (dx, dy)) + if (x, y) != im.size: + # EpsImagePlugin._save prints the image at (0,0,xsize,ysize) + sx = x / im.size[0] + sy = y / im.size[1] + self.fp.write(b"%f %f scale\n" % (sx, sy)) + EpsImagePlugin._save(im, self.fp, "", 0) + self.fp.write(b"\ngrestore\n") + + +# -------------------------------------------------------------------- +# PostScript driver + +# +# EDROFF.PS -- PostScript driver for Edroff 2 +# +# History: +# 94-01-25 fl: created (edroff 2.04) +# +# Copyright (c) Fredrik Lundh 1994. +# + + +EDROFF_PS = b"""\ +/S { show } bind def +/P { moveto show } bind def +/M { moveto } bind def +/X { 0 rmoveto } bind def +/Y { 0 exch rmoveto } bind def +/E { findfont + dup maxlength dict begin + { + 1 index /FID ne { def } { pop pop } ifelse + } forall + /Encoding exch def + dup /FontName exch def + currentdict end definefont pop +} bind def +/F { findfont exch scalefont dup setfont + [ exch /setfont cvx ] cvx bind def +} bind def +""" + +# +# VDI.PS -- PostScript driver for VDI meta commands +# +# History: +# 94-01-25 fl: created (edroff 2.04) +# +# Copyright (c) Fredrik Lundh 1994. +# + +VDI_PS = b"""\ +/Vm { moveto } bind def +/Va { newpath arcn stroke } bind def +/Vl { moveto lineto stroke } bind def +/Vc { newpath 0 360 arc closepath } bind def +/Vr { exch dup 0 rlineto + exch dup 0 exch rlineto + exch neg 0 rlineto + 0 exch neg rlineto + setgray fill } bind def +/Tm matrix def +/Ve { Tm currentmatrix pop + translate scale newpath 0 0 .5 0 360 arc closepath + Tm setmatrix +} bind def +/Vf { currentgray exch setgray fill setgray } bind def +""" + +# +# ERROR.PS -- Error handler +# +# History: +# 89-11-21 fl: created (pslist 1.10) +# + +ERROR_PS = b"""\ +/landscape false def +/errorBUF 200 string def +/errorNL { currentpoint 10 sub exch pop 72 exch moveto } def +errordict begin /handleerror { + initmatrix /Courier findfont 10 scalefont setfont + newpath 72 720 moveto $error begin /newerror false def + (PostScript Error) show errorNL errorNL + (Error: ) show + /errorname load errorBUF cvs show errorNL errorNL + (Command: ) show + /command load dup type /stringtype ne { errorBUF cvs } if show + errorNL errorNL + (VMstatus: ) show + vmstatus errorBUF cvs show ( bytes available, ) show + errorBUF cvs show ( bytes used at level ) show + errorBUF cvs show errorNL errorNL + (Operand stargck: ) show errorNL /ostargck load { + dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL + } forall errorNL + (Execution stargck: ) show errorNL /estargck load { + dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL + } forall + end showpage +} def end +""" diff --git a/MLPY/Lib/site-packages/PIL/PaletteFile.py b/MLPY/Lib/site-packages/PIL/PaletteFile.py new file mode 100644 index 0000000000000000000000000000000000000000..70b22e601c977f7a9cb675832dcd7e2f31d0b179 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/PaletteFile.py @@ -0,0 +1,54 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read simple, teragon-style palette files +# +# History: +# 97-08-23 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +from typing import IO + +from ._binary import o8 + + +class PaletteFile: + """File handler for Teragon-style palette files.""" + + rawmode = "RGB" + + def __init__(self, fp: IO[bytes]) -> None: + palette = [o8(i) * 3 for i in range(256)] + + while True: + s = fp.readline() + + if not s: + break + if s[:1] == b"#": + continue + if len(s) > 100: + msg = "bad palette file" + raise SyntaxError(msg) + + v = [int(x) for x in s.split()] + try: + [i, r, g, b] = v + except ValueError: + [i, r] = v + g = b = r + + if 0 <= i <= 255: + palette[i] = o8(r) + o8(g) + o8(b) + + self.palette = b"".join(palette) + + def getpalette(self) -> tuple[bytes, str]: + return self.palette, self.rawmode diff --git a/MLPY/Lib/site-packages/PIL/PalmImagePlugin.py b/MLPY/Lib/site-packages/PIL/PalmImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..e9ffdc84a0b309ae818cfcb86f2a081d3b1cf0a7 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/PalmImagePlugin.py @@ -0,0 +1,229 @@ +# +# The Python Imaging Library. +# $Id$ +# + +## +# Image plugin for Palm pixmap images (output only). +## +from __future__ import annotations + +from typing import IO + +from . import Image, ImageFile +from ._binary import o8 +from ._binary import o16be as o16b + +# fmt: off +_Palm8BitColormapValues = ( + (255, 255, 255), (255, 204, 255), (255, 153, 255), (255, 102, 255), + (255, 51, 255), (255, 0, 255), (255, 255, 204), (255, 204, 204), + (255, 153, 204), (255, 102, 204), (255, 51, 204), (255, 0, 204), + (255, 255, 153), (255, 204, 153), (255, 153, 153), (255, 102, 153), + (255, 51, 153), (255, 0, 153), (204, 255, 255), (204, 204, 255), + (204, 153, 255), (204, 102, 255), (204, 51, 255), (204, 0, 255), + (204, 255, 204), (204, 204, 204), (204, 153, 204), (204, 102, 204), + (204, 51, 204), (204, 0, 204), (204, 255, 153), (204, 204, 153), + (204, 153, 153), (204, 102, 153), (204, 51, 153), (204, 0, 153), + (153, 255, 255), (153, 204, 255), (153, 153, 255), (153, 102, 255), + (153, 51, 255), (153, 0, 255), (153, 255, 204), (153, 204, 204), + (153, 153, 204), (153, 102, 204), (153, 51, 204), (153, 0, 204), + (153, 255, 153), (153, 204, 153), (153, 153, 153), (153, 102, 153), + (153, 51, 153), (153, 0, 153), (102, 255, 255), (102, 204, 255), + (102, 153, 255), (102, 102, 255), (102, 51, 255), (102, 0, 255), + (102, 255, 204), (102, 204, 204), (102, 153, 204), (102, 102, 204), + (102, 51, 204), (102, 0, 204), (102, 255, 153), (102, 204, 153), + (102, 153, 153), (102, 102, 153), (102, 51, 153), (102, 0, 153), + (51, 255, 255), (51, 204, 255), (51, 153, 255), (51, 102, 255), + (51, 51, 255), (51, 0, 255), (51, 255, 204), (51, 204, 204), + (51, 153, 204), (51, 102, 204), (51, 51, 204), (51, 0, 204), + (51, 255, 153), (51, 204, 153), (51, 153, 153), (51, 102, 153), + (51, 51, 153), (51, 0, 153), (0, 255, 255), (0, 204, 255), + (0, 153, 255), (0, 102, 255), (0, 51, 255), (0, 0, 255), + (0, 255, 204), (0, 204, 204), (0, 153, 204), (0, 102, 204), + (0, 51, 204), (0, 0, 204), (0, 255, 153), (0, 204, 153), + (0, 153, 153), (0, 102, 153), (0, 51, 153), (0, 0, 153), + (255, 255, 102), (255, 204, 102), (255, 153, 102), (255, 102, 102), + (255, 51, 102), (255, 0, 102), (255, 255, 51), (255, 204, 51), + (255, 153, 51), (255, 102, 51), (255, 51, 51), (255, 0, 51), + (255, 255, 0), (255, 204, 0), (255, 153, 0), (255, 102, 0), + (255, 51, 0), (255, 0, 0), (204, 255, 102), (204, 204, 102), + (204, 153, 102), (204, 102, 102), (204, 51, 102), (204, 0, 102), + (204, 255, 51), (204, 204, 51), (204, 153, 51), (204, 102, 51), + (204, 51, 51), (204, 0, 51), (204, 255, 0), (204, 204, 0), + (204, 153, 0), (204, 102, 0), (204, 51, 0), (204, 0, 0), + (153, 255, 102), (153, 204, 102), (153, 153, 102), (153, 102, 102), + (153, 51, 102), (153, 0, 102), (153, 255, 51), (153, 204, 51), + (153, 153, 51), (153, 102, 51), (153, 51, 51), (153, 0, 51), + (153, 255, 0), (153, 204, 0), (153, 153, 0), (153, 102, 0), + (153, 51, 0), (153, 0, 0), (102, 255, 102), (102, 204, 102), + (102, 153, 102), (102, 102, 102), (102, 51, 102), (102, 0, 102), + (102, 255, 51), (102, 204, 51), (102, 153, 51), (102, 102, 51), + (102, 51, 51), (102, 0, 51), (102, 255, 0), (102, 204, 0), + (102, 153, 0), (102, 102, 0), (102, 51, 0), (102, 0, 0), + (51, 255, 102), (51, 204, 102), (51, 153, 102), (51, 102, 102), + (51, 51, 102), (51, 0, 102), (51, 255, 51), (51, 204, 51), + (51, 153, 51), (51, 102, 51), (51, 51, 51), (51, 0, 51), + (51, 255, 0), (51, 204, 0), (51, 153, 0), (51, 102, 0), + (51, 51, 0), (51, 0, 0), (0, 255, 102), (0, 204, 102), + (0, 153, 102), (0, 102, 102), (0, 51, 102), (0, 0, 102), + (0, 255, 51), (0, 204, 51), (0, 153, 51), (0, 102, 51), + (0, 51, 51), (0, 0, 51), (0, 255, 0), (0, 204, 0), + (0, 153, 0), (0, 102, 0), (0, 51, 0), (17, 17, 17), + (34, 34, 34), (68, 68, 68), (85, 85, 85), (119, 119, 119), + (136, 136, 136), (170, 170, 170), (187, 187, 187), (221, 221, 221), + (238, 238, 238), (192, 192, 192), (128, 0, 0), (128, 0, 128), + (0, 128, 0), (0, 128, 128), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0)) +# fmt: on + + +# so build a prototype image to be used for palette resampling +def build_prototype_image() -> Image.Image: + image = Image.new("L", (1, len(_Palm8BitColormapValues))) + image.putdata(list(range(len(_Palm8BitColormapValues)))) + palettedata: tuple[int, ...] = () + for colormapValue in _Palm8BitColormapValues: + palettedata += colormapValue + palettedata += (0, 0, 0) * (256 - len(_Palm8BitColormapValues)) + image.putpalette(palettedata) + return image + + +Palm8BitColormapImage = build_prototype_image() + +# OK, we now have in Palm8BitColormapImage, +# a "P"-mode image with the right palette +# +# -------------------------------------------------------------------- + +_FLAGS = {"custom-colormap": 0x4000, "is-compressed": 0x8000, "has-transparent": 0x2000} + +_COMPRESSION_TYPES = {"none": 0xFF, "rle": 0x01, "scanline": 0x00} + + +# +# -------------------------------------------------------------------- + +## +# (Internal) Image save plugin for the Palm format. + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + if im.mode == "P": + # we assume this is a color Palm image with the standard colormap, + # unless the "info" dict has a "custom-colormap" field + + rawmode = "P" + bpp = 8 + version = 1 + + elif im.mode == "L": + if im.encoderinfo.get("bpp") in (1, 2, 4): + # this is 8-bit grayscale, so we shift it to get the high-order bits, + # and invert it because + # Palm does grayscale from white (0) to black (1) + bpp = im.encoderinfo["bpp"] + maxval = (1 << bpp) - 1 + shift = 8 - bpp + im = im.point(lambda x: maxval - (x >> shift)) + elif im.info.get("bpp") in (1, 2, 4): + # here we assume that even though the inherent mode is 8-bit grayscale, + # only the lower bpp bits are significant. + # We invert them to match the Palm. + bpp = im.info["bpp"] + maxval = (1 << bpp) - 1 + im = im.point(lambda x: maxval - (x & maxval)) + else: + msg = f"cannot write mode {im.mode} as Palm" + raise OSError(msg) + + # we ignore the palette here + im._mode = "P" + rawmode = f"P;{bpp}" + version = 1 + + elif im.mode == "1": + # monochrome -- write it inverted, as is the Palm standard + rawmode = "1;I" + bpp = 1 + version = 0 + + else: + msg = f"cannot write mode {im.mode} as Palm" + raise OSError(msg) + + # + # make sure image data is available + im.load() + + # write header + + cols = im.size[0] + rows = im.size[1] + + rowbytes = int((cols + (16 // bpp - 1)) / (16 // bpp)) * 2 + transparent_index = 0 + compression_type = _COMPRESSION_TYPES["none"] + + flags = 0 + if im.mode == "P" and "custom-colormap" in im.info: + flags = flags & _FLAGS["custom-colormap"] + colormapsize = 4 * 256 + 2 + colormapmode = im.palette.mode + colormap = im.getdata().getpalette() + else: + colormapsize = 0 + + if "offset" in im.info: + offset = (rowbytes * rows + 16 + 3 + colormapsize) // 4 + else: + offset = 0 + + fp.write(o16b(cols) + o16b(rows) + o16b(rowbytes) + o16b(flags)) + fp.write(o8(bpp)) + fp.write(o8(version)) + fp.write(o16b(offset)) + fp.write(o8(transparent_index)) + fp.write(o8(compression_type)) + fp.write(o16b(0)) # reserved by Palm + + # now write colormap if necessary + + if colormapsize > 0: + fp.write(o16b(256)) + for i in range(256): + fp.write(o8(i)) + if colormapmode == "RGB": + fp.write( + o8(colormap[3 * i]) + + o8(colormap[3 * i + 1]) + + o8(colormap[3 * i + 2]) + ) + elif colormapmode == "RGBA": + fp.write( + o8(colormap[4 * i]) + + o8(colormap[4 * i + 1]) + + o8(colormap[4 * i + 2]) + ) + + # now convert data to raw form + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, rowbytes, 1))]) + + if hasattr(fp, "flush"): + fp.flush() + + +# +# -------------------------------------------------------------------- + +Image.register_save("Palm", _save) + +Image.register_extension("Palm", ".palm") + +Image.register_mime("Palm", "image/palm") diff --git a/MLPY/Lib/site-packages/PIL/PcdImagePlugin.py b/MLPY/Lib/site-packages/PIL/PcdImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..421d1871e1a701a36faddbc26d95a7db3bb5613e --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/PcdImagePlugin.py @@ -0,0 +1,66 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PCD file handling +# +# History: +# 96-05-10 fl Created +# 96-05-27 fl Added draft mode (128x192, 256x384) +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +from . import Image, ImageFile + +## +# Image plugin for PhotoCD images. This plugin only reads the 768x512 +# image from the file; higher resolutions are encoded in a proprietary +# encoding. + + +class PcdImageFile(ImageFile.ImageFile): + format = "PCD" + format_description = "Kodak PhotoCD" + + def _open(self) -> None: + # rough + assert self.fp is not None + + self.fp.seek(2048) + s = self.fp.read(2048) + + if s[:4] != b"PCD_": + msg = "not a PCD file" + raise SyntaxError(msg) + + orientation = s[1538] & 3 + self.tile_post_rotate = None + if orientation == 1: + self.tile_post_rotate = 90 + elif orientation == 3: + self.tile_post_rotate = -90 + + self._mode = "RGB" + self._size = 768, 512 # FIXME: not correct for rotated images! + self.tile = [("pcd", (0, 0) + self.size, 96 * 2048, None)] + + def load_end(self) -> None: + if self.tile_post_rotate: + # Handle rotated PCDs + assert self.im is not None + + self.im = self.im.rotate(self.tile_post_rotate) + self._size = self.im.size + + +# +# registry + +Image.register_open(PcdImageFile.format, PcdImageFile) + +Image.register_extension(PcdImageFile.format, ".pcd") diff --git a/MLPY/Lib/site-packages/PIL/PcfFontFile.py b/MLPY/Lib/site-packages/PIL/PcfFontFile.py new file mode 100644 index 0000000000000000000000000000000000000000..24b3f26ca7d63029997e761a7719e753c771090d --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/PcfFontFile.py @@ -0,0 +1,254 @@ +# +# THIS IS WORK IN PROGRESS +# +# The Python Imaging Library +# $Id$ +# +# portable compiled font file parser +# +# history: +# 1997-08-19 fl created +# 2003-09-13 fl fixed loading of unicode fonts +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1997-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import io +from typing import BinaryIO, Callable + +from . import FontFile, Image +from ._binary import i8 +from ._binary import i16be as b16 +from ._binary import i16le as l16 +from ._binary import i32be as b32 +from ._binary import i32le as l32 + +# -------------------------------------------------------------------- +# declarations + +PCF_MAGIC = 0x70636601 # "\x01fcp" + +PCF_PROPERTIES = 1 << 0 +PCF_ACCELERATORS = 1 << 1 +PCF_METRICS = 1 << 2 +PCF_BITMAPS = 1 << 3 +PCF_INK_METRICS = 1 << 4 +PCF_BDF_ENCODINGS = 1 << 5 +PCF_SWIDTHS = 1 << 6 +PCF_GLYPH_NAMES = 1 << 7 +PCF_BDF_ACCELERATORS = 1 << 8 + +BYTES_PER_ROW: list[Callable[[int], int]] = [ + lambda bits: ((bits + 7) >> 3), + lambda bits: ((bits + 15) >> 3) & ~1, + lambda bits: ((bits + 31) >> 3) & ~3, + lambda bits: ((bits + 63) >> 3) & ~7, +] + + +def sz(s: bytes, o: int) -> bytes: + return s[o : s.index(b"\0", o)] + + +class PcfFontFile(FontFile.FontFile): + """Font file plugin for the X11 PCF format.""" + + name = "name" + + def __init__(self, fp: BinaryIO, charset_encoding: str = "iso8859-1"): + self.charset_encoding = charset_encoding + + magic = l32(fp.read(4)) + if magic != PCF_MAGIC: + msg = "not a PCF file" + raise SyntaxError(msg) + + super().__init__() + + count = l32(fp.read(4)) + self.toc = {} + for i in range(count): + type = l32(fp.read(4)) + self.toc[type] = l32(fp.read(4)), l32(fp.read(4)), l32(fp.read(4)) + + self.fp = fp + + self.info = self._load_properties() + + metrics = self._load_metrics() + bitmaps = self._load_bitmaps(metrics) + encoding = self._load_encoding() + + # + # create glyph structure + + for ch, ix in enumerate(encoding): + if ix is not None: + ( + xsize, + ysize, + left, + right, + width, + ascent, + descent, + attributes, + ) = metrics[ix] + self.glyph[ch] = ( + (width, 0), + (left, descent - ysize, xsize + left, descent), + (0, 0, xsize, ysize), + bitmaps[ix], + ) + + def _getformat( + self, tag: int + ) -> tuple[BinaryIO, int, Callable[[bytes], int], Callable[[bytes], int]]: + format, size, offset = self.toc[tag] + + fp = self.fp + fp.seek(offset) + + format = l32(fp.read(4)) + + if format & 4: + i16, i32 = b16, b32 + else: + i16, i32 = l16, l32 + + return fp, format, i16, i32 + + def _load_properties(self) -> dict[bytes, bytes | int]: + # + # font properties + + properties = {} + + fp, format, i16, i32 = self._getformat(PCF_PROPERTIES) + + nprops = i32(fp.read(4)) + + # read property description + p = [(i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4))) for _ in range(nprops)] + + if nprops & 3: + fp.seek(4 - (nprops & 3), io.SEEK_CUR) # pad + + data = fp.read(i32(fp.read(4))) + + for k, s, v in p: + property_value: bytes | int = sz(data, v) if s else v + properties[sz(data, k)] = property_value + + return properties + + def _load_metrics(self) -> list[tuple[int, int, int, int, int, int, int, int]]: + # + # font metrics + + metrics: list[tuple[int, int, int, int, int, int, int, int]] = [] + + fp, format, i16, i32 = self._getformat(PCF_METRICS) + + append = metrics.append + + if (format & 0xFF00) == 0x100: + # "compressed" metrics + for i in range(i16(fp.read(2))): + left = i8(fp.read(1)) - 128 + right = i8(fp.read(1)) - 128 + width = i8(fp.read(1)) - 128 + ascent = i8(fp.read(1)) - 128 + descent = i8(fp.read(1)) - 128 + xsize = right - left + ysize = ascent + descent + append((xsize, ysize, left, right, width, ascent, descent, 0)) + + else: + # "jumbo" metrics + for i in range(i32(fp.read(4))): + left = i16(fp.read(2)) + right = i16(fp.read(2)) + width = i16(fp.read(2)) + ascent = i16(fp.read(2)) + descent = i16(fp.read(2)) + attributes = i16(fp.read(2)) + xsize = right - left + ysize = ascent + descent + append((xsize, ysize, left, right, width, ascent, descent, attributes)) + + return metrics + + def _load_bitmaps( + self, metrics: list[tuple[int, int, int, int, int, int, int, int]] + ) -> list[Image.Image]: + # + # bitmap data + + fp, format, i16, i32 = self._getformat(PCF_BITMAPS) + + nbitmaps = i32(fp.read(4)) + + if nbitmaps != len(metrics): + msg = "Wrong number of bitmaps" + raise OSError(msg) + + offsets = [i32(fp.read(4)) for _ in range(nbitmaps)] + + bitmap_sizes = [i32(fp.read(4)) for _ in range(4)] + + # byteorder = format & 4 # non-zero => MSB + bitorder = format & 8 # non-zero => MSB + padindex = format & 3 + + bitmapsize = bitmap_sizes[padindex] + offsets.append(bitmapsize) + + data = fp.read(bitmapsize) + + pad = BYTES_PER_ROW[padindex] + mode = "1;R" + if bitorder: + mode = "1" + + bitmaps = [] + for i in range(nbitmaps): + xsize, ysize = metrics[i][:2] + b, e = offsets[i : i + 2] + bitmaps.append( + Image.frombytes("1", (xsize, ysize), data[b:e], "raw", mode, pad(xsize)) + ) + + return bitmaps + + def _load_encoding(self) -> list[int | None]: + fp, format, i16, i32 = self._getformat(PCF_BDF_ENCODINGS) + + first_col, last_col = i16(fp.read(2)), i16(fp.read(2)) + first_row, last_row = i16(fp.read(2)), i16(fp.read(2)) + + i16(fp.read(2)) # default + + nencoding = (last_col - first_col + 1) * (last_row - first_row + 1) + + # map character code to bitmap index + encoding: list[int | None] = [None] * min(256, nencoding) + + encoding_offsets = [i16(fp.read(2)) for _ in range(nencoding)] + + for i in range(first_col, len(encoding)): + try: + encoding_offset = encoding_offsets[ + ord(bytearray([i]).decode(self.charset_encoding)) + ] + if encoding_offset != 0xFFFF: + encoding[i] = encoding_offset + except UnicodeDecodeError: + # character is not supported in selected encoding + pass + + return encoding diff --git a/MLPY/Lib/site-packages/PIL/PcxImagePlugin.py b/MLPY/Lib/site-packages/PIL/PcxImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..f5d121e0d99a8149264154b9f6c96affed27a8b9 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/PcxImagePlugin.py @@ -0,0 +1,227 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PCX file handling +# +# This format was originally used by ZSoft's popular PaintBrush +# program for the IBM PC. It is also supported by many MS-DOS and +# Windows applications, including the Windows PaintBrush program in +# Windows 3. +# +# history: +# 1995-09-01 fl Created +# 1996-05-20 fl Fixed RGB support +# 1997-01-03 fl Fixed 2-bit and 4-bit support +# 1999-02-03 fl Fixed 8-bit support (broken in 1.0b1) +# 1999-02-07 fl Added write support +# 2002-06-09 fl Made 2-bit and 4-bit support a bit more robust +# 2002-07-30 fl Seek from to current position, not beginning of file +# 2003-06-03 fl Extract DPI settings (info["dpi"]) +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import io +import logging +from typing import IO + +from . import Image, ImageFile, ImagePalette +from ._binary import i16le as i16 +from ._binary import o8 +from ._binary import o16le as o16 + +logger = logging.getLogger(__name__) + + +def _accept(prefix: bytes) -> bool: + return prefix[0] == 10 and prefix[1] in [0, 2, 3, 5] + + +## +# Image plugin for Paintbrush images. + + +class PcxImageFile(ImageFile.ImageFile): + format = "PCX" + format_description = "Paintbrush" + + def _open(self) -> None: + # header + assert self.fp is not None + + s = self.fp.read(128) + if not _accept(s): + msg = "not a PCX file" + raise SyntaxError(msg) + + # image + bbox = i16(s, 4), i16(s, 6), i16(s, 8) + 1, i16(s, 10) + 1 + if bbox[2] <= bbox[0] or bbox[3] <= bbox[1]: + msg = "bad PCX image size" + raise SyntaxError(msg) + logger.debug("BBox: %s %s %s %s", *bbox) + + # format + version = s[1] + bits = s[3] + planes = s[65] + provided_stride = i16(s, 66) + logger.debug( + "PCX version %s, bits %s, planes %s, stride %s", + version, + bits, + planes, + provided_stride, + ) + + self.info["dpi"] = i16(s, 12), i16(s, 14) + + if bits == 1 and planes == 1: + mode = rawmode = "1" + + elif bits == 1 and planes in (2, 4): + mode = "P" + rawmode = "P;%dL" % planes + self.palette = ImagePalette.raw("RGB", s[16:64]) + + elif version == 5 and bits == 8 and planes == 1: + mode = rawmode = "L" + # FIXME: hey, this doesn't work with the incremental loader !!! + self.fp.seek(-769, io.SEEK_END) + s = self.fp.read(769) + if len(s) == 769 and s[0] == 12: + # check if the palette is linear grayscale + for i in range(256): + if s[i * 3 + 1 : i * 3 + 4] != o8(i) * 3: + mode = rawmode = "P" + break + if mode == "P": + self.palette = ImagePalette.raw("RGB", s[1:]) + self.fp.seek(128) + + elif version == 5 and bits == 8 and planes == 3: + mode = "RGB" + rawmode = "RGB;L" + + else: + msg = "unknown PCX mode" + raise OSError(msg) + + self._mode = mode + self._size = bbox[2] - bbox[0], bbox[3] - bbox[1] + + # Don't trust the passed in stride. + # Calculate the approximate position for ourselves. + # CVE-2020-35653 + stride = (self._size[0] * bits + 7) // 8 + + # While the specification states that this must be even, + # not all images follow this + if provided_stride != stride: + stride += stride % 2 + + bbox = (0, 0) + self.size + logger.debug("size: %sx%s", *self.size) + + self.tile = [("pcx", bbox, self.fp.tell(), (rawmode, planes * stride))] + + +# -------------------------------------------------------------------- +# save PCX files + + +SAVE = { + # mode: (version, bits, planes, raw mode) + "1": (2, 1, 1, "1"), + "L": (5, 8, 1, "L"), + "P": (5, 8, 1, "P"), + "RGB": (5, 8, 3, "RGB;L"), +} + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + try: + version, bits, planes, rawmode = SAVE[im.mode] + except KeyError as e: + msg = f"Cannot save {im.mode} images as PCX" + raise ValueError(msg) from e + + # bytes per plane + stride = (im.size[0] * bits + 7) // 8 + # stride should be even + stride += stride % 2 + # Stride needs to be kept in sync with the PcxEncode.c version. + # Ideally it should be passed in in the state, but the bytes value + # gets overwritten. + + logger.debug( + "PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d", + im.size[0], + bits, + stride, + ) + + # under windows, we could determine the current screen size with + # "Image.core.display_mode()[1]", but I think that's overkill... + + screen = im.size + + dpi = 100, 100 + + # PCX header + fp.write( + o8(10) + + o8(version) + + o8(1) + + o8(bits) + + o16(0) + + o16(0) + + o16(im.size[0] - 1) + + o16(im.size[1] - 1) + + o16(dpi[0]) + + o16(dpi[1]) + + b"\0" * 24 + + b"\xFF" * 24 + + b"\0" + + o8(planes) + + o16(stride) + + o16(1) + + o16(screen[0]) + + o16(screen[1]) + + b"\0" * 54 + ) + + assert fp.tell() == 128 + + ImageFile._save(im, fp, [("pcx", (0, 0) + im.size, 0, (rawmode, bits * planes))]) + + if im.mode == "P": + # colour palette + assert im.im is not None + + fp.write(o8(12)) + palette = im.im.getpalette("RGB", "RGB") + palette += b"\x00" * (768 - len(palette)) + fp.write(palette) # 768 bytes + elif im.mode == "L": + # grayscale palette + fp.write(o8(12)) + for i in range(256): + fp.write(o8(i) * 3) + + +# -------------------------------------------------------------------- +# registry + + +Image.register_open(PcxImageFile.format, PcxImageFile, _accept) +Image.register_save(PcxImageFile.format, _save) + +Image.register_extension(PcxImageFile.format, ".pcx") + +Image.register_mime(PcxImageFile.format, "image/x-pcx") diff --git a/MLPY/Lib/site-packages/PIL/PdfImagePlugin.py b/MLPY/Lib/site-packages/PIL/PdfImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..9f9021c4598a3cfe8f6cdc29ae4dadadb962d1b8 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/PdfImagePlugin.py @@ -0,0 +1,304 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PDF (Acrobat) file handling +# +# History: +# 1996-07-16 fl Created +# 1997-01-18 fl Fixed header +# 2004-02-21 fl Fixes for 1/L/CMYK images, etc. +# 2004-02-24 fl Fixes for 1 and P images. +# +# Copyright (c) 1997-2004 by Secret Labs AB. All rights reserved. +# Copyright (c) 1996-1997 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +## +# Image plugin for PDF images (output only). +## +from __future__ import annotations + +import io +import math +import os +import time +from typing import IO + +from . import Image, ImageFile, ImageSequence, PdfParser, __version__, features + +# +# -------------------------------------------------------------------- + +# object ids: +# 1. catalogue +# 2. pages +# 3. image +# 4. page +# 5. page contents + + +def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + _save(im, fp, filename, save_all=True) + + +## +# (Internal) Image save plugin for the PDF format. + + +def _write_image(im, filename, existing_pdf, image_refs): + # FIXME: Should replace ASCIIHexDecode with RunLengthDecode + # (packbits) or LZWDecode (tiff/lzw compression). Note that + # PDF 1.2 also supports Flatedecode (zip compression). + + params = None + decode = None + + # + # Get image characteristics + + width, height = im.size + + dict_obj = {"BitsPerComponent": 8} + if im.mode == "1": + if features.check("libtiff"): + filter = "CCITTFaxDecode" + dict_obj["BitsPerComponent"] = 1 + params = PdfParser.PdfArray( + [ + PdfParser.PdfDict( + { + "K": -1, + "BlackIs1": True, + "Columns": width, + "Rows": height, + } + ) + ] + ) + else: + filter = "DCTDecode" + dict_obj["ColorSpace"] = PdfParser.PdfName("DeviceGray") + procset = "ImageB" # grayscale + elif im.mode == "L": + filter = "DCTDecode" + # params = f"<< /Predictor 15 /Columns {width-2} >>" + dict_obj["ColorSpace"] = PdfParser.PdfName("DeviceGray") + procset = "ImageB" # grayscale + elif im.mode == "LA": + filter = "JPXDecode" + # params = f"<< /Predictor 15 /Columns {width-2} >>" + procset = "ImageB" # grayscale + dict_obj["SMaskInData"] = 1 + elif im.mode == "P": + filter = "ASCIIHexDecode" + palette = im.getpalette() + dict_obj["ColorSpace"] = [ + PdfParser.PdfName("Indexed"), + PdfParser.PdfName("DeviceRGB"), + len(palette) // 3 - 1, + PdfParser.PdfBinary(palette), + ] + procset = "ImageI" # indexed color + + if "transparency" in im.info: + smask = im.convert("LA").getchannel("A") + smask.encoderinfo = {} + + image_ref = _write_image(smask, filename, existing_pdf, image_refs)[0] + dict_obj["SMask"] = image_ref + elif im.mode == "RGB": + filter = "DCTDecode" + dict_obj["ColorSpace"] = PdfParser.PdfName("DeviceRGB") + procset = "ImageC" # color images + elif im.mode == "RGBA": + filter = "JPXDecode" + procset = "ImageC" # color images + dict_obj["SMaskInData"] = 1 + elif im.mode == "CMYK": + filter = "DCTDecode" + dict_obj["ColorSpace"] = PdfParser.PdfName("DeviceCMYK") + procset = "ImageC" # color images + decode = [1, 0, 1, 0, 1, 0, 1, 0] + else: + msg = f"cannot save mode {im.mode}" + raise ValueError(msg) + + # + # image + + op = io.BytesIO() + + if filter == "ASCIIHexDecode": + ImageFile._save(im, op, [("hex", (0, 0) + im.size, 0, im.mode)]) + elif filter == "CCITTFaxDecode": + im.save( + op, + "TIFF", + compression="group4", + # use a single strip + strip_size=math.ceil(width / 8) * height, + ) + elif filter == "DCTDecode": + Image.SAVE["JPEG"](im, op, filename) + elif filter == "JPXDecode": + del dict_obj["BitsPerComponent"] + Image.SAVE["JPEG2000"](im, op, filename) + else: + msg = f"unsupported PDF filter ({filter})" + raise ValueError(msg) + + stream = op.getvalue() + if filter == "CCITTFaxDecode": + stream = stream[8:] + filter = PdfParser.PdfArray([PdfParser.PdfName(filter)]) + else: + filter = PdfParser.PdfName(filter) + + image_ref = image_refs.pop(0) + existing_pdf.write_obj( + image_ref, + stream=stream, + Type=PdfParser.PdfName("XObject"), + Subtype=PdfParser.PdfName("Image"), + Width=width, # * 72.0 / x_resolution, + Height=height, # * 72.0 / y_resolution, + Filter=filter, + Decode=decode, + DecodeParms=params, + **dict_obj, + ) + + return image_ref, procset + + +def _save(im, fp, filename, save_all=False): + is_appending = im.encoderinfo.get("append", False) + if is_appending: + existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="r+b") + else: + existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="w+b") + + dpi = im.encoderinfo.get("dpi") + if dpi: + x_resolution = dpi[0] + y_resolution = dpi[1] + else: + x_resolution = y_resolution = im.encoderinfo.get("resolution", 72.0) + + info = { + "title": ( + None if is_appending else os.path.splitext(os.path.basename(filename))[0] + ), + "author": None, + "subject": None, + "keywords": None, + "creator": None, + "producer": None, + "creationDate": None if is_appending else time.gmtime(), + "modDate": None if is_appending else time.gmtime(), + } + for k, default in info.items(): + v = im.encoderinfo.get(k) if k in im.encoderinfo else default + if v: + existing_pdf.info[k[0].upper() + k[1:]] = v + + # + # make sure image data is available + im.load() + + existing_pdf.start_writing() + existing_pdf.write_header() + existing_pdf.write_comment(f"created by Pillow {__version__} PDF driver") + + # + # pages + ims = [im] + if save_all: + append_images = im.encoderinfo.get("append_images", []) + for append_im in append_images: + append_im.encoderinfo = im.encoderinfo.copy() + ims.append(append_im) + number_of_pages = 0 + image_refs = [] + page_refs = [] + contents_refs = [] + for im in ims: + im_number_of_pages = 1 + if save_all: + try: + im_number_of_pages = im.n_frames + except AttributeError: + # Image format does not have n_frames. + # It is a single frame image + pass + number_of_pages += im_number_of_pages + for i in range(im_number_of_pages): + image_refs.append(existing_pdf.next_object_id(0)) + if im.mode == "P" and "transparency" in im.info: + image_refs.append(existing_pdf.next_object_id(0)) + + page_refs.append(existing_pdf.next_object_id(0)) + contents_refs.append(existing_pdf.next_object_id(0)) + existing_pdf.pages.append(page_refs[-1]) + + # + # catalog and list of pages + existing_pdf.write_catalog() + + page_number = 0 + for im_sequence in ims: + im_pages = ImageSequence.Iterator(im_sequence) if save_all else [im_sequence] + for im in im_pages: + image_ref, procset = _write_image(im, filename, existing_pdf, image_refs) + + # + # page + + existing_pdf.write_page( + page_refs[page_number], + Resources=PdfParser.PdfDict( + ProcSet=[PdfParser.PdfName("PDF"), PdfParser.PdfName(procset)], + XObject=PdfParser.PdfDict(image=image_ref), + ), + MediaBox=[ + 0, + 0, + im.width * 72.0 / x_resolution, + im.height * 72.0 / y_resolution, + ], + Contents=contents_refs[page_number], + ) + + # + # page contents + + page_contents = b"q %f 0 0 %f 0 0 cm /image Do Q\n" % ( + im.width * 72.0 / x_resolution, + im.height * 72.0 / y_resolution, + ) + + existing_pdf.write_obj(contents_refs[page_number], stream=page_contents) + + page_number += 1 + + # + # trailer + existing_pdf.write_xref_and_trailer() + if hasattr(fp, "flush"): + fp.flush() + existing_pdf.close() + + +# +# -------------------------------------------------------------------- + + +Image.register_save("PDF", _save) +Image.register_save_all("PDF", _save_all) + +Image.register_extension("PDF", ".pdf") + +Image.register_mime("PDF", "application/pdf") diff --git a/MLPY/Lib/site-packages/PIL/PdfParser.py b/MLPY/Lib/site-packages/PIL/PdfParser.py new file mode 100644 index 0000000000000000000000000000000000000000..93d677e89e1e38f0b2583448f6eb3b62924d70ae --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/PdfParser.py @@ -0,0 +1,1003 @@ +from __future__ import annotations + +import calendar +import codecs +import collections +import mmap +import os +import re +import time +import zlib +from typing import TYPE_CHECKING, Any, List, NamedTuple, Union + + +# see 7.9.2.2 Text String Type on page 86 and D.3 PDFDocEncoding Character Set +# on page 656 +def encode_text(s: str) -> bytes: + return codecs.BOM_UTF16_BE + s.encode("utf_16_be") + + +PDFDocEncoding = { + 0x16: "\u0017", + 0x18: "\u02D8", + 0x19: "\u02C7", + 0x1A: "\u02C6", + 0x1B: "\u02D9", + 0x1C: "\u02DD", + 0x1D: "\u02DB", + 0x1E: "\u02DA", + 0x1F: "\u02DC", + 0x80: "\u2022", + 0x81: "\u2020", + 0x82: "\u2021", + 0x83: "\u2026", + 0x84: "\u2014", + 0x85: "\u2013", + 0x86: "\u0192", + 0x87: "\u2044", + 0x88: "\u2039", + 0x89: "\u203A", + 0x8A: "\u2212", + 0x8B: "\u2030", + 0x8C: "\u201E", + 0x8D: "\u201C", + 0x8E: "\u201D", + 0x8F: "\u2018", + 0x90: "\u2019", + 0x91: "\u201A", + 0x92: "\u2122", + 0x93: "\uFB01", + 0x94: "\uFB02", + 0x95: "\u0141", + 0x96: "\u0152", + 0x97: "\u0160", + 0x98: "\u0178", + 0x99: "\u017D", + 0x9A: "\u0131", + 0x9B: "\u0142", + 0x9C: "\u0153", + 0x9D: "\u0161", + 0x9E: "\u017E", + 0xA0: "\u20AC", +} + + +def decode_text(b): + if b[: len(codecs.BOM_UTF16_BE)] == codecs.BOM_UTF16_BE: + return b[len(codecs.BOM_UTF16_BE) :].decode("utf_16_be") + else: + return "".join(PDFDocEncoding.get(byte, chr(byte)) for byte in b) + + +class PdfFormatError(RuntimeError): + """An error that probably indicates a syntactic or semantic error in the + PDF file structure""" + + pass + + +def check_format_condition(condition: bool, error_message: str) -> None: + if not condition: + raise PdfFormatError(error_message) + + +class IndirectReferenceTuple(NamedTuple): + object_id: int + generation: int + + +class IndirectReference(IndirectReferenceTuple): + def __str__(self) -> str: + return f"{self.object_id} {self.generation} R" + + def __bytes__(self) -> bytes: + return self.__str__().encode("us-ascii") + + def __eq__(self, other: object) -> bool: + if self.__class__ is not other.__class__: + return False + assert isinstance(other, IndirectReference) + return other.object_id == self.object_id and other.generation == self.generation + + def __ne__(self, other): + return not (self == other) + + def __hash__(self) -> int: + return hash((self.object_id, self.generation)) + + +class IndirectObjectDef(IndirectReference): + def __str__(self) -> str: + return f"{self.object_id} {self.generation} obj" + + +class XrefTable: + def __init__(self): + self.existing_entries = {} # object ID => (offset, generation) + self.new_entries = {} # object ID => (offset, generation) + self.deleted_entries = {0: 65536} # object ID => generation + self.reading_finished = False + + def __setitem__(self, key, value): + if self.reading_finished: + self.new_entries[key] = value + else: + self.existing_entries[key] = value + if key in self.deleted_entries: + del self.deleted_entries[key] + + def __getitem__(self, key): + try: + return self.new_entries[key] + except KeyError: + return self.existing_entries[key] + + def __delitem__(self, key): + if key in self.new_entries: + generation = self.new_entries[key][1] + 1 + del self.new_entries[key] + self.deleted_entries[key] = generation + elif key in self.existing_entries: + generation = self.existing_entries[key][1] + 1 + self.deleted_entries[key] = generation + elif key in self.deleted_entries: + generation = self.deleted_entries[key] + else: + msg = f"object ID {key} cannot be deleted because it doesn't exist" + raise IndexError(msg) + + def __contains__(self, key): + return key in self.existing_entries or key in self.new_entries + + def __len__(self) -> int: + return len( + set(self.existing_entries.keys()) + | set(self.new_entries.keys()) + | set(self.deleted_entries.keys()) + ) + + def keys(self): + return ( + set(self.existing_entries.keys()) - set(self.deleted_entries.keys()) + ) | set(self.new_entries.keys()) + + def write(self, f): + keys = sorted(set(self.new_entries.keys()) | set(self.deleted_entries.keys())) + deleted_keys = sorted(set(self.deleted_entries.keys())) + startxref = f.tell() + f.write(b"xref\n") + while keys: + # find a contiguous sequence of object IDs + prev = None + for index, key in enumerate(keys): + if prev is None or prev + 1 == key: + prev = key + else: + contiguous_keys = keys[:index] + keys = keys[index:] + break + else: + contiguous_keys = keys + keys = None + f.write(b"%d %d\n" % (contiguous_keys[0], len(contiguous_keys))) + for object_id in contiguous_keys: + if object_id in self.new_entries: + f.write(b"%010d %05d n \n" % self.new_entries[object_id]) + else: + this_deleted_object_id = deleted_keys.pop(0) + check_format_condition( + object_id == this_deleted_object_id, + f"expected the next deleted object ID to be {object_id}, " + f"instead found {this_deleted_object_id}", + ) + try: + next_in_linked_list = deleted_keys[0] + except IndexError: + next_in_linked_list = 0 + f.write( + b"%010d %05d f \n" + % (next_in_linked_list, self.deleted_entries[object_id]) + ) + return startxref + + +class PdfName: + def __init__(self, name): + if isinstance(name, PdfName): + self.name = name.name + elif isinstance(name, bytes): + self.name = name + else: + self.name = name.encode("us-ascii") + + def name_as_str(self) -> str: + return self.name.decode("us-ascii") + + def __eq__(self, other): + return ( + isinstance(other, PdfName) and other.name == self.name + ) or other == self.name + + def __hash__(self) -> int: + return hash(self.name) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({repr(self.name)})" + + @classmethod + def from_pdf_stream(cls, data): + return cls(PdfParser.interpret_name(data)) + + allowed_chars = set(range(33, 127)) - {ord(c) for c in "#%/()<>[]{}"} + + def __bytes__(self) -> bytes: + result = bytearray(b"/") + for b in self.name: + if b in self.allowed_chars: + result.append(b) + else: + result.extend(b"#%02X" % b) + return bytes(result) + + +class PdfArray(List[Any]): + def __bytes__(self) -> bytes: + return b"[ " + b" ".join(pdf_repr(x) for x in self) + b" ]" + + +if TYPE_CHECKING: + _DictBase = collections.UserDict[Union[str, bytes], Any] +else: + _DictBase = collections.UserDict + + +class PdfDict(_DictBase): + def __setattr__(self, key, value): + if key == "data": + collections.UserDict.__setattr__(self, key, value) + else: + self[key.encode("us-ascii")] = value + + def __getattr__(self, key): + try: + value = self[key.encode("us-ascii")] + except KeyError as e: + raise AttributeError(key) from e + if isinstance(value, bytes): + value = decode_text(value) + if key.endswith("Date"): + if value.startswith("D:"): + value = value[2:] + + relationship = "Z" + if len(value) > 17: + relationship = value[14] + offset = int(value[15:17]) * 60 + if len(value) > 20: + offset += int(value[18:20]) + + format = "%Y%m%d%H%M%S"[: len(value) - 2] + value = time.strptime(value[: len(format) + 2], format) + if relationship in ["+", "-"]: + offset *= 60 + if relationship == "+": + offset *= -1 + value = time.gmtime(calendar.timegm(value) + offset) + return value + + def __bytes__(self) -> bytes: + out = bytearray(b"<<") + for key, value in self.items(): + if value is None: + continue + value = pdf_repr(value) + out.extend(b"\n") + out.extend(bytes(PdfName(key))) + out.extend(b" ") + out.extend(value) + out.extend(b"\n>>") + return bytes(out) + + +class PdfBinary: + def __init__(self, data): + self.data = data + + def __bytes__(self) -> bytes: + return b"<%s>" % b"".join(b"%02X" % b for b in self.data) + + +class PdfStream: + def __init__(self, dictionary, buf): + self.dictionary = dictionary + self.buf = buf + + def decode(self): + try: + filter = self.dictionary.Filter + except AttributeError: + return self.buf + if filter == b"FlateDecode": + try: + expected_length = self.dictionary.DL + except AttributeError: + expected_length = self.dictionary.Length + return zlib.decompress(self.buf, bufsize=int(expected_length)) + else: + msg = f"stream filter {repr(self.dictionary.Filter)} unknown/unsupported" + raise NotImplementedError(msg) + + +def pdf_repr(x): + if x is True: + return b"true" + elif x is False: + return b"false" + elif x is None: + return b"null" + elif isinstance(x, (PdfName, PdfDict, PdfArray, PdfBinary)): + return bytes(x) + elif isinstance(x, (int, float)): + return str(x).encode("us-ascii") + elif isinstance(x, time.struct_time): + return b"(D:" + time.strftime("%Y%m%d%H%M%SZ", x).encode("us-ascii") + b")" + elif isinstance(x, dict): + return bytes(PdfDict(x)) + elif isinstance(x, list): + return bytes(PdfArray(x)) + elif isinstance(x, str): + return pdf_repr(encode_text(x)) + elif isinstance(x, bytes): + # XXX escape more chars? handle binary garbage + x = x.replace(b"\\", b"\\\\") + x = x.replace(b"(", b"\\(") + x = x.replace(b")", b"\\)") + return b"(" + x + b")" + else: + return bytes(x) + + +class PdfParser: + """Based on + https://www.adobe.com/content/dam/acom/en/devnet/acrobat/pdfs/PDF32000_2008.pdf + Supports PDF up to 1.4 + """ + + def __init__(self, filename=None, f=None, buf=None, start_offset=0, mode="rb"): + if buf and f: + msg = "specify buf or f or filename, but not both buf and f" + raise RuntimeError(msg) + self.filename = filename + self.buf = buf + self.f = f + self.start_offset = start_offset + self.should_close_buf = False + self.should_close_file = False + if filename is not None and f is None: + self.f = f = open(filename, mode) + self.should_close_file = True + if f is not None: + self.buf = buf = self.get_buf_from_file(f) + self.should_close_buf = True + if not filename and hasattr(f, "name"): + self.filename = f.name + self.cached_objects = {} + if buf: + self.read_pdf_info() + else: + self.file_size_total = self.file_size_this = 0 + self.root = PdfDict() + self.root_ref = None + self.info = PdfDict() + self.info_ref = None + self.page_tree_root = {} + self.pages = [] + self.orig_pages = [] + self.pages_ref = None + self.last_xref_section_offset = None + self.trailer_dict = {} + self.xref_table = XrefTable() + self.xref_table.reading_finished = True + if f: + self.seek_end() + + def __enter__(self) -> PdfParser: + return self + + def __exit__(self, *args: object) -> None: + self.close() + + def start_writing(self) -> None: + self.close_buf() + self.seek_end() + + def close_buf(self) -> None: + try: + self.buf.close() + except AttributeError: + pass + self.buf = None + + def close(self) -> None: + if self.should_close_buf: + self.close_buf() + if self.f is not None and self.should_close_file: + self.f.close() + self.f = None + + def seek_end(self) -> None: + self.f.seek(0, os.SEEK_END) + + def write_header(self) -> None: + self.f.write(b"%PDF-1.4\n") + + def write_comment(self, s): + self.f.write(f"% {s}\n".encode()) + + def write_catalog(self) -> IndirectReference: + self.del_root() + self.root_ref = self.next_object_id(self.f.tell()) + self.pages_ref = self.next_object_id(0) + self.rewrite_pages() + self.write_obj(self.root_ref, Type=PdfName(b"Catalog"), Pages=self.pages_ref) + self.write_obj( + self.pages_ref, + Type=PdfName(b"Pages"), + Count=len(self.pages), + Kids=self.pages, + ) + return self.root_ref + + def rewrite_pages(self) -> None: + pages_tree_nodes_to_delete = [] + for i, page_ref in enumerate(self.orig_pages): + page_info = self.cached_objects[page_ref] + del self.xref_table[page_ref.object_id] + pages_tree_nodes_to_delete.append(page_info[PdfName(b"Parent")]) + if page_ref not in self.pages: + # the page has been deleted + continue + # make dict keys into strings for passing to write_page + stringified_page_info = {} + for key, value in page_info.items(): + # key should be a PdfName + stringified_page_info[key.name_as_str()] = value + stringified_page_info["Parent"] = self.pages_ref + new_page_ref = self.write_page(None, **stringified_page_info) + for j, cur_page_ref in enumerate(self.pages): + if cur_page_ref == page_ref: + # replace the page reference with the new one + self.pages[j] = new_page_ref + # delete redundant Pages tree nodes from xref table + for pages_tree_node_ref in pages_tree_nodes_to_delete: + while pages_tree_node_ref: + pages_tree_node = self.cached_objects[pages_tree_node_ref] + if pages_tree_node_ref.object_id in self.xref_table: + del self.xref_table[pages_tree_node_ref.object_id] + pages_tree_node_ref = pages_tree_node.get(b"Parent", None) + self.orig_pages = [] + + def write_xref_and_trailer(self, new_root_ref=None): + if new_root_ref: + self.del_root() + self.root_ref = new_root_ref + if self.info: + self.info_ref = self.write_obj(None, self.info) + start_xref = self.xref_table.write(self.f) + num_entries = len(self.xref_table) + trailer_dict = {b"Root": self.root_ref, b"Size": num_entries} + if self.last_xref_section_offset is not None: + trailer_dict[b"Prev"] = self.last_xref_section_offset + if self.info: + trailer_dict[b"Info"] = self.info_ref + self.last_xref_section_offset = start_xref + self.f.write( + b"trailer\n" + + bytes(PdfDict(trailer_dict)) + + b"\nstartxref\n%d\n%%%%EOF" % start_xref + ) + + def write_page(self, ref, *objs, **dict_obj): + if isinstance(ref, int): + ref = self.pages[ref] + if "Type" not in dict_obj: + dict_obj["Type"] = PdfName(b"Page") + if "Parent" not in dict_obj: + dict_obj["Parent"] = self.pages_ref + return self.write_obj(ref, *objs, **dict_obj) + + def write_obj(self, ref, *objs, **dict_obj): + f = self.f + if ref is None: + ref = self.next_object_id(f.tell()) + else: + self.xref_table[ref.object_id] = (f.tell(), ref.generation) + f.write(bytes(IndirectObjectDef(*ref))) + stream = dict_obj.pop("stream", None) + if stream is not None: + dict_obj["Length"] = len(stream) + if dict_obj: + f.write(pdf_repr(dict_obj)) + for obj in objs: + f.write(pdf_repr(obj)) + if stream is not None: + f.write(b"stream\n") + f.write(stream) + f.write(b"\nendstream\n") + f.write(b"endobj\n") + return ref + + def del_root(self) -> None: + if self.root_ref is None: + return + del self.xref_table[self.root_ref.object_id] + del self.xref_table[self.root[b"Pages"].object_id] + + @staticmethod + def get_buf_from_file(f): + if hasattr(f, "getbuffer"): + return f.getbuffer() + elif hasattr(f, "getvalue"): + return f.getvalue() + else: + try: + return mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) + except ValueError: # cannot mmap an empty file + return b"" + + def read_pdf_info(self) -> None: + self.file_size_total = len(self.buf) + self.file_size_this = self.file_size_total - self.start_offset + self.read_trailer() + self.root_ref = self.trailer_dict[b"Root"] + self.info_ref = self.trailer_dict.get(b"Info", None) + self.root = PdfDict(self.read_indirect(self.root_ref)) + if self.info_ref is None: + self.info = PdfDict() + else: + self.info = PdfDict(self.read_indirect(self.info_ref)) + check_format_condition(b"Type" in self.root, "/Type missing in Root") + check_format_condition( + self.root[b"Type"] == b"Catalog", "/Type in Root is not /Catalog" + ) + check_format_condition(b"Pages" in self.root, "/Pages missing in Root") + check_format_condition( + isinstance(self.root[b"Pages"], IndirectReference), + "/Pages in Root is not an indirect reference", + ) + self.pages_ref = self.root[b"Pages"] + self.page_tree_root = self.read_indirect(self.pages_ref) + self.pages = self.linearize_page_tree(self.page_tree_root) + # save the original list of page references + # in case the user modifies, adds or deletes some pages + # and we need to rewrite the pages and their list + self.orig_pages = self.pages[:] + + def next_object_id(self, offset=None): + try: + # TODO: support reuse of deleted objects + reference = IndirectReference(max(self.xref_table.keys()) + 1, 0) + except ValueError: + reference = IndirectReference(1, 0) + if offset is not None: + self.xref_table[reference.object_id] = (offset, 0) + return reference + + delimiter = rb"[][()<>{}/%]" + delimiter_or_ws = rb"[][()<>{}/%\000\011\012\014\015\040]" + whitespace = rb"[\000\011\012\014\015\040]" + whitespace_or_hex = rb"[\000\011\012\014\015\0400-9a-fA-F]" + whitespace_optional = whitespace + b"*" + whitespace_mandatory = whitespace + b"+" + # No "\012" aka "\n" or "\015" aka "\r": + whitespace_optional_no_nl = rb"[\000\011\014\040]*" + newline_only = rb"[\r\n]+" + newline = whitespace_optional_no_nl + newline_only + whitespace_optional_no_nl + re_trailer_end = re.compile( + whitespace_mandatory + + rb"trailer" + + whitespace_optional + + rb"<<(.*>>)" + + newline + + rb"startxref" + + newline + + rb"([0-9]+)" + + newline + + rb"%%EOF" + + whitespace_optional + + rb"$", + re.DOTALL, + ) + re_trailer_prev = re.compile( + whitespace_optional + + rb"trailer" + + whitespace_optional + + rb"<<(.*?>>)" + + newline + + rb"startxref" + + newline + + rb"([0-9]+)" + + newline + + rb"%%EOF" + + whitespace_optional, + re.DOTALL, + ) + + def read_trailer(self): + search_start_offset = len(self.buf) - 16384 + if search_start_offset < self.start_offset: + search_start_offset = self.start_offset + m = self.re_trailer_end.search(self.buf, search_start_offset) + check_format_condition(m, "trailer end not found") + # make sure we found the LAST trailer + last_match = m + while m: + last_match = m + m = self.re_trailer_end.search(self.buf, m.start() + 16) + if not m: + m = last_match + trailer_data = m.group(1) + self.last_xref_section_offset = int(m.group(2)) + self.trailer_dict = self.interpret_trailer(trailer_data) + self.xref_table = XrefTable() + self.read_xref_table(xref_section_offset=self.last_xref_section_offset) + if b"Prev" in self.trailer_dict: + self.read_prev_trailer(self.trailer_dict[b"Prev"]) + + def read_prev_trailer(self, xref_section_offset): + trailer_offset = self.read_xref_table(xref_section_offset=xref_section_offset) + m = self.re_trailer_prev.search( + self.buf[trailer_offset : trailer_offset + 16384] + ) + check_format_condition(m, "previous trailer not found") + trailer_data = m.group(1) + check_format_condition( + int(m.group(2)) == xref_section_offset, + "xref section offset in previous trailer doesn't match what was expected", + ) + trailer_dict = self.interpret_trailer(trailer_data) + if b"Prev" in trailer_dict: + self.read_prev_trailer(trailer_dict[b"Prev"]) + + re_whitespace_optional = re.compile(whitespace_optional) + re_name = re.compile( + whitespace_optional + + rb"/([!-$&'*-.0-;=?-Z\\^-z|~]+)(?=" + + delimiter_or_ws + + rb")" + ) + re_dict_start = re.compile(whitespace_optional + rb"<<") + re_dict_end = re.compile(whitespace_optional + rb">>" + whitespace_optional) + + @classmethod + def interpret_trailer(cls, trailer_data): + trailer = {} + offset = 0 + while True: + m = cls.re_name.match(trailer_data, offset) + if not m: + m = cls.re_dict_end.match(trailer_data, offset) + check_format_condition( + m and m.end() == len(trailer_data), + "name not found in trailer, remaining data: " + + repr(trailer_data[offset:]), + ) + break + key = cls.interpret_name(m.group(1)) + value, offset = cls.get_value(trailer_data, m.end()) + trailer[key] = value + check_format_condition( + b"Size" in trailer and isinstance(trailer[b"Size"], int), + "/Size not in trailer or not an integer", + ) + check_format_condition( + b"Root" in trailer and isinstance(trailer[b"Root"], IndirectReference), + "/Root not in trailer or not an indirect reference", + ) + return trailer + + re_hashes_in_name = re.compile(rb"([^#]*)(#([0-9a-fA-F]{2}))?") + + @classmethod + def interpret_name(cls, raw, as_text=False): + name = b"" + for m in cls.re_hashes_in_name.finditer(raw): + if m.group(3): + name += m.group(1) + bytearray.fromhex(m.group(3).decode("us-ascii")) + else: + name += m.group(1) + if as_text: + return name.decode("utf-8") + else: + return bytes(name) + + re_null = re.compile(whitespace_optional + rb"null(?=" + delimiter_or_ws + rb")") + re_true = re.compile(whitespace_optional + rb"true(?=" + delimiter_or_ws + rb")") + re_false = re.compile(whitespace_optional + rb"false(?=" + delimiter_or_ws + rb")") + re_int = re.compile( + whitespace_optional + rb"([-+]?[0-9]+)(?=" + delimiter_or_ws + rb")" + ) + re_real = re.compile( + whitespace_optional + + rb"([-+]?([0-9]+\.[0-9]*|[0-9]*\.[0-9]+))(?=" + + delimiter_or_ws + + rb")" + ) + re_array_start = re.compile(whitespace_optional + rb"\[") + re_array_end = re.compile(whitespace_optional + rb"]") + re_string_hex = re.compile( + whitespace_optional + rb"<(" + whitespace_or_hex + rb"*)>" + ) + re_string_lit = re.compile(whitespace_optional + rb"\(") + re_indirect_reference = re.compile( + whitespace_optional + + rb"([-+]?[0-9]+)" + + whitespace_mandatory + + rb"([-+]?[0-9]+)" + + whitespace_mandatory + + rb"R(?=" + + delimiter_or_ws + + rb")" + ) + re_indirect_def_start = re.compile( + whitespace_optional + + rb"([-+]?[0-9]+)" + + whitespace_mandatory + + rb"([-+]?[0-9]+)" + + whitespace_mandatory + + rb"obj(?=" + + delimiter_or_ws + + rb")" + ) + re_indirect_def_end = re.compile( + whitespace_optional + rb"endobj(?=" + delimiter_or_ws + rb")" + ) + re_comment = re.compile( + rb"(" + whitespace_optional + rb"%[^\r\n]*" + newline + rb")*" + ) + re_stream_start = re.compile(whitespace_optional + rb"stream\r?\n") + re_stream_end = re.compile( + whitespace_optional + rb"endstream(?=" + delimiter_or_ws + rb")" + ) + + @classmethod + def get_value(cls, data, offset, expect_indirect=None, max_nesting=-1): + if max_nesting == 0: + return None, None + m = cls.re_comment.match(data, offset) + if m: + offset = m.end() + m = cls.re_indirect_def_start.match(data, offset) + if m: + check_format_condition( + int(m.group(1)) > 0, + "indirect object definition: object ID must be greater than 0", + ) + check_format_condition( + int(m.group(2)) >= 0, + "indirect object definition: generation must be non-negative", + ) + check_format_condition( + expect_indirect is None + or expect_indirect + == IndirectReference(int(m.group(1)), int(m.group(2))), + "indirect object definition different than expected", + ) + object, offset = cls.get_value(data, m.end(), max_nesting=max_nesting - 1) + if offset is None: + return object, None + m = cls.re_indirect_def_end.match(data, offset) + check_format_condition(m, "indirect object definition end not found") + return object, m.end() + check_format_condition( + not expect_indirect, "indirect object definition not found" + ) + m = cls.re_indirect_reference.match(data, offset) + if m: + check_format_condition( + int(m.group(1)) > 0, + "indirect object reference: object ID must be greater than 0", + ) + check_format_condition( + int(m.group(2)) >= 0, + "indirect object reference: generation must be non-negative", + ) + return IndirectReference(int(m.group(1)), int(m.group(2))), m.end() + m = cls.re_dict_start.match(data, offset) + if m: + offset = m.end() + result = {} + m = cls.re_dict_end.match(data, offset) + while not m: + key, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1) + if offset is None: + return result, None + value, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1) + result[key] = value + if offset is None: + return result, None + m = cls.re_dict_end.match(data, offset) + offset = m.end() + m = cls.re_stream_start.match(data, offset) + if m: + try: + stream_len_str = result.get(b"Length") + stream_len = int(stream_len_str) + except (TypeError, ValueError) as e: + msg = f"bad or missing Length in stream dict ({stream_len_str})" + raise PdfFormatError(msg) from e + stream_data = data[m.end() : m.end() + stream_len] + m = cls.re_stream_end.match(data, m.end() + stream_len) + check_format_condition(m, "stream end not found") + offset = m.end() + result = PdfStream(PdfDict(result), stream_data) + else: + result = PdfDict(result) + return result, offset + m = cls.re_array_start.match(data, offset) + if m: + offset = m.end() + result = [] + m = cls.re_array_end.match(data, offset) + while not m: + value, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1) + result.append(value) + if offset is None: + return result, None + m = cls.re_array_end.match(data, offset) + return result, m.end() + m = cls.re_null.match(data, offset) + if m: + return None, m.end() + m = cls.re_true.match(data, offset) + if m: + return True, m.end() + m = cls.re_false.match(data, offset) + if m: + return False, m.end() + m = cls.re_name.match(data, offset) + if m: + return PdfName(cls.interpret_name(m.group(1))), m.end() + m = cls.re_int.match(data, offset) + if m: + return int(m.group(1)), m.end() + m = cls.re_real.match(data, offset) + if m: + # XXX Decimal instead of float??? + return float(m.group(1)), m.end() + m = cls.re_string_hex.match(data, offset) + if m: + # filter out whitespace + hex_string = bytearray( + b for b in m.group(1) if b in b"0123456789abcdefABCDEF" + ) + if len(hex_string) % 2 == 1: + # append a 0 if the length is not even - yes, at the end + hex_string.append(ord(b"0")) + return bytearray.fromhex(hex_string.decode("us-ascii")), m.end() + m = cls.re_string_lit.match(data, offset) + if m: + return cls.get_literal_string(data, m.end()) + # return None, offset # fallback (only for debugging) + msg = f"unrecognized object: {repr(data[offset : offset + 32])}" + raise PdfFormatError(msg) + + re_lit_str_token = re.compile( + rb"(\\[nrtbf()\\])|(\\[0-9]{1,3})|(\\(\r\n|\r|\n))|(\r\n|\r|\n)|(\()|(\))" + ) + escaped_chars = { + b"n": b"\n", + b"r": b"\r", + b"t": b"\t", + b"b": b"\b", + b"f": b"\f", + b"(": b"(", + b")": b")", + b"\\": b"\\", + ord(b"n"): b"\n", + ord(b"r"): b"\r", + ord(b"t"): b"\t", + ord(b"b"): b"\b", + ord(b"f"): b"\f", + ord(b"("): b"(", + ord(b")"): b")", + ord(b"\\"): b"\\", + } + + @classmethod + def get_literal_string(cls, data, offset): + nesting_depth = 0 + result = bytearray() + for m in cls.re_lit_str_token.finditer(data, offset): + result.extend(data[offset : m.start()]) + if m.group(1): + result.extend(cls.escaped_chars[m.group(1)[1]]) + elif m.group(2): + result.append(int(m.group(2)[1:], 8)) + elif m.group(3): + pass + elif m.group(5): + result.extend(b"\n") + elif m.group(6): + result.extend(b"(") + nesting_depth += 1 + elif m.group(7): + if nesting_depth == 0: + return bytes(result), m.end() + result.extend(b")") + nesting_depth -= 1 + offset = m.end() + msg = "unfinished literal string" + raise PdfFormatError(msg) + + re_xref_section_start = re.compile(whitespace_optional + rb"xref" + newline) + re_xref_subsection_start = re.compile( + whitespace_optional + + rb"([0-9]+)" + + whitespace_mandatory + + rb"([0-9]+)" + + whitespace_optional + + newline_only + ) + re_xref_entry = re.compile(rb"([0-9]{10}) ([0-9]{5}) ([fn])( \r| \n|\r\n)") + + def read_xref_table(self, xref_section_offset): + subsection_found = False + m = self.re_xref_section_start.match( + self.buf, xref_section_offset + self.start_offset + ) + check_format_condition(m, "xref section start not found") + offset = m.end() + while True: + m = self.re_xref_subsection_start.match(self.buf, offset) + if not m: + check_format_condition( + subsection_found, "xref subsection start not found" + ) + break + subsection_found = True + offset = m.end() + first_object = int(m.group(1)) + num_objects = int(m.group(2)) + for i in range(first_object, first_object + num_objects): + m = self.re_xref_entry.match(self.buf, offset) + check_format_condition(m, "xref entry not found") + offset = m.end() + is_free = m.group(3) == b"f" + if not is_free: + generation = int(m.group(2)) + new_entry = (int(m.group(1)), generation) + if i not in self.xref_table: + self.xref_table[i] = new_entry + return offset + + def read_indirect(self, ref, max_nesting=-1): + offset, generation = self.xref_table[ref[0]] + check_format_condition( + generation == ref[1], + f"expected to find generation {ref[1]} for object ID {ref[0]} in xref " + f"table, instead found generation {generation} at offset {offset}", + ) + value = self.get_value( + self.buf, + offset + self.start_offset, + expect_indirect=IndirectReference(*ref), + max_nesting=max_nesting, + )[0] + self.cached_objects[ref] = value + return value + + def linearize_page_tree(self, node=None): + if node is None: + node = self.page_tree_root + check_format_condition( + node[b"Type"] == b"Pages", "/Type of page tree node is not /Pages" + ) + pages = [] + for kid in node[b"Kids"]: + kid_object = self.read_indirect(kid) + if kid_object[b"Type"] == b"Page": + pages.append(kid) + else: + pages.extend(self.linearize_page_tree(node=kid_object)) + return pages diff --git a/MLPY/Lib/site-packages/PIL/PixarImagePlugin.py b/MLPY/Lib/site-packages/PIL/PixarImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..f5d1730fedb859be87453f3b9aaee53dc930b3ba --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/PixarImagePlugin.py @@ -0,0 +1,72 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PIXAR raster support for PIL +# +# history: +# 97-01-29 fl Created +# +# notes: +# This is incomplete; it is based on a few samples created with +# Photoshop 2.5 and 3.0, and a summary description provided by +# Greg Coats . Hopefully, "L" and +# "RGBA" support will be added in future versions. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +from . import Image, ImageFile +from ._binary import i16le as i16 + +# +# helpers + + +def _accept(prefix: bytes) -> bool: + return prefix[:4] == b"\200\350\000\000" + + +## +# Image plugin for PIXAR raster images. + + +class PixarImageFile(ImageFile.ImageFile): + format = "PIXAR" + format_description = "PIXAR raster image" + + def _open(self) -> None: + # assuming a 4-byte magic label + assert self.fp is not None + + s = self.fp.read(4) + if not _accept(s): + msg = "not a PIXAR file" + raise SyntaxError(msg) + + # read rest of header + s = s + self.fp.read(508) + + self._size = i16(s, 418), i16(s, 416) + + # get channel/depth descriptions + mode = i16(s, 424), i16(s, 426) + + if mode == (14, 2): + self._mode = "RGB" + # FIXME: to be continued... + + # create tile descriptor (assuming "dumped") + self.tile = [("raw", (0, 0) + self.size, 1024, (self.mode, 0, 1))] + + +# +# -------------------------------------------------------------------- + +Image.register_open(PixarImageFile.format, PixarImageFile, _accept) + +Image.register_extension(PixarImageFile.format, ".pxr") diff --git a/MLPY/Lib/site-packages/PIL/PngImagePlugin.py b/MLPY/Lib/site-packages/PIL/PngImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..e84715224e9939f45cc10bee6905d2b78521e74e --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/PngImagePlugin.py @@ -0,0 +1,1489 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PNG support code +# +# See "PNG (Portable Network Graphics) Specification, version 1.0; +# W3C Recommendation", 1996-10-01, Thomas Boutell (ed.). +# +# history: +# 1996-05-06 fl Created (couldn't resist it) +# 1996-12-14 fl Upgraded, added read and verify support (0.2) +# 1996-12-15 fl Separate PNG stream parser +# 1996-12-29 fl Added write support, added getchunks +# 1996-12-30 fl Eliminated circular references in decoder (0.3) +# 1998-07-12 fl Read/write 16-bit images as mode I (0.4) +# 2001-02-08 fl Added transparency support (from Zircon) (0.5) +# 2001-04-16 fl Don't close data source in "open" method (0.6) +# 2004-02-24 fl Don't even pretend to support interlaced files (0.7) +# 2004-08-31 fl Do basic sanity check on chunk identifiers (0.8) +# 2004-09-20 fl Added PngInfo chunk container +# 2004-12-18 fl Added DPI read support (based on code by Niki Spahiev) +# 2008-08-13 fl Added tRNS support for RGB images +# 2009-03-06 fl Support for preserving ICC profiles (by Florian Hoech) +# 2009-03-08 fl Added zTXT support (from Lowell Alleman) +# 2009-03-29 fl Read interlaced PNG files (from Conrado Porto Lopes Gouvua) +# +# Copyright (c) 1997-2009 by Secret Labs AB +# Copyright (c) 1996 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import itertools +import logging +import re +import struct +import warnings +import zlib +from enum import IntEnum +from typing import IO, TYPE_CHECKING, Any, NoReturn + +from . import Image, ImageChops, ImageFile, ImagePalette, ImageSequence +from ._binary import i16be as i16 +from ._binary import i32be as i32 +from ._binary import o8 +from ._binary import o16be as o16 +from ._binary import o32be as o32 + +if TYPE_CHECKING: + from . import _imaging + +logger = logging.getLogger(__name__) + +is_cid = re.compile(rb"\w\w\w\w").match + + +_MAGIC = b"\211PNG\r\n\032\n" + + +_MODES = { + # supported bits/color combinations, and corresponding modes/rawmodes + # Grayscale + (1, 0): ("1", "1"), + (2, 0): ("L", "L;2"), + (4, 0): ("L", "L;4"), + (8, 0): ("L", "L"), + (16, 0): ("I;16", "I;16B"), + # Truecolour + (8, 2): ("RGB", "RGB"), + (16, 2): ("RGB", "RGB;16B"), + # Indexed-colour + (1, 3): ("P", "P;1"), + (2, 3): ("P", "P;2"), + (4, 3): ("P", "P;4"), + (8, 3): ("P", "P"), + # Grayscale with alpha + (8, 4): ("LA", "LA"), + (16, 4): ("RGBA", "LA;16B"), # LA;16B->LA not yet available + # Truecolour with alpha + (8, 6): ("RGBA", "RGBA"), + (16, 6): ("RGBA", "RGBA;16B"), +} + + +_simple_palette = re.compile(b"^\xff*\x00\xff*$") + +MAX_TEXT_CHUNK = ImageFile.SAFEBLOCK +""" +Maximum decompressed size for a iTXt or zTXt chunk. +Eliminates decompression bombs where compressed chunks can expand 1000x. +See :ref:`Text in PNG File Format`. +""" +MAX_TEXT_MEMORY = 64 * MAX_TEXT_CHUNK +""" +Set the maximum total text chunk size. +See :ref:`Text in PNG File Format`. +""" + + +# APNG frame disposal modes +class Disposal(IntEnum): + OP_NONE = 0 + """ + No disposal is done on this frame before rendering the next frame. + See :ref:`Saving APNG sequences`. + """ + OP_BACKGROUND = 1 + """ + This frame’s modified region is cleared to fully transparent black before rendering + the next frame. + See :ref:`Saving APNG sequences`. + """ + OP_PREVIOUS = 2 + """ + This frame’s modified region is reverted to the previous frame’s contents before + rendering the next frame. + See :ref:`Saving APNG sequences`. + """ + + +# APNG frame blend modes +class Blend(IntEnum): + OP_SOURCE = 0 + """ + All color components of this frame, including alpha, overwrite the previous output + image contents. + See :ref:`Saving APNG sequences`. + """ + OP_OVER = 1 + """ + This frame should be alpha composited with the previous output image contents. + See :ref:`Saving APNG sequences`. + """ + + +def _safe_zlib_decompress(s): + dobj = zlib.decompressobj() + plaintext = dobj.decompress(s, MAX_TEXT_CHUNK) + if dobj.unconsumed_tail: + msg = "Decompressed Data Too Large" + raise ValueError(msg) + return plaintext + + +def _crc32(data, seed=0): + return zlib.crc32(data, seed) & 0xFFFFFFFF + + +# -------------------------------------------------------------------- +# Support classes. Suitable for PNG and related formats like MNG etc. + + +class ChunkStream: + def __init__(self, fp: IO[bytes]) -> None: + self.fp: IO[bytes] | None = fp + self.queue: list[tuple[bytes, int, int]] | None = [] + + def read(self) -> tuple[bytes, int, int]: + """Fetch a new chunk. Returns header information.""" + cid = None + + assert self.fp is not None + if self.queue: + cid, pos, length = self.queue.pop() + self.fp.seek(pos) + else: + s = self.fp.read(8) + cid = s[4:] + pos = self.fp.tell() + length = i32(s) + + if not is_cid(cid): + if not ImageFile.LOAD_TRUNCATED_IMAGES: + msg = f"broken PNG file (chunk {repr(cid)})" + raise SyntaxError(msg) + + return cid, pos, length + + def __enter__(self) -> ChunkStream: + return self + + def __exit__(self, *args: object) -> None: + self.close() + + def close(self) -> None: + self.queue = self.fp = None + + def push(self, cid: bytes, pos: int, length: int) -> None: + assert self.queue is not None + self.queue.append((cid, pos, length)) + + def call(self, cid, pos, length): + """Call the appropriate chunk handler""" + + logger.debug("STREAM %r %s %s", cid, pos, length) + return getattr(self, f"chunk_{cid.decode('ascii')}")(pos, length) + + def crc(self, cid: bytes, data: bytes) -> None: + """Read and verify checksum""" + + # Skip CRC checks for ancillary chunks if allowed to load truncated + # images + # 5th byte of first char is 1 [specs, section 5.4] + if ImageFile.LOAD_TRUNCATED_IMAGES and (cid[0] >> 5 & 1): + self.crc_skip(cid, data) + return + + assert self.fp is not None + try: + crc1 = _crc32(data, _crc32(cid)) + crc2 = i32(self.fp.read(4)) + if crc1 != crc2: + msg = f"broken PNG file (bad header checksum in {repr(cid)})" + raise SyntaxError(msg) + except struct.error as e: + msg = f"broken PNG file (incomplete checksum in {repr(cid)})" + raise SyntaxError(msg) from e + + def crc_skip(self, cid: bytes, data: bytes) -> None: + """Read checksum""" + + assert self.fp is not None + self.fp.read(4) + + def verify(self, endchunk: bytes = b"IEND") -> list[bytes]: + # Simple approach; just calculate checksum for all remaining + # blocks. Must be called directly after open. + + cids = [] + + while True: + try: + cid, pos, length = self.read() + except struct.error as e: + msg = "truncated PNG file" + raise OSError(msg) from e + + if cid == endchunk: + break + self.crc(cid, ImageFile._safe_read(self.fp, length)) + cids.append(cid) + + return cids + + +class iTXt(str): + """ + Subclass of string to allow iTXt chunks to look like strings while + keeping their extra information + + """ + + lang: str | bytes | None + tkey: str | bytes | None + + @staticmethod + def __new__(cls, text, lang=None, tkey=None): + """ + :param cls: the class to use when creating the instance + :param text: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + """ + + self = str.__new__(cls, text) + self.lang = lang + self.tkey = tkey + return self + + +class PngInfo: + """ + PNG chunk container (for use with save(pnginfo=)) + + """ + + def __init__(self) -> None: + self.chunks: list[tuple[bytes, bytes, bool]] = [] + + def add(self, cid: bytes, data: bytes, after_idat: bool = False) -> None: + """Appends an arbitrary chunk. Use with caution. + + :param cid: a byte string, 4 bytes long. + :param data: a byte string of the encoded data + :param after_idat: for use with private chunks. Whether the chunk + should be written after IDAT + + """ + + self.chunks.append((cid, data, after_idat)) + + def add_itxt( + self, + key: str | bytes, + value: str | bytes, + lang: str | bytes = "", + tkey: str | bytes = "", + zip: bool = False, + ) -> None: + """Appends an iTXt chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + :param zip: compression flag + + """ + + if not isinstance(key, bytes): + key = key.encode("latin-1", "strict") + if not isinstance(value, bytes): + value = value.encode("utf-8", "strict") + if not isinstance(lang, bytes): + lang = lang.encode("utf-8", "strict") + if not isinstance(tkey, bytes): + tkey = tkey.encode("utf-8", "strict") + + if zip: + self.add( + b"iTXt", + key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" + zlib.compress(value), + ) + else: + self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" + value) + + def add_text( + self, key: str | bytes, value: str | bytes | iTXt, zip: bool = False + ) -> None: + """Appends a text chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key, text or an + :py:class:`PIL.PngImagePlugin.iTXt` instance + :param zip: compression flag + + """ + if isinstance(value, iTXt): + return self.add_itxt( + key, + value, + value.lang if value.lang is not None else b"", + value.tkey if value.tkey is not None else b"", + zip=zip, + ) + + # The tEXt chunk stores latin-1 text + if not isinstance(value, bytes): + try: + value = value.encode("latin-1", "strict") + except UnicodeError: + return self.add_itxt(key, value, zip=zip) + + if not isinstance(key, bytes): + key = key.encode("latin-1", "strict") + + if zip: + self.add(b"zTXt", key + b"\0\0" + zlib.compress(value)) + else: + self.add(b"tEXt", key + b"\0" + value) + + +# -------------------------------------------------------------------- +# PNG image stream (IHDR/IEND) + + +class PngStream(ChunkStream): + def __init__(self, fp): + super().__init__(fp) + + # local copies of Image attributes + self.im_info = {} + self.im_text = {} + self.im_size = (0, 0) + self.im_mode = None + self.im_tile = None + self.im_palette = None + self.im_custom_mimetype = None + self.im_n_frames = None + self._seq_num = None + self.rewind_state = None + + self.text_memory = 0 + + def check_text_memory(self, chunklen: int) -> None: + self.text_memory += chunklen + if self.text_memory > MAX_TEXT_MEMORY: + msg = ( + "Too much memory used in text chunks: " + f"{self.text_memory}>MAX_TEXT_MEMORY" + ) + raise ValueError(msg) + + def save_rewind(self) -> None: + self.rewind_state = { + "info": self.im_info.copy(), + "tile": self.im_tile, + "seq_num": self._seq_num, + } + + def rewind(self) -> None: + self.im_info = self.rewind_state["info"].copy() + self.im_tile = self.rewind_state["tile"] + self._seq_num = self.rewind_state["seq_num"] + + def chunk_iCCP(self, pos: int, length: int) -> bytes: + # ICC profile + s = ImageFile._safe_read(self.fp, length) + # according to PNG spec, the iCCP chunk contains: + # Profile name 1-79 bytes (character string) + # Null separator 1 byte (null character) + # Compression method 1 byte (0) + # Compressed profile n bytes (zlib with deflate compression) + i = s.find(b"\0") + logger.debug("iCCP profile name %r", s[:i]) + comp_method = s[i + 1] + logger.debug("Compression method %s", comp_method) + if comp_method != 0: + msg = f"Unknown compression method {comp_method} in iCCP chunk" + raise SyntaxError(msg) + try: + icc_profile = _safe_zlib_decompress(s[i + 2 :]) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + icc_profile = None + else: + raise + except zlib.error: + icc_profile = None # FIXME + self.im_info["icc_profile"] = icc_profile + return s + + def chunk_IHDR(self, pos: int, length: int) -> bytes: + # image header + s = ImageFile._safe_read(self.fp, length) + if length < 13: + if ImageFile.LOAD_TRUNCATED_IMAGES: + return s + msg = "Truncated IHDR chunk" + raise ValueError(msg) + self.im_size = i32(s, 0), i32(s, 4) + try: + self.im_mode, self.im_rawmode = _MODES[(s[8], s[9])] + except Exception: + pass + if s[12]: + self.im_info["interlace"] = 1 + if s[11]: + msg = "unknown filter category" + raise SyntaxError(msg) + return s + + def chunk_IDAT(self, pos: int, length: int) -> NoReturn: + # image data + if "bbox" in self.im_info: + tile = [("zip", self.im_info["bbox"], pos, self.im_rawmode)] + else: + if self.im_n_frames is not None: + self.im_info["default_image"] = True + tile = [("zip", (0, 0) + self.im_size, pos, self.im_rawmode)] + self.im_tile = tile + self.im_idat = length + msg = "image data found" + raise EOFError(msg) + + def chunk_IEND(self, pos: int, length: int) -> NoReturn: + msg = "end of PNG image" + raise EOFError(msg) + + def chunk_PLTE(self, pos: int, length: int) -> bytes: + # palette + s = ImageFile._safe_read(self.fp, length) + if self.im_mode == "P": + self.im_palette = "RGB", s + return s + + def chunk_tRNS(self, pos: int, length: int) -> bytes: + # transparency + s = ImageFile._safe_read(self.fp, length) + if self.im_mode == "P": + if _simple_palette.match(s): + # tRNS contains only one full-transparent entry, + # other entries are full opaque + i = s.find(b"\0") + if i >= 0: + self.im_info["transparency"] = i + else: + # otherwise, we have a byte string with one alpha value + # for each palette entry + self.im_info["transparency"] = s + elif self.im_mode in ("1", "L", "I;16"): + self.im_info["transparency"] = i16(s) + elif self.im_mode == "RGB": + self.im_info["transparency"] = i16(s), i16(s, 2), i16(s, 4) + return s + + def chunk_gAMA(self, pos: int, length: int) -> bytes: + # gamma setting + s = ImageFile._safe_read(self.fp, length) + self.im_info["gamma"] = i32(s) / 100000.0 + return s + + def chunk_cHRM(self, pos: int, length: int) -> bytes: + # chromaticity, 8 unsigned ints, actual value is scaled by 100,000 + # WP x,y, Red x,y, Green x,y Blue x,y + + s = ImageFile._safe_read(self.fp, length) + raw_vals = struct.unpack(">%dI" % (len(s) // 4), s) + self.im_info["chromaticity"] = tuple(elt / 100000.0 for elt in raw_vals) + return s + + def chunk_sRGB(self, pos: int, length: int) -> bytes: + # srgb rendering intent, 1 byte + # 0 perceptual + # 1 relative colorimetric + # 2 saturation + # 3 absolute colorimetric + + s = ImageFile._safe_read(self.fp, length) + if length < 1: + if ImageFile.LOAD_TRUNCATED_IMAGES: + return s + msg = "Truncated sRGB chunk" + raise ValueError(msg) + self.im_info["srgb"] = s[0] + return s + + def chunk_pHYs(self, pos: int, length: int) -> bytes: + # pixels per unit + s = ImageFile._safe_read(self.fp, length) + if length < 9: + if ImageFile.LOAD_TRUNCATED_IMAGES: + return s + msg = "Truncated pHYs chunk" + raise ValueError(msg) + px, py = i32(s, 0), i32(s, 4) + unit = s[8] + if unit == 1: # meter + dpi = px * 0.0254, py * 0.0254 + self.im_info["dpi"] = dpi + elif unit == 0: + self.im_info["aspect"] = px, py + return s + + def chunk_tEXt(self, pos: int, length: int) -> bytes: + # text + s = ImageFile._safe_read(self.fp, length) + try: + k, v = s.split(b"\0", 1) + except ValueError: + # fallback for broken tEXt tags + k = s + v = b"" + if k: + k = k.decode("latin-1", "strict") + v_str = v.decode("latin-1", "replace") + + self.im_info[k] = v if k == "exif" else v_str + self.im_text[k] = v_str + self.check_text_memory(len(v_str)) + + return s + + def chunk_zTXt(self, pos: int, length: int) -> bytes: + # compressed text + s = ImageFile._safe_read(self.fp, length) + try: + k, v = s.split(b"\0", 1) + except ValueError: + k = s + v = b"" + if v: + comp_method = v[0] + else: + comp_method = 0 + if comp_method != 0: + msg = f"Unknown compression method {comp_method} in zTXt chunk" + raise SyntaxError(msg) + try: + v = _safe_zlib_decompress(v[1:]) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + v = b"" + else: + raise + except zlib.error: + v = b"" + + if k: + k = k.decode("latin-1", "strict") + v = v.decode("latin-1", "replace") + + self.im_info[k] = self.im_text[k] = v + self.check_text_memory(len(v)) + + return s + + def chunk_iTXt(self, pos: int, length: int) -> bytes: + # international text + r = s = ImageFile._safe_read(self.fp, length) + try: + k, r = r.split(b"\0", 1) + except ValueError: + return s + if len(r) < 2: + return s + cf, cm, r = r[0], r[1], r[2:] + try: + lang, tk, v = r.split(b"\0", 2) + except ValueError: + return s + if cf != 0: + if cm == 0: + try: + v = _safe_zlib_decompress(v) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + return s + else: + raise + except zlib.error: + return s + else: + return s + if k == b"XML:com.adobe.xmp": + self.im_info["xmp"] = v + try: + k = k.decode("latin-1", "strict") + lang = lang.decode("utf-8", "strict") + tk = tk.decode("utf-8", "strict") + v = v.decode("utf-8", "strict") + except UnicodeError: + return s + + self.im_info[k] = self.im_text[k] = iTXt(v, lang, tk) + self.check_text_memory(len(v)) + + return s + + def chunk_eXIf(self, pos: int, length: int) -> bytes: + s = ImageFile._safe_read(self.fp, length) + self.im_info["exif"] = b"Exif\x00\x00" + s + return s + + # APNG chunks + def chunk_acTL(self, pos: int, length: int) -> bytes: + s = ImageFile._safe_read(self.fp, length) + if length < 8: + if ImageFile.LOAD_TRUNCATED_IMAGES: + return s + msg = "APNG contains truncated acTL chunk" + raise ValueError(msg) + if self.im_n_frames is not None: + self.im_n_frames = None + warnings.warn("Invalid APNG, will use default PNG image if possible") + return s + n_frames = i32(s) + if n_frames == 0 or n_frames > 0x80000000: + warnings.warn("Invalid APNG, will use default PNG image if possible") + return s + self.im_n_frames = n_frames + self.im_info["loop"] = i32(s, 4) + self.im_custom_mimetype = "image/apng" + return s + + def chunk_fcTL(self, pos: int, length: int) -> bytes: + s = ImageFile._safe_read(self.fp, length) + if length < 26: + if ImageFile.LOAD_TRUNCATED_IMAGES: + return s + msg = "APNG contains truncated fcTL chunk" + raise ValueError(msg) + seq = i32(s) + if (self._seq_num is None and seq != 0) or ( + self._seq_num is not None and self._seq_num != seq - 1 + ): + msg = "APNG contains frame sequence errors" + raise SyntaxError(msg) + self._seq_num = seq + width, height = i32(s, 4), i32(s, 8) + px, py = i32(s, 12), i32(s, 16) + im_w, im_h = self.im_size + if px + width > im_w or py + height > im_h: + msg = "APNG contains invalid frames" + raise SyntaxError(msg) + self.im_info["bbox"] = (px, py, px + width, py + height) + delay_num, delay_den = i16(s, 20), i16(s, 22) + if delay_den == 0: + delay_den = 100 + self.im_info["duration"] = float(delay_num) / float(delay_den) * 1000 + self.im_info["disposal"] = s[24] + self.im_info["blend"] = s[25] + return s + + def chunk_fdAT(self, pos: int, length: int) -> bytes: + if length < 4: + if ImageFile.LOAD_TRUNCATED_IMAGES: + s = ImageFile._safe_read(self.fp, length) + return s + msg = "APNG contains truncated fDAT chunk" + raise ValueError(msg) + s = ImageFile._safe_read(self.fp, 4) + seq = i32(s) + if self._seq_num != seq - 1: + msg = "APNG contains frame sequence errors" + raise SyntaxError(msg) + self._seq_num = seq + return self.chunk_IDAT(pos + 4, length - 4) + + +# -------------------------------------------------------------------- +# PNG reader + + +def _accept(prefix: bytes) -> bool: + return prefix[:8] == _MAGIC + + +## +# Image plugin for PNG images. + + +class PngImageFile(ImageFile.ImageFile): + format = "PNG" + format_description = "Portable network graphics" + + def _open(self) -> None: + if not _accept(self.fp.read(8)): + msg = "not a PNG file" + raise SyntaxError(msg) + self._fp = self.fp + self.__frame = 0 + + # + # Parse headers up to the first IDAT or fDAT chunk + + self.private_chunks: list[tuple[bytes, bytes] | tuple[bytes, bytes, bool]] = [] + self.png: PngStream | None = PngStream(self.fp) + + while True: + # + # get next chunk + + cid, pos, length = self.png.read() + + try: + s = self.png.call(cid, pos, length) + except EOFError: + break + except AttributeError: + logger.debug("%r %s %s (unknown)", cid, pos, length) + s = ImageFile._safe_read(self.fp, length) + if cid[1:2].islower(): + self.private_chunks.append((cid, s)) + + self.png.crc(cid, s) + + # + # Copy relevant attributes from the PngStream. An alternative + # would be to let the PngStream class modify these attributes + # directly, but that introduces circular references which are + # difficult to break if things go wrong in the decoder... + # (believe me, I've tried ;-) + + self._mode = self.png.im_mode + self._size = self.png.im_size + self.info = self.png.im_info + self._text = None + self.tile = self.png.im_tile + self.custom_mimetype = self.png.im_custom_mimetype + self.n_frames = self.png.im_n_frames or 1 + self.default_image = self.info.get("default_image", False) + + if self.png.im_palette: + rawmode, data = self.png.im_palette + self.palette = ImagePalette.raw(rawmode, data) + + if cid == b"fdAT": + self.__prepare_idat = length - 4 + else: + self.__prepare_idat = length # used by load_prepare() + + if self.png.im_n_frames is not None: + self._close_exclusive_fp_after_loading = False + self.png.save_rewind() + self.__rewind_idat = self.__prepare_idat + self.__rewind = self._fp.tell() + if self.default_image: + # IDAT chunk contains default image and not first animation frame + self.n_frames += 1 + self._seek(0) + self.is_animated = self.n_frames > 1 + + @property + def text(self): + # experimental + if self._text is None: + # iTxt, tEXt and zTXt chunks may appear at the end of the file + # So load the file to ensure that they are read + if self.is_animated: + frame = self.__frame + # for APNG, seek to the final frame before loading + self.seek(self.n_frames - 1) + self.load() + if self.is_animated: + self.seek(frame) + return self._text + + def verify(self) -> None: + """Verify PNG file""" + + if self.fp is None: + msg = "verify must be called directly after open" + raise RuntimeError(msg) + + # back up to beginning of IDAT block + self.fp.seek(self.tile[0][2] - 8) + + assert self.png is not None + self.png.verify() + self.png.close() + + if self._exclusive_fp: + self.fp.close() + self.fp = None + + def seek(self, frame: int) -> None: + if not self._seek_check(frame): + return + if frame < self.__frame: + self._seek(0, True) + + last_frame = self.__frame + for f in range(self.__frame + 1, frame + 1): + try: + self._seek(f) + except EOFError as e: + self.seek(last_frame) + msg = "no more images in APNG file" + raise EOFError(msg) from e + + def _seek(self, frame: int, rewind: bool = False) -> None: + assert self.png is not None + + self.dispose: _imaging.ImagingCore | None + if frame == 0: + if rewind: + self._fp.seek(self.__rewind) + self.png.rewind() + self.__prepare_idat = self.__rewind_idat + self.im = None + if self.pyaccess: + self.pyaccess = None + self.info = self.png.im_info + self.tile = self.png.im_tile + self.fp = self._fp + self._prev_im = None + self.dispose = None + self.default_image = self.info.get("default_image", False) + self.dispose_op = self.info.get("disposal") + self.blend_op = self.info.get("blend") + self.dispose_extent = self.info.get("bbox") + self.__frame = 0 + else: + if frame != self.__frame + 1: + msg = f"cannot seek to frame {frame}" + raise ValueError(msg) + + # ensure previous frame was loaded + self.load() + + if self.dispose: + self.im.paste(self.dispose, self.dispose_extent) + self._prev_im = self.im.copy() + + self.fp = self._fp + + # advance to the next frame + if self.__prepare_idat: + ImageFile._safe_read(self.fp, self.__prepare_idat) + self.__prepare_idat = 0 + frame_start = False + while True: + self.fp.read(4) # CRC + + try: + cid, pos, length = self.png.read() + except (struct.error, SyntaxError): + break + + if cid == b"IEND": + msg = "No more images in APNG file" + raise EOFError(msg) + if cid == b"fcTL": + if frame_start: + # there must be at least one fdAT chunk between fcTL chunks + msg = "APNG missing frame data" + raise SyntaxError(msg) + frame_start = True + + try: + self.png.call(cid, pos, length) + except UnicodeDecodeError: + break + except EOFError: + if cid == b"fdAT": + length -= 4 + if frame_start: + self.__prepare_idat = length + break + ImageFile._safe_read(self.fp, length) + except AttributeError: + logger.debug("%r %s %s (unknown)", cid, pos, length) + ImageFile._safe_read(self.fp, length) + + self.__frame = frame + self.tile = self.png.im_tile + self.dispose_op = self.info.get("disposal") + self.blend_op = self.info.get("blend") + self.dispose_extent = self.info.get("bbox") + + if not self.tile: + msg = "image not found in APNG frame" + raise EOFError(msg) + + # setup frame disposal (actual disposal done when needed in the next _seek()) + if self._prev_im is None and self.dispose_op == Disposal.OP_PREVIOUS: + self.dispose_op = Disposal.OP_BACKGROUND + + self.dispose = None + if self.dispose_op == Disposal.OP_PREVIOUS: + if self._prev_im: + self.dispose = self._prev_im.copy() + self.dispose = self._crop(self.dispose, self.dispose_extent) + elif self.dispose_op == Disposal.OP_BACKGROUND: + self.dispose = Image.core.fill(self.mode, self.size) + self.dispose = self._crop(self.dispose, self.dispose_extent) + + def tell(self) -> int: + return self.__frame + + def load_prepare(self) -> None: + """internal: prepare to read PNG file""" + + if self.info.get("interlace"): + self.decoderconfig = self.decoderconfig + (1,) + + self.__idat = self.__prepare_idat # used by load_read() + ImageFile.ImageFile.load_prepare(self) + + def load_read(self, read_bytes: int) -> bytes: + """internal: read more image data""" + + assert self.png is not None + while self.__idat == 0: + # end of chunk, skip forward to next one + + self.fp.read(4) # CRC + + cid, pos, length = self.png.read() + + if cid not in [b"IDAT", b"DDAT", b"fdAT"]: + self.png.push(cid, pos, length) + return b"" + + if cid == b"fdAT": + try: + self.png.call(cid, pos, length) + except EOFError: + pass + self.__idat = length - 4 # sequence_num has already been read + else: + self.__idat = length # empty chunks are allowed + + # read more data from this chunk + if read_bytes <= 0: + read_bytes = self.__idat + else: + read_bytes = min(read_bytes, self.__idat) + + self.__idat = self.__idat - read_bytes + + return self.fp.read(read_bytes) + + def load_end(self) -> None: + """internal: finished reading image data""" + assert self.png is not None + if self.__idat != 0: + self.fp.read(self.__idat) + while True: + self.fp.read(4) # CRC + + try: + cid, pos, length = self.png.read() + except (struct.error, SyntaxError): + break + + if cid == b"IEND": + break + elif cid == b"fcTL" and self.is_animated: + # start of the next frame, stop reading + self.__prepare_idat = 0 + self.png.push(cid, pos, length) + break + + try: + self.png.call(cid, pos, length) + except UnicodeDecodeError: + break + except EOFError: + if cid == b"fdAT": + length -= 4 + try: + ImageFile._safe_read(self.fp, length) + except OSError as e: + if ImageFile.LOAD_TRUNCATED_IMAGES: + break + else: + raise e + except AttributeError: + logger.debug("%r %s %s (unknown)", cid, pos, length) + s = ImageFile._safe_read(self.fp, length) + if cid[1:2].islower(): + self.private_chunks.append((cid, s, True)) + self._text = self.png.im_text + if not self.is_animated: + self.png.close() + self.png = None + else: + if self._prev_im and self.blend_op == Blend.OP_OVER: + updated = self._crop(self.im, self.dispose_extent) + if self.im.mode == "RGB" and "transparency" in self.info: + mask = updated.convert_transparent( + "RGBA", self.info["transparency"] + ) + else: + mask = updated.convert("RGBA") + self._prev_im.paste(updated, self.dispose_extent, mask) + self.im = self._prev_im + if self.pyaccess: + self.pyaccess = None + + def _getexif(self) -> dict[str, Any] | None: + if "exif" not in self.info: + self.load() + if "exif" not in self.info and "Raw profile type exif" not in self.info: + return None + return self.getexif()._get_merged_dict() + + def getexif(self) -> Image.Exif: + if "exif" not in self.info: + self.load() + + return super().getexif() + + +# -------------------------------------------------------------------- +# PNG writer + +_OUTMODES = { + # supported PIL modes, and corresponding rawmode, bit depth and color type + "1": ("1", b"\x01", b"\x00"), + "L;1": ("L;1", b"\x01", b"\x00"), + "L;2": ("L;2", b"\x02", b"\x00"), + "L;4": ("L;4", b"\x04", b"\x00"), + "L": ("L", b"\x08", b"\x00"), + "LA": ("LA", b"\x08", b"\x04"), + "I": ("I;16B", b"\x10", b"\x00"), + "I;16": ("I;16B", b"\x10", b"\x00"), + "I;16B": ("I;16B", b"\x10", b"\x00"), + "P;1": ("P;1", b"\x01", b"\x03"), + "P;2": ("P;2", b"\x02", b"\x03"), + "P;4": ("P;4", b"\x04", b"\x03"), + "P": ("P", b"\x08", b"\x03"), + "RGB": ("RGB", b"\x08", b"\x02"), + "RGBA": ("RGBA", b"\x08", b"\x06"), +} + + +def putchunk(fp, cid, *data): + """Write a PNG chunk (including CRC field)""" + + data = b"".join(data) + + fp.write(o32(len(data)) + cid) + fp.write(data) + crc = _crc32(data, _crc32(cid)) + fp.write(o32(crc)) + + +class _idat: + # wrap output from the encoder in IDAT chunks + + def __init__(self, fp, chunk): + self.fp = fp + self.chunk = chunk + + def write(self, data: bytes) -> None: + self.chunk(self.fp, b"IDAT", data) + + +class _fdat: + # wrap encoder output in fdAT chunks + + def __init__(self, fp, chunk, seq_num): + self.fp = fp + self.chunk = chunk + self.seq_num = seq_num + + def write(self, data: bytes) -> None: + self.chunk(self.fp, b"fdAT", o32(self.seq_num), data) + self.seq_num += 1 + + +def _write_multiple_frames(im, fp, chunk, mode, rawmode, default_image, append_images): + duration = im.encoderinfo.get("duration") + loop = im.encoderinfo.get("loop", im.info.get("loop", 0)) + disposal = im.encoderinfo.get("disposal", im.info.get("disposal", Disposal.OP_NONE)) + blend = im.encoderinfo.get("blend", im.info.get("blend", Blend.OP_SOURCE)) + + if default_image: + chain = itertools.chain(append_images) + else: + chain = itertools.chain([im], append_images) + + im_frames = [] + frame_count = 0 + for im_seq in chain: + for im_frame in ImageSequence.Iterator(im_seq): + if im_frame.mode == mode: + im_frame = im_frame.copy() + else: + im_frame = im_frame.convert(mode) + encoderinfo = im.encoderinfo.copy() + if isinstance(duration, (list, tuple)): + encoderinfo["duration"] = duration[frame_count] + elif duration is None and "duration" in im_frame.info: + encoderinfo["duration"] = im_frame.info["duration"] + if isinstance(disposal, (list, tuple)): + encoderinfo["disposal"] = disposal[frame_count] + if isinstance(blend, (list, tuple)): + encoderinfo["blend"] = blend[frame_count] + frame_count += 1 + + if im_frames: + previous = im_frames[-1] + prev_disposal = previous["encoderinfo"].get("disposal") + prev_blend = previous["encoderinfo"].get("blend") + if prev_disposal == Disposal.OP_PREVIOUS and len(im_frames) < 2: + prev_disposal = Disposal.OP_BACKGROUND + + if prev_disposal == Disposal.OP_BACKGROUND: + base_im = previous["im"].copy() + dispose = Image.core.fill("RGBA", im.size, (0, 0, 0, 0)) + bbox = previous["bbox"] + if bbox: + dispose = dispose.crop(bbox) + else: + bbox = (0, 0) + im.size + base_im.paste(dispose, bbox) + elif prev_disposal == Disposal.OP_PREVIOUS: + base_im = im_frames[-2]["im"] + else: + base_im = previous["im"] + delta = ImageChops.subtract_modulo( + im_frame.convert("RGBA"), base_im.convert("RGBA") + ) + bbox = delta.getbbox(alpha_only=False) + if ( + not bbox + and prev_disposal == encoderinfo.get("disposal") + and prev_blend == encoderinfo.get("blend") + and "duration" in encoderinfo + ): + previous["encoderinfo"]["duration"] += encoderinfo["duration"] + continue + else: + bbox = None + im_frames.append({"im": im_frame, "bbox": bbox, "encoderinfo": encoderinfo}) + + if len(im_frames) == 1 and not default_image: + return im_frames[0]["im"] + + # animation control + chunk( + fp, + b"acTL", + o32(len(im_frames)), # 0: num_frames + o32(loop), # 4: num_plays + ) + + # default image IDAT (if it exists) + if default_image: + if im.mode != mode: + im = im.convert(mode) + ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)]) + + seq_num = 0 + for frame, frame_data in enumerate(im_frames): + im_frame = frame_data["im"] + if not frame_data["bbox"]: + bbox = (0, 0) + im_frame.size + else: + bbox = frame_data["bbox"] + im_frame = im_frame.crop(bbox) + size = im_frame.size + encoderinfo = frame_data["encoderinfo"] + frame_duration = int(round(encoderinfo.get("duration", 0))) + frame_disposal = encoderinfo.get("disposal", disposal) + frame_blend = encoderinfo.get("blend", blend) + # frame control + chunk( + fp, + b"fcTL", + o32(seq_num), # sequence_number + o32(size[0]), # width + o32(size[1]), # height + o32(bbox[0]), # x_offset + o32(bbox[1]), # y_offset + o16(frame_duration), # delay_numerator + o16(1000), # delay_denominator + o8(frame_disposal), # dispose_op + o8(frame_blend), # blend_op + ) + seq_num += 1 + # frame data + if frame == 0 and not default_image: + # first frame must be in IDAT chunks for backwards compatibility + ImageFile._save( + im_frame, + _idat(fp, chunk), + [("zip", (0, 0) + im_frame.size, 0, rawmode)], + ) + else: + fdat_chunks = _fdat(fp, chunk, seq_num) + ImageFile._save( + im_frame, + fdat_chunks, + [("zip", (0, 0) + im_frame.size, 0, rawmode)], + ) + seq_num = fdat_chunks.seq_num + + +def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + _save(im, fp, filename, save_all=True) + + +def _save(im, fp, filename, chunk=putchunk, save_all=False): + # save an image to disk (called by the save method) + + if save_all: + default_image = im.encoderinfo.get( + "default_image", im.info.get("default_image") + ) + modes = set() + sizes = set() + append_images = im.encoderinfo.get("append_images", []) + for im_seq in itertools.chain([im], append_images): + for im_frame in ImageSequence.Iterator(im_seq): + modes.add(im_frame.mode) + sizes.add(im_frame.size) + for mode in ("RGBA", "RGB", "P"): + if mode in modes: + break + else: + mode = modes.pop() + size = tuple(max(frame_size[i] for frame_size in sizes) for i in range(2)) + else: + size = im.size + mode = im.mode + + outmode = mode + if mode == "P": + # + # attempt to minimize storage requirements for palette images + if "bits" in im.encoderinfo: + # number of bits specified by user + colors = min(1 << im.encoderinfo["bits"], 256) + else: + # check palette contents + if im.palette: + colors = max(min(len(im.palette.getdata()[1]) // 3, 256), 1) + else: + colors = 256 + + if colors <= 16: + if colors <= 2: + bits = 1 + elif colors <= 4: + bits = 2 + else: + bits = 4 + outmode += f";{bits}" + + # encoder options + im.encoderconfig = ( + im.encoderinfo.get("optimize", False), + im.encoderinfo.get("compress_level", -1), + im.encoderinfo.get("compress_type", -1), + im.encoderinfo.get("dictionary", b""), + ) + + # get the corresponding PNG mode + try: + rawmode, bit_depth, color_type = _OUTMODES[outmode] + except KeyError as e: + msg = f"cannot write mode {mode} as PNG" + raise OSError(msg) from e + + # + # write minimal PNG file + + fp.write(_MAGIC) + + chunk( + fp, + b"IHDR", + o32(size[0]), # 0: size + o32(size[1]), + bit_depth, + color_type, + b"\0", # 10: compression + b"\0", # 11: filter category + b"\0", # 12: interlace flag + ) + + chunks = [b"cHRM", b"gAMA", b"sBIT", b"sRGB", b"tIME"] + + icc = im.encoderinfo.get("icc_profile", im.info.get("icc_profile")) + if icc: + # ICC profile + # according to PNG spec, the iCCP chunk contains: + # Profile name 1-79 bytes (character string) + # Null separator 1 byte (null character) + # Compression method 1 byte (0) + # Compressed profile n bytes (zlib with deflate compression) + name = b"ICC Profile" + data = name + b"\0\0" + zlib.compress(icc) + chunk(fp, b"iCCP", data) + + # You must either have sRGB or iCCP. + # Disallow sRGB chunks when an iCCP-chunk has been emitted. + chunks.remove(b"sRGB") + + info = im.encoderinfo.get("pnginfo") + if info: + chunks_multiple_allowed = [b"sPLT", b"iTXt", b"tEXt", b"zTXt"] + for info_chunk in info.chunks: + cid, data = info_chunk[:2] + if cid in chunks: + chunks.remove(cid) + chunk(fp, cid, data) + elif cid in chunks_multiple_allowed: + chunk(fp, cid, data) + elif cid[1:2].islower(): + # Private chunk + after_idat = len(info_chunk) == 3 and info_chunk[2] + if not after_idat: + chunk(fp, cid, data) + + if im.mode == "P": + palette_byte_number = colors * 3 + palette_bytes = im.im.getpalette("RGB")[:palette_byte_number] + while len(palette_bytes) < palette_byte_number: + palette_bytes += b"\0" + chunk(fp, b"PLTE", palette_bytes) + + transparency = im.encoderinfo.get("transparency", im.info.get("transparency", None)) + + if transparency or transparency == 0: + if im.mode == "P": + # limit to actual palette size + alpha_bytes = colors + if isinstance(transparency, bytes): + chunk(fp, b"tRNS", transparency[:alpha_bytes]) + else: + transparency = max(0, min(255, transparency)) + alpha = b"\xFF" * transparency + b"\0" + chunk(fp, b"tRNS", alpha[:alpha_bytes]) + elif im.mode in ("1", "L", "I", "I;16"): + transparency = max(0, min(65535, transparency)) + chunk(fp, b"tRNS", o16(transparency)) + elif im.mode == "RGB": + red, green, blue = transparency + chunk(fp, b"tRNS", o16(red) + o16(green) + o16(blue)) + else: + if "transparency" in im.encoderinfo: + # don't bother with transparency if it's an RGBA + # and it's in the info dict. It's probably just stale. + msg = "cannot use transparency for this mode" + raise OSError(msg) + else: + if im.mode == "P" and im.im.getpalettemode() == "RGBA": + alpha = im.im.getpalette("RGBA", "A") + alpha_bytes = colors + chunk(fp, b"tRNS", alpha[:alpha_bytes]) + + dpi = im.encoderinfo.get("dpi") + if dpi: + chunk( + fp, + b"pHYs", + o32(int(dpi[0] / 0.0254 + 0.5)), + o32(int(dpi[1] / 0.0254 + 0.5)), + b"\x01", + ) + + if info: + chunks = [b"bKGD", b"hIST"] + for info_chunk in info.chunks: + cid, data = info_chunk[:2] + if cid in chunks: + chunks.remove(cid) + chunk(fp, cid, data) + + exif = im.encoderinfo.get("exif") + if exif: + if isinstance(exif, Image.Exif): + exif = exif.tobytes(8) + if exif.startswith(b"Exif\x00\x00"): + exif = exif[6:] + chunk(fp, b"eXIf", exif) + + if save_all: + im = _write_multiple_frames( + im, fp, chunk, mode, rawmode, default_image, append_images + ) + if im: + ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)]) + + if info: + for info_chunk in info.chunks: + cid, data = info_chunk[:2] + if cid[1:2].islower(): + # Private chunk + after_idat = len(info_chunk) == 3 and info_chunk[2] + if after_idat: + chunk(fp, cid, data) + + chunk(fp, b"IEND", b"") + + if hasattr(fp, "flush"): + fp.flush() + + +# -------------------------------------------------------------------- +# PNG chunk converter + + +def getchunks(im, **params): + """Return a list of PNG chunks representing this image.""" + + class collector: + data = [] + + def write(self, data: bytes) -> None: + pass + + def append(self, chunk: bytes) -> None: + self.data.append(chunk) + + def append(fp, cid, *data): + data = b"".join(data) + crc = o32(_crc32(data, _crc32(cid))) + fp.append((cid, data, crc)) + + fp = collector() + + try: + im.encoderinfo = params + _save(im, fp, None, append) + finally: + del im.encoderinfo + + return fp.data + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(PngImageFile.format, PngImageFile, _accept) +Image.register_save(PngImageFile.format, _save) +Image.register_save_all(PngImageFile.format, _save_all) + +Image.register_extensions(PngImageFile.format, [".png", ".apng"]) + +Image.register_mime(PngImageFile.format, "image/png") diff --git a/MLPY/Lib/site-packages/PIL/PpmImagePlugin.py b/MLPY/Lib/site-packages/PIL/PpmImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..64112368b15dbe1d10e94ccc0b158b0b0362e5cb --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/PpmImagePlugin.py @@ -0,0 +1,371 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PPM support for PIL +# +# History: +# 96-03-24 fl Created +# 98-03-06 fl Write RGBA images (as RGB, that is) +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import math +from typing import IO + +from . import Image, ImageFile +from ._binary import i16be as i16 +from ._binary import o8 +from ._binary import o32le as o32 + +# +# -------------------------------------------------------------------- + +b_whitespace = b"\x20\x09\x0a\x0b\x0c\x0d" + +MODES = { + # standard + b"P1": "1", + b"P2": "L", + b"P3": "RGB", + b"P4": "1", + b"P5": "L", + b"P6": "RGB", + # extensions + b"P0CMYK": "CMYK", + b"Pf": "F", + # PIL extensions (for test purposes only) + b"PyP": "P", + b"PyRGBA": "RGBA", + b"PyCMYK": "CMYK", +} + + +def _accept(prefix: bytes) -> bool: + return prefix[0:1] == b"P" and prefix[1] in b"0123456fy" + + +## +# Image plugin for PBM, PGM, and PPM images. + + +class PpmImageFile(ImageFile.ImageFile): + format = "PPM" + format_description = "Pbmplus image" + + def _read_magic(self) -> bytes: + assert self.fp is not None + + magic = b"" + # read until whitespace or longest available magic number + for _ in range(6): + c = self.fp.read(1) + if not c or c in b_whitespace: + break + magic += c + return magic + + def _read_token(self) -> bytes: + assert self.fp is not None + + token = b"" + while len(token) <= 10: # read until next whitespace or limit of 10 characters + c = self.fp.read(1) + if not c: + break + elif c in b_whitespace: # token ended + if not token: + # skip whitespace at start + continue + break + elif c == b"#": + # ignores rest of the line; stops at CR, LF or EOF + while self.fp.read(1) not in b"\r\n": + pass + continue + token += c + if not token: + # Token was not even 1 byte + msg = "Reached EOF while reading header" + raise ValueError(msg) + elif len(token) > 10: + msg = f"Token too long in file header: {token.decode()}" + raise ValueError(msg) + return token + + def _open(self) -> None: + assert self.fp is not None + + magic_number = self._read_magic() + try: + mode = MODES[magic_number] + except KeyError: + msg = "not a PPM file" + raise SyntaxError(msg) + self._mode = mode + + if magic_number in (b"P1", b"P4"): + self.custom_mimetype = "image/x-portable-bitmap" + elif magic_number in (b"P2", b"P5"): + self.custom_mimetype = "image/x-portable-graymap" + elif magic_number in (b"P3", b"P6"): + self.custom_mimetype = "image/x-portable-pixmap" + + self._size = int(self._read_token()), int(self._read_token()) + + decoder_name = "raw" + if magic_number in (b"P1", b"P2", b"P3"): + decoder_name = "ppm_plain" + + args: str | tuple[str | int, ...] + if mode == "1": + args = "1;I" + elif mode == "F": + scale = float(self._read_token()) + if scale == 0.0 or not math.isfinite(scale): + msg = "scale must be finite and non-zero" + raise ValueError(msg) + self.info["scale"] = abs(scale) + + rawmode = "F;32F" if scale < 0 else "F;32BF" + args = (rawmode, 0, -1) + else: + maxval = int(self._read_token()) + if not 0 < maxval < 65536: + msg = "maxval must be greater than 0 and less than 65536" + raise ValueError(msg) + if maxval > 255 and mode == "L": + self._mode = "I" + + rawmode = mode + if decoder_name != "ppm_plain": + # If maxval matches a bit depth, use the raw decoder directly + if maxval == 65535 and mode == "L": + rawmode = "I;16B" + elif maxval != 255: + decoder_name = "ppm" + + args = rawmode if decoder_name == "raw" else (rawmode, maxval) + self.tile = [(decoder_name, (0, 0) + self.size, self.fp.tell(), args)] + + +# +# -------------------------------------------------------------------- + + +class PpmPlainDecoder(ImageFile.PyDecoder): + _pulls_fd = True + _comment_spans: bool + + def _read_block(self) -> bytes: + assert self.fd is not None + + return self.fd.read(ImageFile.SAFEBLOCK) + + def _find_comment_end(self, block: bytes, start: int = 0) -> int: + a = block.find(b"\n", start) + b = block.find(b"\r", start) + return min(a, b) if a * b > 0 else max(a, b) # lowest nonnegative index (or -1) + + def _ignore_comments(self, block: bytes) -> bytes: + if self._comment_spans: + # Finish current comment + while block: + comment_end = self._find_comment_end(block) + if comment_end != -1: + # Comment ends in this block + # Delete tail of comment + block = block[comment_end + 1 :] + break + else: + # Comment spans whole block + # So read the next block, looking for the end + block = self._read_block() + + # Search for any further comments + self._comment_spans = False + while True: + comment_start = block.find(b"#") + if comment_start == -1: + # No comment found + break + comment_end = self._find_comment_end(block, comment_start) + if comment_end != -1: + # Comment ends in this block + # Delete comment + block = block[:comment_start] + block[comment_end + 1 :] + else: + # Comment continues to next block(s) + block = block[:comment_start] + self._comment_spans = True + break + return block + + def _decode_bitonal(self) -> bytearray: + """ + This is a separate method because in the plain PBM format, all data tokens are + exactly one byte, so the inter-token whitespace is optional. + """ + data = bytearray() + total_bytes = self.state.xsize * self.state.ysize + + while len(data) != total_bytes: + block = self._read_block() # read next block + if not block: + # eof + break + + block = self._ignore_comments(block) + + tokens = b"".join(block.split()) + for token in tokens: + if token not in (48, 49): + msg = b"Invalid token for this mode: %s" % bytes([token]) + raise ValueError(msg) + data = (data + tokens)[:total_bytes] + invert = bytes.maketrans(b"01", b"\xFF\x00") + return data.translate(invert) + + def _decode_blocks(self, maxval: int) -> bytearray: + data = bytearray() + max_len = 10 + out_byte_count = 4 if self.mode == "I" else 1 + out_max = 65535 if self.mode == "I" else 255 + bands = Image.getmodebands(self.mode) + total_bytes = self.state.xsize * self.state.ysize * bands * out_byte_count + + half_token = b"" + while len(data) != total_bytes: + block = self._read_block() # read next block + if not block: + if half_token: + block = bytearray(b" ") # flush half_token + else: + # eof + break + + block = self._ignore_comments(block) + + if half_token: + block = half_token + block # stitch half_token to new block + half_token = b"" + + tokens = block.split() + + if block and not block[-1:].isspace(): # block might split token + half_token = tokens.pop() # save half token for later + if len(half_token) > max_len: # prevent buildup of half_token + msg = ( + b"Token too long found in data: %s" % half_token[: max_len + 1] + ) + raise ValueError(msg) + + for token in tokens: + if len(token) > max_len: + msg = b"Token too long found in data: %s" % token[: max_len + 1] + raise ValueError(msg) + value = int(token) + if value < 0: + msg_str = f"Channel value is negative: {value}" + raise ValueError(msg_str) + if value > maxval: + msg_str = f"Channel value too large for this mode: {value}" + raise ValueError(msg_str) + value = round(value / maxval * out_max) + data += o32(value) if self.mode == "I" else o8(value) + if len(data) == total_bytes: # finished! + break + return data + + def decode(self, buffer: bytes) -> tuple[int, int]: + self._comment_spans = False + if self.mode == "1": + data = self._decode_bitonal() + rawmode = "1;8" + else: + maxval = self.args[-1] + data = self._decode_blocks(maxval) + rawmode = "I;32" if self.mode == "I" else self.mode + self.set_as_raw(bytes(data), rawmode) + return -1, 0 + + +class PpmDecoder(ImageFile.PyDecoder): + _pulls_fd = True + + def decode(self, buffer: bytes) -> tuple[int, int]: + assert self.fd is not None + + data = bytearray() + maxval = self.args[-1] + in_byte_count = 1 if maxval < 256 else 2 + out_byte_count = 4 if self.mode == "I" else 1 + out_max = 65535 if self.mode == "I" else 255 + bands = Image.getmodebands(self.mode) + dest_length = self.state.xsize * self.state.ysize * bands * out_byte_count + while len(data) < dest_length: + pixels = self.fd.read(in_byte_count * bands) + if len(pixels) < in_byte_count * bands: + # eof + break + for b in range(bands): + value = ( + pixels[b] if in_byte_count == 1 else i16(pixels, b * in_byte_count) + ) + value = min(out_max, round(value / maxval * out_max)) + data += o32(value) if self.mode == "I" else o8(value) + rawmode = "I;32" if self.mode == "I" else self.mode + self.set_as_raw(bytes(data), rawmode) + return -1, 0 + + +# +# -------------------------------------------------------------------- + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + if im.mode == "1": + rawmode, head = "1;I", b"P4" + elif im.mode == "L": + rawmode, head = "L", b"P5" + elif im.mode == "I": + rawmode, head = "I;16B", b"P5" + elif im.mode in ("RGB", "RGBA"): + rawmode, head = "RGB", b"P6" + elif im.mode == "F": + rawmode, head = "F;32F", b"Pf" + else: + msg = f"cannot write mode {im.mode} as PPM" + raise OSError(msg) + fp.write(head + b"\n%d %d\n" % im.size) + if head == b"P6": + fp.write(b"255\n") + elif head == b"P5": + if rawmode == "L": + fp.write(b"255\n") + else: + fp.write(b"65535\n") + elif head == b"Pf": + fp.write(b"-1.0\n") + row_order = -1 if im.mode == "F" else 1 + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, row_order))]) + + +# +# -------------------------------------------------------------------- + + +Image.register_open(PpmImageFile.format, PpmImageFile, _accept) +Image.register_save(PpmImageFile.format, _save) + +Image.register_decoder("ppm", PpmDecoder) +Image.register_decoder("ppm_plain", PpmPlainDecoder) + +Image.register_extensions(PpmImageFile.format, [".pbm", ".pgm", ".ppm", ".pnm", ".pfm"]) + +Image.register_mime(PpmImageFile.format, "image/x-portable-anymap") diff --git a/MLPY/Lib/site-packages/PIL/PsdImagePlugin.py b/MLPY/Lib/site-packages/PIL/PsdImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..f0b99f1e6786cd2bc6238762a440fbd320fb21fe --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/PsdImagePlugin.py @@ -0,0 +1,326 @@ +# +# The Python Imaging Library +# $Id$ +# +# Adobe PSD 2.5/3.0 file handling +# +# History: +# 1995-09-01 fl Created +# 1997-01-03 fl Read most PSD images +# 1997-01-18 fl Fixed P and CMYK support +# 2001-10-21 fl Added seek/tell support (for layers) +# +# Copyright (c) 1997-2001 by Secret Labs AB. +# Copyright (c) 1995-2001 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import io +from functools import cached_property + +from . import Image, ImageFile, ImagePalette +from ._binary import i8 +from ._binary import i16be as i16 +from ._binary import i32be as i32 +from ._binary import si16be as si16 +from ._binary import si32be as si32 + +MODES = { + # (photoshop mode, bits) -> (pil mode, required channels) + (0, 1): ("1", 1), + (0, 8): ("L", 1), + (1, 8): ("L", 1), + (2, 8): ("P", 1), + (3, 8): ("RGB", 3), + (4, 8): ("CMYK", 4), + (7, 8): ("L", 1), # FIXME: multilayer + (8, 8): ("L", 1), # duotone + (9, 8): ("LAB", 3), +} + + +# --------------------------------------------------------------------. +# read PSD images + + +def _accept(prefix: bytes) -> bool: + return prefix[:4] == b"8BPS" + + +## +# Image plugin for Photoshop images. + + +class PsdImageFile(ImageFile.ImageFile): + format = "PSD" + format_description = "Adobe Photoshop" + _close_exclusive_fp_after_loading = False + + def _open(self) -> None: + read = self.fp.read + + # + # header + + s = read(26) + if not _accept(s) or i16(s, 4) != 1: + msg = "not a PSD file" + raise SyntaxError(msg) + + psd_bits = i16(s, 22) + psd_channels = i16(s, 12) + psd_mode = i16(s, 24) + + mode, channels = MODES[(psd_mode, psd_bits)] + + if channels > psd_channels: + msg = "not enough channels" + raise OSError(msg) + if mode == "RGB" and psd_channels == 4: + mode = "RGBA" + channels = 4 + + self._mode = mode + self._size = i32(s, 18), i32(s, 14) + + # + # color mode data + + size = i32(read(4)) + if size: + data = read(size) + if mode == "P" and size == 768: + self.palette = ImagePalette.raw("RGB;L", data) + + # + # image resources + + self.resources = [] + + size = i32(read(4)) + if size: + # load resources + end = self.fp.tell() + size + while self.fp.tell() < end: + read(4) # signature + id = i16(read(2)) + name = read(i8(read(1))) + if not (len(name) & 1): + read(1) # padding + data = read(i32(read(4))) + if len(data) & 1: + read(1) # padding + self.resources.append((id, name, data)) + if id == 1039: # ICC profile + self.info["icc_profile"] = data + + # + # layer and mask information + + self._layers_position = None + + size = i32(read(4)) + if size: + end = self.fp.tell() + size + size = i32(read(4)) + if size: + self._layers_position = self.fp.tell() + self._layers_size = size + self.fp.seek(end) + self._n_frames: int | None = None + + # + # image descriptor + + self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels) + + # keep the file open + self._fp = self.fp + self.frame = 1 + self._min_frame = 1 + + @cached_property + def layers(self): + layers = [] + if self._layers_position is not None: + self._fp.seek(self._layers_position) + _layer_data = io.BytesIO(ImageFile._safe_read(self._fp, self._layers_size)) + layers = _layerinfo(_layer_data, self._layers_size) + self._n_frames = len(layers) + return layers + + @property + def n_frames(self) -> int: + if self._n_frames is None: + self._n_frames = len(self.layers) + return self._n_frames + + @property + def is_animated(self) -> bool: + return len(self.layers) > 1 + + def seek(self, layer: int) -> None: + if not self._seek_check(layer): + return + + # seek to given layer (1..max) + try: + _, mode, _, tile = self.layers[layer - 1] + self._mode = mode + self.tile = tile + self.frame = layer + self.fp = self._fp + except IndexError as e: + msg = "no such layer" + raise EOFError(msg) from e + + def tell(self) -> int: + # return layer number (0=image, 1..max=layers) + return self.frame + + +def _layerinfo(fp, ct_bytes): + # read layerinfo block + layers = [] + + def read(size): + return ImageFile._safe_read(fp, size) + + ct = si16(read(2)) + + # sanity check + if ct_bytes < (abs(ct) * 20): + msg = "Layer block too short for number of layers requested" + raise SyntaxError(msg) + + for _ in range(abs(ct)): + # bounding box + y0 = si32(read(4)) + x0 = si32(read(4)) + y1 = si32(read(4)) + x1 = si32(read(4)) + + # image info + mode = [] + ct_types = i16(read(2)) + if ct_types > 4: + fp.seek(ct_types * 6 + 12, io.SEEK_CUR) + size = i32(read(4)) + fp.seek(size, io.SEEK_CUR) + continue + + for _ in range(ct_types): + type = i16(read(2)) + + if type == 65535: + m = "A" + else: + m = "RGBA"[type] + + mode.append(m) + read(4) # size + + # figure out the image mode + mode.sort() + if mode == ["R"]: + mode = "L" + elif mode == ["B", "G", "R"]: + mode = "RGB" + elif mode == ["A", "B", "G", "R"]: + mode = "RGBA" + else: + mode = None # unknown + + # skip over blend flags and extra information + read(12) # filler + name = "" + size = i32(read(4)) # length of the extra data field + if size: + data_end = fp.tell() + size + + length = i32(read(4)) + if length: + fp.seek(length - 16, io.SEEK_CUR) + + length = i32(read(4)) + if length: + fp.seek(length, io.SEEK_CUR) + + length = i8(read(1)) + if length: + # Don't know the proper encoding, + # Latin-1 should be a good guess + name = read(length).decode("latin-1", "replace") + + fp.seek(data_end) + layers.append((name, mode, (x0, y0, x1, y1))) + + # get tiles + for i, (name, mode, bbox) in enumerate(layers): + tile = [] + for m in mode: + t = _maketile(fp, m, bbox, 1) + if t: + tile.extend(t) + layers[i] = name, mode, bbox, tile + + return layers + + +def _maketile(file, mode, bbox, channels): + tile = None + read = file.read + + compression = i16(read(2)) + + xsize = bbox[2] - bbox[0] + ysize = bbox[3] - bbox[1] + + offset = file.tell() + + if compression == 0: + # + # raw compression + tile = [] + for channel in range(channels): + layer = mode[channel] + if mode == "CMYK": + layer += ";I" + tile.append(("raw", bbox, offset, layer)) + offset = offset + xsize * ysize + + elif compression == 1: + # + # packbits compression + i = 0 + tile = [] + bytecount = read(channels * ysize * 2) + offset = file.tell() + for channel in range(channels): + layer = mode[channel] + if mode == "CMYK": + layer += ";I" + tile.append(("packbits", bbox, offset, layer)) + for y in range(ysize): + offset = offset + i16(bytecount, i) + i += 2 + + file.seek(offset) + + if offset & 1: + read(1) # padding + + return tile + + +# -------------------------------------------------------------------- +# registry + + +Image.register_open(PsdImageFile.format, PsdImageFile, _accept) + +Image.register_extension(PsdImageFile.format, ".psd") + +Image.register_mime(PsdImageFile.format, "image/vnd.adobe.photoshop") diff --git a/MLPY/Lib/site-packages/PIL/PyAccess.py b/MLPY/Lib/site-packages/PIL/PyAccess.py new file mode 100644 index 0000000000000000000000000000000000000000..1c1bb31444443a0fd261fb33a2262775958c219c --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/PyAccess.py @@ -0,0 +1,381 @@ +# +# The Python Imaging Library +# Pillow fork +# +# Python implementation of the PixelAccess Object +# +# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-2009 by Fredrik Lundh. +# Copyright (c) 2013 Eric Soroos +# +# See the README file for information on usage and redistribution +# + +# Notes: +# +# * Implements the pixel access object following Access.c +# * Taking only the tuple form, which is used from python. +# * Fill.c uses the integer form, but it's still going to use the old +# Access.c implementation. +# +from __future__ import annotations + +import logging +import sys +from typing import TYPE_CHECKING + +from ._deprecate import deprecate + +FFI: type +try: + from cffi import FFI + + defs = """ + struct Pixel_RGBA { + unsigned char r,g,b,a; + }; + struct Pixel_I16 { + unsigned char l,r; + }; + """ + ffi = FFI() + ffi.cdef(defs) +except ImportError as ex: + # Allow error import for doc purposes, but error out when accessing + # anything in core. + from ._util import DeferredError + + FFI = ffi = DeferredError.new(ex) + +logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + from . import Image + + +class PyAccess: + def __init__(self, img: Image.Image, readonly: bool = False) -> None: + deprecate("PyAccess", 11) + vals = dict(img.im.unsafe_ptrs) + self.readonly = readonly + self.image8 = ffi.cast("unsigned char **", vals["image8"]) + self.image32 = ffi.cast("int **", vals["image32"]) + self.image = ffi.cast("unsigned char **", vals["image"]) + self.xsize, self.ysize = img.im.size + self._img = img + + # Keep pointer to im object to prevent dereferencing. + self._im = img.im + if self._im.mode in ("P", "PA"): + self._palette = img.palette + + # Debugging is polluting test traces, only useful here + # when hacking on PyAccess + # logger.debug("%s", vals) + self._post_init() + + def _post_init(self) -> None: + pass + + def __setitem__( + self, + xy: tuple[int, int] | list[int], + color: float | tuple[int, ...] | list[int], + ) -> None: + """ + Modifies the pixel at x,y. The color is given as a single + numerical value for single band images, and a tuple for + multi-band images. In addition to this, RGB and RGBA tuples + are accepted for P and PA images. + + :param xy: The pixel coordinate, given as (x, y). See + :ref:`coordinate-system`. + :param color: The pixel value. + """ + if self.readonly: + msg = "Attempt to putpixel a read only image" + raise ValueError(msg) + (x, y) = xy + if x < 0: + x = self.xsize + x + if y < 0: + y = self.ysize + y + (x, y) = self.check_xy((x, y)) + + if ( + self._im.mode in ("P", "PA") + and isinstance(color, (list, tuple)) + and len(color) in [3, 4] + ): + # RGB or RGBA value for a P or PA image + if self._im.mode == "PA": + alpha = color[3] if len(color) == 4 else 255 + color = color[:3] + palette_index = self._palette.getcolor(color, self._img) + color = (palette_index, alpha) if self._im.mode == "PA" else palette_index + + return self.set_pixel(x, y, color) + + def __getitem__(self, xy: tuple[int, int] | list[int]) -> float | tuple[int, ...]: + """ + Returns the pixel at x,y. The pixel is returned as a single + value for single band images or a tuple for multiple band + images + + :param xy: The pixel coordinate, given as (x, y). See + :ref:`coordinate-system`. + :returns: a pixel value for single band images, a tuple of + pixel values for multiband images. + """ + (x, y) = xy + if x < 0: + x = self.xsize + x + if y < 0: + y = self.ysize + y + (x, y) = self.check_xy((x, y)) + return self.get_pixel(x, y) + + putpixel = __setitem__ + getpixel = __getitem__ + + def check_xy(self, xy: tuple[int, int]) -> tuple[int, int]: + (x, y) = xy + if not (0 <= x < self.xsize and 0 <= y < self.ysize): + msg = "pixel location out of range" + raise ValueError(msg) + return xy + + def get_pixel(self, x: int, y: int) -> float | tuple[int, ...]: + raise NotImplementedError() + + def set_pixel( + self, x: int, y: int, color: float | tuple[int, ...] | list[int] + ) -> None: + raise NotImplementedError() + + +class _PyAccess32_2(PyAccess): + """PA, LA, stored in first and last bytes of a 32 bit word""" + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x: int, y: int) -> tuple[int, int]: + pixel = self.pixels[y][x] + return pixel.r, pixel.a + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.a = min(color[1], 255) + + +class _PyAccess32_3(PyAccess): + """RGB and friends, stored in the first three bytes of a 32 bit word""" + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x: int, y: int) -> tuple[int, int, int]: + pixel = self.pixels[y][x] + return pixel.r, pixel.g, pixel.b + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + pixel.a = 255 + + +class _PyAccess32_4(PyAccess): + """RGBA etc, all 4 bytes of a 32 bit word""" + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x: int, y: int) -> tuple[int, int, int, int]: + pixel = self.pixels[y][x] + return pixel.r, pixel.g, pixel.b, pixel.a + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + pixel.a = min(color[3], 255) + + +class _PyAccess8(PyAccess): + """1, L, P, 8 bit images stored as uint8""" + + def _post_init(self, *args, **kwargs): + self.pixels = self.image8 + + def get_pixel(self, x: int, y: int) -> int: + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # integer + self.pixels[y][x] = min(color, 255) + except TypeError: + # tuple + self.pixels[y][x] = min(color[0], 255) + + +class _PyAccessI16_N(PyAccess): + """I;16 access, native bitendian without conversion""" + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("unsigned short **", self.image) + + def get_pixel(self, x: int, y: int) -> int: + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # integer + self.pixels[y][x] = min(color, 65535) + except TypeError: + # tuple + self.pixels[y][x] = min(color[0], 65535) + + +class _PyAccessI16_L(PyAccess): + """I;16L access, with conversion""" + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_I16 **", self.image) + + def get_pixel(self, x: int, y: int) -> int: + pixel = self.pixels[y][x] + return pixel.l + pixel.r * 256 + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + try: + color = min(color, 65535) + except TypeError: + color = min(color[0], 65535) + + pixel.l = color & 0xFF + pixel.r = color >> 8 + + +class _PyAccessI16_B(PyAccess): + """I;16B access, with conversion""" + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_I16 **", self.image) + + def get_pixel(self, x: int, y: int) -> int: + pixel = self.pixels[y][x] + return pixel.l * 256 + pixel.r + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + try: + color = min(color, 65535) + except Exception: + color = min(color[0], 65535) + + pixel.l = color >> 8 + pixel.r = color & 0xFF + + +class _PyAccessI32_N(PyAccess): + """Signed Int32 access, native endian""" + + def _post_init(self, *args, **kwargs): + self.pixels = self.image32 + + def get_pixel(self, x: int, y: int) -> int: + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + self.pixels[y][x] = color + + +class _PyAccessI32_Swap(PyAccess): + """I;32L/B access, with byteswapping conversion""" + + def _post_init(self, *args, **kwargs): + self.pixels = self.image32 + + def reverse(self, i): + orig = ffi.new("int *", i) + chars = ffi.cast("unsigned char *", orig) + chars[0], chars[1], chars[2], chars[3] = chars[3], chars[2], chars[1], chars[0] + return ffi.cast("int *", chars)[0] + + def get_pixel(self, x: int, y: int) -> int: + return self.reverse(self.pixels[y][x]) + + def set_pixel(self, x, y, color): + self.pixels[y][x] = self.reverse(color) + + +class _PyAccessF(PyAccess): + """32 bit float access""" + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("float **", self.image32) + + def get_pixel(self, x: int, y: int) -> float: + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # not a tuple + self.pixels[y][x] = color + except TypeError: + # tuple + self.pixels[y][x] = color[0] + + +mode_map = { + "1": _PyAccess8, + "L": _PyAccess8, + "P": _PyAccess8, + "I;16N": _PyAccessI16_N, + "LA": _PyAccess32_2, + "La": _PyAccess32_2, + "PA": _PyAccess32_2, + "RGB": _PyAccess32_3, + "LAB": _PyAccess32_3, + "HSV": _PyAccess32_3, + "YCbCr": _PyAccess32_3, + "RGBA": _PyAccess32_4, + "RGBa": _PyAccess32_4, + "RGBX": _PyAccess32_4, + "CMYK": _PyAccess32_4, + "F": _PyAccessF, + "I": _PyAccessI32_N, +} + +if sys.byteorder == "little": + mode_map["I;16"] = _PyAccessI16_N + mode_map["I;16L"] = _PyAccessI16_N + mode_map["I;16B"] = _PyAccessI16_B + + mode_map["I;32L"] = _PyAccessI32_N + mode_map["I;32B"] = _PyAccessI32_Swap +else: + mode_map["I;16"] = _PyAccessI16_L + mode_map["I;16L"] = _PyAccessI16_L + mode_map["I;16B"] = _PyAccessI16_N + + mode_map["I;32L"] = _PyAccessI32_Swap + mode_map["I;32B"] = _PyAccessI32_N + + +def new(img: Image.Image, readonly: bool = False) -> PyAccess | None: + access_type = mode_map.get(img.mode, None) + if not access_type: + logger.debug("PyAccess Not Implemented: %s", img.mode) + return None + return access_type(img, readonly) diff --git a/MLPY/Lib/site-packages/PIL/QoiImagePlugin.py b/MLPY/Lib/site-packages/PIL/QoiImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..7c27819edf227aac3c1a6589862137314e9ba4bb --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/QoiImagePlugin.py @@ -0,0 +1,115 @@ +# +# The Python Imaging Library. +# +# QOI support for PIL +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import os + +from . import Image, ImageFile +from ._binary import i32be as i32 + + +def _accept(prefix: bytes) -> bool: + return prefix[:4] == b"qoif" + + +class QoiImageFile(ImageFile.ImageFile): + format = "QOI" + format_description = "Quite OK Image" + + def _open(self) -> None: + if not _accept(self.fp.read(4)): + msg = "not a QOI file" + raise SyntaxError(msg) + + self._size = tuple(i32(self.fp.read(4)) for i in range(2)) + + channels = self.fp.read(1)[0] + self._mode = "RGB" if channels == 3 else "RGBA" + + self.fp.seek(1, os.SEEK_CUR) # colorspace + self.tile = [("qoi", (0, 0) + self._size, self.fp.tell(), None)] + + +class QoiDecoder(ImageFile.PyDecoder): + _pulls_fd = True + _previous_pixel: bytes | bytearray | None = None + _previously_seen_pixels: dict[int, bytes | bytearray] = {} + + def _add_to_previous_pixels(self, value: bytes | bytearray) -> None: + self._previous_pixel = value + + r, g, b, a = value + hash_value = (r * 3 + g * 5 + b * 7 + a * 11) % 64 + self._previously_seen_pixels[hash_value] = value + + def decode(self, buffer: bytes) -> tuple[int, int]: + assert self.fd is not None + + self._previously_seen_pixels = {} + self._add_to_previous_pixels(bytearray((0, 0, 0, 255))) + + data = bytearray() + bands = Image.getmodebands(self.mode) + dest_length = self.state.xsize * self.state.ysize * bands + while len(data) < dest_length: + byte = self.fd.read(1)[0] + value: bytes | bytearray + if byte == 0b11111110 and self._previous_pixel: # QOI_OP_RGB + value = bytearray(self.fd.read(3)) + self._previous_pixel[3:] + elif byte == 0b11111111: # QOI_OP_RGBA + value = self.fd.read(4) + else: + op = byte >> 6 + if op == 0: # QOI_OP_INDEX + op_index = byte & 0b00111111 + value = self._previously_seen_pixels.get( + op_index, bytearray((0, 0, 0, 0)) + ) + elif op == 1 and self._previous_pixel: # QOI_OP_DIFF + value = bytearray( + ( + (self._previous_pixel[0] + ((byte & 0b00110000) >> 4) - 2) + % 256, + (self._previous_pixel[1] + ((byte & 0b00001100) >> 2) - 2) + % 256, + (self._previous_pixel[2] + (byte & 0b00000011) - 2) % 256, + self._previous_pixel[3], + ) + ) + elif op == 2 and self._previous_pixel: # QOI_OP_LUMA + second_byte = self.fd.read(1)[0] + diff_green = (byte & 0b00111111) - 32 + diff_red = ((second_byte & 0b11110000) >> 4) - 8 + diff_blue = (second_byte & 0b00001111) - 8 + + value = bytearray( + tuple( + (self._previous_pixel[i] + diff_green + diff) % 256 + for i, diff in enumerate((diff_red, 0, diff_blue)) + ) + ) + value += self._previous_pixel[3:] + elif op == 3 and self._previous_pixel: # QOI_OP_RUN + run_length = (byte & 0b00111111) + 1 + value = self._previous_pixel + if bands == 3: + value = value[:3] + data += value * run_length + continue + self._add_to_previous_pixels(value) + + if bands == 3: + value = value[:3] + data += value + self.set_as_raw(data) + return -1, 0 + + +Image.register_open(QoiImageFile.format, QoiImageFile, _accept) +Image.register_decoder("qoi", QoiDecoder) +Image.register_extension(QoiImageFile.format, ".qoi") diff --git a/MLPY/Lib/site-packages/PIL/SgiImagePlugin.py b/MLPY/Lib/site-packages/PIL/SgiImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..a93a40ce17eaff3da58979e334ead58beddd23a2 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/SgiImagePlugin.py @@ -0,0 +1,238 @@ +# +# The Python Imaging Library. +# $Id$ +# +# SGI image file handling +# +# See "The SGI Image File Format (Draft version 0.97)", Paul Haeberli. +# +# +# +# History: +# 2017-22-07 mb Add RLE decompression +# 2016-16-10 mb Add save method without compression +# 1995-09-10 fl Created +# +# Copyright (c) 2016 by Mickael Bonfill. +# Copyright (c) 2008 by Karsten Hiddemann. +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1995 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import os +import struct +from typing import IO + +from . import Image, ImageFile +from ._binary import i16be as i16 +from ._binary import o8 + + +def _accept(prefix: bytes) -> bool: + return len(prefix) >= 2 and i16(prefix) == 474 + + +MODES = { + (1, 1, 1): "L", + (1, 2, 1): "L", + (2, 1, 1): "L;16B", + (2, 2, 1): "L;16B", + (1, 3, 3): "RGB", + (2, 3, 3): "RGB;16B", + (1, 3, 4): "RGBA", + (2, 3, 4): "RGBA;16B", +} + + +## +# Image plugin for SGI images. +class SgiImageFile(ImageFile.ImageFile): + format = "SGI" + format_description = "SGI Image File Format" + + def _open(self) -> None: + # HEAD + assert self.fp is not None + + headlen = 512 + s = self.fp.read(headlen) + + if not _accept(s): + msg = "Not an SGI image file" + raise ValueError(msg) + + # compression : verbatim or RLE + compression = s[2] + + # bpc : 1 or 2 bytes (8bits or 16bits) + bpc = s[3] + + # dimension : 1, 2 or 3 (depending on xsize, ysize and zsize) + dimension = i16(s, 4) + + # xsize : width + xsize = i16(s, 6) + + # ysize : height + ysize = i16(s, 8) + + # zsize : channels count + zsize = i16(s, 10) + + # layout + layout = bpc, dimension, zsize + + # determine mode from bits/zsize + rawmode = "" + try: + rawmode = MODES[layout] + except KeyError: + pass + + if rawmode == "": + msg = "Unsupported SGI image mode" + raise ValueError(msg) + + self._size = xsize, ysize + self._mode = rawmode.split(";")[0] + if self.mode == "RGB": + self.custom_mimetype = "image/rgb" + + # orientation -1 : scanlines begins at the bottom-left corner + orientation = -1 + + # decoder info + if compression == 0: + pagesize = xsize * ysize * bpc + if bpc == 2: + self.tile = [ + ("SGI16", (0, 0) + self.size, headlen, (self.mode, 0, orientation)) + ] + else: + self.tile = [] + offset = headlen + for layer in self.mode: + self.tile.append( + ("raw", (0, 0) + self.size, offset, (layer, 0, orientation)) + ) + offset += pagesize + elif compression == 1: + self.tile = [ + ("sgi_rle", (0, 0) + self.size, headlen, (rawmode, orientation, bpc)) + ] + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + if im.mode not in {"RGB", "RGBA", "L"}: + msg = "Unsupported SGI image mode" + raise ValueError(msg) + + # Get the keyword arguments + info = im.encoderinfo + + # Byte-per-pixel precision, 1 = 8bits per pixel + bpc = info.get("bpc", 1) + + if bpc not in (1, 2): + msg = "Unsupported number of bytes per pixel" + raise ValueError(msg) + + # Flip the image, since the origin of SGI file is the bottom-left corner + orientation = -1 + # Define the file as SGI File Format + magic_number = 474 + # Run-Length Encoding Compression - Unsupported at this time + rle = 0 + + # Number of dimensions (x,y,z) + dim = 3 + # X Dimension = width / Y Dimension = height + x, y = im.size + if im.mode == "L" and y == 1: + dim = 1 + elif im.mode == "L": + dim = 2 + # Z Dimension: Number of channels + z = len(im.mode) + + if dim in {1, 2}: + z = 1 + + # assert we've got the right number of bands. + if len(im.getbands()) != z: + msg = f"incorrect number of bands in SGI write: {z} vs {len(im.getbands())}" + raise ValueError(msg) + + # Minimum Byte value + pinmin = 0 + # Maximum Byte value (255 = 8bits per pixel) + pinmax = 255 + # Image name (79 characters max, truncated below in write) + img_name = os.path.splitext(os.path.basename(filename))[0] + if isinstance(img_name, str): + img_name = img_name.encode("ascii", "ignore") + # Standard representation of pixel in the file + colormap = 0 + fp.write(struct.pack(">h", magic_number)) + fp.write(o8(rle)) + fp.write(o8(bpc)) + fp.write(struct.pack(">H", dim)) + fp.write(struct.pack(">H", x)) + fp.write(struct.pack(">H", y)) + fp.write(struct.pack(">H", z)) + fp.write(struct.pack(">l", pinmin)) + fp.write(struct.pack(">l", pinmax)) + fp.write(struct.pack("4s", b"")) # dummy + fp.write(struct.pack("79s", img_name)) # truncates to 79 chars + fp.write(struct.pack("s", b"")) # force null byte after img_name + fp.write(struct.pack(">l", colormap)) + fp.write(struct.pack("404s", b"")) # dummy + + rawmode = "L" + if bpc == 2: + rawmode = "L;16B" + + for channel in im.split(): + fp.write(channel.tobytes("raw", rawmode, 0, orientation)) + + if hasattr(fp, "flush"): + fp.flush() + + +class SGI16Decoder(ImageFile.PyDecoder): + _pulls_fd = True + + def decode(self, buffer: bytes) -> tuple[int, int]: + assert self.fd is not None + assert self.im is not None + + rawmode, stride, orientation = self.args + pagesize = self.state.xsize * self.state.ysize + zsize = len(self.mode) + self.fd.seek(512) + + for band in range(zsize): + channel = Image.new("L", (self.state.xsize, self.state.ysize)) + channel.frombytes( + self.fd.read(2 * pagesize), "raw", "L;16B", stride, orientation + ) + self.im.putband(channel.im, band) + + return -1, 0 + + +# +# registry + + +Image.register_decoder("SGI16", SGI16Decoder) +Image.register_open(SgiImageFile.format, SgiImageFile, _accept) +Image.register_save(SgiImageFile.format, _save) +Image.register_mime(SgiImageFile.format, "image/sgi") + +Image.register_extensions(SgiImageFile.format, [".bw", ".rgb", ".rgba", ".sgi"]) + +# End of file diff --git a/MLPY/Lib/site-packages/PIL/SpiderImagePlugin.py b/MLPY/Lib/site-packages/PIL/SpiderImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..e37d874a93844770a0e9ce8039966ffcb18fbce9 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/SpiderImagePlugin.py @@ -0,0 +1,325 @@ +# +# The Python Imaging Library. +# +# SPIDER image file handling +# +# History: +# 2004-08-02 Created BB +# 2006-03-02 added save method +# 2006-03-13 added support for stack images +# +# Copyright (c) 2004 by Health Research Inc. (HRI) RENSSELAER, NY 12144. +# Copyright (c) 2004 by William Baxter. +# Copyright (c) 2004 by Secret Labs AB. +# Copyright (c) 2004 by Fredrik Lundh. +# + +## +# Image plugin for the Spider image format. This format is used +# by the SPIDER software, in processing image data from electron +# microscopy and tomography. +## + +# +# SpiderImagePlugin.py +# +# The Spider image format is used by SPIDER software, in processing +# image data from electron microscopy and tomography. +# +# Spider home page: +# https://spider.wadsworth.org/spider_doc/spider/docs/spider.html +# +# Details about the Spider image format: +# https://spider.wadsworth.org/spider_doc/spider/docs/image_doc.html +# +from __future__ import annotations + +import os +import struct +import sys +from typing import IO, TYPE_CHECKING, Any, Tuple, cast + +from . import Image, ImageFile + + +def isInt(f: Any) -> int: + try: + i = int(f) + if f - i == 0: + return 1 + else: + return 0 + except (ValueError, OverflowError): + return 0 + + +iforms = [1, 3, -11, -12, -21, -22] + + +# There is no magic number to identify Spider files, so just check a +# series of header locations to see if they have reasonable values. +# Returns no. of bytes in the header, if it is a valid Spider header, +# otherwise returns 0 + + +def isSpiderHeader(t: tuple[float, ...]) -> int: + h = (99,) + t # add 1 value so can use spider header index start=1 + # header values 1,2,5,12,13,22,23 should be integers + for i in [1, 2, 5, 12, 13, 22, 23]: + if not isInt(h[i]): + return 0 + # check iform + iform = int(h[5]) + if iform not in iforms: + return 0 + # check other header values + labrec = int(h[13]) # no. records in file header + labbyt = int(h[22]) # total no. of bytes in header + lenbyt = int(h[23]) # record length in bytes + if labbyt != (labrec * lenbyt): + return 0 + # looks like a valid header + return labbyt + + +def isSpiderImage(filename: str) -> int: + with open(filename, "rb") as fp: + f = fp.read(92) # read 23 * 4 bytes + t = struct.unpack(">23f", f) # try big-endian first + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + t = struct.unpack("<23f", f) # little-endian + hdrlen = isSpiderHeader(t) + return hdrlen + + +class SpiderImageFile(ImageFile.ImageFile): + format = "SPIDER" + format_description = "Spider 2D image" + _close_exclusive_fp_after_loading = False + + def _open(self) -> None: + # check header + n = 27 * 4 # read 27 float values + f = self.fp.read(n) + + try: + self.bigendian = 1 + t = struct.unpack(">27f", f) # try big-endian first + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + self.bigendian = 0 + t = struct.unpack("<27f", f) # little-endian + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + msg = "not a valid Spider file" + raise SyntaxError(msg) + except struct.error as e: + msg = "not a valid Spider file" + raise SyntaxError(msg) from e + + h = (99,) + t # add 1 value : spider header index starts at 1 + iform = int(h[5]) + if iform != 1: + msg = "not a Spider 2D image" + raise SyntaxError(msg) + + self._size = int(h[12]), int(h[2]) # size in pixels (width, height) + self.istack = int(h[24]) + self.imgnumber = int(h[27]) + + if self.istack == 0 and self.imgnumber == 0: + # stk=0, img=0: a regular 2D image + offset = hdrlen + self._nimages = 1 + elif self.istack > 0 and self.imgnumber == 0: + # stk>0, img=0: Opening the stack for the first time + self.imgbytes = int(h[12]) * int(h[2]) * 4 + self.hdrlen = hdrlen + self._nimages = int(h[26]) + # Point to the first image in the stack + offset = hdrlen * 2 + self.imgnumber = 1 + elif self.istack == 0 and self.imgnumber > 0: + # stk=0, img>0: an image within the stack + offset = hdrlen + self.stkoffset + self.istack = 2 # So Image knows it's still a stack + else: + msg = "inconsistent stack header values" + raise SyntaxError(msg) + + if self.bigendian: + self.rawmode = "F;32BF" + else: + self.rawmode = "F;32F" + self._mode = "F" + + self.tile = [("raw", (0, 0) + self.size, offset, (self.rawmode, 0, 1))] + self._fp = self.fp # FIXME: hack + + @property + def n_frames(self) -> int: + return self._nimages + + @property + def is_animated(self) -> bool: + return self._nimages > 1 + + # 1st image index is zero (although SPIDER imgnumber starts at 1) + def tell(self) -> int: + if self.imgnumber < 1: + return 0 + else: + return self.imgnumber - 1 + + def seek(self, frame: int) -> None: + if self.istack == 0: + msg = "attempt to seek in a non-stack file" + raise EOFError(msg) + if not self._seek_check(frame): + return + self.stkoffset = self.hdrlen + frame * (self.hdrlen + self.imgbytes) + self.fp = self._fp + self.fp.seek(self.stkoffset) + self._open() + + # returns a byte image after rescaling to 0..255 + def convert2byte(self, depth: int = 255) -> Image.Image: + extrema = self.getextrema() + assert isinstance(extrema[0], float) + minimum, maximum = cast(Tuple[float, float], extrema) + m: float = 1 + if maximum != minimum: + m = depth / (maximum - minimum) + b = -m * minimum + return self.point(lambda i: i * m + b).convert("L") + + if TYPE_CHECKING: + from . import ImageTk + + # returns a ImageTk.PhotoImage object, after rescaling to 0..255 + def tkPhotoImage(self) -> ImageTk.PhotoImage: + from . import ImageTk + + return ImageTk.PhotoImage(self.convert2byte(), palette=256) + + +# -------------------------------------------------------------------- +# Image series + + +# given a list of filenames, return a list of images +def loadImageSeries(filelist: list[str] | None = None) -> list[SpiderImageFile] | None: + """create a list of :py:class:`~PIL.Image.Image` objects for use in a montage""" + if filelist is None or len(filelist) < 1: + return None + + imglist = [] + for img in filelist: + if not os.path.exists(img): + print(f"unable to find {img}") + continue + try: + with Image.open(img) as im: + im = im.convert2byte() + except Exception: + if not isSpiderImage(img): + print(f"{img} is not a Spider image file") + continue + im.info["filename"] = img + imglist.append(im) + return imglist + + +# -------------------------------------------------------------------- +# For saving images in Spider format + + +def makeSpiderHeader(im: Image.Image) -> list[bytes]: + nsam, nrow = im.size + lenbyt = nsam * 4 # There are labrec records in the header + labrec = int(1024 / lenbyt) + if 1024 % lenbyt != 0: + labrec += 1 + labbyt = labrec * lenbyt + nvalues = int(labbyt / 4) + if nvalues < 23: + return [] + + hdr = [0.0] * nvalues + + # NB these are Fortran indices + hdr[1] = 1.0 # nslice (=1 for an image) + hdr[2] = float(nrow) # number of rows per slice + hdr[3] = float(nrow) # number of records in the image + hdr[5] = 1.0 # iform for 2D image + hdr[12] = float(nsam) # number of pixels per line + hdr[13] = float(labrec) # number of records in file header + hdr[22] = float(labbyt) # total number of bytes in header + hdr[23] = float(lenbyt) # record length in bytes + + # adjust for Fortran indexing + hdr = hdr[1:] + hdr.append(0.0) + # pack binary data into a string + return [struct.pack("f", v) for v in hdr] + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + if im.mode[0] != "F": + im = im.convert("F") + + hdr = makeSpiderHeader(im) + if len(hdr) < 256: + msg = "Error creating Spider header" + raise OSError(msg) + + # write the SPIDER header + fp.writelines(hdr) + + rawmode = "F;32NF" # 32-bit native floating point + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))]) + + +def _save_spider(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + # get the filename extension and register it with Image + filename_ext = os.path.splitext(filename)[1] + ext = filename_ext.decode() if isinstance(filename_ext, bytes) else filename_ext + Image.register_extension(SpiderImageFile.format, ext) + _save(im, fp, filename) + + +# -------------------------------------------------------------------- + + +Image.register_open(SpiderImageFile.format, SpiderImageFile) +Image.register_save(SpiderImageFile.format, _save_spider) + +if __name__ == "__main__": + if len(sys.argv) < 2: + print("Syntax: python3 SpiderImagePlugin.py [infile] [outfile]") + sys.exit() + + filename = sys.argv[1] + if not isSpiderImage(filename): + print("input image must be in Spider format") + sys.exit() + + with Image.open(filename) as im: + print(f"image: {im}") + print(f"format: {im.format}") + print(f"size: {im.size}") + print(f"mode: {im.mode}") + print("max, min: ", end=" ") + print(im.getextrema()) + + if len(sys.argv) > 2: + outfile = sys.argv[2] + + # perform some image operation + im = im.transpose(Image.Transpose.FLIP_LEFT_RIGHT) + print( + f"saving a flipped version of {os.path.basename(filename)} " + f"as {outfile} " + ) + im.save(outfile, SpiderImageFile.format) diff --git a/MLPY/Lib/site-packages/PIL/SunImagePlugin.py b/MLPY/Lib/site-packages/PIL/SunImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..d9dc4ab8d5d5f06ccb1d0b8b806e27a8d57bcc7d --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/SunImagePlugin.py @@ -0,0 +1,141 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Sun image file handling +# +# History: +# 1995-09-10 fl Created +# 1996-05-28 fl Fixed 32-bit alignment +# 1998-12-29 fl Import ImagePalette module +# 2001-12-18 fl Fixed palette loading (from Jean-Claude Rimbault) +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1995-1996 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +from . import Image, ImageFile, ImagePalette +from ._binary import i32be as i32 + + +def _accept(prefix: bytes) -> bool: + return len(prefix) >= 4 and i32(prefix) == 0x59A66A95 + + +## +# Image plugin for Sun raster files. + + +class SunImageFile(ImageFile.ImageFile): + format = "SUN" + format_description = "Sun Raster File" + + def _open(self) -> None: + # The Sun Raster file header is 32 bytes in length + # and has the following format: + + # typedef struct _SunRaster + # { + # DWORD MagicNumber; /* Magic (identification) number */ + # DWORD Width; /* Width of image in pixels */ + # DWORD Height; /* Height of image in pixels */ + # DWORD Depth; /* Number of bits per pixel */ + # DWORD Length; /* Size of image data in bytes */ + # DWORD Type; /* Type of raster file */ + # DWORD ColorMapType; /* Type of color map */ + # DWORD ColorMapLength; /* Size of the color map in bytes */ + # } SUNRASTER; + + assert self.fp is not None + + # HEAD + s = self.fp.read(32) + if not _accept(s): + msg = "not an SUN raster file" + raise SyntaxError(msg) + + offset = 32 + + self._size = i32(s, 4), i32(s, 8) + + depth = i32(s, 12) + # data_length = i32(s, 16) # unreliable, ignore. + file_type = i32(s, 20) + palette_type = i32(s, 24) # 0: None, 1: RGB, 2: Raw/arbitrary + palette_length = i32(s, 28) + + if depth == 1: + self._mode, rawmode = "1", "1;I" + elif depth == 4: + self._mode, rawmode = "L", "L;4" + elif depth == 8: + self._mode = rawmode = "L" + elif depth == 24: + if file_type == 3: + self._mode, rawmode = "RGB", "RGB" + else: + self._mode, rawmode = "RGB", "BGR" + elif depth == 32: + if file_type == 3: + self._mode, rawmode = "RGB", "RGBX" + else: + self._mode, rawmode = "RGB", "BGRX" + else: + msg = "Unsupported Mode/Bit Depth" + raise SyntaxError(msg) + + if palette_length: + if palette_length > 1024: + msg = "Unsupported Color Palette Length" + raise SyntaxError(msg) + + if palette_type != 1: + msg = "Unsupported Palette Type" + raise SyntaxError(msg) + + offset = offset + palette_length + self.palette = ImagePalette.raw("RGB;L", self.fp.read(palette_length)) + if self.mode == "L": + self._mode = "P" + rawmode = rawmode.replace("L", "P") + + # 16 bit boundaries on stride + stride = ((self.size[0] * depth + 15) // 16) * 2 + + # file type: Type is the version (or flavor) of the bitmap + # file. The following values are typically found in the Type + # field: + # 0000h Old + # 0001h Standard + # 0002h Byte-encoded + # 0003h RGB format + # 0004h TIFF format + # 0005h IFF format + # FFFFh Experimental + + # Old and standard are the same, except for the length tag. + # byte-encoded is run-length-encoded + # RGB looks similar to standard, but RGB byte order + # TIFF and IFF mean that they were converted from T/IFF + # Experimental means that it's something else. + # (https://www.fileformat.info/format/sunraster/egff.htm) + + if file_type in (0, 1, 3, 4, 5): + self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride))] + elif file_type == 2: + self.tile = [("sun_rle", (0, 0) + self.size, offset, rawmode)] + else: + msg = "Unsupported Sun Raster file type" + raise SyntaxError(msg) + + +# +# registry + + +Image.register_open(SunImageFile.format, SunImageFile, _accept) + +Image.register_extension(SunImageFile.format, ".ras") diff --git a/MLPY/Lib/site-packages/PIL/TarIO.py b/MLPY/Lib/site-packages/PIL/TarIO.py new file mode 100644 index 0000000000000000000000000000000000000000..4267e3b82a477ca1478a10311157b458f1ced868 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/TarIO.py @@ -0,0 +1,67 @@ +# +# The Python Imaging Library. +# $Id$ +# +# read files from within a tar file +# +# History: +# 95-06-18 fl Created +# 96-05-28 fl Open files in binary mode +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995-96. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import io + +from . import ContainerIO + + +class TarIO(ContainerIO.ContainerIO[bytes]): + """A file object that provides read access to a given member of a TAR file.""" + + def __init__(self, tarfile: str, file: str) -> None: + """ + Create file object. + + :param tarfile: Name of TAR file. + :param file: Name of member file. + """ + self.fh = open(tarfile, "rb") + + while True: + s = self.fh.read(512) + if len(s) != 512: + msg = "unexpected end of tar file" + raise OSError(msg) + + name = s[:100].decode("utf-8") + i = name.find("\0") + if i == 0: + msg = "cannot find subfile" + raise OSError(msg) + if i > 0: + name = name[:i] + + size = int(s[124:135], 8) + + if file == name: + break + + self.fh.seek((size + 511) & (~511), io.SEEK_CUR) + + # Open region + super().__init__(self.fh, self.fh.tell(), size) + + # Context manager support + def __enter__(self) -> TarIO: + return self + + def __exit__(self, *args: object) -> None: + self.close() + + def close(self) -> None: + self.fh.close() diff --git a/MLPY/Lib/site-packages/PIL/TgaImagePlugin.py b/MLPY/Lib/site-packages/PIL/TgaImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..06c048bbefa0bb1fb92e858ebe12c6dc644153a2 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/TgaImagePlugin.py @@ -0,0 +1,262 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TGA file handling +# +# History: +# 95-09-01 fl created (reads 24-bit files only) +# 97-01-04 fl support more TGA versions, including compressed images +# 98-07-04 fl fixed orientation and alpha layer bugs +# 98-09-11 fl fixed orientation for runlength decoder +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import warnings +from typing import IO + +from . import Image, ImageFile, ImagePalette +from ._binary import i16le as i16 +from ._binary import o8 +from ._binary import o16le as o16 + +# +# -------------------------------------------------------------------- +# Read RGA file + + +MODES = { + # map imagetype/depth to rawmode + (1, 8): "P", + (3, 1): "1", + (3, 8): "L", + (3, 16): "LA", + (2, 16): "BGRA;15Z", + (2, 24): "BGR", + (2, 32): "BGRA", +} + + +## +# Image plugin for Targa files. + + +class TgaImageFile(ImageFile.ImageFile): + format = "TGA" + format_description = "Targa" + + def _open(self) -> None: + # process header + assert self.fp is not None + + s = self.fp.read(18) + + id_len = s[0] + + colormaptype = s[1] + imagetype = s[2] + + depth = s[16] + + flags = s[17] + + self._size = i16(s, 12), i16(s, 14) + + # validate header fields + if ( + colormaptype not in (0, 1) + or self.size[0] <= 0 + or self.size[1] <= 0 + or depth not in (1, 8, 16, 24, 32) + ): + msg = "not a TGA file" + raise SyntaxError(msg) + + # image mode + if imagetype in (3, 11): + self._mode = "L" + if depth == 1: + self._mode = "1" # ??? + elif depth == 16: + self._mode = "LA" + elif imagetype in (1, 9): + self._mode = "P" if colormaptype else "L" + elif imagetype in (2, 10): + self._mode = "RGB" if depth == 24 else "RGBA" + else: + msg = "unknown TGA mode" + raise SyntaxError(msg) + + # orientation + orientation = flags & 0x30 + self._flip_horizontally = orientation in [0x10, 0x30] + if orientation in [0x20, 0x30]: + orientation = 1 + elif orientation in [0, 0x10]: + orientation = -1 + else: + msg = "unknown TGA orientation" + raise SyntaxError(msg) + + self.info["orientation"] = orientation + + if imagetype & 8: + self.info["compression"] = "tga_rle" + + if id_len: + self.info["id_section"] = self.fp.read(id_len) + + if colormaptype: + # read palette + start, size, mapdepth = i16(s, 3), i16(s, 5), s[7] + if mapdepth == 16: + self.palette = ImagePalette.raw( + "BGRA;15Z", bytes(2 * start) + self.fp.read(2 * size) + ) + self.palette.mode = "RGBA" + elif mapdepth == 24: + self.palette = ImagePalette.raw( + "BGR", bytes(3 * start) + self.fp.read(3 * size) + ) + elif mapdepth == 32: + self.palette = ImagePalette.raw( + "BGRA", bytes(4 * start) + self.fp.read(4 * size) + ) + else: + msg = "unknown TGA map depth" + raise SyntaxError(msg) + + # setup tile descriptor + try: + rawmode = MODES[(imagetype & 7, depth)] + if imagetype & 8: + # compressed + self.tile = [ + ( + "tga_rle", + (0, 0) + self.size, + self.fp.tell(), + (rawmode, orientation, depth), + ) + ] + else: + self.tile = [ + ( + "raw", + (0, 0) + self.size, + self.fp.tell(), + (rawmode, 0, orientation), + ) + ] + except KeyError: + pass # cannot decode + + def load_end(self) -> None: + if self._flip_horizontally: + assert self.im is not None + self.im = self.im.transpose(Image.Transpose.FLIP_LEFT_RIGHT) + + +# +# -------------------------------------------------------------------- +# Write TGA file + + +SAVE = { + "1": ("1", 1, 0, 3), + "L": ("L", 8, 0, 3), + "LA": ("LA", 16, 0, 3), + "P": ("P", 8, 1, 1), + "RGB": ("BGR", 24, 0, 2), + "RGBA": ("BGRA", 32, 0, 2), +} + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + try: + rawmode, bits, colormaptype, imagetype = SAVE[im.mode] + except KeyError as e: + msg = f"cannot write mode {im.mode} as TGA" + raise OSError(msg) from e + + if "rle" in im.encoderinfo: + rle = im.encoderinfo["rle"] + else: + compression = im.encoderinfo.get("compression", im.info.get("compression")) + rle = compression == "tga_rle" + if rle: + imagetype += 8 + + id_section = im.encoderinfo.get("id_section", im.info.get("id_section", "")) + id_len = len(id_section) + if id_len > 255: + id_len = 255 + id_section = id_section[:255] + warnings.warn("id_section has been trimmed to 255 characters") + + if colormaptype: + assert im.im is not None + palette = im.im.getpalette("RGB", "BGR") + colormaplength, colormapentry = len(palette) // 3, 24 + else: + colormaplength, colormapentry = 0, 0 + + if im.mode in ("LA", "RGBA"): + flags = 8 + else: + flags = 0 + + orientation = im.encoderinfo.get("orientation", im.info.get("orientation", -1)) + if orientation > 0: + flags = flags | 0x20 + + fp.write( + o8(id_len) + + o8(colormaptype) + + o8(imagetype) + + o16(0) # colormapfirst + + o16(colormaplength) + + o8(colormapentry) + + o16(0) + + o16(0) + + o16(im.size[0]) + + o16(im.size[1]) + + o8(bits) + + o8(flags) + ) + + if id_section: + fp.write(id_section) + + if colormaptype: + fp.write(palette) + + if rle: + ImageFile._save( + im, fp, [("tga_rle", (0, 0) + im.size, 0, (rawmode, orientation))] + ) + else: + ImageFile._save( + im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))] + ) + + # write targa version 2 footer + fp.write(b"\000" * 8 + b"TRUEVISION-XFILE." + b"\000") + + +# +# -------------------------------------------------------------------- +# Registry + + +Image.register_open(TgaImageFile.format, TgaImageFile) +Image.register_save(TgaImageFile.format, _save) + +Image.register_extensions(TgaImageFile.format, [".tga", ".icb", ".vda", ".vst"]) + +Image.register_mime(TgaImageFile.format, "image/x-tga") diff --git a/MLPY/Lib/site-packages/PIL/TiffImagePlugin.py b/MLPY/Lib/site-packages/PIL/TiffImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..ca9e2b99024b01416adbf8beffdb038b8fb1ab32 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/TiffImagePlugin.py @@ -0,0 +1,2200 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF file handling +# +# TIFF is a flexible, if somewhat aged, image file format originally +# defined by Aldus. Although TIFF supports a wide variety of pixel +# layouts and compression methods, the name doesn't really stand for +# "thousands of incompatible file formats," it just feels that way. +# +# To read TIFF data from a stream, the stream must be seekable. For +# progressive decoding, make sure to use TIFF files where the tag +# directory is placed first in the file. +# +# History: +# 1995-09-01 fl Created +# 1996-05-04 fl Handle JPEGTABLES tag +# 1996-05-18 fl Fixed COLORMAP support +# 1997-01-05 fl Fixed PREDICTOR support +# 1997-08-27 fl Added support for rational tags (from Perry Stoll) +# 1998-01-10 fl Fixed seek/tell (from Jan Blom) +# 1998-07-15 fl Use private names for internal variables +# 1999-06-13 fl Rewritten for PIL 1.0 (1.0) +# 2000-10-11 fl Additional fixes for Python 2.0 (1.1) +# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2) +# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3) +# 2001-12-18 fl Added workaround for broken Matrox library +# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart) +# 2003-05-19 fl Check FILLORDER tag +# 2003-09-26 fl Added RGBa support +# 2004-02-24 fl Added DPI support; fixed rational write support +# 2005-02-07 fl Added workaround for broken Corel Draw 10 files +# 2006-01-09 fl Added support for float/double tags (from Russell Nelson) +# +# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import io +import itertools +import logging +import math +import os +import struct +import warnings +from collections.abc import MutableMapping +from fractions import Fraction +from numbers import Number, Rational +from typing import IO, TYPE_CHECKING, Any, Callable, NoReturn + +from . import ExifTags, Image, ImageFile, ImageOps, ImagePalette, TiffTags +from ._binary import i16be as i16 +from ._binary import i32be as i32 +from ._binary import o8 +from ._deprecate import deprecate +from .TiffTags import TYPES + +logger = logging.getLogger(__name__) + +# Set these to true to force use of libtiff for reading or writing. +READ_LIBTIFF = False +WRITE_LIBTIFF = False +IFD_LEGACY_API = True +STRIP_SIZE = 65536 + +II = b"II" # little-endian (Intel style) +MM = b"MM" # big-endian (Motorola style) + +# +# -------------------------------------------------------------------- +# Read TIFF files + +# a few tag names, just to make the code below a bit more readable +OSUBFILETYPE = 255 +IMAGEWIDTH = 256 +IMAGELENGTH = 257 +BITSPERSAMPLE = 258 +COMPRESSION = 259 +PHOTOMETRIC_INTERPRETATION = 262 +FILLORDER = 266 +IMAGEDESCRIPTION = 270 +STRIPOFFSETS = 273 +SAMPLESPERPIXEL = 277 +ROWSPERSTRIP = 278 +STRIPBYTECOUNTS = 279 +X_RESOLUTION = 282 +Y_RESOLUTION = 283 +PLANAR_CONFIGURATION = 284 +RESOLUTION_UNIT = 296 +TRANSFERFUNCTION = 301 +SOFTWARE = 305 +DATE_TIME = 306 +ARTIST = 315 +PREDICTOR = 317 +COLORMAP = 320 +TILEWIDTH = 322 +TILELENGTH = 323 +TILEOFFSETS = 324 +TILEBYTECOUNTS = 325 +SUBIFD = 330 +EXTRASAMPLES = 338 +SAMPLEFORMAT = 339 +JPEGTABLES = 347 +YCBCRSUBSAMPLING = 530 +REFERENCEBLACKWHITE = 532 +COPYRIGHT = 33432 +IPTC_NAA_CHUNK = 33723 # newsphoto properties +PHOTOSHOP_CHUNK = 34377 # photoshop properties +ICCPROFILE = 34675 +EXIFIFD = 34665 +XMP = 700 +JPEGQUALITY = 65537 # pseudo-tag by libtiff + +# https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java +IMAGEJ_META_DATA_BYTE_COUNTS = 50838 +IMAGEJ_META_DATA = 50839 + +COMPRESSION_INFO = { + # Compression => pil compression name + 1: "raw", + 2: "tiff_ccitt", + 3: "group3", + 4: "group4", + 5: "tiff_lzw", + 6: "tiff_jpeg", # obsolete + 7: "jpeg", + 8: "tiff_adobe_deflate", + 32771: "tiff_raw_16", # 16-bit padding + 32773: "packbits", + 32809: "tiff_thunderscan", + 32946: "tiff_deflate", + 34676: "tiff_sgilog", + 34677: "tiff_sgilog24", + 34925: "lzma", + 50000: "zstd", + 50001: "webp", +} + +COMPRESSION_INFO_REV = {v: k for k, v in COMPRESSION_INFO.items()} + +OPEN_INFO = { + # (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample, + # ExtraSamples) => mode, rawmode + (II, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (MM, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (II, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (MM, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (II, 1, (1,), 1, (1,), ()): ("1", "1"), + (MM, 1, (1,), 1, (1,), ()): ("1", "1"), + (II, 1, (1,), 2, (1,), ()): ("1", "1;R"), + (MM, 1, (1,), 2, (1,), ()): ("1", "1;R"), + (II, 0, (1,), 1, (2,), ()): ("L", "L;2I"), + (MM, 0, (1,), 1, (2,), ()): ("L", "L;2I"), + (II, 0, (1,), 2, (2,), ()): ("L", "L;2IR"), + (MM, 0, (1,), 2, (2,), ()): ("L", "L;2IR"), + (II, 1, (1,), 1, (2,), ()): ("L", "L;2"), + (MM, 1, (1,), 1, (2,), ()): ("L", "L;2"), + (II, 1, (1,), 2, (2,), ()): ("L", "L;2R"), + (MM, 1, (1,), 2, (2,), ()): ("L", "L;2R"), + (II, 0, (1,), 1, (4,), ()): ("L", "L;4I"), + (MM, 0, (1,), 1, (4,), ()): ("L", "L;4I"), + (II, 0, (1,), 2, (4,), ()): ("L", "L;4IR"), + (MM, 0, (1,), 2, (4,), ()): ("L", "L;4IR"), + (II, 1, (1,), 1, (4,), ()): ("L", "L;4"), + (MM, 1, (1,), 1, (4,), ()): ("L", "L;4"), + (II, 1, (1,), 2, (4,), ()): ("L", "L;4R"), + (MM, 1, (1,), 2, (4,), ()): ("L", "L;4R"), + (II, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (MM, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (II, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (MM, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (II, 1, (1,), 1, (8,), ()): ("L", "L"), + (MM, 1, (1,), 1, (8,), ()): ("L", "L"), + (II, 1, (2,), 1, (8,), ()): ("L", "L"), + (MM, 1, (2,), 1, (8,), ()): ("L", "L"), + (II, 1, (1,), 2, (8,), ()): ("L", "L;R"), + (MM, 1, (1,), 2, (8,), ()): ("L", "L;R"), + (II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"), + (II, 0, (1,), 1, (16,), ()): ("I;16", "I;16"), + (II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"), + (MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"), + (II, 1, (1,), 2, (16,), ()): ("I;16", "I;16R"), + (II, 1, (2,), 1, (16,), ()): ("I", "I;16S"), + (MM, 1, (2,), 1, (16,), ()): ("I", "I;16BS"), + (II, 0, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"), + (II, 1, (1,), 1, (32,), ()): ("I", "I;32N"), + (II, 1, (2,), 1, (32,), ()): ("I", "I;32S"), + (MM, 1, (2,), 1, (32,), ()): ("I", "I;32BS"), + (II, 1, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"), + (II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + (MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + (II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (MM, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (II, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (MM, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (II, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGB", "RGBX"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGB", "RGBX"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGB", "RGBXX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGB", "RGBXX"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGB", "RGBXXX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGB", "RGBXXX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + (MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + (II, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16L"), + (MM, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16B"), + (II, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16L"), + (MM, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16B"), + (II, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGB", "RGBX;16L"), + (MM, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGB", "RGBX;16B"), + (II, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16L"), + (MM, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16B"), + (II, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16L"), + (MM, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16B"), + (II, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (MM, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (II, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (MM, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (II, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (MM, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (II, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (MM, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (II, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (MM, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (II, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (MM, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (II, 3, (1,), 1, (8,), ()): ("P", "P"), + (MM, 3, (1,), 1, (8,), ()): ("P", "P"), + (II, 3, (1,), 1, (8, 8), (0,)): ("P", "PX"), + (II, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (II, 3, (1,), 2, (8,), ()): ("P", "P;R"), + (MM, 3, (1,), 2, (8,), ()): ("P", "P;R"), + (II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + (MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + (II, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"), + (MM, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"), + (II, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"), + (MM, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"), + (II, 5, (1,), 1, (16, 16, 16, 16), ()): ("CMYK", "CMYK;16L"), + (II, 6, (1,), 1, (8,), ()): ("L", "L"), + (MM, 6, (1,), 1, (8,), ()): ("L", "L"), + # JPEG compressed images handled by LibTiff and auto-converted to RGBX + # Minimal Baseline TIFF requires YCbCr images to have 3 SamplesPerPixel + (II, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"), + (MM, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"), + (II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), + (MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), +} + +MAX_SAMPLESPERPIXEL = max(len(key_tp[4]) for key_tp in OPEN_INFO) + +PREFIXES = [ + b"MM\x00\x2A", # Valid TIFF header with big-endian byte order + b"II\x2A\x00", # Valid TIFF header with little-endian byte order + b"MM\x2A\x00", # Invalid TIFF header, assume big-endian + b"II\x00\x2A", # Invalid TIFF header, assume little-endian + b"MM\x00\x2B", # BigTIFF with big-endian byte order + b"II\x2B\x00", # BigTIFF with little-endian byte order +] + +if not getattr(Image.core, "libtiff_support_custom_tags", True): + deprecate("Support for LibTIFF earlier than version 4", 12) + + +def _accept(prefix: bytes) -> bool: + return prefix[:4] in PREFIXES + + +def _limit_rational(val, max_val): + inv = abs(val) > 1 + n_d = IFDRational(1 / val if inv else val).limit_rational(max_val) + return n_d[::-1] if inv else n_d + + +def _limit_signed_rational(val, max_val, min_val): + frac = Fraction(val) + n_d = frac.numerator, frac.denominator + + if min(n_d) < min_val: + n_d = _limit_rational(val, abs(min_val)) + + if max(n_d) > max_val: + val = Fraction(*n_d) + n_d = _limit_rational(val, max_val) + + return n_d + + +## +# Wrapper for TIFF IFDs. + +_load_dispatch = {} +_write_dispatch = {} + + +def _delegate(op): + def delegate(self, *args): + return getattr(self._val, op)(*args) + + return delegate + + +class IFDRational(Rational): + """Implements a rational class where 0/0 is a legal value to match + the in the wild use of exif rationals. + + e.g., DigitalZoomRatio - 0.00/0.00 indicates that no digital zoom was used + """ + + """ If the denominator is 0, store this as a float('nan'), otherwise store + as a fractions.Fraction(). Delegate as appropriate + + """ + + __slots__ = ("_numerator", "_denominator", "_val") + + def __init__(self, value, denominator=1): + """ + :param value: either an integer numerator, a + float/rational/other number, or an IFDRational + :param denominator: Optional integer denominator + """ + if isinstance(value, IFDRational): + self._numerator = value.numerator + self._denominator = value.denominator + self._val = value._val + return + + if isinstance(value, Fraction): + self._numerator = value.numerator + self._denominator = value.denominator + else: + self._numerator = value + self._denominator = denominator + + if denominator == 0: + self._val = float("nan") + elif denominator == 1: + self._val = Fraction(value) + else: + self._val = Fraction(value, denominator) + + @property + def numerator(self): + return self._numerator + + @property + def denominator(self): + return self._denominator + + def limit_rational(self, max_denominator): + """ + + :param max_denominator: Integer, the maximum denominator value + :returns: Tuple of (numerator, denominator) + """ + + if self.denominator == 0: + return self.numerator, self.denominator + + f = self._val.limit_denominator(max_denominator) + return f.numerator, f.denominator + + def __repr__(self) -> str: + return str(float(self._val)) + + def __hash__(self) -> int: + return self._val.__hash__() + + def __eq__(self, other: object) -> bool: + val = self._val + if isinstance(other, IFDRational): + other = other._val + if isinstance(other, float): + val = float(val) + return val == other + + def __getstate__(self): + return [self._val, self._numerator, self._denominator] + + def __setstate__(self, state): + IFDRational.__init__(self, 0) + _val, _numerator, _denominator = state + self._val = _val + self._numerator = _numerator + self._denominator = _denominator + + """ a = ['add','radd', 'sub', 'rsub', 'mul', 'rmul', + 'truediv', 'rtruediv', 'floordiv', 'rfloordiv', + 'mod','rmod', 'pow','rpow', 'pos', 'neg', + 'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'bool', + 'ceil', 'floor', 'round'] + print("\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a)) + """ + + __add__ = _delegate("__add__") + __radd__ = _delegate("__radd__") + __sub__ = _delegate("__sub__") + __rsub__ = _delegate("__rsub__") + __mul__ = _delegate("__mul__") + __rmul__ = _delegate("__rmul__") + __truediv__ = _delegate("__truediv__") + __rtruediv__ = _delegate("__rtruediv__") + __floordiv__ = _delegate("__floordiv__") + __rfloordiv__ = _delegate("__rfloordiv__") + __mod__ = _delegate("__mod__") + __rmod__ = _delegate("__rmod__") + __pow__ = _delegate("__pow__") + __rpow__ = _delegate("__rpow__") + __pos__ = _delegate("__pos__") + __neg__ = _delegate("__neg__") + __abs__ = _delegate("__abs__") + __trunc__ = _delegate("__trunc__") + __lt__ = _delegate("__lt__") + __gt__ = _delegate("__gt__") + __le__ = _delegate("__le__") + __ge__ = _delegate("__ge__") + __bool__ = _delegate("__bool__") + __ceil__ = _delegate("__ceil__") + __floor__ = _delegate("__floor__") + __round__ = _delegate("__round__") + # Python >= 3.11 + if hasattr(Fraction, "__int__"): + __int__ = _delegate("__int__") + + +def _register_loader(idx, size): + def decorator(func): + from .TiffTags import TYPES + + if func.__name__.startswith("load_"): + TYPES[idx] = func.__name__[5:].replace("_", " ") + _load_dispatch[idx] = size, func # noqa: F821 + return func + + return decorator + + +def _register_writer(idx): + def decorator(func): + _write_dispatch[idx] = func # noqa: F821 + return func + + return decorator + + +def _register_basic(idx_fmt_name): + from .TiffTags import TYPES + + idx, fmt, name = idx_fmt_name + TYPES[idx] = name + size = struct.calcsize(f"={fmt}") + _load_dispatch[idx] = ( # noqa: F821 + size, + lambda self, data, legacy_api=True: ( + self._unpack(f"{len(data) // size}{fmt}", data) + ), + ) + _write_dispatch[idx] = lambda self, *values: ( # noqa: F821 + b"".join(self._pack(fmt, value) for value in values) + ) + + +if TYPE_CHECKING: + _IFDv2Base = MutableMapping[int, Any] +else: + _IFDv2Base = MutableMapping + + +class ImageFileDirectory_v2(_IFDv2Base): + """This class represents a TIFF tag directory. To speed things up, we + don't decode tags unless they're asked for. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v2() + ifd[key] = 'Some Data' + ifd.tagtype[key] = TiffTags.ASCII + print(ifd[key]) + 'Some Data' + + Individual values are returned as the strings or numbers, sequences are + returned as tuples of the values. + + The tiff metadata type of each item is stored in a dictionary of + tag types in + :attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype`. The types + are read from a tiff file, guessed from the type added, or added + manually. + + Data Structures: + + * ``self.tagtype = {}`` + + * Key: numerical TIFF tag number + * Value: integer corresponding to the data type from + :py:data:`.TiffTags.TYPES` + + .. versionadded:: 3.0.0 + + 'Internal' data structures: + + * ``self._tags_v2 = {}`` + + * Key: numerical TIFF tag number + * Value: decoded data, as tuple for multiple values + + * ``self._tagdata = {}`` + + * Key: numerical TIFF tag number + * Value: undecoded byte string from file + + * ``self._tags_v1 = {}`` + + * Key: numerical TIFF tag number + * Value: decoded data in the v1 format + + Tags will be found in the private attributes ``self._tagdata``, and in + ``self._tags_v2`` once decoded. + + ``self.legacy_api`` is a value for internal use, and shouldn't be changed + from outside code. In cooperation with + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`, if ``legacy_api`` + is true, then decoded tags will be populated into both ``_tags_v1`` and + ``_tags_v2``. ``_tags_v2`` will be used if this IFD is used in the TIFF + save routine. Tags should be read from ``_tags_v1`` if + ``legacy_api == true``. + + """ + + _load_dispatch: dict[int, Callable[[ImageFileDirectory_v2, bytes, bool], Any]] = {} + _write_dispatch: dict[int, Callable[..., Any]] = {} + + def __init__( + self, + ifh: bytes = b"II\052\0\0\0\0\0", + prefix: bytes | None = None, + group: int | None = None, + ) -> None: + """Initialize an ImageFileDirectory. + + To construct an ImageFileDirectory from a real file, pass the 8-byte + magic header to the constructor. To only set the endianness, pass it + as the 'prefix' keyword argument. + + :param ifh: One of the accepted magic headers (cf. PREFIXES); also sets + endianness. + :param prefix: Override the endianness of the file. + """ + if not _accept(ifh): + msg = f"not a TIFF file (header {repr(ifh)} not valid)" + raise SyntaxError(msg) + self._prefix = prefix if prefix is not None else ifh[:2] + if self._prefix == MM: + self._endian = ">" + elif self._prefix == II: + self._endian = "<" + else: + msg = "not a TIFF IFD" + raise SyntaxError(msg) + self._bigtiff = ifh[2] == 43 + self.group = group + self.tagtype: dict[int, int] = {} + """ Dictionary of tag types """ + self.reset() + (self.next,) = ( + self._unpack("Q", ifh[8:]) if self._bigtiff else self._unpack("L", ifh[4:]) + ) + self._legacy_api = False + + prefix = property(lambda self: self._prefix) + offset = property(lambda self: self._offset) + + @property + def legacy_api(self) -> bool: + return self._legacy_api + + @legacy_api.setter + def legacy_api(self, value: bool) -> NoReturn: + msg = "Not allowing setting of legacy api" + raise Exception(msg) + + def reset(self) -> None: + self._tags_v1: dict[int, Any] = {} # will remain empty if legacy_api is false + self._tags_v2: dict[int, Any] = {} # main tag storage + self._tagdata: dict[int, bytes] = {} + self.tagtype = {} # added 2008-06-05 by Florian Hoech + self._next = None + self._offset = None + + def __str__(self) -> str: + return str(dict(self)) + + def named(self): + """ + :returns: dict of name|key: value + + Returns the complete tag dictionary, with named tags where possible. + """ + return { + TiffTags.lookup(code, self.group).name: value + for code, value in self.items() + } + + def __len__(self) -> int: + return len(set(self._tagdata) | set(self._tags_v2)) + + def __getitem__(self, tag): + if tag not in self._tags_v2: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + self[tag] = handler(self, data, self.legacy_api) # check type + val = self._tags_v2[tag] + if self.legacy_api and not isinstance(val, (tuple, bytes)): + val = (val,) + return val + + def __contains__(self, tag): + return tag in self._tags_v2 or tag in self._tagdata + + def __setitem__(self, tag, value): + self._setitem(tag, value, self.legacy_api) + + def _setitem(self, tag, value, legacy_api): + basetypes = (Number, bytes, str) + + info = TiffTags.lookup(tag, self.group) + values = [value] if isinstance(value, basetypes) else value + + if tag not in self.tagtype: + if info.type: + self.tagtype[tag] = info.type + else: + self.tagtype[tag] = TiffTags.UNDEFINED + if all(isinstance(v, IFDRational) for v in values): + self.tagtype[tag] = ( + TiffTags.RATIONAL + if all(v >= 0 for v in values) + else TiffTags.SIGNED_RATIONAL + ) + elif all(isinstance(v, int) for v in values): + if all(0 <= v < 2**16 for v in values): + self.tagtype[tag] = TiffTags.SHORT + elif all(-(2**15) < v < 2**15 for v in values): + self.tagtype[tag] = TiffTags.SIGNED_SHORT + else: + self.tagtype[tag] = ( + TiffTags.LONG + if all(v >= 0 for v in values) + else TiffTags.SIGNED_LONG + ) + elif all(isinstance(v, float) for v in values): + self.tagtype[tag] = TiffTags.DOUBLE + elif all(isinstance(v, str) for v in values): + self.tagtype[tag] = TiffTags.ASCII + elif all(isinstance(v, bytes) for v in values): + self.tagtype[tag] = TiffTags.BYTE + + if self.tagtype[tag] == TiffTags.UNDEFINED: + values = [ + v.encode("ascii", "replace") if isinstance(v, str) else v + for v in values + ] + elif self.tagtype[tag] == TiffTags.RATIONAL: + values = [float(v) if isinstance(v, int) else v for v in values] + + is_ifd = self.tagtype[tag] == TiffTags.LONG and isinstance(values, dict) + if not is_ifd: + values = tuple(info.cvt_enum(value) for value in values) + + dest = self._tags_v1 if legacy_api else self._tags_v2 + + # Three branches: + # Spec'd length == 1, Actual length 1, store as element + # Spec'd length == 1, Actual > 1, Warn and truncate. Formerly barfed. + # No Spec, Actual length 1, Formerly (<4.2) returned a 1 element tuple. + # Don't mess with the legacy api, since it's frozen. + if not is_ifd and ( + (info.length == 1) + or self.tagtype[tag] == TiffTags.BYTE + or (info.length is None and len(values) == 1 and not legacy_api) + ): + # Don't mess with the legacy api, since it's frozen. + if legacy_api and self.tagtype[tag] in [ + TiffTags.RATIONAL, + TiffTags.SIGNED_RATIONAL, + ]: # rationals + values = (values,) + try: + (dest[tag],) = values + except ValueError: + # We've got a builtin tag with 1 expected entry + warnings.warn( + f"Metadata Warning, tag {tag} had too many entries: " + f"{len(values)}, expected 1" + ) + dest[tag] = values[0] + + else: + # Spec'd length > 1 or undefined + # Unspec'd, and length > 1 + dest[tag] = values + + def __delitem__(self, tag: int) -> None: + self._tags_v2.pop(tag, None) + self._tags_v1.pop(tag, None) + self._tagdata.pop(tag, None) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v2)) + + def _unpack(self, fmt, data): + return struct.unpack(self._endian + fmt, data) + + def _pack(self, fmt, *values): + return struct.pack(self._endian + fmt, *values) + + list( + map( + _register_basic, + [ + (TiffTags.SHORT, "H", "short"), + (TiffTags.LONG, "L", "long"), + (TiffTags.SIGNED_BYTE, "b", "signed byte"), + (TiffTags.SIGNED_SHORT, "h", "signed short"), + (TiffTags.SIGNED_LONG, "l", "signed long"), + (TiffTags.FLOAT, "f", "float"), + (TiffTags.DOUBLE, "d", "double"), + (TiffTags.IFD, "L", "long"), + (TiffTags.LONG8, "Q", "long8"), + ], + ) + ) + + @_register_loader(1, 1) # Basic type, except for the legacy API. + def load_byte(self, data, legacy_api=True): + return data + + @_register_writer(1) # Basic type, except for the legacy API. + def write_byte(self, data): + if isinstance(data, IFDRational): + data = int(data) + if isinstance(data, int): + data = bytes((data,)) + return data + + @_register_loader(2, 1) + def load_string(self, data, legacy_api=True): + if data.endswith(b"\0"): + data = data[:-1] + return data.decode("latin-1", "replace") + + @_register_writer(2) + def write_string(self, value): + # remerge of https://github.com/python-pillow/Pillow/pull/1416 + if isinstance(value, int): + value = str(value) + if not isinstance(value, bytes): + value = value.encode("ascii", "replace") + return value + b"\0" + + @_register_loader(5, 8) + def load_rational(self, data, legacy_api=True): + vals = self._unpack(f"{len(data) // 4}L", data) + + def combine(a, b): + return (a, b) if legacy_api else IFDRational(a, b) + + return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(5) + def write_rational(self, *values): + return b"".join( + self._pack("2L", *_limit_rational(frac, 2**32 - 1)) for frac in values + ) + + @_register_loader(7, 1) + def load_undefined(self, data, legacy_api=True): + return data + + @_register_writer(7) + def write_undefined(self, value): + if isinstance(value, IFDRational): + value = int(value) + if isinstance(value, int): + value = str(value).encode("ascii", "replace") + return value + + @_register_loader(10, 8) + def load_signed_rational(self, data, legacy_api=True): + vals = self._unpack(f"{len(data) // 4}l", data) + + def combine(a, b): + return (a, b) if legacy_api else IFDRational(a, b) + + return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(10) + def write_signed_rational(self, *values): + return b"".join( + self._pack("2l", *_limit_signed_rational(frac, 2**31 - 1, -(2**31))) + for frac in values + ) + + def _ensure_read(self, fp, size): + ret = fp.read(size) + if len(ret) != size: + msg = ( + "Corrupt EXIF data. " + f"Expecting to read {size} bytes but only got {len(ret)}. " + ) + raise OSError(msg) + return ret + + def load(self, fp): + self.reset() + self._offset = fp.tell() + + try: + tag_count = ( + self._unpack("Q", self._ensure_read(fp, 8)) + if self._bigtiff + else self._unpack("H", self._ensure_read(fp, 2)) + )[0] + for i in range(tag_count): + tag, typ, count, data = ( + self._unpack("HHQ8s", self._ensure_read(fp, 20)) + if self._bigtiff + else self._unpack("HHL4s", self._ensure_read(fp, 12)) + ) + + tagname = TiffTags.lookup(tag, self.group).name + typname = TYPES.get(typ, "unknown") + msg = f"tag: {tagname} ({tag}) - type: {typname} ({typ})" + + try: + unit_size, handler = self._load_dispatch[typ] + except KeyError: + logger.debug("%s - unsupported type %s", msg, typ) + continue # ignore unsupported type + size = count * unit_size + if size > (8 if self._bigtiff else 4): + here = fp.tell() + (offset,) = self._unpack("Q" if self._bigtiff else "L", data) + msg += f" Tag Location: {here} - Data Location: {offset}" + fp.seek(offset) + data = ImageFile._safe_read(fp, size) + fp.seek(here) + else: + data = data[:size] + + if len(data) != size: + warnings.warn( + "Possibly corrupt EXIF data. " + f"Expecting to read {size} bytes but only got {len(data)}." + f" Skipping tag {tag}" + ) + logger.debug(msg) + continue + + if not data: + logger.debug(msg) + continue + + self._tagdata[tag] = data + self.tagtype[tag] = typ + + msg += " - value: " + ( + "" % size if size > 32 else repr(data) + ) + logger.debug(msg) + + (self.next,) = ( + self._unpack("Q", self._ensure_read(fp, 8)) + if self._bigtiff + else self._unpack("L", self._ensure_read(fp, 4)) + ) + except OSError as msg: + warnings.warn(str(msg)) + return + + def tobytes(self, offset=0): + # FIXME What about tagdata? + result = self._pack("H", len(self._tags_v2)) + + entries = [] + offset = offset + len(result) + len(self._tags_v2) * 12 + 4 + stripoffsets = None + + # pass 1: convert tags to binary format + # always write tags in ascending order + for tag, value in sorted(self._tags_v2.items()): + if tag == STRIPOFFSETS: + stripoffsets = len(entries) + typ = self.tagtype.get(tag) + logger.debug("Tag %s, Type: %s, Value: %s", tag, typ, repr(value)) + is_ifd = typ == TiffTags.LONG and isinstance(value, dict) + if is_ifd: + if self._endian == "<": + ifh = b"II\x2A\x00\x08\x00\x00\x00" + else: + ifh = b"MM\x00\x2A\x00\x00\x00\x08" + ifd = ImageFileDirectory_v2(ifh, group=tag) + values = self._tags_v2[tag] + for ifd_tag, ifd_value in values.items(): + ifd[ifd_tag] = ifd_value + data = ifd.tobytes(offset) + else: + values = value if isinstance(value, tuple) else (value,) + data = self._write_dispatch[typ](self, *values) + + tagname = TiffTags.lookup(tag, self.group).name + typname = "ifd" if is_ifd else TYPES.get(typ, "unknown") + msg = f"save: {tagname} ({tag}) - type: {typname} ({typ})" + msg += " - value: " + ( + "" % len(data) if len(data) >= 16 else str(values) + ) + logger.debug(msg) + + # count is sum of lengths for string and arbitrary data + if is_ifd: + count = 1 + elif typ in [TiffTags.BYTE, TiffTags.ASCII, TiffTags.UNDEFINED]: + count = len(data) + else: + count = len(values) + # figure out if data fits into the entry + if len(data) <= 4: + entries.append((tag, typ, count, data.ljust(4, b"\0"), b"")) + else: + entries.append((tag, typ, count, self._pack("L", offset), data)) + offset += (len(data) + 1) // 2 * 2 # pad to word + + # update strip offset data to point beyond auxiliary data + if stripoffsets is not None: + tag, typ, count, value, data = entries[stripoffsets] + if data: + msg = "multistrip support not yet implemented" + raise NotImplementedError(msg) + value = self._pack("L", self._unpack("L", value)[0] + offset) + entries[stripoffsets] = tag, typ, count, value, data + + # pass 2: write entries to file + for tag, typ, count, value, data in entries: + logger.debug("%s %s %s %s %s", tag, typ, count, repr(value), repr(data)) + result += self._pack("HHL4s", tag, typ, count, value) + + # -- overwrite here for multi-page -- + result += b"\0\0\0\0" # end of entries + + # pass 3: write auxiliary data to file + for tag, typ, count, value, data in entries: + result += data + if len(data) & 1: + result += b"\0" + + return result + + def save(self, fp): + if fp.tell() == 0: # skip TIFF header on subsequent pages + # tiff header -- PIL always starts the first IFD at offset 8 + fp.write(self._prefix + self._pack("HL", 42, 8)) + + offset = fp.tell() + result = self.tobytes(offset) + fp.write(result) + return offset + len(result) + + +ImageFileDirectory_v2._load_dispatch = _load_dispatch +ImageFileDirectory_v2._write_dispatch = _write_dispatch +for idx, name in TYPES.items(): + name = name.replace(" ", "_") + setattr(ImageFileDirectory_v2, f"load_{name}", _load_dispatch[idx][1]) + setattr(ImageFileDirectory_v2, f"write_{name}", _write_dispatch[idx]) +del _load_dispatch, _write_dispatch, idx, name + + +# Legacy ImageFileDirectory support. +class ImageFileDirectory_v1(ImageFileDirectory_v2): + """This class represents the **legacy** interface to a TIFF tag directory. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v1() + ifd[key] = 'Some Data' + ifd.tagtype[key] = TiffTags.ASCII + print(ifd[key]) + ('Some Data',) + + Also contains a dictionary of tag types as read from the tiff image file, + :attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v1.tagtype`. + + Values are returned as a tuple. + + .. deprecated:: 3.0.0 + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._legacy_api = True + + tags = property(lambda self: self._tags_v1) + tagdata = property(lambda self: self._tagdata) + + # defined in ImageFileDirectory_v2 + tagtype: dict[int, int] + """Dictionary of tag types""" + + @classmethod + def from_v2(cls, original): + """Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + + """ + + ifd = cls(prefix=original.prefix) + ifd._tagdata = original._tagdata + ifd.tagtype = original.tagtype + ifd.next = original.next # an indicator for multipage tiffs + return ifd + + def to_v2(self) -> ImageFileDirectory_v2: + """Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + + """ + + ifd = ImageFileDirectory_v2(prefix=self.prefix) + ifd._tagdata = dict(self._tagdata) + ifd.tagtype = dict(self.tagtype) + ifd._tags_v2 = dict(self._tags_v2) + return ifd + + def __contains__(self, tag): + return tag in self._tags_v1 or tag in self._tagdata + + def __len__(self) -> int: + return len(set(self._tagdata) | set(self._tags_v1)) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v1)) + + def __setitem__(self, tag, value): + for legacy_api in (False, True): + self._setitem(tag, value, legacy_api) + + def __getitem__(self, tag): + if tag not in self._tags_v1: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + for legacy in (False, True): + self._setitem(tag, handler(self, data, legacy), legacy) + val = self._tags_v1[tag] + if not isinstance(val, (tuple, bytes)): + val = (val,) + return val + + +# undone -- switch this pointer when IFD_LEGACY_API == False +ImageFileDirectory = ImageFileDirectory_v1 + + +## +# Image plugin for TIFF files. + + +class TiffImageFile(ImageFile.ImageFile): + format = "TIFF" + format_description = "Adobe TIFF" + _close_exclusive_fp_after_loading = False + + def __init__(self, fp=None, filename=None): + self.tag_v2 = None + """ Image file directory (tag dictionary) """ + + self.tag = None + """ Legacy tag entries """ + + super().__init__(fp, filename) + + def _open(self) -> None: + """Open the first image in a TIFF file""" + + # Header + ifh = self.fp.read(8) + if ifh[2] == 43: + ifh += self.fp.read(8) + + self.tag_v2 = ImageFileDirectory_v2(ifh) + + # legacy IFD entries will be filled in later + self.ifd = None + + # setup frame pointers + self.__first = self.__next = self.tag_v2.next + self.__frame = -1 + self._fp = self.fp + self._frame_pos: list[int] = [] + self._n_frames: int | None = None + + logger.debug("*** TiffImageFile._open ***") + logger.debug("- __first: %s", self.__first) + logger.debug("- ifh: %s", repr(ifh)) # Use repr to avoid str(bytes) + + # and load the first frame + self._seek(0) + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + self._seek(len(self._frame_pos)) + while self._n_frames is None: + self._seek(self.tell() + 1) + self.seek(current) + return self._n_frames + + def seek(self, frame: int) -> None: + """Select a given frame as current image""" + if not self._seek_check(frame): + return + self._seek(frame) + # Create a new core image object on second and + # subsequent frames in the image. Image may be + # different size/mode. + Image._decompression_bomb_check(self.size) + self.im = Image.core.new(self.mode, self.size) + + def _seek(self, frame: int) -> None: + self.fp = self._fp + + # reset buffered io handle in case fp + # was passed to libtiff, invalidating the buffer + self.fp.tell() + + while len(self._frame_pos) <= frame: + if not self.__next: + msg = "no more images in TIFF file" + raise EOFError(msg) + logger.debug( + "Seeking to frame %s, on frame %s, __next %s, location: %s", + frame, + self.__frame, + self.__next, + self.fp.tell(), + ) + if self.__next >= 2**63: + msg = "Unable to seek to frame" + raise ValueError(msg) + self.fp.seek(self.__next) + self._frame_pos.append(self.__next) + logger.debug("Loading tags, location: %s", self.fp.tell()) + self.tag_v2.load(self.fp) + if self.tag_v2.next in self._frame_pos: + # This IFD has already been processed + # Declare this to be the end of the image + self.__next = 0 + else: + self.__next = self.tag_v2.next + if self.__next == 0: + self._n_frames = frame + 1 + if len(self._frame_pos) == 1: + self.is_animated = self.__next != 0 + self.__frame += 1 + self.fp.seek(self._frame_pos[frame]) + self.tag_v2.load(self.fp) + if XMP in self.tag_v2: + self.info["xmp"] = self.tag_v2[XMP] + elif "xmp" in self.info: + del self.info["xmp"] + self._reload_exif() + # fill the legacy tag/ifd entries + self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2) + self.__frame = frame + self._setup() + + def tell(self) -> int: + """Return the current frame number""" + return self.__frame + + def get_photoshop_blocks(self): + """ + Returns a dictionary of Photoshop "Image Resource Blocks". + The keys are the image resource ID. For more information, see + https://www.adobe.com/devnet-apps/photoshop/fileformatashtml/#50577409_pgfId-1037727 + + :returns: Photoshop "Image Resource Blocks" in a dictionary. + """ + blocks = {} + val = self.tag_v2.get(ExifTags.Base.ImageResources) + if val: + while val[:4] == b"8BIM": + id = i16(val[4:6]) + n = math.ceil((val[6] + 1) / 2) * 2 + size = i32(val[6 + n : 10 + n]) + data = val[10 + n : 10 + n + size] + blocks[id] = {"data": data} + + val = val[math.ceil((10 + n + size) / 2) * 2 :] + return blocks + + def load(self): + if self.tile and self.use_load_libtiff: + return self._load_libtiff() + return super().load() + + def load_end(self) -> None: + # allow closing if we're on the first frame, there's no next + # This is the ImageFile.load path only, libtiff specific below. + if not self.is_animated: + self._close_exclusive_fp_after_loading = True + + # reset buffered io handle in case fp + # was passed to libtiff, invalidating the buffer + self.fp.tell() + + # load IFD data from fp before it is closed + exif = self.getexif() + for key in TiffTags.TAGS_V2_GROUPS: + if key not in exif: + continue + exif.get_ifd(key) + + ImageOps.exif_transpose(self, in_place=True) + if ExifTags.Base.Orientation in self.tag_v2: + del self.tag_v2[ExifTags.Base.Orientation] + + def _load_libtiff(self): + """Overload method triggered when we detect a compressed tiff + Calls out to libtiff""" + + Image.Image.load(self) + + self.load_prepare() + + if not len(self.tile) == 1: + msg = "Not exactly one tile" + raise OSError(msg) + + # (self._compression, (extents tuple), + # 0, (rawmode, self._compression, fp)) + extents = self.tile[0][1] + args = list(self.tile[0][3]) + + # To be nice on memory footprint, if there's a + # file descriptor, use that instead of reading + # into a string in python. + try: + fp = hasattr(self.fp, "fileno") and self.fp.fileno() + # flush the file descriptor, prevents error on pypy 2.4+ + # should also eliminate the need for fp.tell + # in _seek + if hasattr(self.fp, "flush"): + self.fp.flush() + except OSError: + # io.BytesIO have a fileno, but returns an OSError if + # it doesn't use a file descriptor. + fp = False + + if fp: + args[2] = fp + + decoder = Image._getdecoder( + self.mode, "libtiff", tuple(args), self.decoderconfig + ) + try: + decoder.setimage(self.im, extents) + except ValueError as e: + msg = "Couldn't set the image" + raise OSError(msg) from e + + close_self_fp = self._exclusive_fp and not self.is_animated + if hasattr(self.fp, "getvalue"): + # We've got a stringio like thing passed in. Yay for all in memory. + # The decoder needs the entire file in one shot, so there's not + # a lot we can do here other than give it the entire file. + # unless we could do something like get the address of the + # underlying string for stringio. + # + # Rearranging for supporting byteio items, since they have a fileno + # that returns an OSError if there's no underlying fp. Easier to + # deal with here by reordering. + logger.debug("have getvalue. just sending in a string from getvalue") + n, err = decoder.decode(self.fp.getvalue()) + elif fp: + # we've got a actual file on disk, pass in the fp. + logger.debug("have fileno, calling fileno version of the decoder.") + if not close_self_fp: + self.fp.seek(0) + # 4 bytes, otherwise the trace might error out + n, err = decoder.decode(b"fpfp") + else: + # we have something else. + logger.debug("don't have fileno or getvalue. just reading") + self.fp.seek(0) + # UNDONE -- so much for that buffer size thing. + n, err = decoder.decode(self.fp.read()) + + self.tile = [] + self.readonly = 0 + + self.load_end() + + if close_self_fp: + self.fp.close() + self.fp = None # might be shared + + if err < 0: + raise OSError(err) + + return Image.Image.load(self) + + def _setup(self): + """Setup this image object based on current tags""" + + if 0xBC01 in self.tag_v2: + msg = "Windows Media Photo files not yet supported" + raise OSError(msg) + + # extract relevant tags + self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)] + self._planar_configuration = self.tag_v2.get(PLANAR_CONFIGURATION, 1) + + # photometric is a required tag, but not everyone is reading + # the specification + photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0) + + # old style jpeg compression images most certainly are YCbCr + if self._compression == "tiff_jpeg": + photo = 6 + + fillorder = self.tag_v2.get(FILLORDER, 1) + + logger.debug("*** Summary ***") + logger.debug("- compression: %s", self._compression) + logger.debug("- photometric_interpretation: %s", photo) + logger.debug("- planar_configuration: %s", self._planar_configuration) + logger.debug("- fill_order: %s", fillorder) + logger.debug("- YCbCr subsampling: %s", self.tag.get(YCBCRSUBSAMPLING)) + + # size + xsize = int(self.tag_v2.get(IMAGEWIDTH)) + ysize = int(self.tag_v2.get(IMAGELENGTH)) + self._size = xsize, ysize + + logger.debug("- size: %s", self.size) + + sample_format = self.tag_v2.get(SAMPLEFORMAT, (1,)) + if len(sample_format) > 1 and max(sample_format) == min(sample_format) == 1: + # SAMPLEFORMAT is properly per band, so an RGB image will + # be (1,1,1). But, we don't support per band pixel types, + # and anything more than one band is a uint8. So, just + # take the first element. Revisit this if adding support + # for more exotic images. + sample_format = (1,) + + bps_tuple = self.tag_v2.get(BITSPERSAMPLE, (1,)) + extra_tuple = self.tag_v2.get(EXTRASAMPLES, ()) + if photo in (2, 6, 8): # RGB, YCbCr, LAB + bps_count = 3 + elif photo == 5: # CMYK + bps_count = 4 + else: + bps_count = 1 + bps_count += len(extra_tuple) + bps_actual_count = len(bps_tuple) + samples_per_pixel = self.tag_v2.get( + SAMPLESPERPIXEL, + 3 if self._compression == "tiff_jpeg" and photo in (2, 6) else 1, + ) + + if samples_per_pixel > MAX_SAMPLESPERPIXEL: + # DOS check, samples_per_pixel can be a Long, and we extend the tuple below + logger.error( + "More samples per pixel than can be decoded: %s", samples_per_pixel + ) + msg = "Invalid value for samples per pixel" + raise SyntaxError(msg) + + if samples_per_pixel < bps_actual_count: + # If a file has more values in bps_tuple than expected, + # remove the excess. + bps_tuple = bps_tuple[:samples_per_pixel] + elif samples_per_pixel > bps_actual_count and bps_actual_count == 1: + # If a file has only one value in bps_tuple, when it should have more, + # presume it is the same number of bits for all of the samples. + bps_tuple = bps_tuple * samples_per_pixel + + if len(bps_tuple) != samples_per_pixel: + msg = "unknown data organization" + raise SyntaxError(msg) + + # mode: check photometric interpretation and bits per pixel + key = ( + self.tag_v2.prefix, + photo, + sample_format, + fillorder, + bps_tuple, + extra_tuple, + ) + logger.debug("format key: %s", key) + try: + self._mode, rawmode = OPEN_INFO[key] + except KeyError as e: + logger.debug("- unsupported format") + msg = "unknown pixel mode" + raise SyntaxError(msg) from e + + logger.debug("- raw mode: %s", rawmode) + logger.debug("- pil mode: %s", self.mode) + + self.info["compression"] = self._compression + + xres = self.tag_v2.get(X_RESOLUTION, 1) + yres = self.tag_v2.get(Y_RESOLUTION, 1) + + if xres and yres: + resunit = self.tag_v2.get(RESOLUTION_UNIT) + if resunit == 2: # dots per inch + self.info["dpi"] = (xres, yres) + elif resunit == 3: # dots per centimeter. convert to dpi + self.info["dpi"] = (xres * 2.54, yres * 2.54) + elif resunit is None: # used to default to 1, but now 2) + self.info["dpi"] = (xres, yres) + # For backward compatibility, + # we also preserve the old behavior + self.info["resolution"] = xres, yres + else: # No absolute unit of measurement + self.info["resolution"] = xres, yres + + # build tile descriptors + x = y = layer = 0 + self.tile = [] + self.use_load_libtiff = READ_LIBTIFF or self._compression != "raw" + if self.use_load_libtiff: + # Decoder expects entire file as one tile. + # There's a buffer size limit in load (64k) + # so large g4 images will fail if we use that + # function. + # + # Setup the one tile for the whole image, then + # use the _load_libtiff function. + + # libtiff handles the fillmode for us, so 1;IR should + # actually be 1;I. Including the R double reverses the + # bits, so stripes of the image are reversed. See + # https://github.com/python-pillow/Pillow/issues/279 + if fillorder == 2: + # Replace fillorder with fillorder=1 + key = key[:3] + (1,) + key[4:] + logger.debug("format key: %s", key) + # this should always work, since all the + # fillorder==2 modes have a corresponding + # fillorder=1 mode + self._mode, rawmode = OPEN_INFO[key] + # libtiff always returns the bytes in native order. + # we're expecting image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if rawmode == "I;16": + rawmode = "I;16N" + if ";16B" in rawmode: + rawmode = rawmode.replace(";16B", ";16N") + if ";16L" in rawmode: + rawmode = rawmode.replace(";16L", ";16N") + + # YCbCr images with new jpeg compression with pixels in one plane + # unpacked straight into RGB values + if ( + photo == 6 + and self._compression == "jpeg" + and self._planar_configuration == 1 + ): + rawmode = "RGB" + + # Offset in the tile tuple is 0, we go from 0,0 to + # w,h, and we only do this once -- eds + a = (rawmode, self._compression, False, self.tag_v2.offset) + self.tile.append(("libtiff", (0, 0, xsize, ysize), 0, a)) + + elif STRIPOFFSETS in self.tag_v2 or TILEOFFSETS in self.tag_v2: + # striped image + if STRIPOFFSETS in self.tag_v2: + offsets = self.tag_v2[STRIPOFFSETS] + h = self.tag_v2.get(ROWSPERSTRIP, ysize) + w = self.size[0] + else: + # tiled image + offsets = self.tag_v2[TILEOFFSETS] + w = self.tag_v2.get(TILEWIDTH) + h = self.tag_v2.get(TILELENGTH) + + for offset in offsets: + if x + w > xsize: + stride = w * sum(bps_tuple) / 8 # bytes per line + else: + stride = 0 + + tile_rawmode = rawmode + if self._planar_configuration == 2: + # each band on it's own layer + tile_rawmode = rawmode[layer] + # adjust stride width accordingly + stride /= bps_count + + a = (tile_rawmode, int(stride), 1) + self.tile.append( + ( + self._compression, + (x, y, min(x + w, xsize), min(y + h, ysize)), + offset, + a, + ) + ) + x = x + w + if x >= self.size[0]: + x, y = 0, y + h + if y >= self.size[1]: + x = y = 0 + layer += 1 + else: + logger.debug("- unsupported data organization") + msg = "unknown data organization" + raise SyntaxError(msg) + + # Fix up info. + if ICCPROFILE in self.tag_v2: + self.info["icc_profile"] = self.tag_v2[ICCPROFILE] + + # fixup palette descriptor + + if self.mode in ["P", "PA"]: + palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]] + self.palette = ImagePalette.raw("RGB;L", b"".join(palette)) + + +# +# -------------------------------------------------------------------- +# Write TIFF files + +# little endian is default except for image modes with +# explicit big endian byte-order + +SAVE_INFO = { + # mode => rawmode, byteorder, photometrics, + # sampleformat, bitspersample, extra + "1": ("1", II, 1, 1, (1,), None), + "L": ("L", II, 1, 1, (8,), None), + "LA": ("LA", II, 1, 1, (8, 8), 2), + "P": ("P", II, 3, 1, (8,), None), + "PA": ("PA", II, 3, 1, (8, 8), 2), + "I": ("I;32S", II, 1, 2, (32,), None), + "I;16": ("I;16", II, 1, 1, (16,), None), + "I;16S": ("I;16S", II, 1, 2, (16,), None), + "F": ("F;32F", II, 1, 3, (32,), None), + "RGB": ("RGB", II, 2, 1, (8, 8, 8), None), + "RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0), + "RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2), + "CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None), + "YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None), + "LAB": ("LAB", II, 8, 1, (8, 8, 8), None), + "I;32BS": ("I;32BS", MM, 1, 2, (32,), None), + "I;16B": ("I;16B", MM, 1, 1, (16,), None), + "I;16BS": ("I;16BS", MM, 1, 2, (16,), None), + "F;32BF": ("F;32BF", MM, 1, 3, (32,), None), +} + + +def _save(im, fp, filename): + try: + rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode] + except KeyError as e: + msg = f"cannot write mode {im.mode} as TIFF" + raise OSError(msg) from e + + ifd = ImageFileDirectory_v2(prefix=prefix) + + encoderinfo = im.encoderinfo + encoderconfig = im.encoderconfig + try: + compression = encoderinfo["compression"] + except KeyError: + compression = im.info.get("compression") + if isinstance(compression, int): + # compression value may be from BMP. Ignore it + compression = None + if compression is None: + compression = "raw" + elif compression == "tiff_jpeg": + # OJPEG is obsolete, so use new-style JPEG compression instead + compression = "jpeg" + elif compression == "tiff_deflate": + compression = "tiff_adobe_deflate" + + libtiff = WRITE_LIBTIFF or compression != "raw" + + # required for color libtiff images + ifd[PLANAR_CONFIGURATION] = 1 + + ifd[IMAGEWIDTH] = im.size[0] + ifd[IMAGELENGTH] = im.size[1] + + # write any arbitrary tags passed in as an ImageFileDirectory + if "tiffinfo" in encoderinfo: + info = encoderinfo["tiffinfo"] + elif "exif" in encoderinfo: + info = encoderinfo["exif"] + if isinstance(info, bytes): + exif = Image.Exif() + exif.load(info) + info = exif + else: + info = {} + logger.debug("Tiffinfo Keys: %s", list(info)) + if isinstance(info, ImageFileDirectory_v1): + info = info.to_v2() + for key in info: + if isinstance(info, Image.Exif) and key in TiffTags.TAGS_V2_GROUPS: + ifd[key] = info.get_ifd(key) + else: + ifd[key] = info.get(key) + try: + ifd.tagtype[key] = info.tagtype[key] + except Exception: + pass # might not be an IFD. Might not have populated type + + legacy_ifd = {} + if hasattr(im, "tag"): + legacy_ifd = im.tag.to_v2() + + supplied_tags = {**legacy_ifd, **getattr(im, "tag_v2", {})} + for tag in ( + # IFD offset that may not be correct in the saved image + EXIFIFD, + # Determined by the image format and should not be copied from legacy_ifd. + SAMPLEFORMAT, + ): + if tag in supplied_tags: + del supplied_tags[tag] + + # additions written by Greg Couch, gregc@cgl.ucsf.edu + # inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com + if hasattr(im, "tag_v2"): + # preserve tags from original TIFF image file + for key in ( + RESOLUTION_UNIT, + X_RESOLUTION, + Y_RESOLUTION, + IPTC_NAA_CHUNK, + PHOTOSHOP_CHUNK, + XMP, + ): + if key in im.tag_v2: + if key == IPTC_NAA_CHUNK and im.tag_v2.tagtype[key] not in ( + TiffTags.BYTE, + TiffTags.UNDEFINED, + ): + del supplied_tags[key] + else: + ifd[key] = im.tag_v2[key] + ifd.tagtype[key] = im.tag_v2.tagtype[key] + + # preserve ICC profile (should also work when saving other formats + # which support profiles as TIFF) -- 2008-06-06 Florian Hoech + icc = encoderinfo.get("icc_profile", im.info.get("icc_profile")) + if icc: + ifd[ICCPROFILE] = icc + + for key, name in [ + (IMAGEDESCRIPTION, "description"), + (X_RESOLUTION, "resolution"), + (Y_RESOLUTION, "resolution"), + (X_RESOLUTION, "x_resolution"), + (Y_RESOLUTION, "y_resolution"), + (RESOLUTION_UNIT, "resolution_unit"), + (SOFTWARE, "software"), + (DATE_TIME, "date_time"), + (ARTIST, "artist"), + (COPYRIGHT, "copyright"), + ]: + if name in encoderinfo: + ifd[key] = encoderinfo[name] + + dpi = encoderinfo.get("dpi") + if dpi: + ifd[RESOLUTION_UNIT] = 2 + ifd[X_RESOLUTION] = dpi[0] + ifd[Y_RESOLUTION] = dpi[1] + + if bits != (1,): + ifd[BITSPERSAMPLE] = bits + if len(bits) != 1: + ifd[SAMPLESPERPIXEL] = len(bits) + if extra is not None: + ifd[EXTRASAMPLES] = extra + if format != 1: + ifd[SAMPLEFORMAT] = format + + if PHOTOMETRIC_INTERPRETATION not in ifd: + ifd[PHOTOMETRIC_INTERPRETATION] = photo + elif im.mode in ("1", "L") and ifd[PHOTOMETRIC_INTERPRETATION] == 0: + if im.mode == "1": + inverted_im = im.copy() + px = inverted_im.load() + for y in range(inverted_im.height): + for x in range(inverted_im.width): + px[x, y] = 0 if px[x, y] == 255 else 255 + im = inverted_im + else: + im = ImageOps.invert(im) + + if im.mode in ["P", "PA"]: + lut = im.im.getpalette("RGB", "RGB;L") + colormap = [] + colors = len(lut) // 3 + for i in range(3): + colormap += [v * 256 for v in lut[colors * i : colors * (i + 1)]] + colormap += [0] * (256 - colors) + ifd[COLORMAP] = colormap + # data orientation + w, h = ifd[IMAGEWIDTH], ifd[IMAGELENGTH] + stride = len(bits) * ((w * bits[0] + 7) // 8) + if ROWSPERSTRIP not in ifd: + # aim for given strip size (64 KB by default) when using libtiff writer + if libtiff: + im_strip_size = encoderinfo.get("strip_size", STRIP_SIZE) + rows_per_strip = 1 if stride == 0 else min(im_strip_size // stride, h) + # JPEG encoder expects multiple of 8 rows + if compression == "jpeg": + rows_per_strip = min(((rows_per_strip + 7) // 8) * 8, h) + else: + rows_per_strip = h + if rows_per_strip == 0: + rows_per_strip = 1 + ifd[ROWSPERSTRIP] = rows_per_strip + strip_byte_counts = 1 if stride == 0 else stride * ifd[ROWSPERSTRIP] + strips_per_image = (h + ifd[ROWSPERSTRIP] - 1) // ifd[ROWSPERSTRIP] + if strip_byte_counts >= 2**16: + ifd.tagtype[STRIPBYTECOUNTS] = TiffTags.LONG + ifd[STRIPBYTECOUNTS] = (strip_byte_counts,) * (strips_per_image - 1) + ( + stride * h - strip_byte_counts * (strips_per_image - 1), + ) + ifd[STRIPOFFSETS] = tuple( + range(0, strip_byte_counts * strips_per_image, strip_byte_counts) + ) # this is adjusted by IFD writer + # no compression by default: + ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1) + + if im.mode == "YCbCr": + for tag, value in { + YCBCRSUBSAMPLING: (1, 1), + REFERENCEBLACKWHITE: (0, 255, 128, 255, 128, 255), + }.items(): + ifd.setdefault(tag, value) + + blocklist = [TILEWIDTH, TILELENGTH, TILEOFFSETS, TILEBYTECOUNTS] + if libtiff: + if "quality" in encoderinfo: + quality = encoderinfo["quality"] + if not isinstance(quality, int) or quality < 0 or quality > 100: + msg = "Invalid quality setting" + raise ValueError(msg) + if compression != "jpeg": + msg = "quality setting only supported for 'jpeg' compression" + raise ValueError(msg) + ifd[JPEGQUALITY] = quality + + logger.debug("Saving using libtiff encoder") + logger.debug("Items: %s", sorted(ifd.items())) + _fp = 0 + if hasattr(fp, "fileno"): + try: + fp.seek(0) + _fp = os.dup(fp.fileno()) + except io.UnsupportedOperation: + pass + + # optional types for non core tags + types = {} + # STRIPOFFSETS and STRIPBYTECOUNTS are added by the library + # based on the data in the strip. + # OSUBFILETYPE is deprecated. + # The other tags expect arrays with a certain length (fixed or depending on + # BITSPERSAMPLE, etc), passing arrays with a different length will result in + # segfaults. Block these tags until we add extra validation. + # SUBIFD may also cause a segfault. + blocklist += [ + OSUBFILETYPE, + REFERENCEBLACKWHITE, + STRIPBYTECOUNTS, + STRIPOFFSETS, + TRANSFERFUNCTION, + SUBIFD, + ] + + # bits per sample is a single short in the tiff directory, not a list. + atts = {BITSPERSAMPLE: bits[0]} + # Merge the ones that we have with (optional) more bits from + # the original file, e.g x,y resolution so that we can + # save(load('')) == original file. + for tag, value in itertools.chain(ifd.items(), supplied_tags.items()): + # Libtiff can only process certain core items without adding + # them to the custom dictionary. + # Custom items are supported for int, float, unicode, string and byte + # values. Other types and tuples require a tagtype. + if tag not in TiffTags.LIBTIFF_CORE: + if not getattr(Image.core, "libtiff_support_custom_tags", False): + continue + + if tag in ifd.tagtype: + types[tag] = ifd.tagtype[tag] + elif not (isinstance(value, (int, float, str, bytes))): + continue + else: + type = TiffTags.lookup(tag).type + if type: + types[tag] = type + if tag not in atts and tag not in blocklist: + if isinstance(value, str): + atts[tag] = value.encode("ascii", "replace") + b"\0" + elif isinstance(value, IFDRational): + atts[tag] = float(value) + else: + atts[tag] = value + + if SAMPLEFORMAT in atts and len(atts[SAMPLEFORMAT]) == 1: + atts[SAMPLEFORMAT] = atts[SAMPLEFORMAT][0] + + logger.debug("Converted items: %s", sorted(atts.items())) + + # libtiff always expects the bytes in native order. + # we're storing image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if im.mode in ("I;16B", "I;16"): + rawmode = "I;16N" + + # Pass tags as sorted list so that the tags are set in a fixed order. + # This is required by libtiff for some tags. For example, the JPEGQUALITY + # pseudo tag requires that the COMPRESS tag was already set. + tags = list(atts.items()) + tags.sort() + a = (rawmode, compression, _fp, filename, tags, types) + encoder = Image._getencoder(im.mode, "libtiff", a, encoderconfig) + encoder.setimage(im.im, (0, 0) + im.size) + while True: + # undone, change to self.decodermaxblock: + errcode, data = encoder.encode(16 * 1024)[1:] + if not _fp: + fp.write(data) + if errcode: + break + if _fp: + try: + os.close(_fp) + except OSError: + pass + if errcode < 0: + msg = f"encoder error {errcode} when writing image file" + raise OSError(msg) + + else: + for tag in blocklist: + del ifd[tag] + offset = ifd.save(fp) + + ImageFile._save( + im, fp, [("raw", (0, 0) + im.size, offset, (rawmode, stride, 1))] + ) + + # -- helper for multi-page save -- + if "_debug_multipage" in encoderinfo: + # just to access o32 and o16 (using correct byte order) + im._debug_multipage = ifd + + +class AppendingTiffWriter: + fieldSizes = [ + 0, # None + 1, # byte + 1, # ascii + 2, # short + 4, # long + 8, # rational + 1, # sbyte + 1, # undefined + 2, # sshort + 4, # slong + 8, # srational + 4, # float + 8, # double + 4, # ifd + 2, # unicode + 4, # complex + 8, # long8 + ] + + Tags = { + 273, # StripOffsets + 288, # FreeOffsets + 324, # TileOffsets + 519, # JPEGQTables + 520, # JPEGDCTables + 521, # JPEGACTables + } + + def __init__(self, fn, new=False): + if hasattr(fn, "read"): + self.f = fn + self.close_fp = False + else: + self.name = fn + self.close_fp = True + try: + self.f = open(fn, "w+b" if new else "r+b") + except OSError: + self.f = open(fn, "w+b") + self.beginning = self.f.tell() + self.setup() + + def setup(self) -> None: + # Reset everything. + self.f.seek(self.beginning, os.SEEK_SET) + + self.whereToWriteNewIFDOffset = None + self.offsetOfNewPage = 0 + + self.IIMM = iimm = self.f.read(4) + if not iimm: + # empty file - first page + self.isFirst = True + return + + self.isFirst = False + if iimm == b"II\x2a\x00": + self.setEndian("<") + elif iimm == b"MM\x00\x2a": + self.setEndian(">") + else: + msg = "Invalid TIFF file header" + raise RuntimeError(msg) + + self.skipIFDs() + self.goToEnd() + + def finalize(self) -> None: + if self.isFirst: + return + + # fix offsets + self.f.seek(self.offsetOfNewPage) + + iimm = self.f.read(4) + if not iimm: + # Make it easy to finish a frame without committing to a new one. + return + + if iimm != self.IIMM: + msg = "IIMM of new page doesn't match IIMM of first page" + raise RuntimeError(msg) + + ifd_offset = self.readLong() + ifd_offset += self.offsetOfNewPage + self.f.seek(self.whereToWriteNewIFDOffset) + self.writeLong(ifd_offset) + self.f.seek(ifd_offset) + self.fixIFD() + + def newFrame(self) -> None: + # Call this to finish a frame. + self.finalize() + self.setup() + + def __enter__(self) -> AppendingTiffWriter: + return self + + def __exit__(self, *args: object) -> None: + if self.close_fp: + self.close() + + def tell(self) -> int: + return self.f.tell() - self.offsetOfNewPage + + def seek(self, offset, whence=io.SEEK_SET): + if whence == os.SEEK_SET: + offset += self.offsetOfNewPage + + self.f.seek(offset, whence) + return self.tell() + + def goToEnd(self) -> None: + self.f.seek(0, os.SEEK_END) + pos = self.f.tell() + + # pad to 16 byte boundary + pad_bytes = 16 - pos % 16 + if 0 < pad_bytes < 16: + self.f.write(bytes(pad_bytes)) + self.offsetOfNewPage = self.f.tell() + + def setEndian(self, endian: str) -> None: + self.endian = endian + self.longFmt = f"{self.endian}L" + self.shortFmt = f"{self.endian}H" + self.tagFormat = f"{self.endian}HHL" + + def skipIFDs(self) -> None: + while True: + ifd_offset = self.readLong() + if ifd_offset == 0: + self.whereToWriteNewIFDOffset = self.f.tell() - 4 + break + + self.f.seek(ifd_offset) + num_tags = self.readShort() + self.f.seek(num_tags * 12, os.SEEK_CUR) + + def write(self, data: bytes) -> int | None: + return self.f.write(data) + + def readShort(self) -> int: + (value,) = struct.unpack(self.shortFmt, self.f.read(2)) + return value + + def readLong(self) -> int: + (value,) = struct.unpack(self.longFmt, self.f.read(4)) + return value + + def rewriteLastShortToLong(self, value: int) -> None: + self.f.seek(-2, os.SEEK_CUR) + bytes_written = self.f.write(struct.pack(self.longFmt, value)) + if bytes_written is not None and bytes_written != 4: + msg = f"wrote only {bytes_written} bytes but wanted 4" + raise RuntimeError(msg) + + def rewriteLastShort(self, value: int) -> None: + self.f.seek(-2, os.SEEK_CUR) + bytes_written = self.f.write(struct.pack(self.shortFmt, value)) + if bytes_written is not None and bytes_written != 2: + msg = f"wrote only {bytes_written} bytes but wanted 2" + raise RuntimeError(msg) + + def rewriteLastLong(self, value: int) -> None: + self.f.seek(-4, os.SEEK_CUR) + bytes_written = self.f.write(struct.pack(self.longFmt, value)) + if bytes_written is not None and bytes_written != 4: + msg = f"wrote only {bytes_written} bytes but wanted 4" + raise RuntimeError(msg) + + def writeShort(self, value: int) -> None: + bytes_written = self.f.write(struct.pack(self.shortFmt, value)) + if bytes_written is not None and bytes_written != 2: + msg = f"wrote only {bytes_written} bytes but wanted 2" + raise RuntimeError(msg) + + def writeLong(self, value: int) -> None: + bytes_written = self.f.write(struct.pack(self.longFmt, value)) + if bytes_written is not None and bytes_written != 4: + msg = f"wrote only {bytes_written} bytes but wanted 4" + raise RuntimeError(msg) + + def close(self) -> None: + self.finalize() + self.f.close() + + def fixIFD(self) -> None: + num_tags = self.readShort() + + for i in range(num_tags): + tag, field_type, count = struct.unpack(self.tagFormat, self.f.read(8)) + + field_size = self.fieldSizes[field_type] + total_size = field_size * count + is_local = total_size <= 4 + offset: int | None + if not is_local: + offset = self.readLong() + self.offsetOfNewPage + self.rewriteLastLong(offset) + + if tag in self.Tags: + cur_pos = self.f.tell() + + if is_local: + self.fixOffsets( + count, isShort=(field_size == 2), isLong=(field_size == 4) + ) + self.f.seek(cur_pos + 4) + else: + self.f.seek(offset) + self.fixOffsets( + count, isShort=(field_size == 2), isLong=(field_size == 4) + ) + self.f.seek(cur_pos) + + offset = cur_pos = None + + elif is_local: + # skip the locally stored value that is not an offset + self.f.seek(4, os.SEEK_CUR) + + def fixOffsets( + self, count: int, isShort: bool = False, isLong: bool = False + ) -> None: + if not isShort and not isLong: + msg = "offset is neither short nor long" + raise RuntimeError(msg) + + for i in range(count): + offset = self.readShort() if isShort else self.readLong() + offset += self.offsetOfNewPage + if isShort and offset >= 65536: + # offset is now too large - we must convert shorts to longs + if count != 1: + msg = "not implemented" + raise RuntimeError(msg) # XXX TODO + + # simple case - the offset is just one and therefore it is + # local (not referenced with another offset) + self.rewriteLastShortToLong(offset) + self.f.seek(-10, os.SEEK_CUR) + self.writeShort(TiffTags.LONG) # rewrite the type to LONG + self.f.seek(8, os.SEEK_CUR) + elif isShort: + self.rewriteLastShort(offset) + else: + self.rewriteLastLong(offset) + + +def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + encoderinfo = im.encoderinfo.copy() + encoderconfig = im.encoderconfig + append_images = list(encoderinfo.get("append_images", [])) + if not hasattr(im, "n_frames") and not append_images: + return _save(im, fp, filename) + + cur_idx = im.tell() + try: + with AppendingTiffWriter(fp) as tf: + for ims in [im] + append_images: + ims.encoderinfo = encoderinfo + ims.encoderconfig = encoderconfig + if not hasattr(ims, "n_frames"): + nfr = 1 + else: + nfr = ims.n_frames + + for idx in range(nfr): + ims.seek(idx) + ims.load() + _save(ims, tf, filename) + tf.newFrame() + finally: + im.seek(cur_idx) + + +# +# -------------------------------------------------------------------- +# Register + +Image.register_open(TiffImageFile.format, TiffImageFile, _accept) +Image.register_save(TiffImageFile.format, _save) +Image.register_save_all(TiffImageFile.format, _save_all) + +Image.register_extensions(TiffImageFile.format, [".tif", ".tiff"]) + +Image.register_mime(TiffImageFile.format, "image/tiff") diff --git a/MLPY/Lib/site-packages/PIL/TiffTags.py b/MLPY/Lib/site-packages/PIL/TiffTags.py new file mode 100644 index 0000000000000000000000000000000000000000..8b0d19a13457a0ad964e698bddf64e7266771402 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/TiffTags.py @@ -0,0 +1,555 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF tags +# +# This module provides clear-text names for various well-known +# TIFF tags. the TIFF codec works just fine without it. +# +# Copyright (c) Secret Labs AB 1999. +# +# See the README file for information on usage and redistribution. +# + +## +# This module provides constants and clear-text names for various +# well-known TIFF tags. +## +from __future__ import annotations + +from typing import NamedTuple + + +class _TagInfo(NamedTuple): + value: int | None + name: str + type: int | None + length: int | None + enum: dict[str, int] + + +class TagInfo(_TagInfo): + __slots__: list[str] = [] + + def __new__(cls, value=None, name="unknown", type=None, length=None, enum=None): + return super().__new__(cls, value, name, type, length, enum or {}) + + def cvt_enum(self, value): + # Using get will call hash(value), which can be expensive + # for some types (e.g. Fraction). Since self.enum is rarely + # used, it's usually better to test it first. + return self.enum.get(value, value) if self.enum else value + + +def lookup(tag, group=None): + """ + :param tag: Integer tag number + :param group: Which :py:data:`~PIL.TiffTags.TAGS_V2_GROUPS` to look in + + .. versionadded:: 8.3.0 + + :returns: Taginfo namedtuple, From the ``TAGS_V2`` info if possible, + otherwise just populating the value and name from ``TAGS``. + If the tag is not recognized, "unknown" is returned for the name + + """ + + if group is not None: + info = TAGS_V2_GROUPS[group].get(tag) if group in TAGS_V2_GROUPS else None + else: + info = TAGS_V2.get(tag) + return info or TagInfo(tag, TAGS.get(tag, "unknown")) + + +## +# Map tag numbers to tag info. +# +# id: (Name, Type, Length[, enum_values]) +# +# The length here differs from the length in the tiff spec. For +# numbers, the tiff spec is for the number of fields returned. We +# agree here. For string-like types, the tiff spec uses the length of +# field in bytes. In Pillow, we are using the number of expected +# fields, in general 1 for string-like types. + + +BYTE = 1 +ASCII = 2 +SHORT = 3 +LONG = 4 +RATIONAL = 5 +SIGNED_BYTE = 6 +UNDEFINED = 7 +SIGNED_SHORT = 8 +SIGNED_LONG = 9 +SIGNED_RATIONAL = 10 +FLOAT = 11 +DOUBLE = 12 +IFD = 13 +LONG8 = 16 + +_tags_v2 = { + 254: ("NewSubfileType", LONG, 1), + 255: ("SubfileType", SHORT, 1), + 256: ("ImageWidth", LONG, 1), + 257: ("ImageLength", LONG, 1), + 258: ("BitsPerSample", SHORT, 0), + 259: ( + "Compression", + SHORT, + 1, + { + "Uncompressed": 1, + "CCITT 1d": 2, + "Group 3 Fax": 3, + "Group 4 Fax": 4, + "LZW": 5, + "JPEG": 6, + "PackBits": 32773, + }, + ), + 262: ( + "PhotometricInterpretation", + SHORT, + 1, + { + "WhiteIsZero": 0, + "BlackIsZero": 1, + "RGB": 2, + "RGB Palette": 3, + "Transparency Mask": 4, + "CMYK": 5, + "YCbCr": 6, + "CieLAB": 8, + "CFA": 32803, # TIFF/EP, Adobe DNG + "LinearRaw": 32892, # Adobe DNG + }, + ), + 263: ("Threshholding", SHORT, 1), + 264: ("CellWidth", SHORT, 1), + 265: ("CellLength", SHORT, 1), + 266: ("FillOrder", SHORT, 1), + 269: ("DocumentName", ASCII, 1), + 270: ("ImageDescription", ASCII, 1), + 271: ("Make", ASCII, 1), + 272: ("Model", ASCII, 1), + 273: ("StripOffsets", LONG, 0), + 274: ("Orientation", SHORT, 1), + 277: ("SamplesPerPixel", SHORT, 1), + 278: ("RowsPerStrip", LONG, 1), + 279: ("StripByteCounts", LONG, 0), + 280: ("MinSampleValue", SHORT, 0), + 281: ("MaxSampleValue", SHORT, 0), + 282: ("XResolution", RATIONAL, 1), + 283: ("YResolution", RATIONAL, 1), + 284: ("PlanarConfiguration", SHORT, 1, {"Contiguous": 1, "Separate": 2}), + 285: ("PageName", ASCII, 1), + 286: ("XPosition", RATIONAL, 1), + 287: ("YPosition", RATIONAL, 1), + 288: ("FreeOffsets", LONG, 1), + 289: ("FreeByteCounts", LONG, 1), + 290: ("GrayResponseUnit", SHORT, 1), + 291: ("GrayResponseCurve", SHORT, 0), + 292: ("T4Options", LONG, 1), + 293: ("T6Options", LONG, 1), + 296: ("ResolutionUnit", SHORT, 1, {"none": 1, "inch": 2, "cm": 3}), + 297: ("PageNumber", SHORT, 2), + 301: ("TransferFunction", SHORT, 0), + 305: ("Software", ASCII, 1), + 306: ("DateTime", ASCII, 1), + 315: ("Artist", ASCII, 1), + 316: ("HostComputer", ASCII, 1), + 317: ("Predictor", SHORT, 1, {"none": 1, "Horizontal Differencing": 2}), + 318: ("WhitePoint", RATIONAL, 2), + 319: ("PrimaryChromaticities", RATIONAL, 6), + 320: ("ColorMap", SHORT, 0), + 321: ("HalftoneHints", SHORT, 2), + 322: ("TileWidth", LONG, 1), + 323: ("TileLength", LONG, 1), + 324: ("TileOffsets", LONG, 0), + 325: ("TileByteCounts", LONG, 0), + 330: ("SubIFDs", LONG, 0), + 332: ("InkSet", SHORT, 1), + 333: ("InkNames", ASCII, 1), + 334: ("NumberOfInks", SHORT, 1), + 336: ("DotRange", SHORT, 0), + 337: ("TargetPrinter", ASCII, 1), + 338: ("ExtraSamples", SHORT, 0), + 339: ("SampleFormat", SHORT, 0), + 340: ("SMinSampleValue", DOUBLE, 0), + 341: ("SMaxSampleValue", DOUBLE, 0), + 342: ("TransferRange", SHORT, 6), + 347: ("JPEGTables", UNDEFINED, 1), + # obsolete JPEG tags + 512: ("JPEGProc", SHORT, 1), + 513: ("JPEGInterchangeFormat", LONG, 1), + 514: ("JPEGInterchangeFormatLength", LONG, 1), + 515: ("JPEGRestartInterval", SHORT, 1), + 517: ("JPEGLosslessPredictors", SHORT, 0), + 518: ("JPEGPointTransforms", SHORT, 0), + 519: ("JPEGQTables", LONG, 0), + 520: ("JPEGDCTables", LONG, 0), + 521: ("JPEGACTables", LONG, 0), + 529: ("YCbCrCoefficients", RATIONAL, 3), + 530: ("YCbCrSubSampling", SHORT, 2), + 531: ("YCbCrPositioning", SHORT, 1), + 532: ("ReferenceBlackWhite", RATIONAL, 6), + 700: ("XMP", BYTE, 0), + 33432: ("Copyright", ASCII, 1), + 33723: ("IptcNaaInfo", UNDEFINED, 1), + 34377: ("PhotoshopInfo", BYTE, 0), + # FIXME add more tags here + 34665: ("ExifIFD", LONG, 1), + 34675: ("ICCProfile", UNDEFINED, 1), + 34853: ("GPSInfoIFD", LONG, 1), + 36864: ("ExifVersion", UNDEFINED, 1), + 37724: ("ImageSourceData", UNDEFINED, 1), + 40965: ("InteroperabilityIFD", LONG, 1), + 41730: ("CFAPattern", UNDEFINED, 1), + # MPInfo + 45056: ("MPFVersion", UNDEFINED, 1), + 45057: ("NumberOfImages", LONG, 1), + 45058: ("MPEntry", UNDEFINED, 1), + 45059: ("ImageUIDList", UNDEFINED, 0), # UNDONE, check + 45060: ("TotalFrames", LONG, 1), + 45313: ("MPIndividualNum", LONG, 1), + 45569: ("PanOrientation", LONG, 1), + 45570: ("PanOverlap_H", RATIONAL, 1), + 45571: ("PanOverlap_V", RATIONAL, 1), + 45572: ("BaseViewpointNum", LONG, 1), + 45573: ("ConvergenceAngle", SIGNED_RATIONAL, 1), + 45574: ("BaselineLength", RATIONAL, 1), + 45575: ("VerticalDivergence", SIGNED_RATIONAL, 1), + 45576: ("AxisDistance_X", SIGNED_RATIONAL, 1), + 45577: ("AxisDistance_Y", SIGNED_RATIONAL, 1), + 45578: ("AxisDistance_Z", SIGNED_RATIONAL, 1), + 45579: ("YawAngle", SIGNED_RATIONAL, 1), + 45580: ("PitchAngle", SIGNED_RATIONAL, 1), + 45581: ("RollAngle", SIGNED_RATIONAL, 1), + 40960: ("FlashPixVersion", UNDEFINED, 1), + 50741: ("MakerNoteSafety", SHORT, 1, {"Unsafe": 0, "Safe": 1}), + 50780: ("BestQualityScale", RATIONAL, 1), + 50838: ("ImageJMetaDataByteCounts", LONG, 0), # Can be more than one + 50839: ("ImageJMetaData", UNDEFINED, 1), # see Issue #2006 +} +TAGS_V2_GROUPS = { + # ExifIFD + 34665: { + 36864: ("ExifVersion", UNDEFINED, 1), + 40960: ("FlashPixVersion", UNDEFINED, 1), + 40965: ("InteroperabilityIFD", LONG, 1), + 41730: ("CFAPattern", UNDEFINED, 1), + }, + # GPSInfoIFD + 34853: { + 0: ("GPSVersionID", BYTE, 4), + 1: ("GPSLatitudeRef", ASCII, 2), + 2: ("GPSLatitude", RATIONAL, 3), + 3: ("GPSLongitudeRef", ASCII, 2), + 4: ("GPSLongitude", RATIONAL, 3), + 5: ("GPSAltitudeRef", BYTE, 1), + 6: ("GPSAltitude", RATIONAL, 1), + 7: ("GPSTimeStamp", RATIONAL, 3), + 8: ("GPSSatellites", ASCII, 0), + 9: ("GPSStatus", ASCII, 2), + 10: ("GPSMeasureMode", ASCII, 2), + 11: ("GPSDOP", RATIONAL, 1), + 12: ("GPSSpeedRef", ASCII, 2), + 13: ("GPSSpeed", RATIONAL, 1), + 14: ("GPSTrackRef", ASCII, 2), + 15: ("GPSTrack", RATIONAL, 1), + 16: ("GPSImgDirectionRef", ASCII, 2), + 17: ("GPSImgDirection", RATIONAL, 1), + 18: ("GPSMapDatum", ASCII, 0), + 19: ("GPSDestLatitudeRef", ASCII, 2), + 20: ("GPSDestLatitude", RATIONAL, 3), + 21: ("GPSDestLongitudeRef", ASCII, 2), + 22: ("GPSDestLongitude", RATIONAL, 3), + 23: ("GPSDestBearingRef", ASCII, 2), + 24: ("GPSDestBearing", RATIONAL, 1), + 25: ("GPSDestDistanceRef", ASCII, 2), + 26: ("GPSDestDistance", RATIONAL, 1), + 27: ("GPSProcessingMethod", UNDEFINED, 0), + 28: ("GPSAreaInformation", UNDEFINED, 0), + 29: ("GPSDateStamp", ASCII, 11), + 30: ("GPSDifferential", SHORT, 1), + }, + # InteroperabilityIFD + 40965: {1: ("InteropIndex", ASCII, 1), 2: ("InteropVersion", UNDEFINED, 1)}, +} + +# Legacy Tags structure +# these tags aren't included above, but were in the previous versions +TAGS = { + 347: "JPEGTables", + 700: "XMP", + # Additional Exif Info + 32932: "Wang Annotation", + 33434: "ExposureTime", + 33437: "FNumber", + 33445: "MD FileTag", + 33446: "MD ScalePixel", + 33447: "MD ColorTable", + 33448: "MD LabName", + 33449: "MD SampleInfo", + 33450: "MD PrepDate", + 33451: "MD PrepTime", + 33452: "MD FileUnits", + 33550: "ModelPixelScaleTag", + 33723: "IptcNaaInfo", + 33918: "INGR Packet Data Tag", + 33919: "INGR Flag Registers", + 33920: "IrasB Transformation Matrix", + 33922: "ModelTiepointTag", + 34264: "ModelTransformationTag", + 34377: "PhotoshopInfo", + 34735: "GeoKeyDirectoryTag", + 34736: "GeoDoubleParamsTag", + 34737: "GeoAsciiParamsTag", + 34850: "ExposureProgram", + 34852: "SpectralSensitivity", + 34855: "ISOSpeedRatings", + 34856: "OECF", + 34864: "SensitivityType", + 34865: "StandardOutputSensitivity", + 34866: "RecommendedExposureIndex", + 34867: "ISOSpeed", + 34868: "ISOSpeedLatitudeyyy", + 34869: "ISOSpeedLatitudezzz", + 34908: "HylaFAX FaxRecvParams", + 34909: "HylaFAX FaxSubAddress", + 34910: "HylaFAX FaxRecvTime", + 36864: "ExifVersion", + 36867: "DateTimeOriginal", + 36868: "DateTimeDigitized", + 37121: "ComponentsConfiguration", + 37122: "CompressedBitsPerPixel", + 37724: "ImageSourceData", + 37377: "ShutterSpeedValue", + 37378: "ApertureValue", + 37379: "BrightnessValue", + 37380: "ExposureBiasValue", + 37381: "MaxApertureValue", + 37382: "SubjectDistance", + 37383: "MeteringMode", + 37384: "LightSource", + 37385: "Flash", + 37386: "FocalLength", + 37396: "SubjectArea", + 37500: "MakerNote", + 37510: "UserComment", + 37520: "SubSec", + 37521: "SubSecTimeOriginal", + 37522: "SubsecTimeDigitized", + 40960: "FlashPixVersion", + 40961: "ColorSpace", + 40962: "PixelXDimension", + 40963: "PixelYDimension", + 40964: "RelatedSoundFile", + 40965: "InteroperabilityIFD", + 41483: "FlashEnergy", + 41484: "SpatialFrequencyResponse", + 41486: "FocalPlaneXResolution", + 41487: "FocalPlaneYResolution", + 41488: "FocalPlaneResolutionUnit", + 41492: "SubjectLocation", + 41493: "ExposureIndex", + 41495: "SensingMethod", + 41728: "FileSource", + 41729: "SceneType", + 41730: "CFAPattern", + 41985: "CustomRendered", + 41986: "ExposureMode", + 41987: "WhiteBalance", + 41988: "DigitalZoomRatio", + 41989: "FocalLengthIn35mmFilm", + 41990: "SceneCaptureType", + 41991: "GainControl", + 41992: "Contrast", + 41993: "Saturation", + 41994: "Sharpness", + 41995: "DeviceSettingDescription", + 41996: "SubjectDistanceRange", + 42016: "ImageUniqueID", + 42032: "CameraOwnerName", + 42033: "BodySerialNumber", + 42034: "LensSpecification", + 42035: "LensMake", + 42036: "LensModel", + 42037: "LensSerialNumber", + 42112: "GDAL_METADATA", + 42113: "GDAL_NODATA", + 42240: "Gamma", + 50215: "Oce Scanjob Description", + 50216: "Oce Application Selector", + 50217: "Oce Identification Number", + 50218: "Oce ImageLogic Characteristics", + # Adobe DNG + 50706: "DNGVersion", + 50707: "DNGBackwardVersion", + 50708: "UniqueCameraModel", + 50709: "LocalizedCameraModel", + 50710: "CFAPlaneColor", + 50711: "CFALayout", + 50712: "LinearizationTable", + 50713: "BlackLevelRepeatDim", + 50714: "BlackLevel", + 50715: "BlackLevelDeltaH", + 50716: "BlackLevelDeltaV", + 50717: "WhiteLevel", + 50718: "DefaultScale", + 50719: "DefaultCropOrigin", + 50720: "DefaultCropSize", + 50721: "ColorMatrix1", + 50722: "ColorMatrix2", + 50723: "CameraCalibration1", + 50724: "CameraCalibration2", + 50725: "ReductionMatrix1", + 50726: "ReductionMatrix2", + 50727: "AnalogBalance", + 50728: "AsShotNeutral", + 50729: "AsShotWhiteXY", + 50730: "BaselineExposure", + 50731: "BaselineNoise", + 50732: "BaselineSharpness", + 50733: "BayerGreenSplit", + 50734: "LinearResponseLimit", + 50735: "CameraSerialNumber", + 50736: "LensInfo", + 50737: "ChromaBlurRadius", + 50738: "AntiAliasStrength", + 50740: "DNGPrivateData", + 50778: "CalibrationIlluminant1", + 50779: "CalibrationIlluminant2", + 50784: "Alias Layer Metadata", +} + +TAGS_V2: dict[int, TagInfo] = {} + + +def _populate(): + for k, v in _tags_v2.items(): + # Populate legacy structure. + TAGS[k] = v[0] + if len(v) == 4: + for sk, sv in v[3].items(): + TAGS[(k, sv)] = sk + + TAGS_V2[k] = TagInfo(k, *v) + + for tags in TAGS_V2_GROUPS.values(): + for k, v in tags.items(): + tags[k] = TagInfo(k, *v) + + +_populate() +## +# Map type numbers to type names -- defined in ImageFileDirectory. + +TYPES: dict[int, str] = {} + +# +# These tags are handled by default in libtiff, without +# adding to the custom dictionary. From tif_dir.c, searching for +# case TIFFTAG in the _TIFFVSetField function: +# Line: item. +# 148: case TIFFTAG_SUBFILETYPE: +# 151: case TIFFTAG_IMAGEWIDTH: +# 154: case TIFFTAG_IMAGELENGTH: +# 157: case TIFFTAG_BITSPERSAMPLE: +# 181: case TIFFTAG_COMPRESSION: +# 202: case TIFFTAG_PHOTOMETRIC: +# 205: case TIFFTAG_THRESHHOLDING: +# 208: case TIFFTAG_FILLORDER: +# 214: case TIFFTAG_ORIENTATION: +# 221: case TIFFTAG_SAMPLESPERPIXEL: +# 228: case TIFFTAG_ROWSPERSTRIP: +# 238: case TIFFTAG_MINSAMPLEVALUE: +# 241: case TIFFTAG_MAXSAMPLEVALUE: +# 244: case TIFFTAG_SMINSAMPLEVALUE: +# 247: case TIFFTAG_SMAXSAMPLEVALUE: +# 250: case TIFFTAG_XRESOLUTION: +# 256: case TIFFTAG_YRESOLUTION: +# 262: case TIFFTAG_PLANARCONFIG: +# 268: case TIFFTAG_XPOSITION: +# 271: case TIFFTAG_YPOSITION: +# 274: case TIFFTAG_RESOLUTIONUNIT: +# 280: case TIFFTAG_PAGENUMBER: +# 284: case TIFFTAG_HALFTONEHINTS: +# 288: case TIFFTAG_COLORMAP: +# 294: case TIFFTAG_EXTRASAMPLES: +# 298: case TIFFTAG_MATTEING: +# 305: case TIFFTAG_TILEWIDTH: +# 316: case TIFFTAG_TILELENGTH: +# 327: case TIFFTAG_TILEDEPTH: +# 333: case TIFFTAG_DATATYPE: +# 344: case TIFFTAG_SAMPLEFORMAT: +# 361: case TIFFTAG_IMAGEDEPTH: +# 364: case TIFFTAG_SUBIFD: +# 376: case TIFFTAG_YCBCRPOSITIONING: +# 379: case TIFFTAG_YCBCRSUBSAMPLING: +# 383: case TIFFTAG_TRANSFERFUNCTION: +# 389: case TIFFTAG_REFERENCEBLACKWHITE: +# 393: case TIFFTAG_INKNAMES: + +# Following pseudo-tags are also handled by default in libtiff: +# TIFFTAG_JPEGQUALITY 65537 + +# some of these are not in our TAGS_V2 dict and were included from tiff.h + +# This list also exists in encode.c +LIBTIFF_CORE = { + 255, + 256, + 257, + 258, + 259, + 262, + 263, + 266, + 274, + 277, + 278, + 280, + 281, + 340, + 341, + 282, + 283, + 284, + 286, + 287, + 296, + 297, + 321, + 320, + 338, + 32995, + 322, + 323, + 32998, + 32996, + 339, + 32997, + 330, + 531, + 530, + 301, + 532, + 333, + # as above + 269, # this has been in our tests forever, and works + 65537, +} + +LIBTIFF_CORE.remove(255) # We don't have support for subfiletypes +LIBTIFF_CORE.remove(322) # We don't have support for writing tiled images with libtiff +LIBTIFF_CORE.remove(323) # Tiled images +LIBTIFF_CORE.remove(333) # Ink Names either + +# Note to advanced users: There may be combinations of these +# parameters and values that when added properly, will work and +# produce valid tiff images that may work in your application. +# It is safe to add and remove tags from this set from Pillow's point +# of view so long as you test against libtiff. diff --git a/MLPY/Lib/site-packages/PIL/WalImageFile.py b/MLPY/Lib/site-packages/PIL/WalImageFile.py new file mode 100644 index 0000000000000000000000000000000000000000..25c02b13f218bfaf2b610a27d77ffae5d19e4aea --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/WalImageFile.py @@ -0,0 +1,124 @@ +# +# The Python Imaging Library. +# $Id$ +# +# WAL file handling +# +# History: +# 2003-04-23 fl created +# +# Copyright (c) 2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +""" +This reader is based on the specification available from: +https://www.flipcode.com/archives/Quake_2_BSP_File_Format.shtml +and has been tested with a few sample files found using google. + +.. note:: + This format cannot be automatically recognized, so the reader + is not registered for use with :py:func:`PIL.Image.open()`. + To open a WAL file, use the :py:func:`PIL.WalImageFile.open()` function instead. +""" +from __future__ import annotations + +from . import Image, ImageFile +from ._binary import i32le as i32 + + +class WalImageFile(ImageFile.ImageFile): + format = "WAL" + format_description = "Quake2 Texture" + + def _open(self) -> None: + self._mode = "P" + + # read header fields + header = self.fp.read(32 + 24 + 32 + 12) + self._size = i32(header, 32), i32(header, 36) + Image._decompression_bomb_check(self.size) + + # load pixel data + offset = i32(header, 40) + self.fp.seek(offset) + + # strings are null-terminated + self.info["name"] = header[:32].split(b"\0", 1)[0] + next_name = header[56 : 56 + 32].split(b"\0", 1)[0] + if next_name: + self.info["next_name"] = next_name + + def load(self): + if not self.im: + self.im = Image.core.new(self.mode, self.size) + self.frombytes(self.fp.read(self.size[0] * self.size[1])) + self.putpalette(quake2palette) + return Image.Image.load(self) + + +def open(filename): + """ + Load texture from a Quake2 WAL texture file. + + By default, a Quake2 standard palette is attached to the texture. + To override the palette, use the :py:func:`PIL.Image.Image.putpalette()` method. + + :param filename: WAL file name, or an opened file handle. + :returns: An image instance. + """ + return WalImageFile(filename) + + +quake2palette = ( + # default palette taken from piffo 0.93 by Hans Häggström + b"\x01\x01\x01\x0b\x0b\x0b\x12\x12\x12\x17\x17\x17\x1b\x1b\x1b\x1e" + b"\x1e\x1e\x22\x22\x22\x26\x26\x26\x29\x29\x29\x2c\x2c\x2c\x2f\x2f" + b"\x2f\x32\x32\x32\x35\x35\x35\x37\x37\x37\x3a\x3a\x3a\x3c\x3c\x3c" + b"\x24\x1e\x13\x22\x1c\x12\x20\x1b\x12\x1f\x1a\x10\x1d\x19\x10\x1b" + b"\x17\x0f\x1a\x16\x0f\x18\x14\x0d\x17\x13\x0d\x16\x12\x0d\x14\x10" + b"\x0b\x13\x0f\x0b\x10\x0d\x0a\x0f\x0b\x0a\x0d\x0b\x07\x0b\x0a\x07" + b"\x23\x23\x26\x22\x22\x25\x22\x20\x23\x21\x1f\x22\x20\x1e\x20\x1f" + b"\x1d\x1e\x1d\x1b\x1c\x1b\x1a\x1a\x1a\x19\x19\x18\x17\x17\x17\x16" + b"\x16\x14\x14\x14\x13\x13\x13\x10\x10\x10\x0f\x0f\x0f\x0d\x0d\x0d" + b"\x2d\x28\x20\x29\x24\x1c\x27\x22\x1a\x25\x1f\x17\x38\x2e\x1e\x31" + b"\x29\x1a\x2c\x25\x17\x26\x20\x14\x3c\x30\x14\x37\x2c\x13\x33\x28" + b"\x12\x2d\x24\x10\x28\x1f\x0f\x22\x1a\x0b\x1b\x14\x0a\x13\x0f\x07" + b"\x31\x1a\x16\x30\x17\x13\x2e\x16\x10\x2c\x14\x0d\x2a\x12\x0b\x27" + b"\x0f\x0a\x25\x0f\x07\x21\x0d\x01\x1e\x0b\x01\x1c\x0b\x01\x1a\x0b" + b"\x01\x18\x0a\x01\x16\x0a\x01\x13\x0a\x01\x10\x07\x01\x0d\x07\x01" + b"\x29\x23\x1e\x27\x21\x1c\x26\x20\x1b\x25\x1f\x1a\x23\x1d\x19\x21" + b"\x1c\x18\x20\x1b\x17\x1e\x19\x16\x1c\x18\x14\x1b\x17\x13\x19\x14" + b"\x10\x17\x13\x0f\x14\x10\x0d\x12\x0f\x0b\x0f\x0b\x0a\x0b\x0a\x07" + b"\x26\x1a\x0f\x23\x19\x0f\x20\x17\x0f\x1c\x16\x0f\x19\x13\x0d\x14" + b"\x10\x0b\x10\x0d\x0a\x0b\x0a\x07\x33\x22\x1f\x35\x29\x26\x37\x2f" + b"\x2d\x39\x35\x34\x37\x39\x3a\x33\x37\x39\x30\x34\x36\x2b\x31\x34" + b"\x27\x2e\x31\x22\x2b\x2f\x1d\x28\x2c\x17\x25\x2a\x0f\x20\x26\x0d" + b"\x1e\x25\x0b\x1c\x22\x0a\x1b\x20\x07\x19\x1e\x07\x17\x1b\x07\x14" + b"\x18\x01\x12\x16\x01\x0f\x12\x01\x0b\x0d\x01\x07\x0a\x01\x01\x01" + b"\x2c\x21\x21\x2a\x1f\x1f\x29\x1d\x1d\x27\x1c\x1c\x26\x1a\x1a\x24" + b"\x18\x18\x22\x17\x17\x21\x16\x16\x1e\x13\x13\x1b\x12\x12\x18\x10" + b"\x10\x16\x0d\x0d\x12\x0b\x0b\x0d\x0a\x0a\x0a\x07\x07\x01\x01\x01" + b"\x2e\x30\x29\x2d\x2e\x27\x2b\x2c\x26\x2a\x2a\x24\x28\x29\x23\x27" + b"\x27\x21\x26\x26\x1f\x24\x24\x1d\x22\x22\x1c\x1f\x1f\x1a\x1c\x1c" + b"\x18\x19\x19\x16\x17\x17\x13\x13\x13\x10\x0f\x0f\x0d\x0b\x0b\x0a" + b"\x30\x1e\x1b\x2d\x1c\x19\x2c\x1a\x17\x2a\x19\x14\x28\x17\x13\x26" + b"\x16\x10\x24\x13\x0f\x21\x12\x0d\x1f\x10\x0b\x1c\x0f\x0a\x19\x0d" + b"\x0a\x16\x0b\x07\x12\x0a\x07\x0f\x07\x01\x0a\x01\x01\x01\x01\x01" + b"\x28\x29\x38\x26\x27\x36\x25\x26\x34\x24\x24\x31\x22\x22\x2f\x20" + b"\x21\x2d\x1e\x1f\x2a\x1d\x1d\x27\x1b\x1b\x25\x19\x19\x21\x17\x17" + b"\x1e\x14\x14\x1b\x13\x12\x17\x10\x0f\x13\x0d\x0b\x0f\x0a\x07\x07" + b"\x2f\x32\x29\x2d\x30\x26\x2b\x2e\x24\x29\x2c\x21\x27\x2a\x1e\x25" + b"\x28\x1c\x23\x26\x1a\x21\x25\x18\x1e\x22\x14\x1b\x1f\x10\x19\x1c" + b"\x0d\x17\x1a\x0a\x13\x17\x07\x10\x13\x01\x0d\x0f\x01\x0a\x0b\x01" + b"\x01\x3f\x01\x13\x3c\x0b\x1b\x39\x10\x20\x35\x14\x23\x31\x17\x23" + b"\x2d\x18\x23\x29\x18\x3f\x3f\x3f\x3f\x3f\x39\x3f\x3f\x31\x3f\x3f" + b"\x2a\x3f\x3f\x20\x3f\x3f\x14\x3f\x3c\x12\x3f\x39\x0f\x3f\x35\x0b" + b"\x3f\x32\x07\x3f\x2d\x01\x3d\x2a\x01\x3b\x26\x01\x39\x21\x01\x37" + b"\x1d\x01\x34\x1a\x01\x32\x16\x01\x2f\x12\x01\x2d\x0f\x01\x2a\x0b" + b"\x01\x27\x07\x01\x23\x01\x01\x1d\x01\x01\x17\x01\x01\x10\x01\x01" + b"\x3d\x01\x01\x19\x19\x3f\x3f\x01\x01\x01\x01\x3f\x16\x16\x13\x10" + b"\x10\x0f\x0d\x0d\x0b\x3c\x2e\x2a\x36\x27\x20\x30\x21\x18\x29\x1b" + b"\x10\x3c\x39\x37\x37\x32\x2f\x31\x2c\x28\x2b\x26\x21\x30\x22\x20" +) diff --git a/MLPY/Lib/site-packages/PIL/WebPImagePlugin.py b/MLPY/Lib/site-packages/PIL/WebPImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..05f6fc7973e5faa034e6cd6d7aa8472943a6e76a --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/WebPImagePlugin.py @@ -0,0 +1,363 @@ +from __future__ import annotations + +from io import BytesIO +from typing import IO, Any + +from . import Image, ImageFile + +try: + from . import _webp + + SUPPORTED = True +except ImportError: + SUPPORTED = False + + +_VALID_WEBP_MODES = {"RGBX": True, "RGBA": True, "RGB": True} + +_VALID_WEBP_LEGACY_MODES = {"RGB": True, "RGBA": True} + +_VP8_MODES_BY_IDENTIFIER = { + b"VP8 ": "RGB", + b"VP8X": "RGBA", + b"VP8L": "RGBA", # lossless +} + + +def _accept(prefix: bytes) -> bool | str: + is_riff_file_format = prefix[:4] == b"RIFF" + is_webp_file = prefix[8:12] == b"WEBP" + is_valid_vp8_mode = prefix[12:16] in _VP8_MODES_BY_IDENTIFIER + + if is_riff_file_format and is_webp_file and is_valid_vp8_mode: + if not SUPPORTED: + return ( + "image file could not be identified because WEBP support not installed" + ) + return True + return False + + +class WebPImageFile(ImageFile.ImageFile): + format = "WEBP" + format_description = "WebP image" + __loaded = 0 + __logical_frame = 0 + + def _open(self) -> None: + if not _webp.HAVE_WEBPANIM: + # Legacy mode + data, width, height, self._mode, icc_profile, exif = _webp.WebPDecode( + self.fp.read() + ) + if icc_profile: + self.info["icc_profile"] = icc_profile + if exif: + self.info["exif"] = exif + self._size = width, height + self.fp = BytesIO(data) + self.tile = [("raw", (0, 0) + self.size, 0, self.mode)] + self.n_frames = 1 + self.is_animated = False + return + + # Use the newer AnimDecoder API to parse the (possibly) animated file, + # and access muxed chunks like ICC/EXIF/XMP. + self._decoder = _webp.WebPAnimDecoder(self.fp.read()) + + # Get info from decoder + width, height, loop_count, bgcolor, frame_count, mode = self._decoder.get_info() + self._size = width, height + self.info["loop"] = loop_count + bg_a, bg_r, bg_g, bg_b = ( + (bgcolor >> 24) & 0xFF, + (bgcolor >> 16) & 0xFF, + (bgcolor >> 8) & 0xFF, + bgcolor & 0xFF, + ) + self.info["background"] = (bg_r, bg_g, bg_b, bg_a) + self.n_frames = frame_count + self.is_animated = self.n_frames > 1 + self._mode = "RGB" if mode == "RGBX" else mode + self.rawmode = mode + self.tile = [] + + # Attempt to read ICC / EXIF / XMP chunks from file + icc_profile = self._decoder.get_chunk("ICCP") + exif = self._decoder.get_chunk("EXIF") + xmp = self._decoder.get_chunk("XMP ") + if icc_profile: + self.info["icc_profile"] = icc_profile + if exif: + self.info["exif"] = exif + if xmp: + self.info["xmp"] = xmp + + # Initialize seek state + self._reset(reset=False) + + def _getexif(self) -> dict[str, Any] | None: + if "exif" not in self.info: + return None + return self.getexif()._get_merged_dict() + + def seek(self, frame: int) -> None: + if not self._seek_check(frame): + return + + # Set logical frame to requested position + self.__logical_frame = frame + + def _reset(self, reset: bool = True) -> None: + if reset: + self._decoder.reset() + self.__physical_frame = 0 + self.__loaded = -1 + self.__timestamp = 0 + + def _get_next(self): + # Get next frame + ret = self._decoder.get_next() + self.__physical_frame += 1 + + # Check if an error occurred + if ret is None: + self._reset() # Reset just to be safe + self.seek(0) + msg = "failed to decode next frame in WebP file" + raise EOFError(msg) + + # Compute duration + data, timestamp = ret + duration = timestamp - self.__timestamp + self.__timestamp = timestamp + + # libwebp gives frame end, adjust to start of frame + timestamp -= duration + return data, timestamp, duration + + def _seek(self, frame: int) -> None: + if self.__physical_frame == frame: + return # Nothing to do + if frame < self.__physical_frame: + self._reset() # Rewind to beginning + while self.__physical_frame < frame: + self._get_next() # Advance to the requested frame + + def load(self): + if _webp.HAVE_WEBPANIM: + if self.__loaded != self.__logical_frame: + self._seek(self.__logical_frame) + + # We need to load the image data for this frame + data, timestamp, duration = self._get_next() + self.info["timestamp"] = timestamp + self.info["duration"] = duration + self.__loaded = self.__logical_frame + + # Set tile + if self.fp and self._exclusive_fp: + self.fp.close() + self.fp = BytesIO(data) + self.tile = [("raw", (0, 0) + self.size, 0, self.rawmode)] + + return super().load() + + def load_seek(self, pos: int) -> None: + pass + + def tell(self) -> int: + if not _webp.HAVE_WEBPANIM: + return super().tell() + + return self.__logical_frame + + +def _save_all(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + encoderinfo = im.encoderinfo.copy() + append_images = list(encoderinfo.get("append_images", [])) + + # If total frame count is 1, then save using the legacy API, which + # will preserve non-alpha modes + total = 0 + for ims in [im] + append_images: + total += getattr(ims, "n_frames", 1) + if total == 1: + _save(im, fp, filename) + return + + background: int | tuple[int, ...] = (0, 0, 0, 0) + if "background" in encoderinfo: + background = encoderinfo["background"] + elif "background" in im.info: + background = im.info["background"] + if isinstance(background, int): + # GifImagePlugin stores a global color table index in + # info["background"]. So it must be converted to an RGBA value + palette = im.getpalette() + if palette: + r, g, b = palette[background * 3 : (background + 1) * 3] + background = (r, g, b, 255) + else: + background = (background, background, background, 255) + + duration = im.encoderinfo.get("duration", im.info.get("duration", 0)) + loop = im.encoderinfo.get("loop", 0) + minimize_size = im.encoderinfo.get("minimize_size", False) + kmin = im.encoderinfo.get("kmin", None) + kmax = im.encoderinfo.get("kmax", None) + allow_mixed = im.encoderinfo.get("allow_mixed", False) + verbose = False + lossless = im.encoderinfo.get("lossless", False) + quality = im.encoderinfo.get("quality", 80) + alpha_quality = im.encoderinfo.get("alpha_quality", 100) + method = im.encoderinfo.get("method", 0) + icc_profile = im.encoderinfo.get("icc_profile") or "" + exif = im.encoderinfo.get("exif", "") + if isinstance(exif, Image.Exif): + exif = exif.tobytes() + xmp = im.encoderinfo.get("xmp", "") + if allow_mixed: + lossless = False + + # Sensible keyframe defaults are from gif2webp.c script + if kmin is None: + kmin = 9 if lossless else 3 + if kmax is None: + kmax = 17 if lossless else 5 + + # Validate background color + if ( + not isinstance(background, (list, tuple)) + or len(background) != 4 + or not all(0 <= v < 256 for v in background) + ): + msg = f"Background color is not an RGBA tuple clamped to (0-255): {background}" + raise OSError(msg) + + # Convert to packed uint + bg_r, bg_g, bg_b, bg_a = background + background = (bg_a << 24) | (bg_r << 16) | (bg_g << 8) | (bg_b << 0) + + # Setup the WebP animation encoder + enc = _webp.WebPAnimEncoder( + im.size[0], + im.size[1], + background, + loop, + minimize_size, + kmin, + kmax, + allow_mixed, + verbose, + ) + + # Add each frame + frame_idx = 0 + timestamp = 0 + cur_idx = im.tell() + try: + for ims in [im] + append_images: + # Get # of frames in this image + nfr = getattr(ims, "n_frames", 1) + + for idx in range(nfr): + ims.seek(idx) + ims.load() + + # Make sure image mode is supported + frame = ims + rawmode = ims.mode + if ims.mode not in _VALID_WEBP_MODES: + alpha = ( + "A" in ims.mode + or "a" in ims.mode + or (ims.mode == "P" and "A" in ims.im.getpalettemode()) + ) + rawmode = "RGBA" if alpha else "RGB" + frame = ims.convert(rawmode) + + if rawmode == "RGB": + # For faster conversion, use RGBX + rawmode = "RGBX" + + # Append the frame to the animation encoder + enc.add( + frame.tobytes("raw", rawmode), + round(timestamp), + frame.size[0], + frame.size[1], + rawmode, + lossless, + quality, + alpha_quality, + method, + ) + + # Update timestamp and frame index + if isinstance(duration, (list, tuple)): + timestamp += duration[frame_idx] + else: + timestamp += duration + frame_idx += 1 + + finally: + im.seek(cur_idx) + + # Force encoder to flush frames + enc.add(None, round(timestamp), 0, 0, "", lossless, quality, alpha_quality, 0) + + # Get the final output from the encoder + data = enc.assemble(icc_profile, exif, xmp) + if data is None: + msg = "cannot write file as WebP (encoder returned None)" + raise OSError(msg) + + fp.write(data) + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + lossless = im.encoderinfo.get("lossless", False) + quality = im.encoderinfo.get("quality", 80) + alpha_quality = im.encoderinfo.get("alpha_quality", 100) + icc_profile = im.encoderinfo.get("icc_profile") or "" + exif = im.encoderinfo.get("exif", b"") + if isinstance(exif, Image.Exif): + exif = exif.tobytes() + if exif.startswith(b"Exif\x00\x00"): + exif = exif[6:] + xmp = im.encoderinfo.get("xmp", "") + method = im.encoderinfo.get("method", 4) + exact = 1 if im.encoderinfo.get("exact") else 0 + + if im.mode not in _VALID_WEBP_LEGACY_MODES: + im = im.convert("RGBA" if im.has_transparency_data else "RGB") + + data = _webp.WebPEncode( + im.tobytes(), + im.size[0], + im.size[1], + lossless, + float(quality), + float(alpha_quality), + im.mode, + icc_profile, + method, + exact, + exif, + xmp, + ) + if data is None: + msg = "cannot write file as WebP (encoder returned None)" + raise OSError(msg) + + fp.write(data) + + +Image.register_open(WebPImageFile.format, WebPImageFile, _accept) +if SUPPORTED: + Image.register_save(WebPImageFile.format, _save) + if _webp.HAVE_WEBPANIM: + Image.register_save_all(WebPImageFile.format, _save_all) + Image.register_extension(WebPImageFile.format, ".webp") + Image.register_mime(WebPImageFile.format, "image/webp") diff --git a/MLPY/Lib/site-packages/PIL/WmfImagePlugin.py b/MLPY/Lib/site-packages/PIL/WmfImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..191935228dfe35be8d0123db377dc92919f726ab --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/WmfImagePlugin.py @@ -0,0 +1,181 @@ +# +# The Python Imaging Library +# $Id$ +# +# WMF stub codec +# +# history: +# 1996-12-14 fl Created +# 2004-02-22 fl Turned into a stub driver +# 2004-02-23 fl Added EMF support +# +# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# +# WMF/EMF reference documentation: +# https://winprotocoldoc.blob.core.windows.net/productionwindowsarchives/MS-WMF/[MS-WMF].pdf +# http://wvware.sourceforge.net/caolan/index.html +# http://wvware.sourceforge.net/caolan/ora-wmf.html +from __future__ import annotations + +from typing import IO + +from . import Image, ImageFile +from ._binary import i16le as word +from ._binary import si16le as short +from ._binary import si32le as _long + +_handler = None + + +def register_handler(handler: ImageFile.StubHandler | None) -> None: + """ + Install application-specific WMF image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +if hasattr(Image.core, "drawwmf"): + # install default handler (windows only) + + class WmfHandler(ImageFile.StubHandler): + def open(self, im: ImageFile.StubImageFile) -> None: + im._mode = "RGB" + self.bbox = im.info["wmf_bbox"] + + def load(self, im: ImageFile.StubImageFile) -> Image.Image: + im.fp.seek(0) # rewind + return Image.frombytes( + "RGB", + im.size, + Image.core.drawwmf(im.fp.read(), im.size, self.bbox), + "raw", + "BGR", + (im.size[0] * 3 + 3) & -4, + -1, + ) + + register_handler(WmfHandler()) + +# +# -------------------------------------------------------------------- +# Read WMF file + + +def _accept(prefix: bytes) -> bool: + return ( + prefix[:6] == b"\xd7\xcd\xc6\x9a\x00\x00" or prefix[:4] == b"\x01\x00\x00\x00" + ) + + +## +# Image plugin for Windows metafiles. + + +class WmfStubImageFile(ImageFile.StubImageFile): + format = "WMF" + format_description = "Windows Metafile" + + def _open(self) -> None: + self._inch = None + + # check placable header + s = self.fp.read(80) + + if s[:6] == b"\xd7\xcd\xc6\x9a\x00\x00": + # placeable windows metafile + + # get units per inch + self._inch = word(s, 14) + + # get bounding box + x0 = short(s, 6) + y0 = short(s, 8) + x1 = short(s, 10) + y1 = short(s, 12) + + # normalize size to 72 dots per inch + self.info["dpi"] = 72 + size = ( + (x1 - x0) * self.info["dpi"] // self._inch, + (y1 - y0) * self.info["dpi"] // self._inch, + ) + + self.info["wmf_bbox"] = x0, y0, x1, y1 + + # sanity check (standard metafile header) + if s[22:26] != b"\x01\x00\t\x00": + msg = "Unsupported WMF file format" + raise SyntaxError(msg) + + elif s[:4] == b"\x01\x00\x00\x00" and s[40:44] == b" EMF": + # enhanced metafile + + # get bounding box + x0 = _long(s, 8) + y0 = _long(s, 12) + x1 = _long(s, 16) + y1 = _long(s, 20) + + # get frame (in 0.01 millimeter units) + frame = _long(s, 24), _long(s, 28), _long(s, 32), _long(s, 36) + + size = x1 - x0, y1 - y0 + + # calculate dots per inch from bbox and frame + xdpi = 2540.0 * (x1 - y0) / (frame[2] - frame[0]) + ydpi = 2540.0 * (y1 - y0) / (frame[3] - frame[1]) + + self.info["wmf_bbox"] = x0, y0, x1, y1 + + if xdpi == ydpi: + self.info["dpi"] = xdpi + else: + self.info["dpi"] = xdpi, ydpi + + else: + msg = "Unsupported file format" + raise SyntaxError(msg) + + self._mode = "RGB" + self._size = size + + loader = self._load() + if loader: + loader.open(self) + + def _load(self) -> ImageFile.StubHandler | None: + return _handler + + def load(self, dpi=None): + if dpi is not None and self._inch is not None: + self.info["dpi"] = dpi + x0, y0, x1, y1 = self.info["wmf_bbox"] + self._size = ( + (x1 - x0) * self.info["dpi"] // self._inch, + (y1 - y0) * self.info["dpi"] // self._inch, + ) + return super().load() + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + if _handler is None or not hasattr(_handler, "save"): + msg = "WMF save handler not installed" + raise OSError(msg) + _handler.save(im, fp, filename) + + +# +# -------------------------------------------------------------------- +# Registry stuff + + +Image.register_open(WmfStubImageFile.format, WmfStubImageFile, _accept) +Image.register_save(WmfStubImageFile.format, _save) + +Image.register_extensions(WmfStubImageFile.format, [".wmf", ".emf"]) diff --git a/MLPY/Lib/site-packages/PIL/XVThumbImagePlugin.py b/MLPY/Lib/site-packages/PIL/XVThumbImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..eb86cd67125af3c33af54af95233aa9a47ba77d1 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/XVThumbImagePlugin.py @@ -0,0 +1,81 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XV Thumbnail file handler by Charles E. "Gene" Cash +# (gcash@magicnet.net) +# +# see xvcolor.c and xvbrowse.c in the sources to John Bradley's XV, +# available from ftp://ftp.cis.upenn.edu/pub/xv/ +# +# history: +# 98-08-15 cec created (b/w only) +# 98-12-09 cec added color palette +# 98-12-28 fl added to PIL (with only a few very minor modifications) +# +# To do: +# FIXME: make save work (this requires quantization support) +# +from __future__ import annotations + +from . import Image, ImageFile, ImagePalette +from ._binary import o8 + +_MAGIC = b"P7 332" + +# standard color palette for thumbnails (RGB332) +PALETTE = b"" +for r in range(8): + for g in range(8): + for b in range(4): + PALETTE = PALETTE + ( + o8((r * 255) // 7) + o8((g * 255) // 7) + o8((b * 255) // 3) + ) + + +def _accept(prefix: bytes) -> bool: + return prefix[:6] == _MAGIC + + +## +# Image plugin for XV thumbnail images. + + +class XVThumbImageFile(ImageFile.ImageFile): + format = "XVThumb" + format_description = "XV thumbnail image" + + def _open(self) -> None: + # check magic + assert self.fp is not None + + if not _accept(self.fp.read(6)): + msg = "not an XV thumbnail file" + raise SyntaxError(msg) + + # Skip to beginning of next line + self.fp.readline() + + # skip info comments + while True: + s = self.fp.readline() + if not s: + msg = "Unexpected EOF reading XV thumbnail file" + raise SyntaxError(msg) + if s[0] != 35: # ie. when not a comment: '#' + break + + # parse header line (already read) + s = s.strip().split() + + self._mode = "P" + self._size = int(s[0]), int(s[1]) + + self.palette = ImagePalette.raw("RGB", PALETTE) + + self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), (self.mode, 0, 1))] + + +# -------------------------------------------------------------------- + +Image.register_open(XVThumbImageFile.format, XVThumbImageFile, _accept) diff --git a/MLPY/Lib/site-packages/PIL/XbmImagePlugin.py b/MLPY/Lib/site-packages/PIL/XbmImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..25b8bd92afe4ca03f9828cd8c906cab08b931528 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/XbmImagePlugin.py @@ -0,0 +1,98 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XBM File handling +# +# History: +# 1995-09-08 fl Created +# 1996-11-01 fl Added save support +# 1997-07-07 fl Made header parser more tolerant +# 1997-07-22 fl Fixed yet another parser bug +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4) +# 2001-05-13 fl Added hotspot handling (based on code from Bernhard Herzog) +# 2004-02-24 fl Allow some whitespace before first #define +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import re +from typing import IO + +from . import Image, ImageFile + +# XBM header +xbm_head = re.compile( + rb"\s*#define[ \t]+.*_width[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+.*_height[ \t]+(?P[0-9]+)[\r\n]+" + b"(?P" + b"#define[ \t]+[^_]*_x_hot[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+[^_]*_y_hot[ \t]+(?P[0-9]+)[\r\n]+" + b")?" + rb"[\000-\377]*_bits\[]" +) + + +def _accept(prefix: bytes) -> bool: + return prefix.lstrip()[:7] == b"#define" + + +## +# Image plugin for X11 bitmaps. + + +class XbmImageFile(ImageFile.ImageFile): + format = "XBM" + format_description = "X11 Bitmap" + + def _open(self) -> None: + assert self.fp is not None + + m = xbm_head.match(self.fp.read(512)) + + if not m: + msg = "not a XBM file" + raise SyntaxError(msg) + + xsize = int(m.group("width")) + ysize = int(m.group("height")) + + if m.group("hotspot"): + self.info["hotspot"] = (int(m.group("xhot")), int(m.group("yhot"))) + + self._mode = "1" + self._size = xsize, ysize + + self.tile = [("xbm", (0, 0) + self.size, m.end(), None)] + + +def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None: + if im.mode != "1": + msg = f"cannot write mode {im.mode} as XBM" + raise OSError(msg) + + fp.write(f"#define im_width {im.size[0]}\n".encode("ascii")) + fp.write(f"#define im_height {im.size[1]}\n".encode("ascii")) + + hotspot = im.encoderinfo.get("hotspot") + if hotspot: + fp.write(f"#define im_x_hot {hotspot[0]}\n".encode("ascii")) + fp.write(f"#define im_y_hot {hotspot[1]}\n".encode("ascii")) + + fp.write(b"static char im_bits[] = {\n") + + ImageFile._save(im, fp, [("xbm", (0, 0) + im.size, 0, None)]) + + fp.write(b"};\n") + + +Image.register_open(XbmImageFile.format, XbmImageFile, _accept) +Image.register_save(XbmImageFile.format, _save) + +Image.register_extension(XbmImageFile.format, ".xbm") + +Image.register_mime(XbmImageFile.format, "image/xbm") diff --git a/MLPY/Lib/site-packages/PIL/XpmImagePlugin.py b/MLPY/Lib/site-packages/PIL/XpmImagePlugin.py new file mode 100644 index 0000000000000000000000000000000000000000..b2e4d9065febf4c149ab0be9334741c130dcface --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/XpmImagePlugin.py @@ -0,0 +1,125 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XPM File handling +# +# History: +# 1996-12-29 fl Created +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) +# +# Copyright (c) Secret Labs AB 1997-2001. +# Copyright (c) Fredrik Lundh 1996-2001. +# +# See the README file for information on usage and redistribution. +# +from __future__ import annotations + +import re + +from . import Image, ImageFile, ImagePalette +from ._binary import o8 + +# XPM header +xpm_head = re.compile(b'"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)') + + +def _accept(prefix: bytes) -> bool: + return prefix[:9] == b"/* XPM */" + + +## +# Image plugin for X11 pixel maps. + + +class XpmImageFile(ImageFile.ImageFile): + format = "XPM" + format_description = "X11 Pixel Map" + + def _open(self) -> None: + if not _accept(self.fp.read(9)): + msg = "not an XPM file" + raise SyntaxError(msg) + + # skip forward to next string + while True: + s = self.fp.readline() + if not s: + msg = "broken XPM file" + raise SyntaxError(msg) + m = xpm_head.match(s) + if m: + break + + self._size = int(m.group(1)), int(m.group(2)) + + pal = int(m.group(3)) + bpp = int(m.group(4)) + + if pal > 256 or bpp != 1: + msg = "cannot read this XPM file" + raise ValueError(msg) + + # + # load palette description + + palette = [b"\0\0\0"] * 256 + + for _ in range(pal): + s = self.fp.readline() + if s[-2:] == b"\r\n": + s = s[:-2] + elif s[-1:] in b"\r\n": + s = s[:-1] + + c = s[1] + s = s[2:-2].split() + + for i in range(0, len(s), 2): + if s[i] == b"c": + # process colour key + rgb = s[i + 1] + if rgb == b"None": + self.info["transparency"] = c + elif rgb[:1] == b"#": + # FIXME: handle colour names (see ImagePalette.py) + rgb = int(rgb[1:], 16) + palette[c] = ( + o8((rgb >> 16) & 255) + o8((rgb >> 8) & 255) + o8(rgb & 255) + ) + else: + # unknown colour + msg = "cannot read this XPM file" + raise ValueError(msg) + break + + else: + # missing colour key + msg = "cannot read this XPM file" + raise ValueError(msg) + + self._mode = "P" + self.palette = ImagePalette.raw("RGB", b"".join(palette)) + + self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), ("P", 0, 1))] + + def load_read(self, read_bytes: int) -> bytes: + # + # load all image data in one chunk + + xsize, ysize = self.size + + s = [self.fp.readline()[1 : xsize + 1].ljust(xsize) for i in range(ysize)] + + return b"".join(s) + + +# +# Registry + + +Image.register_open(XpmImageFile.format, XpmImageFile, _accept) + +Image.register_extension(XpmImageFile.format, ".xpm") + +Image.register_mime(XpmImageFile.format, "image/xpm") diff --git a/MLPY/Lib/site-packages/PIL/_binary.py b/MLPY/Lib/site-packages/PIL/_binary.py new file mode 100644 index 0000000000000000000000000000000000000000..ad8863074574db00ae242ce047433f048fc4e1f0 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/_binary.py @@ -0,0 +1,112 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Binary input/output support routines. +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1995-2003 by Fredrik Lundh +# Copyright (c) 2012 by Brian Crowell +# +# See the README file for information on usage and redistribution. +# + + +"""Binary input/output support routines.""" +from __future__ import annotations + +from struct import pack, unpack_from + + +def i8(c: bytes) -> int: + return c[0] + + +def o8(i: int) -> bytes: + return bytes((i & 255,)) + + +# Input, le = little endian, be = big endian +def i16le(c: bytes, o: int = 0) -> int: + """ + Converts a 2-bytes (16 bits) string to an unsigned integer. + + :param c: string containing bytes to convert + :param o: offset of bytes to convert in string + """ + return unpack_from(" int: + """ + Converts a 2-bytes (16 bits) string to a signed integer. + + :param c: string containing bytes to convert + :param o: offset of bytes to convert in string + """ + return unpack_from(" int: + """ + Converts a 2-bytes (16 bits) string to a signed integer, big endian. + + :param c: string containing bytes to convert + :param o: offset of bytes to convert in string + """ + return unpack_from(">h", c, o)[0] + + +def i32le(c: bytes, o: int = 0) -> int: + """ + Converts a 4-bytes (32 bits) string to an unsigned integer. + + :param c: string containing bytes to convert + :param o: offset of bytes to convert in string + """ + return unpack_from(" int: + """ + Converts a 4-bytes (32 bits) string to a signed integer. + + :param c: string containing bytes to convert + :param o: offset of bytes to convert in string + """ + return unpack_from(" int: + """ + Converts a 4-bytes (32 bits) string to a signed integer, big endian. + + :param c: string containing bytes to convert + :param o: offset of bytes to convert in string + """ + return unpack_from(">i", c, o)[0] + + +def i16be(c: bytes, o: int = 0) -> int: + return unpack_from(">H", c, o)[0] + + +def i32be(c: bytes, o: int = 0) -> int: + return unpack_from(">I", c, o)[0] + + +# Output, le = little endian, be = big endian +def o16le(i: int) -> bytes: + return pack(" bytes: + return pack(" bytes: + return pack(">H", i) + + +def o32be(i: int) -> bytes: + return pack(">I", i) diff --git a/MLPY/Lib/site-packages/PIL/_deprecate.py b/MLPY/Lib/site-packages/PIL/_deprecate.py new file mode 100644 index 0000000000000000000000000000000000000000..9f7addfd47eff46522c1410660c253ffbfd239ec --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/_deprecate.py @@ -0,0 +1,71 @@ +from __future__ import annotations + +import warnings + +from . import __version__ + + +def deprecate( + deprecated: str, + when: int | None, + replacement: str | None = None, + *, + action: str | None = None, + plural: bool = False, +) -> None: + """ + Deprecations helper. + + :param deprecated: Name of thing to be deprecated. + :param when: Pillow major version to be removed in. + :param replacement: Name of replacement. + :param action: Instead of "replacement", give a custom call to action + e.g. "Upgrade to new thing". + :param plural: if the deprecated thing is plural, needing "are" instead of "is". + + Usually of the form: + + "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd). + Use [replacement] instead." + + You can leave out the replacement sentence: + + "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd)" + + Or with another call to action: + + "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd). + [action]." + """ + + is_ = "are" if plural else "is" + + if when is None: + removed = "a future version" + elif when <= int(__version__.split(".")[0]): + msg = f"{deprecated} {is_} deprecated and should be removed." + raise RuntimeError(msg) + elif when == 11: + removed = "Pillow 11 (2024-10-15)" + elif when == 12: + removed = "Pillow 12 (2025-10-15)" + else: + msg = f"Unknown removal version: {when}. Update {__name__}?" + raise ValueError(msg) + + if replacement and action: + msg = "Use only one of 'replacement' and 'action'" + raise ValueError(msg) + + if replacement: + action = f". Use {replacement} instead." + elif action: + action = f". {action.rstrip('.')}." + else: + action = "" + + warnings.warn( + f"{deprecated} {is_} deprecated and will be removed in {removed}{action}", + DeprecationWarning, + stacklevel=3, + ) diff --git a/MLPY/Lib/site-packages/PIL/_imaging.cp39-win_amd64.pyd b/MLPY/Lib/site-packages/PIL/_imaging.cp39-win_amd64.pyd new file mode 100644 index 0000000000000000000000000000000000000000..4f608d9bec330a64883514418d9b97dbed2a0821 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/_imaging.cp39-win_amd64.pyd @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d3ff732216cccef72316ab4e71f9d4aca2aa3a4a14b157a87caaa80369aec5d1 +size 2341888 diff --git a/MLPY/Lib/site-packages/PIL/_imaging.pyi b/MLPY/Lib/site-packages/PIL/_imaging.pyi new file mode 100644 index 0000000000000000000000000000000000000000..385d4056e2ad287284f847ccaf8f3dd35b6d466d --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/_imaging.pyi @@ -0,0 +1,30 @@ +from typing import Any + +class ImagingCore: + def __getattr__(self, name: str) -> Any: ... + +class ImagingFont: + def __getattr__(self, name: str) -> Any: ... + +class ImagingDraw: + def __getattr__(self, name: str) -> Any: ... + +class PixelAccess: + def __getitem__(self, xy: tuple[int, int]) -> float | tuple[int, ...]: ... + def __setitem__( + self, xy: tuple[int, int], color: float | tuple[int, ...] + ) -> None: ... + +class ImagingDecoder: + def __getattr__(self, name: str) -> Any: ... + +class ImagingEncoder: + def __getattr__(self, name: str) -> Any: ... + +class _Outline: + def close(self) -> None: ... + def __getattr__(self, name: str) -> Any: ... + +def font(image: ImagingCore, glyphdata: bytes) -> ImagingFont: ... +def outline() -> _Outline: ... +def __getattr__(name: str) -> Any: ... diff --git a/MLPY/Lib/site-packages/PIL/_imagingcms.cp39-win_amd64.pyd b/MLPY/Lib/site-packages/PIL/_imagingcms.cp39-win_amd64.pyd new file mode 100644 index 0000000000000000000000000000000000000000..ec6cbc60f753550d9b6323e0ae62ee5809286fa4 Binary files /dev/null and b/MLPY/Lib/site-packages/PIL/_imagingcms.cp39-win_amd64.pyd differ diff --git a/MLPY/Lib/site-packages/PIL/_imagingcms.pyi b/MLPY/Lib/site-packages/PIL/_imagingcms.pyi new file mode 100644 index 0000000000000000000000000000000000000000..80d91614c356e79c440ec3fa2a9096370d2edadc --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/_imagingcms.pyi @@ -0,0 +1,141 @@ +import datetime +import sys +from typing import Literal, SupportsFloat, TypedDict + +littlecms_version: str | None + +_Tuple3f = tuple[float, float, float] +_Tuple2x3f = tuple[_Tuple3f, _Tuple3f] +_Tuple3x3f = tuple[_Tuple3f, _Tuple3f, _Tuple3f] + +class _IccMeasurementCondition(TypedDict): + observer: int + backing: _Tuple3f + geo: str + flare: float + illuminant_type: str + +class _IccViewingCondition(TypedDict): + illuminant: _Tuple3f + surround: _Tuple3f + illuminant_type: str + +class CmsProfile: + @property + def rendering_intent(self) -> int: ... + @property + def creation_date(self) -> datetime.datetime | None: ... + @property + def copyright(self) -> str | None: ... + @property + def target(self) -> str | None: ... + @property + def manufacturer(self) -> str | None: ... + @property + def model(self) -> str | None: ... + @property + def profile_description(self) -> str | None: ... + @property + def screening_description(self) -> str | None: ... + @property + def viewing_condition(self) -> str | None: ... + @property + def version(self) -> float: ... + @property + def icc_version(self) -> int: ... + @property + def attributes(self) -> int: ... + @property + def header_flags(self) -> int: ... + @property + def header_manufacturer(self) -> str: ... + @property + def header_model(self) -> str: ... + @property + def device_class(self) -> str: ... + @property + def connection_space(self) -> str: ... + @property + def xcolor_space(self) -> str: ... + @property + def profile_id(self) -> bytes: ... + @property + def is_matrix_shaper(self) -> bool: ... + @property + def technology(self) -> str | None: ... + @property + def colorimetric_intent(self) -> str | None: ... + @property + def perceptual_rendering_intent_gamut(self) -> str | None: ... + @property + def saturation_rendering_intent_gamut(self) -> str | None: ... + @property + def red_colorant(self) -> _Tuple2x3f | None: ... + @property + def green_colorant(self) -> _Tuple2x3f | None: ... + @property + def blue_colorant(self) -> _Tuple2x3f | None: ... + @property + def red_primary(self) -> _Tuple2x3f | None: ... + @property + def green_primary(self) -> _Tuple2x3f | None: ... + @property + def blue_primary(self) -> _Tuple2x3f | None: ... + @property + def media_white_point_temperature(self) -> float | None: ... + @property + def media_white_point(self) -> _Tuple2x3f | None: ... + @property + def media_black_point(self) -> _Tuple2x3f | None: ... + @property + def luminance(self) -> _Tuple2x3f | None: ... + @property + def chromatic_adaptation(self) -> tuple[_Tuple3x3f, _Tuple3x3f] | None: ... + @property + def chromaticity(self) -> _Tuple3x3f | None: ... + @property + def colorant_table(self) -> list[str] | None: ... + @property + def colorant_table_out(self) -> list[str] | None: ... + @property + def intent_supported(self) -> dict[int, tuple[bool, bool, bool]] | None: ... + @property + def clut(self) -> dict[int, tuple[bool, bool, bool]] | None: ... + @property + def icc_measurement_condition(self) -> _IccMeasurementCondition | None: ... + @property + def icc_viewing_condition(self) -> _IccViewingCondition | None: ... + def is_intent_supported(self, intent: int, direction: int, /) -> int: ... + +class CmsTransform: + def apply(self, id_in: int, id_out: int) -> int: ... + +def profile_open(profile: str, /) -> CmsProfile: ... +def profile_frombytes(profile: bytes, /) -> CmsProfile: ... +def profile_tobytes(profile: CmsProfile, /) -> bytes: ... +def buildTransform( + input_profile: CmsProfile, + output_profile: CmsProfile, + in_mode: str, + out_mode: str, + rendering_intent: int = 0, + cms_flags: int = 0, + /, +) -> CmsTransform: ... +def buildProofTransform( + input_profile: CmsProfile, + output_profile: CmsProfile, + proof_profile: CmsProfile, + in_mode: str, + out_mode: str, + rendering_intent: int = 0, + proof_intent: int = 0, + cms_flags: int = 0, + /, +) -> CmsTransform: ... +def createProfile( + color_space: Literal["LAB", "XYZ", "sRGB"], color_temp: SupportsFloat = 0.0, / +) -> CmsProfile: ... + +if sys.platform == "win32": + def get_display_profile_win32(handle: int = 0, is_dc: int = 0, /) -> str | None: ... diff --git a/MLPY/Lib/site-packages/PIL/_imagingft.cp39-win_amd64.pyd b/MLPY/Lib/site-packages/PIL/_imagingft.cp39-win_amd64.pyd new file mode 100644 index 0000000000000000000000000000000000000000..91b18568c8110a3cc8ebff678aff7ba7d1f1da65 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/_imagingft.cp39-win_amd64.pyd @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c2d3853d4299da383a2ccd842d7d4faa82746a924d9d5272e8d51e8880bcc509 +size 1819136 diff --git a/MLPY/Lib/site-packages/PIL/_imagingft.pyi b/MLPY/Lib/site-packages/PIL/_imagingft.pyi new file mode 100644 index 0000000000000000000000000000000000000000..3c06a95169d3db6a761c40d0fa272b4aa7bf44a7 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/_imagingft.pyi @@ -0,0 +1,69 @@ +from typing import Any, TypedDict + +from . import _imaging + +class _Axis(TypedDict): + minimum: int | None + default: int | None + maximum: int | None + name: bytes | None + +class Font: + @property + def family(self) -> str | None: ... + @property + def style(self) -> str | None: ... + @property + def ascent(self) -> int: ... + @property + def descent(self) -> int: ... + @property + def height(self) -> int: ... + @property + def x_ppem(self) -> int: ... + @property + def y_ppem(self) -> int: ... + @property + def glyphs(self) -> int: ... + def render( + self, + string: str | bytes, + fill, + mode=..., + dir=..., + features=..., + lang=..., + stroke_width=..., + anchor=..., + foreground_ink_long=..., + x_start=..., + y_start=..., + /, + ) -> tuple[_imaging.ImagingCore, tuple[int, int]]: ... + def getsize( + self, + string: str | bytes | bytearray, + mode=..., + dir=..., + features=..., + lang=..., + anchor=..., + /, + ) -> tuple[tuple[int, int], tuple[int, int]]: ... + def getlength( + self, string: str | bytes, mode=..., dir=..., features=..., lang=..., / + ) -> float: ... + def getvarnames(self) -> list[bytes]: ... + def getvaraxes(self) -> list[_Axis] | None: ... + def setvarname(self, instance_index: int, /) -> None: ... + def setvaraxes(self, axes: list[float], /) -> None: ... + +def getfont( + filename: str | bytes, + size: float, + index=..., + encoding=..., + font_bytes=..., + layout_engine=..., +) -> Font: ... +def __getattr__(name: str) -> Any: ... diff --git a/MLPY/Lib/site-packages/PIL/_imagingmath.cp39-win_amd64.pyd b/MLPY/Lib/site-packages/PIL/_imagingmath.cp39-win_amd64.pyd new file mode 100644 index 0000000000000000000000000000000000000000..f1b3c5e37db23380f64ea08acd16e6a7bd0a18ea Binary files /dev/null and b/MLPY/Lib/site-packages/PIL/_imagingmath.cp39-win_amd64.pyd differ diff --git a/MLPY/Lib/site-packages/PIL/_imagingmath.pyi b/MLPY/Lib/site-packages/PIL/_imagingmath.pyi new file mode 100644 index 0000000000000000000000000000000000000000..9342f10b9c8d9c139fab885fac4c08aad29c3804 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/_imagingmath.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... diff --git a/MLPY/Lib/site-packages/PIL/_imagingmorph.cp39-win_amd64.pyd b/MLPY/Lib/site-packages/PIL/_imagingmorph.cp39-win_amd64.pyd new file mode 100644 index 0000000000000000000000000000000000000000..e7424393541d084cfb1684f40bd35ca26c8fec69 Binary files /dev/null and b/MLPY/Lib/site-packages/PIL/_imagingmorph.cp39-win_amd64.pyd differ diff --git a/MLPY/Lib/site-packages/PIL/features.py b/MLPY/Lib/site-packages/PIL/features.py new file mode 100644 index 0000000000000000000000000000000000000000..56fef1015249f76b68320204cc0ac564b6ca6fcd --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/features.py @@ -0,0 +1,340 @@ +from __future__ import annotations + +import collections +import os +import sys +import warnings +from typing import IO + +import PIL + +from . import Image + +modules = { + "pil": ("PIL._imaging", "PILLOW_VERSION"), + "tkinter": ("PIL._tkinter_finder", "tk_version"), + "freetype2": ("PIL._imagingft", "freetype2_version"), + "littlecms2": ("PIL._imagingcms", "littlecms_version"), + "webp": ("PIL._webp", "webpdecoder_version"), +} + + +def check_module(feature: str) -> bool: + """ + Checks if a module is available. + + :param feature: The module to check for. + :returns: ``True`` if available, ``False`` otherwise. + :raises ValueError: If the module is not defined in this version of Pillow. + """ + if feature not in modules: + msg = f"Unknown module {feature}" + raise ValueError(msg) + + module, ver = modules[feature] + + try: + __import__(module) + return True + except ModuleNotFoundError: + return False + except ImportError as ex: + warnings.warn(str(ex)) + return False + + +def version_module(feature: str) -> str | None: + """ + :param feature: The module to check for. + :returns: + The loaded version number as a string, or ``None`` if unknown or not available. + :raises ValueError: If the module is not defined in this version of Pillow. + """ + if not check_module(feature): + return None + + module, ver = modules[feature] + + return getattr(__import__(module, fromlist=[ver]), ver) + + +def get_supported_modules() -> list[str]: + """ + :returns: A list of all supported modules. + """ + return [f for f in modules if check_module(f)] + + +codecs = { + "jpg": ("jpeg", "jpeglib"), + "jpg_2000": ("jpeg2k", "jp2klib"), + "zlib": ("zip", "zlib"), + "libtiff": ("libtiff", "libtiff"), +} + + +def check_codec(feature: str) -> bool: + """ + Checks if a codec is available. + + :param feature: The codec to check for. + :returns: ``True`` if available, ``False`` otherwise. + :raises ValueError: If the codec is not defined in this version of Pillow. + """ + if feature not in codecs: + msg = f"Unknown codec {feature}" + raise ValueError(msg) + + codec, lib = codecs[feature] + + return f"{codec}_encoder" in dir(Image.core) + + +def version_codec(feature: str) -> str | None: + """ + :param feature: The codec to check for. + :returns: + The version number as a string, or ``None`` if not available. + Checked at compile time for ``jpg``, run-time otherwise. + :raises ValueError: If the codec is not defined in this version of Pillow. + """ + if not check_codec(feature): + return None + + codec, lib = codecs[feature] + + version = getattr(Image.core, f"{lib}_version") + + if feature == "libtiff": + return version.split("\n")[0].split("Version ")[1] + + return version + + +def get_supported_codecs() -> list[str]: + """ + :returns: A list of all supported codecs. + """ + return [f for f in codecs if check_codec(f)] + + +features = { + "webp_anim": ("PIL._webp", "HAVE_WEBPANIM", None), + "webp_mux": ("PIL._webp", "HAVE_WEBPMUX", None), + "transp_webp": ("PIL._webp", "HAVE_TRANSPARENCY", None), + "raqm": ("PIL._imagingft", "HAVE_RAQM", "raqm_version"), + "fribidi": ("PIL._imagingft", "HAVE_FRIBIDI", "fribidi_version"), + "harfbuzz": ("PIL._imagingft", "HAVE_HARFBUZZ", "harfbuzz_version"), + "libjpeg_turbo": ("PIL._imaging", "HAVE_LIBJPEGTURBO", "libjpeg_turbo_version"), + "libimagequant": ("PIL._imaging", "HAVE_LIBIMAGEQUANT", "imagequant_version"), + "xcb": ("PIL._imaging", "HAVE_XCB", None), +} + + +def check_feature(feature: str) -> bool | None: + """ + Checks if a feature is available. + + :param feature: The feature to check for. + :returns: ``True`` if available, ``False`` if unavailable, ``None`` if unknown. + :raises ValueError: If the feature is not defined in this version of Pillow. + """ + if feature not in features: + msg = f"Unknown feature {feature}" + raise ValueError(msg) + + module, flag, ver = features[feature] + + try: + imported_module = __import__(module, fromlist=["PIL"]) + return getattr(imported_module, flag) + except ModuleNotFoundError: + return None + except ImportError as ex: + warnings.warn(str(ex)) + return None + + +def version_feature(feature: str) -> str | None: + """ + :param feature: The feature to check for. + :returns: The version number as a string, or ``None`` if not available. + :raises ValueError: If the feature is not defined in this version of Pillow. + """ + if not check_feature(feature): + return None + + module, flag, ver = features[feature] + + if ver is None: + return None + + return getattr(__import__(module, fromlist=[ver]), ver) + + +def get_supported_features() -> list[str]: + """ + :returns: A list of all supported features. + """ + return [f for f in features if check_feature(f)] + + +def check(feature: str) -> bool | None: + """ + :param feature: A module, codec, or feature name. + :returns: + ``True`` if the module, codec, or feature is available, + ``False`` or ``None`` otherwise. + """ + + if feature in modules: + return check_module(feature) + if feature in codecs: + return check_codec(feature) + if feature in features: + return check_feature(feature) + warnings.warn(f"Unknown feature '{feature}'.", stacklevel=2) + return False + + +def version(feature: str) -> str | None: + """ + :param feature: + The module, codec, or feature to check for. + :returns: + The version number as a string, or ``None`` if unknown or not available. + """ + if feature in modules: + return version_module(feature) + if feature in codecs: + return version_codec(feature) + if feature in features: + return version_feature(feature) + return None + + +def get_supported() -> list[str]: + """ + :returns: A list of all supported modules, features, and codecs. + """ + + ret = get_supported_modules() + ret.extend(get_supported_features()) + ret.extend(get_supported_codecs()) + return ret + + +def pilinfo(out: IO[str] | None = None, supported_formats: bool = True) -> None: + """ + Prints information about this installation of Pillow. + This function can be called with ``python3 -m PIL``. + It can also be called with ``python3 -m PIL.report`` or ``python3 -m PIL --report`` + to have "supported_formats" set to ``False``, omitting the list of all supported + image file formats. + + :param out: + The output stream to print to. Defaults to ``sys.stdout`` if ``None``. + :param supported_formats: + If ``True``, a list of all supported image file formats will be printed. + """ + + if out is None: + out = sys.stdout + + Image.init() + + print("-" * 68, file=out) + print(f"Pillow {PIL.__version__}", file=out) + py_version_lines = sys.version.splitlines() + print(f"Python {py_version_lines[0].strip()}", file=out) + for py_version in py_version_lines[1:]: + print(f" {py_version.strip()}", file=out) + print("-" * 68, file=out) + print(f"Python executable is {sys.executable or 'unknown'}", file=out) + if sys.prefix != sys.base_prefix: + print(f"Environment Python files loaded from {sys.prefix}", file=out) + print(f"System Python files loaded from {sys.base_prefix}", file=out) + print("-" * 68, file=out) + print( + f"Python Pillow modules loaded from {os.path.dirname(Image.__file__)}", + file=out, + ) + print( + f"Binary Pillow modules loaded from {os.path.dirname(Image.core.__file__)}", + file=out, + ) + print("-" * 68, file=out) + + for name, feature in [ + ("pil", "PIL CORE"), + ("tkinter", "TKINTER"), + ("freetype2", "FREETYPE2"), + ("littlecms2", "LITTLECMS2"), + ("webp", "WEBP"), + ("transp_webp", "WEBP Transparency"), + ("webp_mux", "WEBPMUX"), + ("webp_anim", "WEBP Animation"), + ("jpg", "JPEG"), + ("jpg_2000", "OPENJPEG (JPEG2000)"), + ("zlib", "ZLIB (PNG/ZIP)"), + ("libtiff", "LIBTIFF"), + ("raqm", "RAQM (Bidirectional Text)"), + ("libimagequant", "LIBIMAGEQUANT (Quantization method)"), + ("xcb", "XCB (X protocol)"), + ]: + if check(name): + v: str | None = None + if name == "jpg": + libjpeg_turbo_version = version_feature("libjpeg_turbo") + if libjpeg_turbo_version is not None: + v = "libjpeg-turbo " + libjpeg_turbo_version + if v is None: + v = version(name) + if v is not None: + version_static = name in ("pil", "jpg") + if name == "littlecms2": + # this check is also in src/_imagingcms.c:setup_module() + version_static = tuple(int(x) for x in v.split(".")) < (2, 7) + t = "compiled for" if version_static else "loaded" + if name == "raqm": + for f in ("fribidi", "harfbuzz"): + v2 = version_feature(f) + if v2 is not None: + v += f", {f} {v2}" + print("---", feature, "support ok,", t, v, file=out) + else: + print("---", feature, "support ok", file=out) + else: + print("***", feature, "support not installed", file=out) + print("-" * 68, file=out) + + if supported_formats: + extensions = collections.defaultdict(list) + for ext, i in Image.EXTENSION.items(): + extensions[i].append(ext) + + for i in sorted(Image.ID): + line = f"{i}" + if i in Image.MIME: + line = f"{line} {Image.MIME[i]}" + print(line, file=out) + + if i in extensions: + print( + "Extensions: {}".format(", ".join(sorted(extensions[i]))), file=out + ) + + features = [] + if i in Image.OPEN: + features.append("open") + if i in Image.SAVE: + features.append("save") + if i in Image.SAVE_ALL: + features.append("save_all") + if i in Image.DECODERS: + features.append("decode") + if i in Image.ENCODERS: + features.append("encode") + + print("Features: {}".format(", ".join(features)), file=out) + print("-" * 68, file=out) diff --git a/MLPY/Lib/site-packages/PIL/py.typed b/MLPY/Lib/site-packages/PIL/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/PIL/report.py b/MLPY/Lib/site-packages/PIL/report.py new file mode 100644 index 0000000000000000000000000000000000000000..5abafd44eaf42f2b74a5abdc41f54b53075e9db3 --- /dev/null +++ b/MLPY/Lib/site-packages/PIL/report.py @@ -0,0 +1,5 @@ +from __future__ import annotations + +from .features import pilinfo + +pilinfo(supported_formats=False) diff --git a/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/INSTALLER b/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/LICENSE.txt b/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/LICENSE.txt new file mode 100644 index 0000000000000000000000000000000000000000..673d53712e20fc9d26ffd02bd14fd5af0ec39564 --- /dev/null +++ b/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/LICENSE.txt @@ -0,0 +1,241 @@ +This repository is licensed as follows: +All assets in this repository are the copyright of the Farama Foundation, except +where prohibited. Contributors to the repository transfer copyright of their work +to the Farama Foundation. + +Some code in this repository has been taken from other open source projects +and was originally released under the MIT or Apache 2.0 licenses, with +copyright held by another party. We've attributed these authors and they +retain their copyright to the extent required by law. Everything else +is owned by Justin Terry. The Secret Code font was also released under +the MIT license by Matthew Welch (http://www.squaregear.net/fonts/). +The MIT and Apache 2.0 licenses are included below. + +The Farama Foundation releases the elements of this repository they copyright to +under the MIT license. + +-------------------------------------------------------------------------------- + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------------------------------------------------------------------------------- + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {2020} {Authors} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/METADATA b/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..014341f90fb3b0f6d46c9311f37183cb524f5e3c --- /dev/null +++ b/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/METADATA @@ -0,0 +1,80 @@ +Metadata-Version: 2.1 +Name: PettingZoo +Version: 1.15.0 +Summary: Gym for multi-agent reinforcement learning +Home-page: https://github.com/Farama-Foundation/PettingZoo +Author: PettingZoo Community +Author-email: jkterry@umd.edu +Keywords: Reinforcement Learning,game,RL,AI,gym +Classifier: Development Status :: 5 - Production/Stable +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Python: >=3.7, <3.11 +Description-Content-Type: text/markdown +License-File: LICENSE.txt +Requires-Dist: numpy >=1.18.0 +Requires-Dist: gym >=0.21.0 +Provides-Extra: all +Requires-Dist: multi-agent-ale-py ==0.1.11 ; extra == 'all' +Requires-Dist: pygame ==2.1.0 ; extra == 'all' +Requires-Dist: chess ==1.7.0 ; extra == 'all' +Requires-Dist: rlcard ==1.0.4 ; extra == 'all' +Requires-Dist: hanabi-learning-environment ==0.0.1 ; extra == 'all' +Requires-Dist: pymunk ==6.2.0 ; extra == 'all' +Requires-Dist: magent ==0.1.14 ; extra == 'all' +Requires-Dist: pyglet >=1.4.0 ; extra == 'all' +Requires-Dist: box2d-py ==2.3.5 ; extra == 'all' +Requires-Dist: scipy >=1.4.1 ; extra == 'all' +Requires-Dist: pillow >=8.0.1 ; extra == 'all' +Provides-Extra: atari +Requires-Dist: multi-agent-ale-py ==0.1.11 ; extra == 'atari' +Requires-Dist: pygame ==2.1.0 ; extra == 'atari' +Provides-Extra: butterfly +Requires-Dist: pygame ==2.1.0 ; extra == 'butterfly' +Requires-Dist: pymunk ==6.2.0 ; extra == 'butterfly' +Provides-Extra: classic +Requires-Dist: chess ==1.7.0 ; extra == 'classic' +Requires-Dist: rlcard ==1.0.4 ; extra == 'classic' +Requires-Dist: pygame ==2.1.0 ; extra == 'classic' +Requires-Dist: hanabi-learning-environment ==0.0.1 ; extra == 'classic' +Provides-Extra: magent +Requires-Dist: magent ==0.1.14 ; extra == 'magent' +Provides-Extra: mpe +Requires-Dist: pyglet >=1.4.0 ; extra == 'mpe' +Provides-Extra: other +Requires-Dist: pillow >=8.0.1 ; extra == 'other' +Provides-Extra: sisl +Requires-Dist: pygame ==2.1.0 ; extra == 'sisl' +Requires-Dist: box2d-py ==2.3.5 ; extra == 'sisl' +Requires-Dist: scipy >=1.4.1 ; extra == 'sisl' +Provides-Extra: tests +Requires-Dist: pynput ; extra == 'tests' + +

+ +

+ +PettingZoo is a Python library for conducting research in multi-agent reinforcement learning, akin to a multi-agent version of [Gym](https://github.com/openai/gym). + +Our website, with comprehensive documentation, is [pettingzoo.ml](https://www.pettingzoo.ml) + +## Environments and Installation + +PettingZoo includes the following families of environments: + +* [Atari](https://www.pettingzoo.ml/atari): Multi-player Atari 2600 games (cooperative, competitive and mixed sum) +* [Butterfly](https://www.pettingzoo.ml/butterfly): Cooperative graphical games developed by us, requiring a high degree of coordination +* [Classic](https://www.pettingzoo.ml/classic): Classical games including card games, board games, etc. +* [MAgent](https://www.pettingzoo.ml/magent): Configurable environments with massive numbers of particle agents, originally from https://github.com/geek-ai/MAgent +* [MPE](https://www.pettingzoo.ml/mpe): A set of simple nongraphical communication tasks, originally from https://github.com/openai/multiagent-particle-envs +* [SISL](https://www.pettingzoo.ml/sisl): 3 cooperative environments, originally from https://github.com/sisl/MADRL + +To install the pettingzoo base library, use `pip install pettingzoo` + +This does not include dependencies for all families of environments (there's a massive number, and some can be problematic to install on certain systems). You can install these dependencies for one family like `pip install pettingzoo[atari]` or use `pip install pettingzoo[all]` to install all dependencies. + +We support Python 3.7, 3.8 and 3.9 on Linux and macOS. We will accept PRs related to Windows, but do not officially support it. + diff --git a/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/RECORD b/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..206fec7d85cdfd023fc9f8f60253a74723ddbc16 --- /dev/null +++ b/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/RECORD @@ -0,0 +1,539 @@ +PettingZoo-1.15.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +PettingZoo-1.15.0.dist-info/LICENSE.txt,sha256=Xx8cbXfzGP6tS3tYRxe7covgt8mW1gD7da-EKAIIN7Q,13362 +PettingZoo-1.15.0.dist-info/METADATA,sha256=fBrfrGZ7LJTX7hxNiLoGh5kCIT1zg00vnev7UuZtm7c,4071 +PettingZoo-1.15.0.dist-info/RECORD,, +PettingZoo-1.15.0.dist-info/WHEEL,sha256=Wyh-_nZ0DJYolHNn1_hMa4lM7uDedD_RGVwbmTjyItk,91 +PettingZoo-1.15.0.dist-info/top_level.txt,sha256=PHx_ZYcfYEDMg8hYWRYOjCOZjfSJ02BKsmLjW7P-LEI,11 +pettingzoo/__init__.py,sha256=VUMHw36GpTknVCPbQfCCOkGunSzK7beufGkLj1ieYyQ,539 +pettingzoo/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/atari/__init__.py,sha256=EOnTLVyQDGUOm8PxOruBuZFSuSYe5GhlsORA2T_ACT0,155 +pettingzoo/atari/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/atari/__pycache__/base_atari_env.cpython-39.pyc,, +pettingzoo/atari/__pycache__/basketball_pong_v2.cpython-39.pyc,, +pettingzoo/atari/__pycache__/boxing_v1.cpython-39.pyc,, +pettingzoo/atari/__pycache__/combat_plane_v1.cpython-39.pyc,, +pettingzoo/atari/__pycache__/combat_tank_v1.cpython-39.pyc,, +pettingzoo/atari/__pycache__/double_dunk_v2.cpython-39.pyc,, +pettingzoo/atari/__pycache__/entombed_competitive_v2.cpython-39.pyc,, +pettingzoo/atari/__pycache__/entombed_cooperative_v2.cpython-39.pyc,, +pettingzoo/atari/__pycache__/flag_capture_v1.cpython-39.pyc,, +pettingzoo/atari/__pycache__/foozpong_v2.cpython-39.pyc,, +pettingzoo/atari/__pycache__/ice_hockey_v1.cpython-39.pyc,, +pettingzoo/atari/__pycache__/joust_v2.cpython-39.pyc,, +pettingzoo/atari/__pycache__/mario_bros_v2.cpython-39.pyc,, +pettingzoo/atari/__pycache__/maze_craze_v2.cpython-39.pyc,, +pettingzoo/atari/__pycache__/othello_v2.cpython-39.pyc,, +pettingzoo/atari/__pycache__/pong_v2.cpython-39.pyc,, +pettingzoo/atari/__pycache__/quadrapong_v3.cpython-39.pyc,, +pettingzoo/atari/__pycache__/space_invaders_v1.cpython-39.pyc,, +pettingzoo/atari/__pycache__/space_war_v1.cpython-39.pyc,, +pettingzoo/atari/__pycache__/surround_v1.cpython-39.pyc,, +pettingzoo/atari/__pycache__/tennis_v2.cpython-39.pyc,, +pettingzoo/atari/__pycache__/video_checkers_v3.cpython-39.pyc,, +pettingzoo/atari/__pycache__/volleyball_pong_v2.cpython-39.pyc,, +pettingzoo/atari/__pycache__/warlords_v2.cpython-39.pyc,, +pettingzoo/atari/__pycache__/wizard_of_wor_v2.cpython-39.pyc,, +pettingzoo/atari/base_atari_env.py,sha256=2fftknLn16Akizn5HRdqLgtAIEgTX6ffdipvwK_8908,8999 +pettingzoo/atari/basketball_pong_v2.py,sha256=8hf7GJpjyJ9WauEkwUTItuWfBDEY2pT-PF5dGpcpXGQ,496 +pettingzoo/atari/boxing_v1.py,sha256=JhoYYp8_W6FD-KNHJF9j4RgfG6oA3hvVBP2WKKcl4BU,317 +pettingzoo/atari/combat_plane_v1.py,sha256=9BhZxdWHaqKVdzpMhesBcZdnzRtzLEgCJAQWvzu7yug,597 +pettingzoo/atari/combat_tank_v1.py,sha256=LHVcETprUMuUb-2jMlnH3FS4hvFzwpCiqnRQLh71Gz4,803 +pettingzoo/atari/double_dunk_v2.py,sha256=IB3BjsonArlPLO-KOJA2zr7-EaBjT6491StUtSMFnKk,322 +pettingzoo/atari/entombed_competitive_v2.py,sha256=ALKXPoJy2IMMP1bx2YT2Kv-Y8YrY8lkrdAoNJl5Sv3Y,316 +pettingzoo/atari/entombed_cooperative_v2.py,sha256=eMr7GzMv1veyLjrvVYCQb9lOfvhl8pbgEgvGqozepkU,316 +pettingzoo/atari/flag_capture_v1.py,sha256=zIyo8Eomw2OvQQermF9ZQoObeQWytsz2dyArVfrspIE,323 +pettingzoo/atari/foozpong_v2.py,sha256=Yo95hItM3HO28aP3sdeD9-jxJFAJx1aP79u66bacOHk,496 +pettingzoo/atari/ice_hockey_v1.py,sha256=CkVm2D_IABv_1vefuOhP8OnGck8fdnFd6PRcXghNQmw,321 +pettingzoo/atari/joust_v2.py,sha256=xocIzU7h9Ib7iLFFhIgY0lpvZqpRRJQd9IZ9oRgvJqA,316 +pettingzoo/atari/mario_bros_v2.py,sha256=l4ZczmJT-szSTyeuuk77HNqU9AaK1p_aGSUO6ylF-to,321 +pettingzoo/atari/maze_craze_v2.py,sha256=pb3SMP_csVjm-Aeif8HFxBfIRNUkfuO4cOnmIGePTck,989 +pettingzoo/atari/othello_v2.py,sha256=rH434bGIYIFKosPzjMlHIp-zHWW-rJcUQd1_TNak4t8,318 +pettingzoo/atari/pong_v2.py,sha256=YewBwZtaXFYeiKQTzAvhALaUSbcC2cJYuz5yjtq4SdE,1126 +pettingzoo/atari/quadrapong_v3.py,sha256=lINsv4tCwJLd-pyGGQ3HIZ7VVRMTgitO4m1V7aWEMVE,359 +pettingzoo/atari/space_invaders_v1.py,sha256=UfAHBzhzMbDA9L7NeGW95hYg3bHA0bxZd0aKxzkUyDc,612 +pettingzoo/atari/space_war_v1.py,sha256=5FGhzPOkugiuVbIYfh4DTGDhlDnTGxO3MRMKktN8uJE,320 +pettingzoo/atari/surround_v1.py,sha256=iqQX8NnqpZA_jKWEZp7vqpaV_JFZwy14Ja77vMJ_UNI,319 +pettingzoo/atari/tennis_v2.py,sha256=wJ7eq1cyi62_QSXMII_b6GPwcL8Z9EdLUwGfjh-ji2s,317 +pettingzoo/atari/video_checkers_v3.py,sha256=SvBrvH0NVMrTfIjLxNd_7808lmyyktDKeJZxgy36hYQ,325 +pettingzoo/atari/volleyball_pong_v2.py,sha256=UUqUmsFGeIEfWUPtfJMZbl0uGk1Luk2JVWshBTyM-54,496 +pettingzoo/atari/warlords_v2.py,sha256=SOUSm7LImFhn3754Zor5iUHmwnT73v7UgNLRfPuBQeo,319 +pettingzoo/atari/wizard_of_wor_v2.py,sha256=O3YlUcJ3RxQ4ekuKAPNAEwizRgSht8sIVqNWOvEazwY,324 +pettingzoo/butterfly/__init__.py,sha256=EOnTLVyQDGUOm8PxOruBuZFSuSYe5GhlsORA2T_ACT0,155 +pettingzoo/butterfly/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/butterfly/__pycache__/cooperative_pong_v5.cpython-39.pyc,, +pettingzoo/butterfly/__pycache__/knights_archers_zombies_v8.cpython-39.pyc,, +pettingzoo/butterfly/__pycache__/pistonball_v6.cpython-39.pyc,, +pettingzoo/butterfly/__pycache__/prison_v3.cpython-39.pyc,, +pettingzoo/butterfly/__pycache__/prospector_v4.cpython-39.pyc,, +pettingzoo/butterfly/cooperative_pong/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/butterfly/cooperative_pong/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/butterfly/cooperative_pong/__pycache__/ball.cpython-39.pyc,, +pettingzoo/butterfly/cooperative_pong/__pycache__/cake_paddle.cpython-39.pyc,, +pettingzoo/butterfly/cooperative_pong/__pycache__/cooperative_pong.cpython-39.pyc,, +pettingzoo/butterfly/cooperative_pong/__pycache__/manual_control.cpython-39.pyc,, +pettingzoo/butterfly/cooperative_pong/__pycache__/paddle.cpython-39.pyc,, +pettingzoo/butterfly/cooperative_pong/ball.py,sha256=whZra0kjCvaBsB0wGgLrmQqY82rJuHX1fAghny8YWK4,2309 +pettingzoo/butterfly/cooperative_pong/cake_paddle.py,sha256=byOasCS_7fMgoS7DHgYwzNZzqs7OLOFlF_XYlUEBV1w,3519 +pettingzoo/butterfly/cooperative_pong/cooperative_pong.py,sha256=1kMQQE94QCn5vHtE621lXcwnvKEw246Whfz3qKnVY6Y,10782 +pettingzoo/butterfly/cooperative_pong/manual_control.py,sha256=oSw-geuToxbGK-zxU2-i7KlOB1ntf7TQJS1luoE4_yA,1917 +pettingzoo/butterfly/cooperative_pong/paddle.py,sha256=_8v2G1t0iwQHQHzQMlYF9sUkPq6jAVo31M7j-g9O13g,2183 +pettingzoo/butterfly/cooperative_pong_v5.py,sha256=ZPpVK9dkmNtnAM3-hDyw5CUmoWRUVlky51ObdXOzwQc,119 +pettingzoo/butterfly/knights_archers_zombies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/butterfly/knights_archers_zombies/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/butterfly/knights_archers_zombies/__pycache__/knights_archers_zombies.cpython-39.pyc,, +pettingzoo/butterfly/knights_archers_zombies/__pycache__/manual_control.cpython-39.pyc,, +pettingzoo/butterfly/knights_archers_zombies/img/archer.png,sha256=6OSiMVzzcFxrs1f5rdsBhmwkM5Bxuru8T1700qskHgs,461 +pettingzoo/butterfly/knights_archers_zombies/img/arrow.png,sha256=5iBCv1sRdoQ6Q0J_dM_Q4V1TRKzbGgynL_OK-LHaiRQ,10490 +pettingzoo/butterfly/knights_archers_zombies/img/knight.png,sha256=e3ydF96M7wWifwvAi6gjWpEZHiLujMS-re9E6EKxs-g,493 +pettingzoo/butterfly/knights_archers_zombies/img/left_wall.png,sha256=5FfYvnv4O6NFp_UQKCBxaBE65xHFL24OEpZ9ZSExwAs,3398 +pettingzoo/butterfly/knights_archers_zombies/img/mace.png,sha256=nPlfjfc8YQSH_MpmPzJYNJaPQwaSs-6pSajhZ5CIz6k,483 +pettingzoo/butterfly/knights_archers_zombies/img/patch1.png,sha256=hISLu_P8m5BzEJS2d-ZnaStoavGWvWL0YzZGY-v7tHw,1368 +pettingzoo/butterfly/knights_archers_zombies/img/patch2.png,sha256=rEheXwrQ7l_72JgBFd8UEIrl7KamnazyrhzLLNSzHYY,1442 +pettingzoo/butterfly/knights_archers_zombies/img/patch3.png,sha256=E8qGi4fURd0PFcHSgVuasTpKw4nSWWjytw0ecJhsaUU,1505 +pettingzoo/butterfly/knights_archers_zombies/img/patch4.png,sha256=e6NTuyOaPAdZzWAnN9Nlb26x2qd8OWerK18RG-HHbu0,1459 +pettingzoo/butterfly/knights_archers_zombies/img/right_wall.png,sha256=0Sksymn9sUXvmJpiJGKfmS7fHT2r9HwnA1M8pNVIMRo,3313 +pettingzoo/butterfly/knights_archers_zombies/img/zombie.png,sha256=zezj-jz0DBrARiIq0fysXK9aOErK0tEj7Jua8zuqG-o,483 +pettingzoo/butterfly/knights_archers_zombies/knights_archers_zombies.py,sha256=_FLcxb0UcVn6HsevUbZahLl1lKB1YdJ_SjJFXnxKsnA,18010 +pettingzoo/butterfly/knights_archers_zombies/manual_control.py,sha256=oKv-dTzS7JRzoHez8RWphnFAzc9gZqyxK5quFmLBPe0,2068 +pettingzoo/butterfly/knights_archers_zombies/src/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/constants.cpython-39.pyc,, +pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/img.cpython-39.pyc,, +pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/players.cpython-39.pyc,, +pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/weapons.cpython-39.pyc,, +pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/zombie.cpython-39.pyc,, +pettingzoo/butterfly/knights_archers_zombies/src/constants.py,sha256=lGfgsWmMZEHF4_9Uc5p_IJug6a7Zqynte8E5M0jbxUE,2380 +pettingzoo/butterfly/knights_archers_zombies/src/img.py,sha256=7uOIguiTpnmFzV30Qx6K7fGN0Dsndx1n13ZK2Xgo_Qw,279 +pettingzoo/butterfly/knights_archers_zombies/src/players.py,sha256=0SZIpvdNym13Btoq9qmHxybVP6LKuafSfuaHQe7eVqQ,3474 +pettingzoo/butterfly/knights_archers_zombies/src/weapons.py,sha256=UZnxodkZHtYsfJLPua8AhRAQ_EAZQuEESaguju6cFBU,2640 +pettingzoo/butterfly/knights_archers_zombies/src/zombie.py,sha256=vG_WIpr_vhMtDoZSEpxQqhaZhttRS1G6zACVcolbJTA,1323 +pettingzoo/butterfly/knights_archers_zombies_v8.py,sha256=sQ96wVAtQkHz64SbO_J_4BbHIuGm_oNatX74b5BYaBc,168 +pettingzoo/butterfly/pistonball/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/butterfly/pistonball/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/butterfly/pistonball/__pycache__/manual_control.cpython-39.pyc,, +pettingzoo/butterfly/pistonball/__pycache__/pistonball.cpython-39.pyc,, +pettingzoo/butterfly/pistonball/background.png,sha256=oxwrbwMFqtfyYQoTwC7uxOAn6jteAtMcsfTCHo-SUzw,5856 +pettingzoo/butterfly/pistonball/manual_control.py,sha256=3HYzdThOxcUCA10LgDtIdlc_1jJ7o09SBHaAF9ZqzMo,1794 +pettingzoo/butterfly/pistonball/piston.png,sha256=goQFQkHlmfBQFsP1E7bkaTrh-y1b5_Fss3T4GT-Flp4,642 +pettingzoo/butterfly/pistonball/piston_body.png,sha256=6lM0YPmXKS3byRnCiRLvL3uN5chyjRfqVC3KQprLHfk,1197 +pettingzoo/butterfly/pistonball/pistonball.py,sha256=EyWklk-ZMhrDYH9dNWq9FBBHeHgEKDsoRHsKsrh38Ro,18596 +pettingzoo/butterfly/pistonball_v6.py,sha256=ok17fAulQ-7O4JEdASJ4IqPvj5sluk3f-s69OFBBvOs,78 +pettingzoo/butterfly/prison/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/butterfly/prison/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/butterfly/prison/__pycache__/manual_control.cpython-39.pyc,, +pettingzoo/butterfly/prison/__pycache__/prison.cpython-39.pyc,, +pettingzoo/butterfly/prison/background.png,sha256=0PMJHjtzZ5qicnJKNiSpkI5OX1wWCfc7s4dLcE0PUQ4,10831 +pettingzoo/butterfly/prison/background_append.png,sha256=7Zn_sYR_TjuTOtbVDfTET-z9wiDKxQriFpMeFrwafLI,6972 +pettingzoo/butterfly/prison/blit_background.png,sha256=qTppwOwh2cJPcm76gT0faD_hPtblPsnYRqOPjvojLI8,5237 +pettingzoo/butterfly/prison/blit_background_append.png,sha256=KItogGdKOlu-769YXazBY6vWEy0BVZGYhEJpZAbjwVc,3400 +pettingzoo/butterfly/prison/manual_control.py,sha256=KcHDFae3Fc5muyW1yxH-ePXo8BVFZlUzLpBOYCopM6k,1807 +pettingzoo/butterfly/prison/prison.py,sha256=SG0YJb3Lx2mu-t5K92ghcXdI6xeOn4hSpxjz0hcviPU,15403 +pettingzoo/butterfly/prison/prisoner.png,sha256=XB-qeEkxhSlC-dfUtde0JzdEzMwJkbwV7_ojv223Lao,2618 +pettingzoo/butterfly/prison/sprites/alien_left.png,sha256=YWfuS_qYtkKJYHpln0-Ol2IBUMI9veCFlhjTRpdBYG8,792 +pettingzoo/butterfly/prison/sprites/alien_right.png,sha256=fNi6XCfO5Ub12gkp10QJYI68H11qBs9rDcqkxDZtvjo,760 +pettingzoo/butterfly/prison/sprites/alien_still.png,sha256=TwTk3WCWqLBWY9VoAMkf7YJHi-3IsJMPlkXZDeqdmWA,938 +pettingzoo/butterfly/prison/sprites/bunny_left.png,sha256=sDatsuDpHYMkQyAhFkfN1CXL8k8hRl5nY1Wlgd1d6FM,2072 +pettingzoo/butterfly/prison/sprites/bunny_right.png,sha256=Ks3DKlxFsY2mYNunumCHVXW7OKEvkM0HxTpLurHOpFI,2027 +pettingzoo/butterfly/prison/sprites/bunny_still.png,sha256=d8WTs_KXiww8QKCakcw_SXMpWH9BaVI_aHLiG9kiMXI,2407 +pettingzoo/butterfly/prison/sprites/drone_left.png,sha256=0pRi6XfYp4Q04jvdBVfrraCNTPZPTctma6wdl1UBFyk,1214 +pettingzoo/butterfly/prison/sprites/drone_right.png,sha256=MF_W0nfGN1rzJN9mQMFntHrGdEs-qYMnBWhDvzlNO9U,1218 +pettingzoo/butterfly/prison/sprites/drone_still.png,sha256=GxBaPbn9Y6IajPndoKbpumzyX3_gJBWnEDuA9Of_ku0,1424 +pettingzoo/butterfly/prison/sprites/glowy_left.png,sha256=xtOupTjIFUuk3MT5_XML8FTrIMU8TiRbT8j6uk7X7wc,1323 +pettingzoo/butterfly/prison/sprites/glowy_right.png,sha256=RDhIpWKQveUeaH67onoqiDkXl-BJ_g3muSZQxxczVE4,1296 +pettingzoo/butterfly/prison/sprites/glowy_still.png,sha256=up31d_iQ0aKLK-X07hgpVEQrYtTjAdMa1Gyz79rs0Ng,1407 +pettingzoo/butterfly/prison/sprites/rabbit_left.png,sha256=9UzGTOpOlsjOHAnVYH6_uorP3LDthPPk3S5rRiOXKgI,3379 +pettingzoo/butterfly/prison/sprites/rabbit_right.png,sha256=QqBi2R8R3NRUaSKkm3tSbajYpPXUUX6t1HtQoocTEdI,3404 +pettingzoo/butterfly/prison/sprites/rabbit_still.png,sha256=vyIQJ4lhtr_7itrQjggRsKtFJtAtW9QN8O5FVpELBNw,3525 +pettingzoo/butterfly/prison/sprites/reptile_left.png,sha256=OM0w1rvqLArysOi_lF8A4JdxC-xliaU1nwtSciDEHGA,1544 +pettingzoo/butterfly/prison/sprites/reptile_right.png,sha256=njPSJXOI1Rv_C2NDrX3A9ogXEc_j408p4WVwDrZzNJA,1562 +pettingzoo/butterfly/prison/sprites/reptile_still.png,sha256=tBDDc8dRWU2to3DBs6u-M9nxe7WB1IGLSfHeqrpo-wo,1847 +pettingzoo/butterfly/prison/sprites/robot_left.png,sha256=ZAWWxrzG27kheeDdgrpAYeCVc5JFBsLSEjd_W4WE0jA,2017 +pettingzoo/butterfly/prison/sprites/robot_right.png,sha256=dgKE2gnJx7ltmG2YSLrcQDzoAmHIFyNS5uoXMfnE_m8,2141 +pettingzoo/butterfly/prison/sprites/robot_still.png,sha256=4qaFBipSK52LZY0FX3YfwfMYCTCLX47BVBesK7KyJ6I,2385 +pettingzoo/butterfly/prison/sprites/tank_left.png,sha256=-oYwRQO-2VUtegegRxbAAlWtr0aUSbGMmDkMvBv8ysM,2567 +pettingzoo/butterfly/prison/sprites/tank_right.png,sha256=DOOBz2DILCAWxNq_RVqH2PnUqEaNstsBEtlX7Jb8TWA,2612 +pettingzoo/butterfly/prison/sprites/tank_still.png,sha256=u1GokjE30nstXY02dB0-ZAG4-DhPPUeWwHS5vpAA2lw,2707 +pettingzoo/butterfly/prison/sprites/ufo_left.png,sha256=iJR0VX76GS5I_WREhAqujnQ-WJRpc-Z5YOcOOPGR69s,1041 +pettingzoo/butterfly/prison/sprites/ufo_right.png,sha256=wqO4h1XRDufIC6M6sFhIctwFyNq3ygvWBxwt8_eAobI,987 +pettingzoo/butterfly/prison/sprites/ufo_still.png,sha256=IsewA1bBKa3vvd-BGNOpDaGeM_ZtFfJjq2HZRFyTM-g,1035 +pettingzoo/butterfly/prison_v3.py,sha256=BCp2W9nr9vwFG4Qp-TXx9422Unx03cuoPTixHOunZic,70 +pettingzoo/butterfly/prospector/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/butterfly/prospector/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/butterfly/prospector/__pycache__/constants.cpython-39.pyc,, +pettingzoo/butterfly/prospector/__pycache__/manual_control.cpython-39.pyc,, +pettingzoo/butterfly/prospector/__pycache__/prospector.cpython-39.pyc,, +pettingzoo/butterfly/prospector/__pycache__/utils.cpython-39.pyc,, +pettingzoo/butterfly/prospector/constants.py,sha256=Xz3OSJQPZ1kIzjLjIz84ql8jb4pa4XnH6vgY3wAgw_Y,2537 +pettingzoo/butterfly/prospector/data/bank.png,sha256=-3WVVWynpQAllAHksMy_6DtexQcvziLwgxo41df2Y7I,5857 +pettingzoo/butterfly/prospector/data/bankers/0.png,sha256=yZs5HoMRhFsKHE_1itKAlFV4fEHQwQ8y1VcQtrx5Tgg,2111 +pettingzoo/butterfly/prospector/data/bankers/1.png,sha256=dwl3_besAb4wDoT-byuqiEmQG75xEMCM8R0iVicPlRc,2112 +pettingzoo/butterfly/prospector/data/bankers/2.png,sha256=Z0-KDJUdzpuMZN_WXxjEUpUbSry-fhwQDkfajhMv2Iw,2115 +pettingzoo/butterfly/prospector/data/debris/0.png,sha256=HSEEbWDoFWew7IpbM_5LzDlMWClrhFDfIbGYzySyCBg,446 +pettingzoo/butterfly/prospector/data/debris/1.png,sha256=px8VlPwJOh_RHy7YDLxpl3M1wWSJl_w2e7UxHP132RE,316 +pettingzoo/butterfly/prospector/data/debris/2.png,sha256=8ITnfc3JetCkYYHLuGTjDQzU1uIjXbegcKC8G3OBQms,623 +pettingzoo/butterfly/prospector/data/debris/3.png,sha256=zo63Kt2Mfz_x6inUxd1EHusejgHGVb5nfGnwkE90ga8,607 +pettingzoo/butterfly/prospector/data/debris/seaweed_water.png,sha256=l-ULVp30jqyZkT4w5T_aFDEubkntaTVr7vE_ZB5Qxgc,968 +pettingzoo/butterfly/prospector/data/fence_horiz_tile.png,sha256=EsP7IrWY-CU82CpWhgdVI-V_p5JKj5jaQi_gGNW8Fmg,564 +pettingzoo/butterfly/prospector/data/fence_vert_tile.png,sha256=dtduk2063K0KSK4RfMabbSMX3tNfWqtVV4weOHrVess,352 +pettingzoo/butterfly/prospector/data/gold.png,sha256=IRpZ6VpEaAi5wAXqbbWOVGHTOvLDZFe1ruNrEPDZHcA,289 +pettingzoo/butterfly/prospector/data/prospector.png,sha256=_IpLpq-HcM_xzBUz3v0Ip9TpB-iJtm16ZKqy-ihmMcg,2323 +pettingzoo/butterfly/prospector/data/river_tile.png,sha256=IVi92YooXygwuIAWFF5sTcA2stdQNspNORopJZady7E,5686 +pettingzoo/butterfly/prospector/data/river_to_sand_tile.png,sha256=Ed50eZLCqkO_YTtA3wahGr3f2v2hTgNMZM36cSkGvHI,5724 +pettingzoo/butterfly/prospector/data/sand_tile.png,sha256=R7Xi2UKe7iAXzHWaHQQcORQCdJO4FlHKqCC7dr6hn50,3500 +pettingzoo/butterfly/prospector/manual_control.py,sha256=r8c1hWiAjt1z0yQg1LEnUjtNxtopoWAxeDleAcTW1fU,2270 +pettingzoo/butterfly/prospector/prospector.py,sha256=SPjZ6N56UmQnqIAtiiprZjYwrFKceYIJqwn23_fXR-w,30264 +pettingzoo/butterfly/prospector/utils.py,sha256=v4U1FGh4nE6nXADYvTvEiXk8DnYs7uWqO0hAdlxiugM,1013 +pettingzoo/butterfly/prospector_v4.py,sha256=1OniICOQdRqYjuxRPeHyC5q5P59bXQRs1_s7Fou1QJo,78 +pettingzoo/classic/__init__.py,sha256=EOnTLVyQDGUOm8PxOruBuZFSuSYe5GhlsORA2T_ACT0,155 +pettingzoo/classic/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/classic/__pycache__/backgammon_v3.cpython-39.pyc,, +pettingzoo/classic/__pycache__/checkers_v3.cpython-39.pyc,, +pettingzoo/classic/__pycache__/chess_v5.cpython-39.pyc,, +pettingzoo/classic/__pycache__/connect_four_v3.cpython-39.pyc,, +pettingzoo/classic/__pycache__/dou_dizhu_v4.cpython-39.pyc,, +pettingzoo/classic/__pycache__/gin_rummy_v4.cpython-39.pyc,, +pettingzoo/classic/__pycache__/go_v5.cpython-39.pyc,, +pettingzoo/classic/__pycache__/hanabi_v4.cpython-39.pyc,, +pettingzoo/classic/__pycache__/leduc_holdem_v4.cpython-39.pyc,, +pettingzoo/classic/__pycache__/mahjong_v4.cpython-39.pyc,, +pettingzoo/classic/__pycache__/rps_v2.cpython-39.pyc,, +pettingzoo/classic/__pycache__/texas_holdem_no_limit_v6.cpython-39.pyc,, +pettingzoo/classic/__pycache__/texas_holdem_v4.cpython-39.pyc,, +pettingzoo/classic/__pycache__/tictactoe_v3.cpython-39.pyc,, +pettingzoo/classic/__pycache__/uno_v4.cpython-39.pyc,, +pettingzoo/classic/backgammon/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/classic/backgammon/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/classic/backgammon/__pycache__/backgammon.cpython-39.pyc,, +pettingzoo/classic/backgammon/__pycache__/backgammon_env.cpython-39.pyc,, +pettingzoo/classic/backgammon/__pycache__/bg_utils.cpython-39.pyc,, +pettingzoo/classic/backgammon/backgammon.py,sha256=hLQ_La4FMsSFCTYUuPDEzvslKXUV12bAg3fh3MwiqsY,75540 +pettingzoo/classic/backgammon/backgammon_env.py,sha256=YuJie9aAetG54yYj_HBv0szsAOPrQ1qne1ZqMxk3pQo,5689 +pettingzoo/classic/backgammon/bg_utils.py,sha256=dQVevW6PreRtMf18b5KX12ecunTZ8BgHSjwY28rR17E,3651 +pettingzoo/classic/backgammon_v3.py,sha256=dPKuRa8TMXyoVaHCTh5O36n8nBexoL1IIcA-IKXXX9g,52 +pettingzoo/classic/checkers/__pycache__/checkers.cpython-39.pyc,, +pettingzoo/classic/checkers/checkers.py,sha256=v_X0mnFhb8LR8XHA0rsc00XKP9PosGgaiRY3LlkKTDw,22066 +pettingzoo/classic/checkers_v3.py,sha256=yiMAPRTX5-gJz7bVRPgQi87wPfHYLRpjoxfl8VV8PQ4,44 +pettingzoo/classic/chess/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/classic/chess/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/classic/chess/__pycache__/chess_env.cpython-39.pyc,, +pettingzoo/classic/chess/__pycache__/chess_utils.cpython-39.pyc,, +pettingzoo/classic/chess/__pycache__/test_chess.cpython-39.pyc,, +pettingzoo/classic/chess/chess_env.py,sha256=nAHOA4F-B9OXtMdqEcjNSk2VQa6ayHSd0-XZork-uew,4354 +pettingzoo/classic/chess/chess_utils.py,sha256=mErXqbXx2mEof_ZkC2JZEkY6NSjmyH3mEpePRHAYITM,10916 +pettingzoo/classic/chess/test_chess.py,sha256=fVYiDBRZD4Dm2umVt5-FpELx1qFIbMuw-eg7olbQamw,2221 +pettingzoo/classic/chess_v5.py,sha256=qpzp243RIvDmubTatcRjIRslRkwoJeWEfUE9vnUeOX4,42 +pettingzoo/classic/connect_four/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/classic/connect_four/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/classic/connect_four/__pycache__/connect_four.cpython-39.pyc,, +pettingzoo/classic/connect_four/connect_four.py,sha256=z3QiwTmCq8y9nQkGN-ZI9e5HwNIK_0IlarIW7stfMnk,7878 +pettingzoo/classic/connect_four/img/C4BlackPiece.png,sha256=Bz-txIWTIiGVAK4tmVyfvVpwaloq-0D0Ty-AL2eA8CM,175 +pettingzoo/classic/connect_four/img/C4RedPiece.png,sha256=5Mzf8BOqEZPGLxwh55MvlnJAgE1sj0Znre9ywzjjNtY,166 +pettingzoo/classic/connect_four/img/Connect4Board.png,sha256=M4R7GrkS_OheapCAMs0TLoG5rxoDBBrCX6XDBLEwwjA,580 +pettingzoo/classic/connect_four_v3.py,sha256=DZsJnyDj1fIoQSVwyo6WEj7rArfGH-tIAcVsu-fc7tI,52 +pettingzoo/classic/dou_dizhu_v4.py,sha256=seXCM2w7tOSkELkBALBiASGQLZdJ411mnXOEsM6y70c,48 +pettingzoo/classic/gin_rummy_v4.py,sha256=cH1fajQG5EBcXPcU_kp0ogG8W1sdjN0chbM1WV-BwXA,48 +pettingzoo/classic/go/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/classic/go/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/classic/go/__pycache__/coords.cpython-39.pyc,, +pettingzoo/classic/go/__pycache__/go.cpython-39.pyc,, +pettingzoo/classic/go/__pycache__/go_env.cpython-39.pyc,, +pettingzoo/classic/go/coords.py,sha256=JdyFxGo1f6kH_0GT4fwfp8AWawHCzFHhuhsS8lvRccU,3377 +pettingzoo/classic/go/go.py,sha256=sgHhCSeamSO9yYsuWiX9CFU4bI3xfiucgyBkJuXBpkQ,19762 +pettingzoo/classic/go/go_env.py,sha256=pilN_i13Ms6PB4BotqQZJSGYmaoWT4_m2W0urGRjzkY,9606 +pettingzoo/classic/go/img/GO_Tile0.png,sha256=9xF4e7VjrDO0JBr2Ildor8RFORABRI8H9Nggq6mRjBg,146 +pettingzoo/classic/go/img/GO_Tile1.png,sha256=Af1hu4r4S6Le6pPhVUw7F62mY2q9Dcwy1uNvxgLX81E,129 +pettingzoo/classic/go/img/GO_Tile2.png,sha256=1KYuM8XwfVCw0oZikHPUfwEDfj5h3iV8Dud3V16NNJ4,130 +pettingzoo/classic/go/img/GO_Tile3.png,sha256=ZD3Q5nMZ_ceBD_TV0Q-2mqC00jydpwH5Da3fgKmmaCo,134 +pettingzoo/classic/go/img/GO_Tile4.png,sha256=bfCZj8HgMd77q8GYB_nxNzYucDkG6U1t1z6-Qk6VdP8,127 +pettingzoo/classic/go/img/GO_Tile5.png,sha256=OeYYrExZeHhqZD4xMsgGCccr3mRb-7b8K4IIU8KHkWw,117 +pettingzoo/classic/go/img/GO_Tile6.png,sha256=lYxLIIHfNtYyrNS12U_IV_afl8INbsnfM_jIyUrWXlo,120 +pettingzoo/classic/go/img/GO_Tile7.png,sha256=efFWYwCZEDIrumJNHI-sExorgUHGprfZJ20Ti1qTa00,121 +pettingzoo/classic/go/img/GO_Tile8.png,sha256=TGSaiiGl-4zM6q5XNYSLdoOO-j7opx-3g_P489uxuWw,121 +pettingzoo/classic/go/img/GoBlackPiece.png,sha256=Ni4Q8ax03wyL_gE_n4hte-Yr3EkUaAl8ejzanSN0KmQ,113 +pettingzoo/classic/go/img/GoBoard.png,sha256=BQvz3Appa3UOABboQhEXuL0whjm4bqzLF8HH10qoS6w,450 +pettingzoo/classic/go/img/GoWhitePiece.png,sha256=6IqWDOhTTcwcnxSOtT2Xhs1A5wm4byLT7mQYgE9X5rg,108 +pettingzoo/classic/go_v5.py,sha256=Zol4VUuw4Y1oGltR3kYd3unlYg_5y8lBKkKwT-niqi4,36 +pettingzoo/classic/hanabi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/classic/hanabi/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/classic/hanabi/__pycache__/hanabi.cpython-39.pyc,, +pettingzoo/classic/hanabi/hanabi.py,sha256=tfprXcE7zLK-puXxpbmNVsoJVlhMAOWxQE9gFKUdZIs,13611 +pettingzoo/classic/hanabi_v4.py,sha256=qCGTB1MqBml8f4A8ahz45BaOw29EDnA7fI_BRS-gLAY,40 +pettingzoo/classic/leduc_holdem_v4.py,sha256=2JVv11K0E5-75-ParGupE9KlFcgsQpnygKliuJ6xaWw,51 +pettingzoo/classic/mahjong_v4.py,sha256=qs9hpIUCMrvqC-8aVbyCf844kymlBU1vX8UV5JGkUhM,46 +pettingzoo/classic/rlcard_envs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/classic/rlcard_envs/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/classic/rlcard_envs/__pycache__/dou_dizhu.cpython-39.pyc,, +pettingzoo/classic/rlcard_envs/__pycache__/gin_rummy.cpython-39.pyc,, +pettingzoo/classic/rlcard_envs/__pycache__/leduc_holdem.cpython-39.pyc,, +pettingzoo/classic/rlcard_envs/__pycache__/mahjong.cpython-39.pyc,, +pettingzoo/classic/rlcard_envs/__pycache__/rlcard_base.cpython-39.pyc,, +pettingzoo/classic/rlcard_envs/__pycache__/texas_holdem.cpython-39.pyc,, +pettingzoo/classic/rlcard_envs/__pycache__/texas_holdem_no_limit.cpython-39.pyc,, +pettingzoo/classic/rlcard_envs/__pycache__/uno.cpython-39.pyc,, +pettingzoo/classic/rlcard_envs/dou_dizhu.py,sha256=Q3vLzgwLHyKv6szRB2e9KYl8hW4Q14y5-M_YWoU48EA,2440 +pettingzoo/classic/rlcard_envs/font/Minecraft.ttf,sha256=vUcxTTAeUP9NEJv_KN_PY3y36xOUVIAlmHi4SIdazGU,14488 +pettingzoo/classic/rlcard_envs/gin_rummy.py,sha256=88OWUXpbKtWn2tLoTMN-l0HawhPwMMxm7h-ZN83aFrw,3285 +pettingzoo/classic/rlcard_envs/img/C2.png,sha256=9wyYJEGGtI5mEUCDjj5NTBWy4aNsnv9my3DQFS9BH_E,6275 +pettingzoo/classic/rlcard_envs/img/C3.png,sha256=1J_1vBm4VAyoj3rrEEGj9akKeZuiva10e074bXq7J48,6947 +pettingzoo/classic/rlcard_envs/img/C4.png,sha256=zBRpVV6lC82w0dg2Y6sE7zrbzb96sMEvZzenPMC1xGM,7115 +pettingzoo/classic/rlcard_envs/img/C5.png,sha256=R7lJ0Q5Lpt4SlP28qRTL6My3AniX6SUlLYZLZuBHsio,7948 +pettingzoo/classic/rlcard_envs/img/C6.png,sha256=C71sLmQ6vNb54LCFPeQje_2npwMLnzB7APA2hYchHe8,8488 +pettingzoo/classic/rlcard_envs/img/C7.png,sha256=b3wKICB8Lz9SV8mZ8k2sp5YJQNqLyMPja6ZxEpDeXPs,8704 +pettingzoo/classic/rlcard_envs/img/C8.png,sha256=g-lUCFiaB1nbMjJSZDdgscjl6Uy9biYBtPeOcMjqeUE,9807 +pettingzoo/classic/rlcard_envs/img/C9.png,sha256=CmMb56YlmBt6Qd5cN8oq1RXTRN4dlq0a0TTKlcRrSRw,10427 +pettingzoo/classic/rlcard_envs/img/CA.png,sha256=N1OXD5VuQkXngVqPRjHHhM2BuCiYfnqlvUeCgb6KDuk,6621 +pettingzoo/classic/rlcard_envs/img/CJ.png,sha256=skFNLPORyMEPUSnwMfNJbSg4BsyHHEKutAe-t3vtc1M,18635 +pettingzoo/classic/rlcard_envs/img/CK.png,sha256=vSJH7pzBiL4AlVuO6vGmRwt5MPImA-mKT2QkNlXlmC8,19498 +pettingzoo/classic/rlcard_envs/img/CQ.png,sha256=r62ij9z2ODsZ3HvUpCkW1cOW7CnrZuK1Q0gApOuI3Fc,18772 +pettingzoo/classic/rlcard_envs/img/CT.png,sha256=RRlaCFWc2J6iKGVpV2ATVDqFVZKnYGqQ15NwWE0L2rE,10454 +pettingzoo/classic/rlcard_envs/img/Card.png,sha256=0Vzl3zX6Xr978OiIlJlZ2PabHET-f0hts5TiGMh78wE,44172 +pettingzoo/classic/rlcard_envs/img/ChipBlack.png,sha256=tqSdndjFhPzzNRy6piz0llz47vP6vPmN8lbegc8OfP8,1014 +pettingzoo/classic/rlcard_envs/img/ChipBlue.png,sha256=7eEifpRwTMTvhbiujDEJnBkIE7r2jCECLBF1bxH_lXY,1049 +pettingzoo/classic/rlcard_envs/img/ChipGreen.png,sha256=9UehK-YY_Plja9hLaLQgj6Ns105DojJE3CRUYUsAOe0,1059 +pettingzoo/classic/rlcard_envs/img/ChipLightBlue.png,sha256=WO9Dc11VHJTqo-wzc1n2T4Jeq-BqcUrbydSru3dfRfs,1051 +pettingzoo/classic/rlcard_envs/img/ChipOrange.png,sha256=EU604QYyr5E7LLvyV5BTWjEW6UMgMjeoILuuDoixvWo,1051 +pettingzoo/classic/rlcard_envs/img/ChipPink.png,sha256=OskJXhi0gNmXo3eY45SocrgUbbbm1uZGBvfQxhCsr8s,1055 +pettingzoo/classic/rlcard_envs/img/ChipRed.png,sha256=EewX-Z9yVhvV3hOglwTIPHm3c9sibXeRTTuRgQ08rpw,1044 +pettingzoo/classic/rlcard_envs/img/ChipWhite.png,sha256=Cmjn6rmxBX_G11klzLLb9JRvPSYPgApgwiDJqnrgg68,1012 +pettingzoo/classic/rlcard_envs/img/ChipYellow.png,sha256=NUDwaR5CH6XMPp4gNckDYaZWvl0kdo4UYldiny5VhqU,1050 +pettingzoo/classic/rlcard_envs/img/D2.png,sha256=da-t8bL5Wk8Xp1Jqo19SDbFgvsbjNyICk29UthL12M0,5905 +pettingzoo/classic/rlcard_envs/img/D3.png,sha256=VMpHKcypvOZ7m5-4QGbQB7HzQV_DzRNH08dqtuSUBro,6501 +pettingzoo/classic/rlcard_envs/img/D4.png,sha256=HYc-U5b4g7AwxQvaRaEUV6LJp5557qzasuZDOTKmjJI,6660 +pettingzoo/classic/rlcard_envs/img/D5.png,sha256=q7oTybgDKzOPr3BmacExBv1JVRcnrI9EvtGippADcns,7451 +pettingzoo/classic/rlcard_envs/img/D6.png,sha256=7F3CFwhoqUEd0ScvLMsN4CCnnmpQVR2VyDMLo4L2SCE,7931 +pettingzoo/classic/rlcard_envs/img/D7.png,sha256=-wvxMG10ahY2cMQL5tsy5DmAWPX3oUZPg2JXHoJEv2g,8138 +pettingzoo/classic/rlcard_envs/img/D8.png,sha256=FYFL9Xu1Z55tk2JdytO-_kznx7LDVktsZ7veSQ9LKRI,9027 +pettingzoo/classic/rlcard_envs/img/D9.png,sha256=2PKQFm6Kl-eIZjGP-sIxED0V6AZxc5nX1Byf3riKInE,9529 +pettingzoo/classic/rlcard_envs/img/DA.png,sha256=ya4Iv8Kiim-xl1vDDP2crGbo-cm015MNnexKo5kr1NU,6252 +pettingzoo/classic/rlcard_envs/img/DJ.png,sha256=IpNYdsTQvkugVOvPh75vvGUyG8xp8J9k24keW7gRoe4,18468 +pettingzoo/classic/rlcard_envs/img/DK.png,sha256=4_N60pVqvEwIs5vdY6AqE1aQVwIl-bTrzQTCDAK39Bo,19118 +pettingzoo/classic/rlcard_envs/img/DQ.png,sha256=nBBmACf8H9aP11-sR5QB4JhJwjLWRosYOu2fOuBk9PE,18375 +pettingzoo/classic/rlcard_envs/img/DT.png,sha256=IxIqoSXibft44nu1MUhEy3o-5DwG6UX20DvkEwEMkxw,9546 +pettingzoo/classic/rlcard_envs/img/H2.png,sha256=TFvU5-70jkxsp79hGYvJu5b0MXGtjOIIWhAZvd-iubY,5898 +pettingzoo/classic/rlcard_envs/img/H3.png,sha256=yxw4ifaB_5tGYWRt00bROCI2eSWchpMUyOmFnYwlHYI,6466 +pettingzoo/classic/rlcard_envs/img/H4.png,sha256=NSYYoorjwtXSb3MfWlUb-cNf1AZo72w1lNaO_T5Hks0,6623 +pettingzoo/classic/rlcard_envs/img/H5.png,sha256=cYjjSvPwNg2NZbKKm2jRC30j3Qg5ZMTYsFZEu3sc16I,7384 +pettingzoo/classic/rlcard_envs/img/H6.png,sha256=fVVYlm3RUg_2IMMP2kg1AG3e2pvcdGjlIXgcX-YoqrI,7884 +pettingzoo/classic/rlcard_envs/img/H7.png,sha256=vFUeTuAm4d-r7PR_8rJSY9IejPYorWtoeRUH6tPk0vQ,8041 +pettingzoo/classic/rlcard_envs/img/H8.png,sha256=sx-YQj--5cE1CqfTpux3ajT18oturlTBph5tW_e_M2I,8882 +pettingzoo/classic/rlcard_envs/img/H9.png,sha256=1CMKYuAqtecDyQBZpFT1FKRBB-UzSYFtoijsSurfonk,9402 +pettingzoo/classic/rlcard_envs/img/HA.png,sha256=QRsYoxkuQvYkNcC3EmDty6lu3id7HWjnyfhH0Z9RmoQ,6870 +pettingzoo/classic/rlcard_envs/img/HJ.png,sha256=K35x9y0sd6wDtT9gcSU7McSu5zDn1qMugwh05sa-voU,18394 +pettingzoo/classic/rlcard_envs/img/HK.png,sha256=3yxO7n3vcTQgNW5tpdoYRMnny5P6HpdZAaEUEM3VK-A,19561 +pettingzoo/classic/rlcard_envs/img/HQ.png,sha256=1YCQpN90NFgHDVQ1qjIyZLE7NcmCpIB941gkgPu44Nk,20159 +pettingzoo/classic/rlcard_envs/img/HT.png,sha256=95B-xA3mdAtOWnsG8bcFMqD9MIwV3gX_7tmpDtQmxow,9366 +pettingzoo/classic/rlcard_envs/img/S2.png,sha256=yg-eXnvu91_6bC9v6PYjQyk0P5oyAzAWWsM5G_sWaAc,6205 +pettingzoo/classic/rlcard_envs/img/S3.png,sha256=590aYJVJ7YG2xM2xO_SzISlfaAF9PjVkBRuhFW1qVUI,6812 +pettingzoo/classic/rlcard_envs/img/S4.png,sha256=V4XDRXe_P3oOPu_SCaEnblxRoJOI7n4tJJs1e75wGnE,7012 +pettingzoo/classic/rlcard_envs/img/S5.png,sha256=XhFXC9Ip0JaZOhiHXLvAJBDbv55nUmd5xSPsU3he0xY,7809 +pettingzoo/classic/rlcard_envs/img/S6.png,sha256=uqTGuNwQZWI8TaWhbMBvOYnjh3A9TelZejUzEeD8xm4,8370 +pettingzoo/classic/rlcard_envs/img/S7.png,sha256=OTq2KFJYSxxo2-mGKckX05YZi58LYKJrhRUPysIbJoo,8585 +pettingzoo/classic/rlcard_envs/img/S8.png,sha256=MHF4V1ZGJExUJc48uEiai_JMxFTY-AF9TKNffWkx55c,9527 +pettingzoo/classic/rlcard_envs/img/S9.png,sha256=3KYaz70QNUlcFXQTFF3YWzKXMuOi7ZMndZh7zx8Z_xI,9931 +pettingzoo/classic/rlcard_envs/img/SA.png,sha256=uoiL6MbUk3XwwSZoI5dIiX_BV2iPWPhyWTF2xB0yNhY,6630 +pettingzoo/classic/rlcard_envs/img/SJ.png,sha256=TDHnSTRu46hFdCND_DsNSKg6dUyjYvbcLkYOTLly1Qo,18175 +pettingzoo/classic/rlcard_envs/img/SK.png,sha256=HzhL8Geb74zF9nHCB9jjJVqGuwhM2aUuuaEci_b7UcQ,19256 +pettingzoo/classic/rlcard_envs/img/SQ.png,sha256=Cx4M4lhNkrN32_6rGxaTEORRJQXunGHut_drFNv_16I,19809 +pettingzoo/classic/rlcard_envs/img/ST.png,sha256=WUxZ-MvkENMViermMSSZe_IL6tzt131s5IrPlKpXT6k,9816 +pettingzoo/classic/rlcard_envs/leduc_holdem.py,sha256=qCUp0MNzLNHPiLNaNWx9OmxKcEwhV8EvjxljZxeaO3s,1377 +pettingzoo/classic/rlcard_envs/mahjong.py,sha256=Ve_c12SgmYMNFWBkBtPMZ9FbDbVdO1zSALnMnd3Cx_0,1281 +pettingzoo/classic/rlcard_envs/rlcard_base.py,sha256=ewNINrfw6ABGZoGoLDbsUfDZ_d7q1emjmjlxp8a6LT8,4072 +pettingzoo/classic/rlcard_envs/texas_holdem.py,sha256=s7ysDKmaOcFayIGJDwqOPM72bH35kEi7ySUpyj3rLx0,7947 +pettingzoo/classic/rlcard_envs/texas_holdem_no_limit.py,sha256=gZjqDkbJgxCYzNv4zAnBxSExQHE_zadMYDbpYnzPwWE,8248 +pettingzoo/classic/rlcard_envs/uno.py,sha256=_RsqmdEX_vL2SWWcvzv42ULHXNpouycmPTDbKAH1Rpg,1923 +pettingzoo/classic/rps/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/classic/rps/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/classic/rps/__pycache__/rps.cpython-39.pyc,, +pettingzoo/classic/rps/font/Minecraft.ttf,sha256=vUcxTTAeUP9NEJv_KN_PY3y36xOUVIAlmHi4SIdazGU,14488 +pettingzoo/classic/rps/img/Lizard.png,sha256=I9CfdsXUquGkWGGYx_EpVTClivYE3tVdbhDf5OIJShk,282 +pettingzoo/classic/rps/img/Paper.png,sha256=EatStbar9auK-0_cgFSmh4LPmm8P8srtP2mtgB6bFaI,194 +pettingzoo/classic/rps/img/Rock.png,sha256=C3b3zAFKtYOznxmHpG_KqTaVJVXbUUyYteduFKwKadY,194 +pettingzoo/classic/rps/img/Scissors.png,sha256=J6rUqz0HRtShb0ckGILijcLLyUOApav7Zn4UE-f23Io,315 +pettingzoo/classic/rps/img/Spock.png,sha256=O21KWd65SMp5tPvC_zKC1OeGrABmIblo1MCzS8gUBFQ,242 +pettingzoo/classic/rps/rps.py,sha256=iRPd-H834yA9TjILWjE84uXybgkcIedOo9XnEfUmBtc,10743 +pettingzoo/classic/rps_v2.py,sha256=ru1IFgogqLd1ORko_CbVpj8mCRCONJubDF8aG8UjbVA,48 +pettingzoo/classic/texas_holdem_no_limit_v6.py,sha256=9kXsm2NdeRTNz8WdKJqbMZHljBODMNDmDwqomgF7J5Y,60 +pettingzoo/classic/texas_holdem_v4.py,sha256=L75QQCf__w27BhQNU8ciDgB0SNSBeFQK_zbfT-zah9k,51 +pettingzoo/classic/tictactoe/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/classic/tictactoe/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/classic/tictactoe/__pycache__/board.cpython-39.pyc,, +pettingzoo/classic/tictactoe/__pycache__/tictactoe.cpython-39.pyc,, +pettingzoo/classic/tictactoe/board.py,sha256=__RVTxGTh4VqWN5noU29fil_uCjNl6FLKDV5zqLpqm0,2229 +pettingzoo/classic/tictactoe/tictactoe.py,sha256=4FFxBrsDFw4wYUK_iEbquA9erCPm5e84t7m-7Oe2A6s,5548 +pettingzoo/classic/tictactoe_v3.py,sha256=A9psj59HhsjcLLUHbIwAeQgFIlxRZCb7rdRBNvf_Kfw,46 +pettingzoo/classic/uno_v4.py,sha256=plkxe-00I9JpUiO4AIi-2BWbPA37J2mGj74TFF3CkGo,42 +pettingzoo/magent/__init__.py,sha256=EOnTLVyQDGUOm8PxOruBuZFSuSYe5GhlsORA2T_ACT0,155 +pettingzoo/magent/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/magent/__pycache__/adversarial_pursuit_v3.cpython-39.pyc,, +pettingzoo/magent/__pycache__/battle_v3.cpython-39.pyc,, +pettingzoo/magent/__pycache__/battlefield_v3.cpython-39.pyc,, +pettingzoo/magent/__pycache__/combined_arms_v5.cpython-39.pyc,, +pettingzoo/magent/__pycache__/gather_v4.cpython-39.pyc,, +pettingzoo/magent/__pycache__/magent_env.cpython-39.pyc,, +pettingzoo/magent/__pycache__/render.cpython-39.pyc,, +pettingzoo/magent/__pycache__/tiger_deer_v3.cpython-39.pyc,, +pettingzoo/magent/adversarial_pursuit_v3.py,sha256=a_b-aVll5X3jzbH3bhL1qL-_xfmrXcSF1QXiWcrKNuk,3433 +pettingzoo/magent/battle_v3.py,sha256=ehzqFcs1OZymuEqtbUdRnuRkeCp3Hl2ao3sFelgmADc,4467 +pettingzoo/magent/battlefield_v3.py,sha256=8NQVJd8_yk8DF5h1V0IlKG5Nw-8rdYbL2R622ydS-OQ,4241 +pettingzoo/magent/combined_arms_v5.py,sha256=HOml0pptE1ipks3hTsZaRzV3KvxIb9GCBsoyYYErZEo,6587 +pettingzoo/magent/gather_v4.py,sha256=fqzwxQuc-sJiHra7d_Biec7wSe4Zba6ZpQqDtfE2_e0,5335 +pettingzoo/magent/magent_env.py,sha256=l5kgpmUL9gbE5vTgny-MTA5FPxtACcOZcUbMJSaO2G0,9190 +pettingzoo/magent/render.py,sha256=_cMYJsZRrKUQyY90JGys1e0c_t3-o5DPMQdCk3Co8LA,9935 +pettingzoo/magent/tiger_deer_v3.py,sha256=PcpFRPx4bb0j7U0OaOKtQc9rqlj2qJCRdbFTaRKx1JA,3850 +pettingzoo/mpe/__init__.py,sha256=EOnTLVyQDGUOm8PxOruBuZFSuSYe5GhlsORA2T_ACT0,155 +pettingzoo/mpe/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/mpe/__pycache__/simple_adversary_v2.cpython-39.pyc,, +pettingzoo/mpe/__pycache__/simple_crypto_v2.cpython-39.pyc,, +pettingzoo/mpe/__pycache__/simple_push_v2.cpython-39.pyc,, +pettingzoo/mpe/__pycache__/simple_reference_v2.cpython-39.pyc,, +pettingzoo/mpe/__pycache__/simple_speaker_listener_v3.cpython-39.pyc,, +pettingzoo/mpe/__pycache__/simple_spread_v2.cpython-39.pyc,, +pettingzoo/mpe/__pycache__/simple_tag_v2.cpython-39.pyc,, +pettingzoo/mpe/__pycache__/simple_v2.cpython-39.pyc,, +pettingzoo/mpe/__pycache__/simple_world_comm_v2.cpython-39.pyc,, +pettingzoo/mpe/_mpe_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/mpe/_mpe_utils/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/mpe/_mpe_utils/__pycache__/core.cpython-39.pyc,, +pettingzoo/mpe/_mpe_utils/__pycache__/rendering.cpython-39.pyc,, +pettingzoo/mpe/_mpe_utils/__pycache__/scenario.cpython-39.pyc,, +pettingzoo/mpe/_mpe_utils/__pycache__/simple_env.cpython-39.pyc,, +pettingzoo/mpe/_mpe_utils/core.py,sha256=2uHH1v_r_nTK67Cx36hSx5LZJINpgSTRKV1W5leJH6o,6927 +pettingzoo/mpe/_mpe_utils/rendering.py,sha256=Y5FP7matr0HQek5A1kspT3JEs6QAda8kfcEBc1Y27mc,12917 +pettingzoo/mpe/_mpe_utils/scenario.py,sha256=gODKexwFaUNEfEIu8K7_Em0dvF7G1mgx6hL9tPleEqs,289 +pettingzoo/mpe/_mpe_utils/secrcode.ttf,sha256=WP9mU2BmNHA2OYXRuT8oWa5AJcHIFMgdAFV-jibOJho,7780 +pettingzoo/mpe/_mpe_utils/simple_env.py,sha256=yOC5-vEaQHzavhvVT97AZrahcB0TdKfJZuHBN1SWcxE,10119 +pettingzoo/mpe/scenarios/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/mpe/scenarios/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/mpe/scenarios/__pycache__/simple.cpython-39.pyc,, +pettingzoo/mpe/scenarios/__pycache__/simple_adversary.cpython-39.pyc,, +pettingzoo/mpe/scenarios/__pycache__/simple_crypto.cpython-39.pyc,, +pettingzoo/mpe/scenarios/__pycache__/simple_push.cpython-39.pyc,, +pettingzoo/mpe/scenarios/__pycache__/simple_reference.cpython-39.pyc,, +pettingzoo/mpe/scenarios/__pycache__/simple_speaker_listener.cpython-39.pyc,, +pettingzoo/mpe/scenarios/__pycache__/simple_spread.cpython-39.pyc,, +pettingzoo/mpe/scenarios/__pycache__/simple_tag.cpython-39.pyc,, +pettingzoo/mpe/scenarios/__pycache__/simple_world_comm.cpython-39.pyc,, +pettingzoo/mpe/scenarios/simple.py,sha256=69k6rvG9HoBopAFFXObnEQcg4ouElJ5cZaQu5NJl0gc,2000 +pettingzoo/mpe/scenarios/simple_adversary.py,sha256=U0lwmadEOCclCwp1u1YCGCxsjXeCXYLFuxSzlSQYwQY,6071 +pettingzoo/mpe/scenarios/simple_crypto.py,sha256=NhTfo5d_yhQE_dmu8T8s1p2izAE34npCazlIxCH9KxU,5967 +pettingzoo/mpe/scenarios/simple_push.py,sha256=BG0SGNvatWomBAD7aGtpPkWlUQzhZeaDnimH9zQbdk8,4238 +pettingzoo/mpe/scenarios/simple_reference.py,sha256=r98pPfu8Y7mdbAWdkO--UaeAshkhY8L344NVF1jpvzU,3532 +pettingzoo/mpe/scenarios/simple_speaker_listener.py,sha256=SLG1eKNz52Xv4osaycCQTLPz8ThA8DOMcWwTnxl7fPs,3491 +pettingzoo/mpe/scenarios/simple_spread.py,sha256=ewytVSC_cVuPzYIeX6x-vB3ctiRkEO0uD3-TNElwGYQ,3948 +pettingzoo/mpe/scenarios/simple_tag.py,sha256=uVaaV4-T6wjdXiYRo1h10O4V25-wi8ezai-PC98ItCs,5975 +pettingzoo/mpe/scenarios/simple_world_comm.py,sha256=cRZyVIK6AdZBaKNsQI4WN0FikA8vz21EqQsI-uNI-oY,12178 +pettingzoo/mpe/simple_adversary_v2.py,sha256=xD3r7txK6JYUVQNLMAHaiwXfL7pCjIxnJbbaz1iV85k,527 +pettingzoo/mpe/simple_crypto_v2.py,sha256=E1PzAbYea4IB3s1FbhhUvLHMrTJFes8c8i8pGloHWMc,515 +pettingzoo/mpe/simple_push_v2.py,sha256=-hVz-UjHvo5q54CEn1X6OnrUJyX6v25Pr9VUptmsnys,511 +pettingzoo/mpe/simple_reference_v2.py,sha256=2Qa_Y-u8VRMlp2N_OKpLLRaBrn5lBsRxYI4PIkYZGnc,647 +pettingzoo/mpe/simple_speaker_listener_v3.py,sha256=ufdKt-geOQSTmvILh--N3LxaLdhdfPO-B54rqblgzpI,535 +pettingzoo/mpe/simple_spread_v2.py,sha256=CY7aqDRVL0q3wu_OZV_Z5jklomJBp4TqKEh2jsdKgeE,647 +pettingzoo/mpe/simple_tag_v2.py,sha256=HLO8YqrqGA3K2FDfOHMQDiZ2Hjypex4pt1orCV5e0tk,597 +pettingzoo/mpe/simple_v2.py,sha256=AwCPfssAUrUzs_5FcvjJbs9__7Tc64ykzEI7HK8x88s,501 +pettingzoo/mpe/simple_world_comm_v2.py,sha256=D1AmTqYbVjUhNsQ6XnbkSQREXmAkQ5bMLWgv0JIrH5w,661 +pettingzoo/sisl/__init__.py,sha256=EOnTLVyQDGUOm8PxOruBuZFSuSYe5GhlsORA2T_ACT0,155 +pettingzoo/sisl/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/sisl/__pycache__/_utils.cpython-39.pyc,, +pettingzoo/sisl/__pycache__/multiwalker_v8.cpython-39.pyc,, +pettingzoo/sisl/__pycache__/pursuit_v4.cpython-39.pyc,, +pettingzoo/sisl/__pycache__/waterworld_v3.cpython-39.pyc,, +pettingzoo/sisl/_utils.py,sha256=JZjaPI8jLjtzb5QufLQwrAv8m2HzmX733r5qKYqvq3E,348 +pettingzoo/sisl/multiwalker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/sisl/multiwalker/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/sisl/multiwalker/__pycache__/multiwalker.cpython-39.pyc,, +pettingzoo/sisl/multiwalker/__pycache__/multiwalker_base.cpython-39.pyc,, +pettingzoo/sisl/multiwalker/multiwalker.py,sha256=z5KdPeg1KWeGVQxz77SaDSdGp25URTZ8nc4dZuoZUfQ,4126 +pettingzoo/sisl/multiwalker/multiwalker_base.py,sha256=Lve5mQxZh7J3UnxE6whh6trcAg3LfUa7slY33vGDDKk,30201 +pettingzoo/sisl/multiwalker_v8.py,sha256=3RnuVzUR68FpAKaSTt5VlB9B0acWk9wlOGtG1--1Os8,64 +pettingzoo/sisl/pursuit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/sisl/pursuit/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/sisl/pursuit/__pycache__/manual_control.cpython-39.pyc,, +pettingzoo/sisl/pursuit/__pycache__/pursuit.cpython-39.pyc,, +pettingzoo/sisl/pursuit/__pycache__/pursuit_base.cpython-39.pyc,, +pettingzoo/sisl/pursuit/manual_control.py,sha256=Kk_CZcuKhJd_h0ykUWDAtjQzOckpB2xZ5s9n6hFCK8s,2462 +pettingzoo/sisl/pursuit/pursuit.py,sha256=IEJ0ri2sgx048fy24js9AN9rPHvABnRbBwXEM52lN8U,3296 +pettingzoo/sisl/pursuit/pursuit_base.py,sha256=uTV8pnpDSEBPZwcZLoGTAsqftHWB1EM1SDZLgUif1y8,19886 +pettingzoo/sisl/pursuit/utils/__init__.py,sha256=cYhSho13TQ0YQT9l0Pyk_HwynH8gmGGH9ApH1OfzyL4,367 +pettingzoo/sisl/pursuit/utils/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/sisl/pursuit/utils/__pycache__/agent_layer.cpython-39.pyc,, +pettingzoo/sisl/pursuit/utils/__pycache__/agent_utils.cpython-39.pyc,, +pettingzoo/sisl/pursuit/utils/__pycache__/controllers.cpython-39.pyc,, +pettingzoo/sisl/pursuit/utils/__pycache__/discrete_agent.cpython-39.pyc,, +pettingzoo/sisl/pursuit/utils/__pycache__/two_d_maps.cpython-39.pyc,, +pettingzoo/sisl/pursuit/utils/agent_layer.py,sha256=MVUxC_gdNSH2MplnlgHVg1-K0wbJPDLNLxL7OsDk1BI,1991 +pettingzoo/sisl/pursuit/utils/agent_utils.py,sha256=kRmgbCNsIgO8lHYZAu-xWtz24fsabPVDce2arX_Zgyo,2830 +pettingzoo/sisl/pursuit/utils/controllers.py,sha256=VBLdtWFKXkqx6HV3d8NYPYVhkR214AQiAbtaLM3Ki2E,765 +pettingzoo/sisl/pursuit/utils/discrete_agent.py,sha256=d-CcH8VUVjepNjBfFDkXkXc7aSVOw4vH-12FiZ-1zwo,3129 +pettingzoo/sisl/pursuit/utils/two_d_maps.py,sha256=2_DMulSXMnljT9QwVgfwhnaYCGWqaTdsxTPxKy0CR1Y,3725 +pettingzoo/sisl/pursuit_v4.py,sha256=mxh7rA46ZF7tmoxQJ_Phzr-aAi-Jbo1V_fC96ucdnh8,72 +pettingzoo/sisl/waterworld/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/sisl/waterworld/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/sisl/waterworld/__pycache__/waterworld.cpython-39.pyc,, +pettingzoo/sisl/waterworld/__pycache__/waterworld_base.cpython-39.pyc,, +pettingzoo/sisl/waterworld/waterworld.py,sha256=9mb6_uVTGy9JEueTb9nIZEcC9ltoZzGDHjTbdI9mE9s,3261 +pettingzoo/sisl/waterworld/waterworld_base.py,sha256=5IsqczFN8gNRhvjnm7ghIDEBzsDrxcrPdi-4WWa4ISI,27630 +pettingzoo/sisl/waterworld_v3.py,sha256=q_-YjRSjvMKjIGriOr_7sgAQF1jCH8zIfDdK4A3KYXc,62 +pettingzoo/test/__init__.py,sha256=8YxhcpEyXocUs6MJ2mh4LkbWnQ7a_UcHDglev5dhWGs,448 +pettingzoo/test/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/test/__pycache__/api_test.cpython-39.pyc,, +pettingzoo/test/__pycache__/bombardment_test.cpython-39.pyc,, +pettingzoo/test/__pycache__/manual_control_test.cpython-39.pyc,, +pettingzoo/test/__pycache__/max_cycles_test.cpython-39.pyc,, +pettingzoo/test/__pycache__/parallel_test.cpython-39.pyc,, +pettingzoo/test/__pycache__/performance_benchmark.cpython-39.pyc,, +pettingzoo/test/__pycache__/render_test.cpython-39.pyc,, +pettingzoo/test/__pycache__/save_obs_test.cpython-39.pyc,, +pettingzoo/test/__pycache__/seed_test.cpython-39.pyc,, +pettingzoo/test/__pycache__/state_test.cpython-39.pyc,, +pettingzoo/test/api_test.py,sha256=nemQ0Yi1B-TQoGE3KMWDijdGYvhXoPKoQJ2ISFHajtc,15732 +pettingzoo/test/bombardment_test.py,sha256=IBGekx-RqtThwvq1NJC733GJSd0RyltT-cGVy7mT45g,1173 +pettingzoo/test/example_envs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pettingzoo/test/example_envs/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/test/example_envs/__pycache__/generated_agents_env_v0.cpython-39.pyc,, +pettingzoo/test/example_envs/__pycache__/generated_agents_parallel_v0.cpython-39.pyc,, +pettingzoo/test/example_envs/generated_agents_env_v0.py,sha256=kKSmxtscX4H_9ZpuevBcjEQLnKwDlwyy8BRPDD9b1_8,3559 +pettingzoo/test/example_envs/generated_agents_parallel_v0.py,sha256=XIn8YrsLa3TtBvNqcLYiHNaKdSCtoewFJntvEDVfFwg,3590 +pettingzoo/test/manual_control_test.py,sha256=qIhIsEPkR-3JL_EKNQDPUTD90etotfDcwGJym8FcSBw,810 +pettingzoo/test/max_cycles_test.py,sha256=wNMPSCEW5MNE-U-REcuw89DCwK-F2P83yxeQfLptBuM,1309 +pettingzoo/test/parallel_test.py,sha256=EF45MrzdbBy2KwFZgtKhQXZcefrhqUxDZWVJhVfMR-0,3190 +pettingzoo/test/performance_benchmark.py,sha256=M7oUp9hJ2ey7dooXN1TwSwUQGfVL0hgWpQefF0Ne448,1118 +pettingzoo/test/render_test.py,sha256=C_wExQrJc4LMQhJmXwDeCHYuExnbWM95W2QWb90O2w8,1662 +pettingzoo/test/save_obs_test.py,sha256=lPqG2TTKi-xzM693oZ2TR2v4cvxcwlwxQg1c8Li47Sg,1116 +pettingzoo/test/seed_test.py,sha256=n6oyk-Fpih2j1O_chDAIudntRd7ebqRi2hqFFjezS_Y,3411 +pettingzoo/test/state_test.py,sha256=fOMwLslwr4pOo-fmkxPpXgbFCP1WgfVrGUvErnPQiHs,4206 +pettingzoo/utils/__init__.py,sha256=0tGFMtKJbZ78go-0t9gt45ZrRNKQVFG_zTpSK6Ac3xs,499 +pettingzoo/utils/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/utils/__pycache__/agent_selector.cpython-39.pyc,, +pettingzoo/utils/__pycache__/average_total_reward.cpython-39.pyc,, +pettingzoo/utils/__pycache__/capture_stdout.cpython-39.pyc,, +pettingzoo/utils/__pycache__/conversions.cpython-39.pyc,, +pettingzoo/utils/__pycache__/deprecated_module.cpython-39.pyc,, +pettingzoo/utils/__pycache__/env.cpython-39.pyc,, +pettingzoo/utils/__pycache__/env_logger.cpython-39.pyc,, +pettingzoo/utils/__pycache__/random_demo.cpython-39.pyc,, +pettingzoo/utils/__pycache__/save_observation.cpython-39.pyc,, +pettingzoo/utils/agent_selector.py,sha256=AfIiTIfcMEcSpTm6GpGapqLAinsntkw-l6sLWeaUljA,1165 +pettingzoo/utils/average_total_reward.py,sha256=et_Av2eXHkFeDDlOQt_LFaQrof7UTjhdACAF3X-D9bY,1144 +pettingzoo/utils/capture_stdout.py,sha256=eL8M15ntZIZwzhjvCicETDJfgcFB06hysS221fs8TcY,544 +pettingzoo/utils/conversions.py,sha256=gjz0o_vcUbxOqbXbjPQODK8a9O0Mt6vYt-dYHtG6t3A,9668 +pettingzoo/utils/deprecated_module.py,sha256=f4MtUzm6Dpuc0FyyTNe0jfioz3ljzBNrlHVDUGUeLdo,1293 +pettingzoo/utils/env.py,sha256=98wCg_X2bvIoFEZtdk3x2ivUzGIWLk0pJT0rKIwF4Bc,10601 +pettingzoo/utils/env_logger.py,sha256=WXrPjmWETToG9b9WTesbYJ7Dh0sQEM53zB4w6e6taew,3184 +pettingzoo/utils/random_demo.py,sha256=FeN2QkNrU1CIkSZGds4_37oz0_SgUTnl_V2FLciXZqM,894 +pettingzoo/utils/save_observation.py,sha256=il0qvAlX-dqMuPbwTRQrxHlWssM7cRoAiRG3wqAN2DU,1477 +pettingzoo/utils/wrappers/__init__.py,sha256=z731KnZMYLVJPrHpRdTU5i-bPyidu2sgFkiYV7PWll0,345 +pettingzoo/utils/wrappers/__pycache__/__init__.cpython-39.pyc,, +pettingzoo/utils/wrappers/__pycache__/assert_out_of_bounds.cpython-39.pyc,, +pettingzoo/utils/wrappers/__pycache__/base.cpython-39.pyc,, +pettingzoo/utils/wrappers/__pycache__/base_parallel.cpython-39.pyc,, +pettingzoo/utils/wrappers/__pycache__/capture_stdout.cpython-39.pyc,, +pettingzoo/utils/wrappers/__pycache__/clip_out_of_bounds.cpython-39.pyc,, +pettingzoo/utils/wrappers/__pycache__/order_enforcing.cpython-39.pyc,, +pettingzoo/utils/wrappers/__pycache__/terminate_illegal.cpython-39.pyc,, +pettingzoo/utils/wrappers/assert_out_of_bounds.py,sha256=zFgv8FZgx7kl1ylXVwg9m--dwqvWIjaXOoNFHKATgzA,729 +pettingzoo/utils/wrappers/base.py,sha256=yKapQihY_49OUqRDpQ5q5ZxZDF7OH_tBfgijLzQzOjY,3369 +pettingzoo/utils/wrappers/base_parallel.py,sha256=9KLbV_j_NmQNpohy1lpKHmmTTeGr8MRvbro9CEAUa-w,2163 +pettingzoo/utils/wrappers/capture_stdout.py,sha256=4fa0YwDVNpbuGyYHBZ6i7bTu23VRL2lzbHY_9fNSKtQ,550 +pettingzoo/utils/wrappers/clip_out_of_bounds.py,sha256=neyQKIC5B3OmPvvoX4p04RnYfr-y5ciK_zKElwXBLuY,1108 +pettingzoo/utils/wrappers/order_enforcing.py,sha256=Dgan9EsWsle-0Y5nlejV-PrT23zWyUopXY2yvffpCVI,3726 +pettingzoo/utils/wrappers/terminate_illegal.py,sha256=LWBZWp-dMrx5kvW1hSNp9TffPhVwcQUFAT20EFWihp0,1905 diff --git a/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/WHEEL b/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..ecaf39f3c3df8b0075a2951da9b1a27fcb08a173 --- /dev/null +++ b/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (71.1.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/top_level.txt b/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..9e1cd1e7ad5ce850c950f1b6c57eb6205f32ae3b --- /dev/null +++ b/MLPY/Lib/site-packages/PettingZoo-1.15.0.dist-info/top_level.txt @@ -0,0 +1 @@ +pettingzoo diff --git a/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/INSTALLER b/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/LICENSE b/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..75b52484ea471f882c29e02693b4f02dba175b5e --- /dev/null +++ b/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/METADATA b/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..c52a1dd15529d2c9de319ca68218e690ec83afd2 --- /dev/null +++ b/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/METADATA @@ -0,0 +1,353 @@ +Metadata-Version: 2.1 +Name: onnx +Version: 1.16.1 +Summary: Open Neural Network Exchange +Author-email: ONNX Contributors +License: Apache License v2.0 +Project-URL: Homepage, https://onnx.ai/ +Project-URL: Repository, https://github.com/onnx/onnx +Classifier: Programming Language :: Python :: 3 +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: numpy >=1.20 +Requires-Dist: protobuf >=3.20.2 +Provides-Extra: reference +Requires-Dist: google-re2 ; extra == 'reference' +Requires-Dist: Pillow ; extra == 'reference' + + + + + +

+ +[![PyPI - Version](https://img.shields.io/pypi/v/onnx.svg)](https://pypi.org/project/onnx) +[![Build Status](https://dev.azure.com/onnx-pipelines/onnx/_apis/build/status/Windows-CI?branchName=main&label=Windows)](https://dev.azure.com/onnx-pipelines/onnx/_build/latest?definitionId=5&branchName=main) +[![Build Status](https://dev.azure.com/onnx-pipelines/onnx/_apis/build/status/Linux-CI?branchName=main&label=Linux)](https://dev.azure.com/onnx-pipelines/onnx/_build/latest?definitionId=7&branchName=main) +[![Build Status](https://dev.azure.com/onnx-pipelines/onnx/_apis/build/status/MacOS-CI?branchName=main&label=MacOS)](https://dev.azure.com/onnx-pipelines/onnx/_build/latest?definitionId=6&branchName=main) +[![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/3313/badge)](https://bestpractices.coreinfrastructure.org/projects/3313) +[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/onnx/onnx/badge)](https://api.securityscorecards.dev/projects/github.com/onnx/onnx) +[![REUSE compliant](https://api.reuse.software/badge/github.com/onnx/onnx)](https://api.reuse.software/info/github.com/onnx/onnx) +[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) +[![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) + + +[Open Neural Network Exchange (ONNX)](https://onnx.ai) is an open ecosystem that empowers AI developers +to choose the right tools as their project evolves. ONNX provides an open source format for AI models, both deep learning and traditional ML. It defines an extensible computation graph model, as well as definitions of built-in operators and standard +data types. Currently we focus on the capabilities needed for inferencing (scoring). + +ONNX is [widely supported](http://onnx.ai/supported-tools) and can be found in many frameworks, tools, and hardware. Enabling interoperability between different frameworks and streamlining the path from research to production helps increase the speed of innovation in the AI community. We invite the community to join us and further evolve ONNX. + +# Use ONNX + +* [Documentation of ONNX Python Package](https://onnx.ai/onnx/) +* [Tutorials for creating ONNX models](https://github.com/onnx/tutorials) +* [Pre-trained ONNX models](https://github.com/onnx/models) + +# Learn about the ONNX spec + +* [Overview](https://github.com/onnx/onnx/blob/main/docs/Overview.md) +* [ONNX intermediate representation spec](https://github.com/onnx/onnx/blob/main/docs/IR.md) +* [Versioning principles of the spec](https://github.com/onnx/onnx/blob/main/docs/Versioning.md) +* [Operators documentation](https://github.com/onnx/onnx/blob/main/docs/Operators.md) +* [Operators documentation](https://onnx.ai/onnx/operators/index.html) (latest release) +* [Python API Overview](https://github.com/onnx/onnx/blob/main/docs/PythonAPIOverview.md) + +# Programming utilities for working with ONNX Graphs + +* [Shape and Type Inference](https://github.com/onnx/onnx/blob/main/docs/ShapeInference.md) +* [Graph Optimization](https://github.com/onnx/optimizer) +* [Opset Version Conversion](https://github.com/onnx/onnx/blob/main/docs/docsgen/source/api/version_converter.md) + +# Contribute + +ONNX is a community project and the open governance model is described [here](https://github.com/onnx/onnx/blob/main/community/readme.md). We encourage you to join the effort and contribute feedback, ideas, and code. You can participate in the [Special Interest Groups](https://github.com/onnx/onnx/blob/main/community/sigs.md) and [Working Groups](https://github.com/onnx/onnx/blob/main/community/working-groups.md) to shape the future of ONNX. + +Check out our [contribution guide](https://github.com/onnx/onnx/blob/main/CONTRIBUTING.md) to get started. + +If you think some operator should be added to ONNX specification, please read +[this document](https://github.com/onnx/onnx/blob/main/docs/AddNewOp.md). + +# Community meetings + +The schedules of the regular meetings of the Steering Committee, the working groups and the SIGs can be found [here](https://onnx.ai/calendar) + +Community Meetups are held at least once a year. Content from previous community meetups are at: + +* 2020.04.09 +* 2020.10.14 +* 2021.03.24 +* 2021.10.21 +* 2022.06.24 +* 2023.06.28 + +# Discuss + +We encourage you to open [Issues](https://github.com/onnx/onnx/issues), or use [Slack](https://lfaifoundation.slack.com/) (If you have not joined yet, please use this [link](https://join.slack.com/t/lfaifoundation/shared_invite/zt-o65errpw-gMTbwNr7FnNbVXNVFkmyNA) to join the group) for more real-time discussion. + +# Follow Us + +Stay up to date with the latest ONNX news. [[Facebook](https://www.facebook.com/onnxai/)] [[Twitter](https://twitter.com/onnxai)] + +# Roadmap + +A roadmap process takes place every year. More details can be found [here](https://github.com/onnx/steering-committee/tree/main/roadmap) + +# Installation + +## Official Python packages + +ONNX released packages are published in PyPi. + +```sh +pip install onnx # or pip install onnx[reference] for optional reference implementation dependencies +``` + +[ONNX weekly packages](https://pypi.org/project/onnx-weekly/) are published in PyPI to enable experimentation and early testing. + +## vcpkg packages + +onnx is in the maintenance list of [vcpkg](https://github.com/microsoft/vcpkg), you can easily use vcpkg to build and install it. + +```sh +git clone https://github.com/microsoft/vcpkg.git +cd vcpkg +./bootstrap-vcpkg.bat # For powershell +./bootstrap-vcpkg.sh # For bash +./vcpkg install onnx +``` + +## Conda packages + +A binary build of ONNX is available from [Conda](https://conda.io), in [conda-forge](https://conda-forge.org/): + +```sh +conda install -c conda-forge onnx +``` + +## Build ONNX from Source + +Before building from source uninstall any existing versions of onnx `pip uninstall onnx`. + +c++17 or higher C++ compiler version is required to build ONNX from source. Still, users can specify their own `CMAKE_CXX_STANDARD` version for building ONNX. + +If you don't have protobuf installed, ONNX will internally download and build protobuf for ONNX build. + +Or, you can manually install [protobuf C/C++ libraries and tools](https://github.com/protocolbuffers/protobuf) with specified version before proceeding forward. Then depending on how you installed protobuf, you need to set environment variable CMAKE_ARGS to "-DONNX_USE_PROTOBUF_SHARED_LIBS=ON" or "-DONNX_USE_PROTOBUF_SHARED_LIBS=OFF". For example, you may need to run the following command: + +Linux: + +```sh +export CMAKE_ARGS="-DONNX_USE_PROTOBUF_SHARED_LIBS=ON" +``` + +Windows: + +```bat +set CMAKE_ARGS="-DONNX_USE_PROTOBUF_SHARED_LIBS=ON" +``` + +The ON/OFF depends on what kind of protobuf library you have. Shared libraries are files ending with \*.dll/\*.so/\*.dylib. Static libraries are files ending with \*.a/\*.lib. This option depends on how you get your protobuf library and how it was built. And it is default OFF. You don't need to run the commands above if you'd prefer to use a static protobuf library. + +### Windows + +If you are building ONNX from source, it is recommended that you also build Protobuf locally as a static library. The version distributed with conda-forge is a DLL, but ONNX expects it to be a static library. Building protobuf locally also lets you control the version of protobuf. The tested and recommended version is 3.21.12. + +The instructions in this README assume you are using Visual Studio. It is recommended that you run all the commands from a shell started from "x64 Native Tools Command Prompt for VS 2019" and keep the build system generator for cmake (e.g., cmake -G "Visual Studio 16 2019") consistent while building protobuf as well as ONNX. + +You can get protobuf by running the following commands: + +```bat +git clone https://github.com/protocolbuffers/protobuf.git +cd protobuf +git checkout v21.12 +cd cmake +cmake -G "Visual Studio 16 2019" -A x64 -DCMAKE_INSTALL_PREFIX= -Dprotobuf_MSVC_STATIC_RUNTIME=OFF -Dprotobuf_BUILD_SHARED_LIBS=OFF -Dprotobuf_BUILD_TESTS=OFF -Dprotobuf_BUILD_EXAMPLES=OFF . +msbuild protobuf.sln /m /p:Configuration=Release +msbuild INSTALL.vcxproj /p:Configuration=Release +``` + +Then it will be built as a static library and installed to . Please add the bin directory(which contains protoc.exe) to your PATH. + +```bat +set PATH=/bin;%PATH% +``` + +Please note: if your protobuf_install_dir contains spaces, **do not** add quotation marks around it. + +Alternative: if you don't want to change your PATH, you can set ONNX_PROTOC_EXECUTABLE instead. + +```bat +set CMAKE_ARGS=-DONNX_PROTOC_EXECUTABLE= +``` + +Then you can build ONNX as: + +``` +git clone https://github.com/onnx/onnx.git +cd onnx +git submodule update --init --recursive +# prefer lite proto +set CMAKE_ARGS=-DONNX_USE_LITE_PROTO=ON +pip install -e . +``` + +### Linux + +First, you need to install protobuf. The minimum Protobuf compiler (protoc) version required by ONNX is 3.6.1. Please note that old protoc versions might not work with `CMAKE_ARGS=-DONNX_USE_LITE_PROTO=ON`. + +Ubuntu 20.04 (and newer) users may choose to install protobuf via + +```sh +apt-get install python3-pip python3-dev libprotobuf-dev protobuf-compiler +``` + +In this case, it is required to add `-DONNX_USE_PROTOBUF_SHARED_LIBS=ON` to CMAKE_ARGS in the ONNX build step. + +A more general way is to build and install it from source. See the instructions below for more details. + +
+ Installing Protobuf from source + + Debian/Ubuntu: + + ```sh + git clone https://github.com/protocolbuffers/protobuf.git + cd protobuf + git checkout v21.12 + git submodule update --init --recursive + mkdir build_source && cd build_source + cmake ../cmake -Dprotobuf_BUILD_SHARED_LIBS=OFF -DCMAKE_INSTALL_PREFIX=/usr -DCMAKE_INSTALL_SYSCONFDIR=/etc -DCMAKE_POSITION_INDEPENDENT_CODE=ON -Dprotobuf_BUILD_TESTS=OFF -DCMAKE_BUILD_TYPE=Release + make -j$(nproc) + make install + ``` + + CentOS/RHEL/Fedora: + + ```sh + git clone https://github.com/protocolbuffers/protobuf.git + cd protobuf + git checkout v21.12 + git submodule update --init --recursive + mkdir build_source && cd build_source + cmake ../cmake -DCMAKE_INSTALL_LIBDIR=lib64 -Dprotobuf_BUILD_SHARED_LIBS=OFF -DCMAKE_INSTALL_PREFIX=/usr -DCMAKE_INSTALL_SYSCONFDIR=/etc -DCMAKE_POSITION_INDEPENDENT_CODE=ON -Dprotobuf_BUILD_TESTS=OFF -DCMAKE_BUILD_TYPE=Release + make -j$(nproc) + make install + ``` + + Here "-DCMAKE_POSITION_INDEPENDENT_CODE=ON" is crucial. By default static libraries are built without "-fPIC" flag, they are not position independent code. But shared libraries must be position independent code. Python C/C++ extensions(like ONNX) are shared libraries. So if a static library was not built with "-fPIC", it can't be linked to such a shared library. + + Once build is successful, update PATH to include protobuf paths. + +
+ +Then you can build ONNX as: + +```sh +git clone https://github.com/onnx/onnx.git +cd onnx +git submodule update --init --recursive +# Optional: prefer lite proto +export CMAKE_ARGS=-DONNX_USE_LITE_PROTO=ON +pip install -e . +``` + +### Mac + +```sh +export NUM_CORES=`sysctl -n hw.ncpu` +brew update +brew install autoconf && brew install automake +wget https://github.com/protocolbuffers/protobuf/releases/download/v21.12/protobuf-cpp-3.21.12.tar.gz +tar -xvf protobuf-cpp-3.21.12.tar.gz +cd protobuf-3.21.12 +mkdir build_source && cd build_source +cmake ../cmake -Dprotobuf_BUILD_SHARED_LIBS=OFF -DCMAKE_POSITION_INDEPENDENT_CODE=ON -Dprotobuf_BUILD_TESTS=OFF -DCMAKE_BUILD_TYPE=Release +make -j${NUM_CORES} +make install +``` + +Once build is successful, update PATH to include protobuf paths. + +Then you can build ONNX as: + +```sh +git clone --recursive https://github.com/onnx/onnx.git +cd onnx +# Optional: prefer lite proto +set CMAKE_ARGS=-DONNX_USE_LITE_PROTO=ON +pip install -e . +``` + +## Verify Installation + +After installation, run + +```sh +python -c "import onnx" +``` + +to verify it works. + +## Common Build Options + +For full list refer to CMakeLists.txt + +### Environment variables + +* `USE_MSVC_STATIC_RUNTIME` should be 1 or 0, not ON or OFF. When set to 1 onnx links statically to runtime library. +**Default**: `USE_MSVC_STATIC_RUNTIME=0` + +* `DEBUG` should be 0 or 1. When set to 1 onnx is built in debug mode. or debug versions of the dependencies, you need to open the [CMakeLists file](https://github.com/onnx/onnx/blob/main/CMakeLists.txt) and append a letter `d` at the end of the package name lines. For example, `NAMES protobuf-lite` would become `NAMES protobuf-lited`. +**Default**: `Debug=0` + +### CMake variables + +* `ONNX_USE_PROTOBUF_SHARED_LIBS` should be `ON` or `OFF`. +**Default**: `ONNX_USE_PROTOBUF_SHARED_LIBS=OFF USE_MSVC_STATIC_RUNTIME=0` +`ONNX_USE_PROTOBUF_SHARED_LIBS` determines how onnx links to protobuf libraries. + * When set to `ON` - onnx will dynamically link to protobuf shared libs, PROTOBUF_USE_DLLS will be defined as described [here](https://github.com/protocolbuffers/protobuf/blob/main/cmake/README.md#dlls-vs-static-linking), Protobuf_USE_STATIC_LIBS will be set to `OFF` and `USE_MSVC_STATIC_RUNTIME` must be 0. + * When set to `OFF` - onnx will link statically to protobuf, and Protobuf_USE_STATIC_LIBS will be set to `ON` (to force the use of the static libraries) and `USE_MSVC_STATIC_RUNTIME` can be `0` or `1`. + +* `ONNX_USE_LITE_PROTO` should be `ON` or `OFF`. When set to `ON` onnx uses lite protobuf instead of full protobuf. +**Default**: `ONNX_USE_LITE_PROTO=OFF` + +* `ONNX_WERROR` should be `ON` or `OFF`. When set to `ON` warnings are treated as errors. +**Default**: `ONNX_WERROR=OFF` in local builds, `ON` in CI and release pipelines. + +## Common Errors + +* Note: the `import onnx` command does not work from the source checkout directory; in this case you'll see `ModuleNotFoundError: No module named 'onnx.onnx_cpp2py_export'`. Change into another directory to fix this error. + +* If you run into any issues while building Protobuf as a static library, please ensure that shared Protobuf libraries, like libprotobuf, are not installed on your device or in the conda environment. If these shared libraries exist, either remove them to build Protobuf from source as a static library, or skip the Protobuf build from source to use the shared version directly. + +* If you run into any issues while building ONNX from source, and your error message reads, `Could not find pythonXX.lib`, ensure that you have consistent Python versions for common commands, such as `python` and `pip`. Clean all existing build files and rebuild ONNX again. + +# Testing + +ONNX uses [pytest](https://docs.pytest.org) as test driver. In order to run tests, you will first need to install `pytest`: + +```sh +pip install pytest nbval +``` + +After installing pytest, use the following command to run tests. + +```sh +pytest +``` + +# Development + +Check out the [contributor guide](https://github.com/onnx/onnx/blob/main/CONTRIBUTING.md) for instructions. + +# License + +[Apache License v2.0](LICENSE) + +# Code of Conduct + +[ONNX Open Source Code of Conduct](https://onnx.ai/codeofconduct.html) diff --git a/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/RECORD b/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..7cddea011d783cc3640394bffb74ff525e63bfac --- /dev/null +++ b/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/RECORD @@ -0,0 +1,7008 @@ +../../Scripts/backend-test-tools.exe,sha256=TVu07mfLokWXhMvZq4S5PE6rup-ch1Jh1LWx7VrEXKo,108398 +../../Scripts/check-model.exe,sha256=mie1Hbfc2fRvrqZG6MeW_sBRV6L1vGG-qWc6scMow60,108401 +../../Scripts/check-node.exe,sha256=3etYu7RswX-nWBPm2j881sFZB2NvgbDFA7TgY01RKsI,108399 +onnx-1.16.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +onnx-1.16.1.dist-info/LICENSE,sha256=Pd-b5cKP4n2tFDpdx27qJSIq0d1ok0oEcGTlbtL6QMU,11560 +onnx-1.16.1.dist-info/METADATA,sha256=ReMoSuBe_QT4EaYpzuASqACxSxbfZKZshjYgaJMLQK0,16842 +onnx-1.16.1.dist-info/RECORD,, +onnx-1.16.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +onnx-1.16.1.dist-info/WHEEL,sha256=Z6c-bE0pUM47a70GvqO_SvH_XXU0lm62gEAKtoNJ08A,100 +onnx-1.16.1.dist-info/entry_points.txt,sha256=kI2A5Kl3HXkb7WkSIlcynOx46XYhwXcFVJlmwL58mPk,156 +onnx-1.16.1.dist-info/top_level.txt,sha256=fok5iu7rojicZZye7lCMdLme_jvte9jjDqYyhL0Kg6E,5 +onnx/__init__.py,sha256=hxBZrVb_NDdAwGGAHWVoeFKZzJlZhFGBrO9xLC6Kk48,12059 +onnx/__pycache__/__init__.cpython-39.pyc,, +onnx/__pycache__/checker.cpython-39.pyc,, +onnx/__pycache__/compose.cpython-39.pyc,, +onnx/__pycache__/external_data_helper.cpython-39.pyc,, +onnx/__pycache__/gen_proto.cpython-39.pyc,, +onnx/__pycache__/helper.cpython-39.pyc,, +onnx/__pycache__/hub.cpython-39.pyc,, +onnx/__pycache__/inliner.cpython-39.pyc,, +onnx/__pycache__/mapping.cpython-39.pyc,, +onnx/__pycache__/model_container.cpython-39.pyc,, +onnx/__pycache__/numpy_helper.cpython-39.pyc,, +onnx/__pycache__/onnx_data_pb.cpython-39.pyc,, +onnx/__pycache__/onnx_data_pb2.cpython-39.pyc,, +onnx/__pycache__/onnx_ml_pb2.cpython-39.pyc,, +onnx/__pycache__/onnx_operators_ml_pb2.cpython-39.pyc,, +onnx/__pycache__/onnx_operators_pb.cpython-39.pyc,, +onnx/__pycache__/onnx_pb.cpython-39.pyc,, +onnx/__pycache__/parser.cpython-39.pyc,, +onnx/__pycache__/printer.cpython-39.pyc,, +onnx/__pycache__/serialization.cpython-39.pyc,, +onnx/__pycache__/shape_inference.cpython-39.pyc,, +onnx/__pycache__/subbyte.cpython-39.pyc,, +onnx/__pycache__/utils.cpython-39.pyc,, +onnx/__pycache__/version.cpython-39.pyc,, +onnx/__pycache__/version_converter.cpython-39.pyc,, +onnx/backend/__init__.py,sha256=HiuD-U6YHJwWikkemKJqBq-35U8KJ9LDdAwdisi7OlQ,85 +onnx/backend/__pycache__/__init__.cpython-39.pyc,, +onnx/backend/__pycache__/base.cpython-39.pyc,, +onnx/backend/base.py,sha256=myXeAXXVI31c_lcqxeAJNK3dRFncuJox2N55rTGANZ4,4680 +onnx/backend/sample/__init__.py,sha256=MjnSJkhE5WRkgjDth11gAVMfwxSlx0ovv7aIX2e6L8g,84 +onnx/backend/sample/__pycache__/__init__.cpython-39.pyc,, +onnx/backend/sample/ops/__init__.py,sha256=zf3CEzbcJnTanRw-mDRnMwFibu2fvUflOzD0xJ-z2MU,854 +onnx/backend/sample/ops/__pycache__/__init__.cpython-39.pyc,, +onnx/backend/sample/ops/__pycache__/abs.cpython-39.pyc,, +onnx/backend/sample/ops/abs.py,sha256=OZP6vJNpixx6-U6_vfptxngZHrcckFKbhWUWNLMuZ9E,179 +onnx/backend/test/__init__.py,sha256=5S8xMcgTHXxEh_GVF5EHUppM7Qhar-8zjnzrpsbmz9Q,204 +onnx/backend/test/__pycache__/__init__.cpython-39.pyc,, +onnx/backend/test/__pycache__/cmd_tools.cpython-39.pyc,, +onnx/backend/test/__pycache__/stat_coverage.cpython-39.pyc,, +onnx/backend/test/case/__init__.py,sha256=pyZJLaPE5b2baJuwZPMNVleuIHhPPAyy9GJDO-eQjOs,378 +onnx/backend/test/case/__pycache__/__init__.cpython-39.pyc,, +onnx/backend/test/case/__pycache__/base.cpython-39.pyc,, +onnx/backend/test/case/__pycache__/test_case.cpython-39.pyc,, +onnx/backend/test/case/__pycache__/utils.cpython-39.pyc,, +onnx/backend/test/case/base.py,sha256=gVDFdClaB1Pd_X6-S5NrCeIycT7GC9_ZGLxX1qfdHu8,1540 +onnx/backend/test/case/model/__init__.py,sha256=P5LG59TqKSUc2Nx98k3LbVMUUJJAxS88RYg5tHFV6Q4,2237 +onnx/backend/test/case/model/__pycache__/__init__.cpython-39.pyc,, +onnx/backend/test/case/model/__pycache__/expand.cpython-39.pyc,, +onnx/backend/test/case/model/__pycache__/gradient.cpython-39.pyc,, +onnx/backend/test/case/model/__pycache__/sequence.cpython-39.pyc,, +onnx/backend/test/case/model/__pycache__/shrink.cpython-39.pyc,, +onnx/backend/test/case/model/__pycache__/sign.cpython-39.pyc,, +onnx/backend/test/case/model/__pycache__/single-relu.cpython-39.pyc,, +onnx/backend/test/case/model/__pycache__/stringnormalizer.cpython-39.pyc,, +onnx/backend/test/case/model/expand.py,sha256=JUj0XXNYUmfRBEPkWUll0MrR14vsMBfw6z5tP8j1i1U,3314 +onnx/backend/test/case/model/gradient.py,sha256=9S--mmzK5Hb9A8uKFue3uraYScAY-C4Wx0GTP688AjM,4084 +onnx/backend/test/case/model/sequence.py,sha256=ClyD2tzvhZmCAKD_yDtkU-kJv_tIOgbx0bXJpI72bT4,16886 +onnx/backend/test/case/model/shrink.py,sha256=Rs5afaaTAznFYgUlsxk-dEILG7dq0bdYnSc1FHtzrAU,1206 +onnx/backend/test/case/model/sign.py,sha256=ua6dvQozKLCwzlLrcxykoFR3P3g0nh7z3RLnOW_ljDk,1151 +onnx/backend/test/case/model/single-relu.py,sha256=zFSV3yVV02dn4UT8oL_hPXAt1IUHU5qpVb7nxjKEKMc,1086 +onnx/backend/test/case/model/stringnormalizer.py,sha256=p5dapxdh4aGCETHNXQh87wB3Mf8hlL7HDxwm12iyNds,6349 +onnx/backend/test/case/node/__init__.py,sha256=h1iZnB5_TIEH0WGb2HFQkCYO8dZSDlD-jSq2I5Kl4UE,17035 +onnx/backend/test/case/node/__pycache__/__init__.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/_image_decoder_data.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/abs.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/acos.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/acosh.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/adagrad.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/adam.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/add.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/affinegrid.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/and.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/argmax.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/argmin.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/asin.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/asinh.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/atan.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/atanh.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/averagepool.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/batchnorm.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/bernoulli.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/bitshift.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/bitwiseand.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/bitwisenot.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/bitwiseor.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/bitwisexor.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/blackmanwindow.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/cast.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/castlike.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/ceil.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/celu.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/center_crop_pad.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/clip.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/col2im.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/compress.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/concat.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/constant.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/constantofshape.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/conv.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/convinteger.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/convtranspose.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/cos.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/cosh.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/cumsum.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/deformconv.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/depthtospace.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/dequantizelinear.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/det.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/dft.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/div.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/dropout.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/dynamicquantizelinear.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/einsum.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/elu.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/equal.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/erf.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/exp.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/expand.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/eyelike.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/flatten.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/floor.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/gather.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/gatherelements.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/gathernd.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/gelu.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/gemm.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/globalaveragepool.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/globalmaxpool.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/greater.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/greater_equal.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/gridsample.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/groupnormalization.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/gru.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/hammingwindow.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/hannwindow.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/hardmax.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/hardsigmoid.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/hardswish.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/identity.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/if.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/image_decoder.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/instancenorm.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/isinf.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/isnan.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/layernormalization.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/leakyrelu.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/less.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/less_equal.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/log.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/logsoftmax.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/loop.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/lppool.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/lrn.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/lstm.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/matmul.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/matmulinteger.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/max.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/maxpool.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/maxunpool.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/mean.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/meanvariancenormalization.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/melweightmatrix.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/min.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/mish.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/mod.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/momentum.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/mul.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/neg.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/negativeloglikelihoodloss.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/nonmaxsuppression.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/nonzero.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/not.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/onehot.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/optionalgetelement.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/optionalhaselement.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/or.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/pad.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/pow.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/prelu.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/qlinearconv.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/qlinearmatmul.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/quantizelinear.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/rangeop.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/reciprocal.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/reduce_log_sum.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/reduce_log_sum_exp.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/reducel1.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/reducel2.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/reducemax.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/reducemean.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/reducemin.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/reduceprod.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/reducesum.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/reducesumsquare.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/regex_full_match.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/relu.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/reshape.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/resize.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/reversesequence.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/rnn.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/roialign.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/round.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/scan.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/scatter.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/scatterelements.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/scatternd.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/selu.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/sequence_map.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/sequenceinsert.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/shape.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/shrink.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/sigmoid.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/sign.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/sin.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/sinh.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/size.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/slice.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/softmax.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/softmaxcrossentropy.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/softplus.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/softsign.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/spacetodepth.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/split.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/splittosequence.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/sqrt.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/squeeze.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/stft.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/string_concat.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/string_split.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/stringnormalizer.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/sub.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/sum.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/tan.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/tanh.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/tfidfvectorizer.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/thresholdedrelu.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/tile.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/topk.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/transpose.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/trilu.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/unique.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/unsqueeze.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/upsample.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/where.cpython-39.pyc,, +onnx/backend/test/case/node/__pycache__/xor.cpython-39.pyc,, +onnx/backend/test/case/node/_image_decoder_data.py,sha256=11zZPBH-wZTfk1ZZ5QXN3dd6lLA6CjxBuoHghOixRAw,216765 +onnx/backend/test/case/node/abs.py,sha256=RBfzepQNsr-IjM03bvCm865oYD6B-IpX2J2AJQ4GkFA,597 +onnx/backend/test/case/node/acos.py,sha256=AKANeRyLZk1z4XtXVe2EvLe9d_paOXGK0r518zPsh1M,718 +onnx/backend/test/case/node/acosh.py,sha256=gB-VP6wMvYlu7MhoV6TqNBKIA58rJus1Re3PFo7g_zc,789 +onnx/backend/test/case/node/adagrad.py,sha256=8TRECQRjfusWnI0_R4DmQBLdx9M6LYkXaR2seqvimp8,3811 +onnx/backend/test/case/node/adam.py,sha256=mf7bOZJLCEXddiCbpEwNZep9FS02B2sidUrLAhSI858,4494 +onnx/backend/test/case/node/add.py,sha256=jimqUMeTfX2S4pPqOQmi_QqsBgyrcgNtlHVXgR7XbHY,1387 +onnx/backend/test/case/node/affinegrid.py,sha256=Q2LREd1JIJvGrtI12ukSCzCwCzE46WD83DLcRvlyphM,6306 +onnx/backend/test/case/node/ai_onnx_ml/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +onnx/backend/test/case/node/ai_onnx_ml/__pycache__/__init__.cpython-39.pyc,, +onnx/backend/test/case/node/ai_onnx_ml/__pycache__/array_feature_extractor.cpython-39.pyc,, +onnx/backend/test/case/node/ai_onnx_ml/__pycache__/binarizer.cpython-39.pyc,, +onnx/backend/test/case/node/ai_onnx_ml/__pycache__/label_encoder.cpython-39.pyc,, +onnx/backend/test/case/node/ai_onnx_ml/__pycache__/tree_ensemble.cpython-39.pyc,, +onnx/backend/test/case/node/ai_onnx_ml/array_feature_extractor.py,sha256=t5EKOtgQNy_5R-fzYvE_qZNRAEHs2LmVmKugyZkgFPk,821 +onnx/backend/test/case/node/ai_onnx_ml/binarizer.py,sha256=Gh466mWWMEZRwJAYrGcX3120PnhxArb77GQCEOc6arg,773 +onnx/backend/test/case/node/ai_onnx_ml/label_encoder.py,sha256=DQy2pAL75j9TIeVOci0_ngl2PMbDRo9wHNtiQ06yZDo,3110 +onnx/backend/test/case/node/ai_onnx_ml/tree_ensemble.py,sha256=k5nPMNMcSdaKdJy3eT0d2ZjCp6bGnwskRILGDyINGUE,3994 +onnx/backend/test/case/node/and.py,sha256=8CB1KZFAgryfLmPVzvuzNt38VeYpyXuozJPeh7-tNXE,2544 +onnx/backend/test/case/node/argmax.py,sha256=tOW1Shz852_niwhyGASasb6rBk6cb9pdkEyoOaWJ-JM,8580 +onnx/backend/test/case/node/argmin.py,sha256=TjNXPcbc6hDAcoNSTJXlJ9SO1MLdTpufTZpXXonoa3U,8662 +onnx/backend/test/case/node/asin.py,sha256=6t160N-u_Fo58qEDRuUcucumoY8wybFKg6ovWlGIbOI,718 +onnx/backend/test/case/node/asinh.py,sha256=yV0NjgaQKQC0F4shc3fIhgz7mrkDITxDPwIrLTnLK1o,772 +onnx/backend/test/case/node/atan.py,sha256=dhsMycFILHxUAwrKnfxdhrwb09PpJ1YHk98gqO9reXo,715 +onnx/backend/test/case/node/atanh.py,sha256=f4_imuKc6cq0YXtah6pGWAScIJw4A0oqFlqbSEID1R0,790 +onnx/backend/test/case/node/averagepool.py,sha256=6dUUgpGoBnvolTwuKpzlhJWszuJgK_ZaSf-57YBq-L0,19878 +onnx/backend/test/case/node/batchnorm.py,sha256=uOTmYD6vyh4dI_Y_pcvdvERMIdlK0O8HiJrSSsf0DFo,4865 +onnx/backend/test/case/node/bernoulli.py,sha256=gNc357ZsJArquIvGi6ZhGmv2G9xkgiZfRLwJL3hfww0,1909 +onnx/backend/test/case/node/bitshift.py,sha256=_u1KTijrr84NREDhTPXPcjeBfWCDWJlPcvDbehzTMBk,3645 +onnx/backend/test/case/node/bitwiseand.py,sha256=oJmtyHDWe4i-9hwsX2VM66_iRVffHmzpZfwROBj2D_s,1708 +onnx/backend/test/case/node/bitwisenot.py,sha256=XRW57NfcO7LoycCA4dk7h-1k93ORdD2HuYmAED1CA5A,1021 +onnx/backend/test/case/node/bitwiseor.py,sha256=MVhAOweUPXhQQR-iWu1aKZvgRUvpyDgvHaPoPl2neoo,1672 +onnx/backend/test/case/node/bitwisexor.py,sha256=jd3H089qlL1jKkCIB3tYGT2NQr5IGtBkUVJadOuMh0I,1707 +onnx/backend/test/case/node/blackmanwindow.py,sha256=kmZwfpZhg0dr_rAtaiGkOI8RlqVDIefglF3LGL-2YuU,1395 +onnx/backend/test/case/node/cast.py,sha256=NgM9QwR3g_VAuRAl_U72p0Y1gIDK-p8rWT8WFgeN3Xc,17854 +onnx/backend/test/case/node/castlike.py,sha256=5E-y3Cbfvx9pUzOhxFZNl-9cPZN7tbhE2i-53k3JNOc,9959 +onnx/backend/test/case/node/ceil.py,sha256=JJC00exuaBbmMK-ktb7ec_yqpX7PlyxTvmS824003vU,741 +onnx/backend/test/case/node/celu.py,sha256=_Prleafe_PZKATvg-mj5LYjlofgpvZeQ0a0iogCWONI,1584 +onnx/backend/test/case/node/center_crop_pad.py,sha256=8XgDSdMOzKLLezf4CoY60zfJGn__Ql46oNDyHWMuE5k,4033 +onnx/backend/test/case/node/clip.py,sha256=FJGc-mka84_ZxgeA9lSRg6F12HmDpnAofG5xOO62ar8,4349 +onnx/backend/test/case/node/col2im.py,sha256=t3Q08c8Zn2O7HtkzKvhgzRZE_wmfNIbphdmX6TfSba4,11384 +onnx/backend/test/case/node/compress.py,sha256=kTJNkFlzx_LutIb-TdKTHnyDHsWpFcBEaCuWDw1rlXU,2809 +onnx/backend/test/case/node/concat.py,sha256=rbIHRe-Tv2yTseK0Adir65GPEHY6NfGTc0O9W2b2PEs,1886 +onnx/backend/test/case/node/constant.py,sha256=f2d2dbQ1P3hZivRXcxdFVsb9TeViWnOApZn8OoeqI3k,794 +onnx/backend/test/case/node/constantofshape.py,sha256=I6pxOMu-hbldJ3E_JFUL4ckp6drJLBiskXOvkBvjAY0,1900 +onnx/backend/test/case/node/conv.py,sha256=gsfmgQm1B3jzl-0fETTaQMwED9W9EPVRv43izwr4F58,8394 +onnx/backend/test/case/node/convinteger.py,sha256=6BfHtCkF2s8y4kdbq3iCQFX-Q0QDMUcqsJmvvQeRm2k,1940 +onnx/backend/test/case/node/convtranspose.py,sha256=t6ixDwQbQ3Jg-2PNj5dwax-dFBInKgHJauE-ndCtR08,16822 +onnx/backend/test/case/node/cos.py,sha256=Iqu8yEAI4NQu1b5vVXal45vrJwyrS-YsR4CL-J1M5p4,705 +onnx/backend/test/case/node/cosh.py,sha256=paohiM1-bYV3Uq5AeXM1MCXW_kPPabXA6vG7htVotfY,761 +onnx/backend/test/case/node/cumsum.py,sha256=dtoV_Rg1opPwMpx-jW6ItihP8roPkn5Ymp0brSt5jOQ,3482 +onnx/backend/test/case/node/deformconv.py,sha256=0kKFJlA45N3OE4YuSr_abaYqIXyDYEAnoEmiO1SyTKw,5647 +onnx/backend/test/case/node/depthtospace.py,sha256=UEryt7YdGewSq0dD5yzN5EVul5yOBkjaUFna9bt2h2Q,3632 +onnx/backend/test/case/node/dequantizelinear.py,sha256=0J7b5SaFatZoc6GaNVtLg1pIMhAm1Ly1KPiU7Vad68k,9408 +onnx/backend/test/case/node/det.py,sha256=aYZlgjZu0qmYUuuv6-6lZ4tW9CHr2PS6VU3wAL9KJ-U,1012 +onnx/backend/test/case/node/dft.py,sha256=tGoOa57NJWSJk8i6BFs1tQk-1YPp5xFVha-TNqYR0iE,3407 +onnx/backend/test/case/node/div.py,sha256=ae1QwxAnnhDkISGSWNDVNsScveoTTYDkqSRSYLWuro0,1475 +onnx/backend/test/case/node/dropout.py,sha256=KKH4WdQ3eY_Z0hjWa1EDXqoSvEkCV5Ri64ISRq7SurM,6549 +onnx/backend/test/case/node/dynamicquantizelinear.py,sha256=skAc3Jtdsf17l5kj3QCj4wA0qNvsE1Q2yKmez9-jPA4,2511 +onnx/backend/test/case/node/einsum.py,sha256=RU6eRYkPccaryXt6KTmdF-_8Nd5cs4kYJ9r8GQ80R58,2414 +onnx/backend/test/case/node/elu.py,sha256=TyDE8lxzVqh5KvioCR9_NRw202pYJnEPLbzxRHdujh8,1260 +onnx/backend/test/case/node/equal.py,sha256=kuNB7U2DbO2Af_5ybKzWxmRYjl9VP9UuQC0vXfg61Fo,1943 +onnx/backend/test/case/node/erf.py,sha256=-ycgPKYQRHkGjkmfVxTmOHqFsaM1IrcJPQNetlRvo0A,610 +onnx/backend/test/case/node/exp.py,sha256=A-IhToYyLv1EC-h7NMp8oyXEI6s3RRd8lcDtnJNG24Q,753 +onnx/backend/test/case/node/expand.py,sha256=bRy5JQzBTihoI3UmfHxPWgHzK2kKTQSPCNLbLXK1yok,1977 +onnx/backend/test/case/node/eyelike.py,sha256=eSwgqOd7BS_8wO5PLks9yaW20_smRu_t8NLiib0smk8,1773 +onnx/backend/test/case/node/flatten.py,sha256=WuVnLB7ilSUoAxzLbYGZj9Ai_7tZgAD61_55-70PnRQ,1961 +onnx/backend/test/case/node/floor.py,sha256=rFm2LtaR4HaKJZKK6phUBddxoS99M9BLVdQakq3YY-s,754 +onnx/backend/test/case/node/gather.py,sha256=FOlQm6jfEQTB7kO5lM9zHhjc_LZVvnYsvC66AbQVo3c,2433 +onnx/backend/test/case/node/gatherelements.py,sha256=lxe8-vzwORGVxpI6rMfxlHlKiiRbEZOG5rpf8wj7Mws,2748 +onnx/backend/test/case/node/gathernd.py,sha256=LOg8hzJOZPgbrUoqj4GuobJS_sjdVROaCAPpZoumeqE,4128 +onnx/backend/test/case/node/gelu.py,sha256=SA_xUMHKmGTfDZyMQId0Z1p2G7VsAG5Tax2JxESaEL4,1822 +onnx/backend/test/case/node/gemm.py,sha256=tLBS24QYvh4m1t3vVHBo4jO1S5LSmxWwWqMa3hIXthg,6077 +onnx/backend/test/case/node/globalaveragepool.py,sha256=lm-cNtoM1GxpJ3qhg36vOMeZZ4nPLs5r0Ds4kmSNCvw,1282 +onnx/backend/test/case/node/globalmaxpool.py,sha256=mJCSOreG3leR0zgrR_eY11M_bZDZlf_R5wv8W_5x4Qc,1257 +onnx/backend/test/case/node/greater.py,sha256=CblNiI3LVv5CTVn2_SsA6AtcE9F2hwMIn0V96R878cM,1068 +onnx/backend/test/case/node/greater_equal.py,sha256=VjKtEMOp323V_Ns_4vuTccGXsuBmf2L3D_J7kJfFZQs,1118 +onnx/backend/test/case/node/gridsample.py,sha256=USzunzK1cJY_J_blAvI7Mh4LuV5PMjadR1GYl7x6DxQ,20323 +onnx/backend/test/case/node/groupnormalization.py,sha256=C17ykjUPrQ_dus1EPqvYBZ5zhEaZIvwfC8rdZE6wfpg,2540 +onnx/backend/test/case/node/gru.py,sha256=lzzWzcsl8yeosfvsXcbOozfyjOSrSC68hFoPL77-5KI,8087 +onnx/backend/test/case/node/hammingwindow.py,sha256=brMVX1rXjt0M01JOgDxCWJKvGV4G1wVHpqZLrXKIXlw,1134 +onnx/backend/test/case/node/hannwindow.py,sha256=egQ95WSqw1Q3x7ruDuQuxRZTlu1ibvn7DbWnl4YUdug,1105 +onnx/backend/test/case/node/hardmax.py,sha256=8e_vVto8LPaOCS8GiHVihEpBFZatr4Zb_aBxdtMoREE,2715 +onnx/backend/test/case/node/hardsigmoid.py,sha256=BqWfQ-n8Tj2vnDyHoezt_COkqkCiR3ZVFFHxrRHsBAs,1259 +onnx/backend/test/case/node/hardswish.py,sha256=IN4WzGQqxfI0YN9SxYQtcxwGAokuQEnLiZG8aIVkYb4,728 +onnx/backend/test/case/node/identity.py,sha256=oYx37jq_a-8hpni8pn7fktF9GL_EOcDWkTRQWsJiiio,2426 +onnx/backend/test/case/node/if.py,sha256=VeJutW2yiYCxDwjyewkI4evI-HU3UIEVBos_MfIJOBE,6680 +onnx/backend/test/case/node/image_decoder.py,sha256=Q3vnoez4WrDYdmz8fTdeBIau_2tdyQ-GcWHBwvXOVGQ,7613 +onnx/backend/test/case/node/instancenorm.py,sha256=OzS3A-kW8vJF5M8hsX28hA3I5lOteBkB6u7MBX5aoak,1965 +onnx/backend/test/case/node/isinf.py,sha256=bsLrKkLrlwQWfTOtlZFxdY5qy0v8H9tdp6P4GzoKT9g,1743 +onnx/backend/test/case/node/isnan.py,sha256=xIHclNhVxEhK6iY84yvzJ74vsNEkdr44tuOUOF-3ulw,962 +onnx/backend/test/case/node/layernormalization.py,sha256=qMrsw5l3fjgll-noebNtXqG4NppvufxDjuQflPxKcXI,6491 +onnx/backend/test/case/node/leakyrelu.py,sha256=HM1yPN9NB2oVWINulZPr1WrcN13etBOg-hqB0c5ISXc,1279 +onnx/backend/test/case/node/less.py,sha256=j-hcre56nDUVlp-uml66g5ZPaRJy81baUOm5s1EXbHM,1038 +onnx/backend/test/case/node/less_equal.py,sha256=fwXjoVSCC_D30I7n_Y7Fcm-kpMtLyfyQc_KRJsfNeIQ,1088 +onnx/backend/test/case/node/log.py,sha256=7XI8ZBxtu6eqPoBOZS-KisBMvg_Iq5vpNvGq0g2GcAw,746 +onnx/backend/test/case/node/logsoftmax.py,sha256=4t0Fn7Vm_wFLAhynuMXzUN5JzMpz0L7KIT7zYH_wk6s,2829 +onnx/backend/test/case/node/loop.py,sha256=ywCTLrnAzG-o466POT_Ww_EHOWmMkQdqCl226-ISKZI,15176 +onnx/backend/test/case/node/lppool.py,sha256=xQC3VhlCbGlq6AjejYdjFK4YNHeYzuVxMM3KPrw8pic,8941 +onnx/backend/test/case/node/lrn.py,sha256=g3z8BZwIBhjH0XbCXb_HQLpjlluV1JPgK-oj-MyXV94,2150 +onnx/backend/test/case/node/lstm.py,sha256=wutmz-BQYUVG1gTP1QCCV57CN-23frvZLQ5jplHaxcE,8517 +onnx/backend/test/case/node/matmul.py,sha256=Imyh6ACatzG4OSAzFdYFD8A9HbY_AvslZBPb_Gx6jVY,1116 +onnx/backend/test/case/node/matmulinteger.py,sha256=c2CBzRt-Kil2GBR9hyP997cAuayuufjuVKsTp8Ja_po,1355 +onnx/backend/test/case/node/max.py,sha256=pRC6brP6A6P7buf7hUe5uv9_QiA-_mvA1hMnoVHdmFs,2074 +onnx/backend/test/case/node/maxpool.py,sha256=oTtdjzy07vDPq9j3YZ9E1S8T1hY7Eq4cb3dekzicLrk,22559 +onnx/backend/test/case/node/maxunpool.py,sha256=FIW7ih5PJrAbZgaMoHUAQ8JpBC1-TOK6vmIJK23Ylck,1985 +onnx/backend/test/case/node/mean.py,sha256=SNwYH0o7-4nybtk1JLgNW8Twp2tXpEuVgZ6qTmhniD4,1378 +onnx/backend/test/case/node/meanvariancenormalization.py,sha256=15ub97LVsB61jakyPzYqQMu2kq-GbJmMj3eR_sm83yY,1735 +onnx/backend/test/case/node/melweightmatrix.py,sha256=dLNBX9h56t09-qa_zEhnWD7HALFo_IalEDdO_YYai54,3754 +onnx/backend/test/case/node/min.py,sha256=TWsKC5EJ0T2iGO1Tzkahk-T9G0LjZQdyT3Lh2pym7uE,2074 +onnx/backend/test/case/node/mish.py,sha256=lCh2qsME-ttOCdR0-G0GDoSOwW-apZ4Td8tu5NU-0A4,642 +onnx/backend/test/case/node/mod.py,sha256=kopByyPkJGMNf4eHjY7v2fQUPtnNRQPbKL0FlLwCHNs,6392 +onnx/backend/test/case/node/momentum.py,sha256=eKludRu0L63hBT_7ENEvbOkGs5nXhJwy1pAfSgT23vQ,5451 +onnx/backend/test/case/node/mul.py,sha256=5gAaj1wkZFIhrHG-jkuK813dQ2rUVK_kJH4YeT51P74,1471 +onnx/backend/test/case/node/neg.py,sha256=6eCnBhgh0neipZJwOACgXUHMT6bIdtFQehlB79r80sE,742 +onnx/backend/test/case/node/negativeloglikelihoodloss.py,sha256=erC7RL7OG3uj4_iyd_cIzRbIcxHjv33hLU5x4843dlU,20200 +onnx/backend/test/case/node/nonmaxsuppression.py,sha256=S7hus38R1hHXkEi_RDcbcyqkTGvMqtJsDa8U5UbBATs,14277 +onnx/backend/test/case/node/nonzero.py,sha256=uvkfhBB6d5Ol6SXxdNn_Cp4Qc7GPiMQSsaRIMG6QkRo,713 +onnx/backend/test/case/node/not.py,sha256=R7wk1Z_J3JK3x_YFmAE_F-AsPNVaPZtJS5h3KprYdPA,879 +onnx/backend/test/case/node/onehot.py,sha256=0vvPfgAh0Qv-eShtxL2yAZmarE-UKUDr_teCRGpx_9I,4070 +onnx/backend/test/case/node/optionalgetelement.py,sha256=kgHkVB5iXH66GNi8nbQgkUk0korRjeh6iSk4gT-QPSg,2603 +onnx/backend/test/case/node/optionalhaselement.py,sha256=NlDQ-pS2DB7eQXdndyMyBLR3EpACAsJGg5BRvQVK4KA,3487 +onnx/backend/test/case/node/or.py,sha256=sgNQ4B2d1HYyhnqg6r-sdPaMf-suPn8ZltLAKKRdt9s,2522 +onnx/backend/test/case/node/pad.py,sha256=YeMeDefWIpalW0e8yBH8vsLitzugN8eEBifJbq-dl8Q,4107 +onnx/backend/test/case/node/pow.py,sha256=mqZTB3aExWI77NHZMD5pa0fb0iTsSrVuVoYTZtfEsGg,3959 +onnx/backend/test/case/node/prelu.py,sha256=2mc_P13n4GYWE50wcvIhNoyIxmppmrBqq0et5O5ZEck,1160 +onnx/backend/test/case/node/qlinearconv.py,sha256=NcqkNkS24rCnTDHpLTePunkIwSdHt6mAlNLGF800oEY,2341 +onnx/backend/test/case/node/qlinearmatmul.py,sha256=liNk0Jzth3BWDJVp1PAOFWqHToB-KsvqN0CQVqwKo7U,5676 +onnx/backend/test/case/node/quantizelinear.py,sha256=xWfKAQmAmYePK_vkbShRm7D4gJ3QzD1o4BUlv9hMPhI,11292 +onnx/backend/test/case/node/rangeop.py,sha256=p7Lz1xygHPHRUrfInf5FS2RBMhFL67kdLbyQxvxE2Ls,1491 +onnx/backend/test/case/node/reciprocal.py,sha256=34LwMciBx7lM5-KpzaDSCa5dL3VXblfLVpwoF8N1_48,782 +onnx/backend/test/case/node/reduce_log_sum.py,sha256=ons-ZqpqnpNaCkk2bUwVu-Di7i0MTeBfKL_fWMn1n8M,3159 +onnx/backend/test/case/node/reduce_log_sum_exp.py,sha256=t-uopw6uFPOflG3ffVVqT7O--4XhoQ099k35nTFHVpA,5935 +onnx/backend/test/case/node/reducel1.py,sha256=UsbNmcGFLK0Q_39KvJh5x7UQbMN8iG0sBtOiFaODXJk,5757 +onnx/backend/test/case/node/reducel2.py,sha256=ajR7QzBkMHSiushzoMnYd51MMgLNwMsah5y9HRwcnp0,6183 +onnx/backend/test/case/node/reducemax.py,sha256=K7PIIg2oHPj-dppxdaPk5afRr9eUor2MVjd8vEZtoxU,6916 +onnx/backend/test/case/node/reducemean.py,sha256=NGi1BwNn32FsVFSAd7g72F3x4LAY6RrgVGN-NQ_8PMA,4944 +onnx/backend/test/case/node/reducemin.py,sha256=abrDUoJvRdTpAsztkwxAYCo79IzpNDuPP5-_f3OHJYY,6961 +onnx/backend/test/case/node/reduceprod.py,sha256=0g5HR3tyYS3PgLEpRa_NkDqQqK3I5AHk0IihQBGchUk,5454 +onnx/backend/test/case/node/reducesum.py,sha256=Elf59emiH7YQUY2AXG5gCotv0BKCv_dcW2JX4m1kK3Q,7374 +onnx/backend/test/case/node/reducesumsquare.py,sha256=_3ICjHMXaf-87PFFnPID6M3lYJ8kHW4JK13zq6D7xW8,5716 +onnx/backend/test/case/node/regex_full_match.py,sha256=MMC5PwBuxXwub0s7E89kcHKN6nMYTw768hl_tmOQ08Q,1946 +onnx/backend/test/case/node/relu.py,sha256=WTdC50jBMZR2rctVtNrQU-UvQ5DuROMPSB1EwKZjodQ,570 +onnx/backend/test/case/node/reshape.py,sha256=Dwks9zBBuGz4kHBXOiZx5iTSKiXcArj8T3ltUhvIjsQ,2960 +onnx/backend/test/case/node/resize.py,sha256=kyJAM49qHisRr7O7_3vny9btEgeeg6AT9z_qEZURU_E,53329 +onnx/backend/test/case/node/reversesequence.py,sha256=mjWpMXMukWlw4UZmImkpABcDSbs8_ig06HLbowF-Pkk,2290 +onnx/backend/test/case/node/rnn.py,sha256=Ryr7wiNG7kYEZZ2TP9Drj_ndi8xa6AO29IqbfT8UgKA,6615 +onnx/backend/test/case/node/roialign.py,sha256=3B9VJcY1jN3oCOO7BQ9TVUzN70LTuIztLw_b527TN3s,15330 +onnx/backend/test/case/node/round.py,sha256=-ElfKi8Z5sCrSkt5cPifbVLmALuGZJb8Tv07eZtaLGY,1362 +onnx/backend/test/case/node/scan.py,sha256=ez7_Uf85TPvAUOLNlFXGygbJACOzR9zQG-2ooMdznt4,4461 +onnx/backend/test/case/node/scatter.py,sha256=8CFOuXyhb-ZjrSbbZzbrhas9eEA357VcRhftimLIwlE,3354 +onnx/backend/test/case/node/scatterelements.py,sha256=dFWxTxPCgW0lcniUo3hxCvxM-OLGbfJ9-gzeDyxJRCk,7639 +onnx/backend/test/case/node/scatternd.py,sha256=KMfDiler2GVEE4K0QDDnAuGMeeQx04wxRfw4Uwjfo5w,8865 +onnx/backend/test/case/node/selu.py,sha256=3Jqr74W85o-XzvKkMPf4Q5dv1f_4AMtJBMfdxEx8vxA,1549 +onnx/backend/test/case/node/sequence_map.py,sha256=q5mGriN83qKB93Lw1HF64k6J89-z9LLyVaCrce3EoLc,11220 +onnx/backend/test/case/node/sequenceinsert.py,sha256=CuE0HchFa3b5AmhHlQHKdFyuKS4QxrBgyGllRq476qc,2634 +onnx/backend/test/case/node/shape.py,sha256=99FbM0VPVGgfv9pSBx3cpjV0WX17-7USf-5-RQ0Evts,1557 +onnx/backend/test/case/node/shrink.py,sha256=YYJIhSU8RaZf6AyZRqiHhdp0G7mTFDTSJClisqSViSA,1054 +onnx/backend/test/case/node/sigmoid.py,sha256=7Yl5VwkxtxW1IpzaI9WDUtRMrmjQaWB7567Kc8uwHpo,847 +onnx/backend/test/case/node/sign.py,sha256=Td-mRwd33PRHTHK3FpNEXXolVE7xRIJXSlVUGqNioo4,557 +onnx/backend/test/case/node/sin.py,sha256=4YBsdp7tQw_636LgJHBycStCzDw9KgIxyr6ZraCn0co,705 +onnx/backend/test/case/node/sinh.py,sha256=hr6lU5muwqnhZBZrm1Pa9Dd0up6zV0xBre76yFzsl4g,762 +onnx/backend/test/case/node/size.py,sha256=mTXvM9xhQZFjDWRaXDiqC4nSybPbCmR9__x4od1uda0,842 +onnx/backend/test/case/node/slice.py,sha256=5Y8-3T5ADfEZ21XkJ4sOWhmsDXg4r3TFJHPjKairFcE,5412 +onnx/backend/test/case/node/softmax.py,sha256=Awhlvai7B3i2cS9bPC1-T0Ag5r3I4HbVckhJ5GT88PM,2741 +onnx/backend/test/case/node/softmaxcrossentropy.py,sha256=WsrrAHlUp-uG-cSrnXJ6K0whNsNOz4kIgyuw1t7E3s4,36666 +onnx/backend/test/case/node/softplus.py,sha256=uRBDt7bRF2TsmA92FMOxoOCukLkENetIThYpzetTolk,829 +onnx/backend/test/case/node/softsign.py,sha256=aR55X3_0Vy0HXAZ5UDl_E2reKMrsmtS8kBsJP8Acwog,769 +onnx/backend/test/case/node/spacetodepth.py,sha256=_7WL1knRWXeoR9I4h5ForniwQ8dbclDo8BlD1loVWIk,1969 +onnx/backend/test/case/node/split.py,sha256=OM5svlonceyKpb7m4Qs56iYaBORbknEwYfh8WHHKiy8,12187 +onnx/backend/test/case/node/splittosequence.py,sha256=UwgsUeIocV1V3edzwfNbicELElCcqnSd56XJEkZPsmQ,2173 +onnx/backend/test/case/node/sqrt.py,sha256=USQsvqWOrWJ_0kGiHosKcJeYQFn0NH9qTsF0rJMCr1k,750 +onnx/backend/test/case/node/squeeze.py,sha256=cdzQUDY2rwGFSBl-0sawK7o82BmsBcwmijtwrcLnAPQ,1088 +onnx/backend/test/case/node/stft.py,sha256=Qt6t8M37UizNB_FCmgtJhi_l2vmO3EMDr1VC_-DrQrc,2298 +onnx/backend/test/case/node/string_concat.py,sha256=l-1oWvoF2p2a5QJSZWFQmBlkZRvucvlzC3Jdll0P2nw,2013 +onnx/backend/test/case/node/string_split.py,sha256=V3YUOgTj3CIfAP297r_XQ0v8M-pzs9GqdtlDFdY5S9g,4243 +onnx/backend/test/case/node/stringnormalizer.py,sha256=qq4ppjNSBm07HqyFzRR3Dkv0u5F2geZ4bvEDneVwm4c,4540 +onnx/backend/test/case/node/sub.py,sha256=iYvsLgREsRNAMyTjFLu_X3Tlu--_NoxEUuhoDGTzS0U,1475 +onnx/backend/test/case/node/sum.py,sha256=QB8OfhM0Q27U2PdJh8UE7yAmMaA90iHhVyzkQC_uTjk,1356 +onnx/backend/test/case/node/tan.py,sha256=_lMQ27GbS52rF_DSxZ0FcFfu64XI5aInnhoFi-5GWw4,705 +onnx/backend/test/case/node/tanh.py,sha256=JaxgXGGY7QjCqUqVBE88Ca1grqeEH88XR0RBAa2VunA,760 +onnx/backend/test/case/node/tfidfvectorizer.py,sha256=hAadMDUc8CrtFhR4ESdWo8i3Q3UYztViSHmCPF5rbRE,9096 +onnx/backend/test/case/node/thresholdedrelu.py,sha256=NlOKehvtFipnANZmtW1LMa6XKEYPgfmHf0Q2TcxKFoo,1303 +onnx/backend/test/case/node/tile.py,sha256=A2gMadWRxL7ZIW2hgxUrL04U0eOhAFsz6rh6l_RvcW4,1105 +onnx/backend/test/case/node/topk.py,sha256=FETeh_yLg80BbmOc9MVyDKE7mauqw2oex8o0nREUnMk,3643 +onnx/backend/test/case/node/transpose.py,sha256=yVh6-RJFDtO5eDPVQkS8GR3LX0hM83qYCD-97ZI9UCk,1402 +onnx/backend/test/case/node/trilu.py,sha256=5_VgNWUHX4rPliD6_U1aLmMoxr2Jqfyww0F-8uPw4CY,12963 +onnx/backend/test/case/node/unique.py,sha256=FJxhqcKPEAviAt2oFh6STCb7vfj8bioi8IGWh0cFXuw,5975 +onnx/backend/test/case/node/unsqueeze.py,sha256=CjIhyAhKgId1LszNOgzk9VOE2E-h6cgVeSZ-h1OFoVM,2822 +onnx/backend/test/case/node/upsample.py,sha256=PX-Kr8Vwr1xWGQmyHjWelXTHrMjT7IgnlXnOkMJth6c,1394 +onnx/backend/test/case/node/where.py,sha256=ewcNZvx-2-flKzIsVIbJLBP_Mdi0YGdQfWxULwBdZPQ,1336 +onnx/backend/test/case/node/xor.py,sha256=lhiOpaPeGItL3foXB5eyura2LMFRkNxLF8Y-KgHhcBM,2544 +onnx/backend/test/case/test_case.py,sha256=Guj0PorwgST9Nu-e8e3f-1XZCpEgICy2t7tROwpnUz0,571 +onnx/backend/test/case/utils.py,sha256=6eDhFIlmONRT9KiyDdQUeUet5Xl1rVv4xeor641ed80,1082 +onnx/backend/test/cmd_tools.py,sha256=gP85amSu3Pi5ZENuY48v1-1100TFfWsKQZt4LFAlafY,7892 +onnx/backend/test/data/light/README.md,sha256=eYTWkgQOw8GI6ZYxtZDwQy4ry0VynJnjGmA8GCZ5nOU,482 +onnx/backend/test/data/light/light_bvlc_alexnet.onnx,sha256=Kvp4zvWoiu2dbj1j-5K9MwyRd6wVDRkYnGs-cgS6AhI,3968 +onnx/backend/test/data/light/light_bvlc_alexnet_output_0.pb,sha256=l9a8wotq1zG8MoGosDBo0V-p1Th2m1skylRI6hQ9sQA,4010 +onnx/backend/test/data/light/light_densenet121.onnx,sha256=Sd21cSeX1hZPHYZL7arZJ95POQmtG0ujkKksL4FQ6fY,214344 +onnx/backend/test/data/light/light_densenet121_output_0.pb,sha256=9sxhMPy_kDFFImeGA7s9t-2POnl1SzfTrJXJouP03gw,4014 +onnx/backend/test/data/light/light_inception_v1.onnx,sha256=u3oObDcMcJ9WFe7vlhtDYo3hPQAJrk1vS_sNWupdgnA,36869 +onnx/backend/test/data/light/light_inception_v1_output_0.pb,sha256=l9a8wotq1zG8MoGosDBo0V-p1Th2m1skylRI6hQ9sQA,4010 +onnx/backend/test/data/light/light_inception_v2.onnx,sha256=Ik131VsmVZqVnbYnw_QXpiP787MADSXwk5MnqpNdkz8,159024 +onnx/backend/test/data/light/light_inception_v2_output_0.pb,sha256=l9a8wotq1zG8MoGosDBo0V-p1Th2m1skylRI6hQ9sQA,4010 +onnx/backend/test/data/light/light_resnet50.onnx,sha256=Bed6XJyc4JE_VJpQ1uus7V4P9oF7YeCbribkxb2QVeQ,79770 +onnx/backend/test/data/light/light_resnet50_output_0.pb,sha256=l9a8wotq1zG8MoGosDBo0V-p1Th2m1skylRI6hQ9sQA,4010 +onnx/backend/test/data/light/light_shufflenet.onnx,sha256=xvQG1ivjbWtFclQsCVCiq9WfViNwaHkykGgLuon7r-U,67666 +onnx/backend/test/data/light/light_shufflenet_output_0.pb,sha256=l9a8wotq1zG8MoGosDBo0V-p1Th2m1skylRI6hQ9sQA,4010 +onnx/backend/test/data/light/light_squeezenet.onnx,sha256=dwsPPIYj4Yv1i1N1TXEAUbTCaCSEIhQpgKEyu-bf6Qg,15618 +onnx/backend/test/data/light/light_squeezenet_output_0.pb,sha256=Mu7nS35YlymoBpJn3mW6aroogdDyQEGq6OUPaFMDwTY,4014 +onnx/backend/test/data/light/light_vgg19.onnx,sha256=jlR9cys6PWbuuPpkoCatuZTT21UvC71S5DbQYwDYmv4,9311 +onnx/backend/test/data/light/light_vgg19_output_0.pb,sha256=l9a8wotq1zG8MoGosDBo0V-p1Th2m1skylRI6hQ9sQA,4010 +onnx/backend/test/data/light/light_zfnet512.onnx,sha256=ZES7WLmMPRT1UaO9uD7qnl234Ud5DbMRXER-nJqDOLA,4506 +onnx/backend/test/data/light/light_zfnet512_output_0.pb,sha256=l9a8wotq1zG8MoGosDBo0V-p1Th2m1skylRI6hQ9sQA,4010 +onnx/backend/test/data/node/test_abs/model.onnx,sha256=h-jIWjxawEAXgf0TJ6BKjux7j6yN9-gTsr7VE0QDkFo,97 +onnx/backend/test/data/node/test_abs/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_abs/test_data_set_0/output_0.pb,sha256=2BNZUVKdYf3atI3XSL8OIxRDqBjEfFBVuhwUGkp0ndA,254 +onnx/backend/test/data/node/test_acos/model.onnx,sha256=K4WDFhjPMxh8fwNbzIiJux_VbyQjjMhtigoNghdRKEA,99 +onnx/backend/test/data/node/test_acos/test_data_set_0/input_0.pb,sha256=BcAjkpobMrS9vXiC_15TubU3Yu4pKcZr520PC6fKOT0,254 +onnx/backend/test/data/node/test_acos/test_data_set_0/output_0.pb,sha256=lRYNLwlHN1hfudhkE1gfGndrnWqvb6bqZ3YAu6Tf82Q,254 +onnx/backend/test/data/node/test_acos_example/model.onnx,sha256=BXaGG4ZgoPcLt19dp4JpRGCrBr9yeHfr0sDHckNlzH4,91 +onnx/backend/test/data/node/test_acos_example/test_data_set_0/input_0.pb,sha256=cD3gw75QWnhZYjV4loK8Tm8OhSPzT7cxIHtMB2cBq90,21 +onnx/backend/test/data/node/test_acos_example/test_data_set_0/output_0.pb,sha256=K2s964qJdp4TDGxtMjeynV0dAchEJfcNnxWvaSD8j_k,21 +onnx/backend/test/data/node/test_acosh/model.onnx,sha256=V7mDqrVOnldEoeSsikvbhVek3m_X4YXS5vCQWPsBIdU,101 +onnx/backend/test/data/node/test_acosh/test_data_set_0/input_0.pb,sha256=MKsiayDTOWEgFMmPTwo5lylyk7-MtQrhW849Ff1C4-4,254 +onnx/backend/test/data/node/test_acosh/test_data_set_0/output_0.pb,sha256=Yrx90XQ3dzgx40XOm9v3vb5LQrD9rEhLfylCJBZFPpU,254 +onnx/backend/test/data/node/test_acosh_example/model.onnx,sha256=EdYmQO55mm3ioMis7Ib6-1kQWLeCMdPl2N3K2f230zY,93 +onnx/backend/test/data/node/test_acosh_example/test_data_set_0/input_0.pb,sha256=NXIAFi1g76Ag1nr5wiDIiE6ilson7_t0kC_Qf5Ln6vg,21 +onnx/backend/test/data/node/test_acosh_example/test_data_set_0/output_0.pb,sha256=Gz3pLNpKQQzxib-93ikXb5WzkZogQuo8gOMoAOPLPH8,21 +onnx/backend/test/data/node/test_adagrad/model.onnx,sha256=W_0zemXPwyE0IPoGhB64im07jPvvYjUJn8SVDyyUv-g,320 +onnx/backend/test/data/node/test_adagrad/test_data_set_0/input_0.pb,sha256=CxMIfRjPZEo5uxAaZt0Go3_5W6wA6Dv71f9iG1FoqiI,11 +onnx/backend/test/data/node/test_adagrad/test_data_set_0/input_1.pb,sha256=xEpBjafU-CYCl1V5uhzdqP2rUfBgaj7glskxTqDGi-k,15 +onnx/backend/test/data/node/test_adagrad/test_data_set_0/input_2.pb,sha256=turuCuY1a0B6_bhynwjX2Kq-OrJi3HnwRSCOt5BZBLw,13 +onnx/backend/test/data/node/test_adagrad/test_data_set_0/input_3.pb,sha256=hrgodO3LVW_LxTbUiK9tBvLA2Q7WZtXUW9itP9oqfQE,13 +onnx/backend/test/data/node/test_adagrad/test_data_set_0/input_4.pb,sha256=kfqSznBP6jN2IJsiqQ1IeGKqLx8m6zYDnfqkjX-hUmk,13 +onnx/backend/test/data/node/test_adagrad/test_data_set_0/output_0.pb,sha256=oCKisBmEjpP3XB3VhImrZp-lrCfEuSFzdJEk7DIOAw8,17 +onnx/backend/test/data/node/test_adagrad/test_data_set_0/output_1.pb,sha256=-92hjiJXMpo3i8AkkMBKfoU_6I8-YQetue7lzslQneg,17 +onnx/backend/test/data/node/test_adagrad_multiple/model.onnx,sha256=0GfKCSbRa93cMC5F1SMr2KKHV-IIViABzwaCNRfA-Zk,465 +onnx/backend/test/data/node/test_adagrad_multiple/test_data_set_0/input_0.pb,sha256=CxMIfRjPZEo5uxAaZt0Go3_5W6wA6Dv71f9iG1FoqiI,11 +onnx/backend/test/data/node/test_adagrad_multiple/test_data_set_0/input_1.pb,sha256=xEpBjafU-CYCl1V5uhzdqP2rUfBgaj7glskxTqDGi-k,15 +onnx/backend/test/data/node/test_adagrad_multiple/test_data_set_0/input_2.pb,sha256=zSCQaDJuSfjV89lOKl_e8FAvaYdDg6UrH2n4eykC9bI,14 +onnx/backend/test/data/node/test_adagrad_multiple/test_data_set_0/input_3.pb,sha256=A-40FgJNucPNFb_WksbMy0Z-E_EIV7RF0DP90kqbbC4,18 +onnx/backend/test/data/node/test_adagrad_multiple/test_data_set_0/input_4.pb,sha256=CxeZGajewrwPha-MBoCdOi3nXJYzF9DgbGhOIwIqx3A,14 +onnx/backend/test/data/node/test_adagrad_multiple/test_data_set_0/input_5.pb,sha256=stCGvEbBW8itgJbkRX1pnKZir080kytSIuTmZAkh41Y,18 +onnx/backend/test/data/node/test_adagrad_multiple/test_data_set_0/input_6.pb,sha256=1-flVrzPmKolD0GSh-ld1wYZxSEIUx0TUpi_0ywedqA,14 +onnx/backend/test/data/node/test_adagrad_multiple/test_data_set_0/input_7.pb,sha256=elUimbVps7sroUEVgzsr7m7OElV1YF29zVkzYA0YS-s,18 +onnx/backend/test/data/node/test_adagrad_multiple/test_data_set_0/output_0.pb,sha256=UKTIQ4M58_6bp-LBagOe_d4VRwris7IFZ3ni0bWiVl8,18 +onnx/backend/test/data/node/test_adagrad_multiple/test_data_set_0/output_1.pb,sha256=ipVVDyNyv3SavMDXeF1dVfWDMwjlgqbW33nRydOBFSI,22 +onnx/backend/test/data/node/test_adagrad_multiple/test_data_set_0/output_2.pb,sha256=HonHMpF_AzJUt8IAnJmpNSnf38ARw54VShRtI15DTLQ,18 +onnx/backend/test/data/node/test_adagrad_multiple/test_data_set_0/output_3.pb,sha256=0tufwJtRaRjRTHCQZDr7_arrlGvjPhOTqvCZJT9X2yM,22 +onnx/backend/test/data/node/test_adam/model.onnx,sha256=w9Ku8dNLby8i4QDTbajJOUXBeRMdrmvFW9GX1MGla3A,371 +onnx/backend/test/data/node/test_adam/test_data_set_0/input_0.pb,sha256=CxMIfRjPZEo5uxAaZt0Go3_5W6wA6Dv71f9iG1FoqiI,11 +onnx/backend/test/data/node/test_adam/test_data_set_0/input_1.pb,sha256=xEpBjafU-CYCl1V5uhzdqP2rUfBgaj7glskxTqDGi-k,15 +onnx/backend/test/data/node/test_adam/test_data_set_0/input_2.pb,sha256=q6Wsydvsci5ZJNiqDej1Kq17QzS3xOPWEruuLIgoorQ,17 +onnx/backend/test/data/node/test_adam/test_data_set_0/input_3.pb,sha256=NQkKr3rClZp31gGf1Oyh_ECrr2_5AMVYn_fNdDazT8U,17 +onnx/backend/test/data/node/test_adam/test_data_set_0/input_4.pb,sha256=6upGRVU5-O-5zKXoDu6cRsc2F036k6N02w_CNPHBGqA,17 +onnx/backend/test/data/node/test_adam/test_data_set_0/input_5.pb,sha256=_sQAc0yH7uQipbdJe_N9EV_eMVJNNTb0bYSrlveTHSA,17 +onnx/backend/test/data/node/test_adam/test_data_set_0/output_0.pb,sha256=TVRVeKeatp47K62yu5zNe7emQcyoqRGVp2TET-REmdw,21 +onnx/backend/test/data/node/test_adam/test_data_set_0/output_1.pb,sha256=rK6Zk9eax3tUns3S7Oa55LN4GA_v8Udzv6uiSt35hMs,21 +onnx/backend/test/data/node/test_adam/test_data_set_0/output_2.pb,sha256=jn5qRVW1d5fDCKW6NTd6zhlOLzJWjh6PDwF_GotDvU0,21 +onnx/backend/test/data/node/test_adam_multiple/model.onnx,sha256=xOWuY-R-HxA0t0rV_ZL-H5CNzeqAWtWKKmeZO3mQJw4,553 +onnx/backend/test/data/node/test_adam_multiple/test_data_set_0/input_0.pb,sha256=CxMIfRjPZEo5uxAaZt0Go3_5W6wA6Dv71f9iG1FoqiI,11 +onnx/backend/test/data/node/test_adam_multiple/test_data_set_0/input_1.pb,sha256=xEpBjafU-CYCl1V5uhzdqP2rUfBgaj7glskxTqDGi-k,15 +onnx/backend/test/data/node/test_adam_multiple/test_data_set_0/input_2.pb,sha256=zSCQaDJuSfjV89lOKl_e8FAvaYdDg6UrH2n4eykC9bI,14 +onnx/backend/test/data/node/test_adam_multiple/test_data_set_0/input_3.pb,sha256=A-40FgJNucPNFb_WksbMy0Z-E_EIV7RF0DP90kqbbC4,18 +onnx/backend/test/data/node/test_adam_multiple/test_data_set_0/input_4.pb,sha256=CxeZGajewrwPha-MBoCdOi3nXJYzF9DgbGhOIwIqx3A,14 +onnx/backend/test/data/node/test_adam_multiple/test_data_set_0/input_5.pb,sha256=stCGvEbBW8itgJbkRX1pnKZir080kytSIuTmZAkh41Y,18 +onnx/backend/test/data/node/test_adam_multiple/test_data_set_0/input_6.pb,sha256=Rc90FrQBRUlC-TzO9j8dMroSVATSdH027FaQzEtBeUA,14 +onnx/backend/test/data/node/test_adam_multiple/test_data_set_0/input_7.pb,sha256=NPcgj_9FyMIJ3Ll5ChXoHxbUNPE7QquI-gnfMq4v000,18 +onnx/backend/test/data/node/test_adam_multiple/test_data_set_0/input_8.pb,sha256=NP1McfMEN1DUKgTtTrLFqX-Q6nolS8o0IdLzi8r_YrA,14 +onnx/backend/test/data/node/test_adam_multiple/test_data_set_0/input_9.pb,sha256=I1axqZFkXox9eUbvVASQWpaGtnfeL7UNt4kOQSINWY8,18 +onnx/backend/test/data/node/test_adam_multiple/test_data_set_0/output_0.pb,sha256=gn-lzXgI1hDCYZ2_9ZQMcws-c0IlriCgT4tPfdZeMn8,18 +onnx/backend/test/data/node/test_adam_multiple/test_data_set_0/output_1.pb,sha256=3poCofD7Nyg9KsIYTG-pJUfEtEz-G7JNKMvVq4fAIdc,22 +onnx/backend/test/data/node/test_adam_multiple/test_data_set_0/output_2.pb,sha256=PhEvop64Uiv1n8ikOYjsoB_Wa2-5qXCpCJteJyKtgsM,18 +onnx/backend/test/data/node/test_adam_multiple/test_data_set_0/output_3.pb,sha256=OFoiZH4lHPqV4YM4sO9JAUSITmDTLcFPGsDo_8g3nWk,22 +onnx/backend/test/data/node/test_adam_multiple/test_data_set_0/output_4.pb,sha256=93pyGZX6OvmF1axeH3eJ18jCG9lt1X_Ne8aEOp2tcMs,18 +onnx/backend/test/data/node/test_adam_multiple/test_data_set_0/output_5.pb,sha256=Gxk4rHZZEsF0zHPKYjeLAOb2bgc8Ty5pp4ow68emvvg,22 +onnx/backend/test/data/node/test_add/model.onnx,sha256=k88EOHBs3av2g63IsTyKF8S4sS2LzLGwQSaOH03_Ci0,129 +onnx/backend/test/data/node/test_add/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_add/test_data_set_0/input_1.pb,sha256=qlmuuQNl3eLtxabKfNE6cbZHjYcj-DzNBr1M4rz7GSo,254 +onnx/backend/test/data/node/test_add/test_data_set_0/output_0.pb,sha256=n_TAJ8n2D7D_tdElxet4CQ6etKybHAJmkYM7_MSHuy8,256 +onnx/backend/test/data/node/test_add_bcast/model.onnx,sha256=1WLvYrfpVB4rm_K1rmKXv-qn0UoZxok4gxiy3Np6FVA,127 +onnx/backend/test/data/node/test_add_bcast/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_add_bcast/test_data_set_0/input_1.pb,sha256=jYn9FLUzE9ETJcxFW48XqSehRlyePPjUoI-gtNyozBs,29 +onnx/backend/test/data/node/test_add_bcast/test_data_set_0/output_0.pb,sha256=rG38BI9ZZ1bIw-G-vQNaVea2-iEFgzlnGrSk4JRQ6_s,256 +onnx/backend/test/data/node/test_add_uint8/model.onnx,sha256=paL-5wnP160jtuJlWbv2GFpqDVJLSBw9E89HQMr8HjM,135 +onnx/backend/test/data/node/test_add_uint8/test_data_set_0/input_0.pb,sha256=wY9Jbz2HD0FM7JM6zwbBbRhGHss3cDgO6eFTgp5L7W8,73 +onnx/backend/test/data/node/test_add_uint8/test_data_set_0/input_1.pb,sha256=BhGgo5N13MdBE3iNJgN6gWCP4GSFzJZUcoAIT5jIuok,73 +onnx/backend/test/data/node/test_add_uint8/test_data_set_0/output_0.pb,sha256=zpBdHPu2qBXHaUcgo8a7xdxlrS3qOLJA6Wip2m_7OsM,75 +onnx/backend/test/data/node/test_affine_grid_2d/model.onnx,sha256=N1MYGhJOky81XKF5BJDmzv7JDq5WCJvO34xAh0We7Nw,182 +onnx/backend/test/data/node/test_affine_grid_2d/test_data_set_0/input_0.pb,sha256=xNhlkk6TeMP0F2xjmLog6lBJCi2vqV0TLmDYgwOxYoY,65 +onnx/backend/test/data/node/test_affine_grid_2d/test_data_set_0/input_1.pb,sha256=9IexbIORnsaGrx67T-L98-YYA-KmtPDfGzZD6aerfLI,44 +onnx/backend/test/data/node/test_affine_grid_2d/test_data_set_0/output_0.pb,sha256=uRZQ1rXSePRYTb_ZVr4BLj02Aqz2t_o9_YQHAxrQmEU,499 +onnx/backend/test/data/node/test_affine_grid_2d_align_corners/model.onnx,sha256=gFQKW2xjdb1ehNLYBw1-qqZ3RzjeeZDxmTU1DWXZLBo,196 +onnx/backend/test/data/node/test_affine_grid_2d_align_corners/test_data_set_0/input_0.pb,sha256=xNhlkk6TeMP0F2xjmLog6lBJCi2vqV0TLmDYgwOxYoY,65 +onnx/backend/test/data/node/test_affine_grid_2d_align_corners/test_data_set_0/input_1.pb,sha256=9IexbIORnsaGrx67T-L98-YYA-KmtPDfGzZD6aerfLI,44 +onnx/backend/test/data/node/test_affine_grid_2d_align_corners/test_data_set_0/output_0.pb,sha256=CTfv2pPSSm_ufn5BP4gHTE0QxzdD3eUIvgjGL1FfUxA,499 +onnx/backend/test/data/node/test_affine_grid_2d_align_corners_expanded/model.onnx,sha256=Q-aiHyb8qkjxG8BntVqgr-R8UVFf3ONrvE-tvJyntRc,27753 +onnx/backend/test/data/node/test_affine_grid_2d_align_corners_expanded/test_data_set_0/input_0.pb,sha256=xNhlkk6TeMP0F2xjmLog6lBJCi2vqV0TLmDYgwOxYoY,65 +onnx/backend/test/data/node/test_affine_grid_2d_align_corners_expanded/test_data_set_0/input_1.pb,sha256=9IexbIORnsaGrx67T-L98-YYA-KmtPDfGzZD6aerfLI,44 +onnx/backend/test/data/node/test_affine_grid_2d_align_corners_expanded/test_data_set_0/output_0.pb,sha256=CTfv2pPSSm_ufn5BP4gHTE0QxzdD3eUIvgjGL1FfUxA,499 +onnx/backend/test/data/node/test_affine_grid_2d_expanded/model.onnx,sha256=uVLXuP4GRcKkbqFRMDq-d63jkG16eywVtohJBMsgoxM,22975 +onnx/backend/test/data/node/test_affine_grid_2d_expanded/test_data_set_0/input_0.pb,sha256=xNhlkk6TeMP0F2xjmLog6lBJCi2vqV0TLmDYgwOxYoY,65 +onnx/backend/test/data/node/test_affine_grid_2d_expanded/test_data_set_0/input_1.pb,sha256=9IexbIORnsaGrx67T-L98-YYA-KmtPDfGzZD6aerfLI,44 +onnx/backend/test/data/node/test_affine_grid_2d_expanded/test_data_set_0/output_0.pb,sha256=uRZQ1rXSePRYTb_ZVr4BLj02Aqz2t_o9_YQHAxrQmEU,499 +onnx/backend/test/data/node/test_affine_grid_3d/model.onnx,sha256=vvTG1-5wXO-hfGRSMIm1JbO65u2SlgM_hOfgpciBbC4,186 +onnx/backend/test/data/node/test_affine_grid_3d/test_data_set_0/input_0.pb,sha256=GKKhJUWROTNgqMF0BBN6u1jWxngTUkvfmq91xSw8ujE,113 +onnx/backend/test/data/node/test_affine_grid_3d/test_data_set_0/input_1.pb,sha256=8ofzgCT9bt76McJqLRbG6-RVLDn_Fu3lLM5fvIQvlXk,52 +onnx/backend/test/data/node/test_affine_grid_3d/test_data_set_0/output_0.pb,sha256=pUVBR8X7xyXEys7lztl4CGmzrN8w8qqcNraifjAZyjs,2901 +onnx/backend/test/data/node/test_affine_grid_3d_align_corners/model.onnx,sha256=6zPUSy7XbYX3riFyjKfV_3hK_PWis0vNtPFCePPSyQc,200 +onnx/backend/test/data/node/test_affine_grid_3d_align_corners/test_data_set_0/input_0.pb,sha256=GKKhJUWROTNgqMF0BBN6u1jWxngTUkvfmq91xSw8ujE,113 +onnx/backend/test/data/node/test_affine_grid_3d_align_corners/test_data_set_0/input_1.pb,sha256=8ofzgCT9bt76McJqLRbG6-RVLDn_Fu3lLM5fvIQvlXk,52 +onnx/backend/test/data/node/test_affine_grid_3d_align_corners/test_data_set_0/output_0.pb,sha256=ys1XMwqKQBT8wg-nNQiTLn0bqQ8cNSIVcIb-RIIZ1b0,2901 +onnx/backend/test/data/node/test_affine_grid_3d_align_corners_expanded/model.onnx,sha256=efWc8OfBgAOzmYmXcnVBfFBam6y9S23SMDkDEpTR79Y,27757 +onnx/backend/test/data/node/test_affine_grid_3d_align_corners_expanded/test_data_set_0/input_0.pb,sha256=GKKhJUWROTNgqMF0BBN6u1jWxngTUkvfmq91xSw8ujE,113 +onnx/backend/test/data/node/test_affine_grid_3d_align_corners_expanded/test_data_set_0/input_1.pb,sha256=8ofzgCT9bt76McJqLRbG6-RVLDn_Fu3lLM5fvIQvlXk,52 +onnx/backend/test/data/node/test_affine_grid_3d_align_corners_expanded/test_data_set_0/output_0.pb,sha256=ys1XMwqKQBT8wg-nNQiTLn0bqQ8cNSIVcIb-RIIZ1b0,2901 +onnx/backend/test/data/node/test_affine_grid_3d_expanded/model.onnx,sha256=mgQgiEhCSz4B5iN7ACEa7XUoUVanr7JXePPoMft5nqI,22979 +onnx/backend/test/data/node/test_affine_grid_3d_expanded/test_data_set_0/input_0.pb,sha256=GKKhJUWROTNgqMF0BBN6u1jWxngTUkvfmq91xSw8ujE,113 +onnx/backend/test/data/node/test_affine_grid_3d_expanded/test_data_set_0/input_1.pb,sha256=8ofzgCT9bt76McJqLRbG6-RVLDn_Fu3lLM5fvIQvlXk,52 +onnx/backend/test/data/node/test_affine_grid_3d_expanded/test_data_set_0/output_0.pb,sha256=pUVBR8X7xyXEys7lztl4CGmzrN8w8qqcNraifjAZyjs,2901 +onnx/backend/test/data/node/test_ai_onnx_ml_array_feature_extractor/model.onnx,sha256=-KqAy2YBDRwyqfinmfj87kNajZdOLQ5-h_HZqnrca1c,181 +onnx/backend/test/data/node/test_ai_onnx_ml_array_feature_extractor/test_data_set_0/input_0.pb,sha256=XI00qhavYkKl_aHKei8a3IH_dsLyhI_Y57J38wFhkU0,59 +onnx/backend/test/data/node/test_ai_onnx_ml_array_feature_extractor/test_data_set_0/input_1.pb,sha256=6bV-LPoUKmxMZqYVh7trsIYnlXnapYVYirqTx2WlSUs,25 +onnx/backend/test/data/node/test_ai_onnx_ml_array_feature_extractor/test_data_set_0/output_0.pb,sha256=cT7VjDLALeCy6aKg9Vg7K9bxY5CnvFGrq3e2QRifANk,35 +onnx/backend/test/data/node/test_ai_onnx_ml_binarizer/model.onnx,sha256=MGIz0rt69DiEaqhSFKzli15l4VhFz99Q_ysGnynGgv8,164 +onnx/backend/test/data/node/test_ai_onnx_ml_binarizer/test_data_set_0/input_0.pb,sha256=IHiHIyablYLzOeKB32-oDw2gZhG5BoXro6hrposdegs,254 +onnx/backend/test/data/node/test_ai_onnx_ml_binarizer/test_data_set_0/output_0.pb,sha256=WCPxC6xa9osg4LxN0y90PYcSuudpgzxjcx9gHKqwIxQ,254 +onnx/backend/test/data/node/test_ai_onnx_ml_label_encoder_string_int/model.onnx,sha256=BopT_IWMzV8xGG-EOoxKO-sCQHGxIoWvDwxZVsr8uwk,221 +onnx/backend/test/data/node/test_ai_onnx_ml_label_encoder_string_int/test_data_set_0/input_0.pb,sha256=EIRRN8kLHaDUGvwylx1GTclfNnQxZlqLbfifjMXq2nI,22 +onnx/backend/test/data/node/test_ai_onnx_ml_label_encoder_string_int/test_data_set_0/output_0.pb,sha256=oh_Q0pMP0Qv2ifITMtFSA-EiwxFl7OIKYlL75syu1SI,49 +onnx/backend/test/data/node/test_ai_onnx_ml_label_encoder_string_int_no_default/model.onnx,sha256=tEOvEj-q-C5bDPp-TRTczGefWLOPMSaI9OstQIoVeug,210 +onnx/backend/test/data/node/test_ai_onnx_ml_label_encoder_string_int_no_default/test_data_set_0/input_0.pb,sha256=EIRRN8kLHaDUGvwylx1GTclfNnQxZlqLbfifjMXq2nI,22 +onnx/backend/test/data/node/test_ai_onnx_ml_label_encoder_string_int_no_default/test_data_set_0/output_0.pb,sha256=h9Y93K7BmSLyWlaA97ZQq7wv4bwL9sGXYJwVOMcm7gA,49 +onnx/backend/test/data/node/test_ai_onnx_ml_label_encoder_tensor_mapping/model.onnx,sha256=_XiC_T8C4_tYJVfu6_cYYJ3yMjrmQMtEwcKJbbeyIXU,288 +onnx/backend/test/data/node/test_ai_onnx_ml_label_encoder_tensor_mapping/test_data_set_0/input_0.pb,sha256=EIRRN8kLHaDUGvwylx1GTclfNnQxZlqLbfifjMXq2nI,22 +onnx/backend/test/data/node/test_ai_onnx_ml_label_encoder_tensor_mapping/test_data_set_0/output_0.pb,sha256=xyaUKkcPIYhr44NLrN2_hh3bGKn8MXrJWypRsnY7wl8,19 +onnx/backend/test/data/node/test_ai_onnx_ml_label_encoder_tensor_value_only_mapping/model.onnx,sha256=2D1AghjlgsysNYKYNo-z9_2X_Aasq7OzyumqY-nFG0o,281 +onnx/backend/test/data/node/test_ai_onnx_ml_label_encoder_tensor_value_only_mapping/test_data_set_0/input_0.pb,sha256=EIRRN8kLHaDUGvwylx1GTclfNnQxZlqLbfifjMXq2nI,22 +onnx/backend/test/data/node/test_ai_onnx_ml_label_encoder_tensor_value_only_mapping/test_data_set_0/output_0.pb,sha256=xyaUKkcPIYhr44NLrN2_hh3bGKn8MXrJWypRsnY7wl8,19 +onnx/backend/test/data/node/test_ai_onnx_ml_tree_ensemble_set_membership/model.onnx,sha256=9uNdicaEdAQjF_c6MPpDd6VgUBjtuMAz-exnLVLbxdI,656 +onnx/backend/test/data/node/test_ai_onnx_ml_tree_ensemble_set_membership/test_data_set_0/input_0.pb,sha256=FA5pjARxPbin_jvJjIdpgk-hNLiGq24kabPh-rHs_Rg,35 +onnx/backend/test/data/node/test_ai_onnx_ml_tree_ensemble_set_membership/test_data_set_0/output_0.pb,sha256=LG1b6rUx1h2kpOBLxnDF_P2oSn1fMgMlM6RXelI41P4,107 +onnx/backend/test/data/node/test_ai_onnx_ml_tree_ensemble_single_tree/model.onnx,sha256=l7hQsQQZxkVR2dt5_eqYyo4pSaD7f4mW3CqhradvhTQ,598 +onnx/backend/test/data/node/test_ai_onnx_ml_tree_ensemble_single_tree/test_data_set_0/input_0.pb,sha256=btAUcGyHALiwspzQrTF8GIeb1pflj31PZzD2dkxYU3A,59 +onnx/backend/test/data/node/test_ai_onnx_ml_tree_ensemble_single_tree/test_data_set_0/output_0.pb,sha256=fciZjMVod9L3Q3eLk6i4Ttl-CgcNZxRtLU0jTKlQjEY,59 +onnx/backend/test/data/node/test_and2d/model.onnx,sha256=jvhZTvomeDvtnZRjIBB5C8xGTWc-N4woaVD1pjk_yfU,119 +onnx/backend/test/data/node/test_and2d/test_data_set_0/input_0.pb,sha256=LjNp_gf5HfToURj-fOV-TL_10b7cTqoOj8RfPTNPYLQ,23 +onnx/backend/test/data/node/test_and2d/test_data_set_0/input_1.pb,sha256=goZmxegiEZPjRTXScUiQU9fpxPndF4EqAZy8oeToa8k,23 +onnx/backend/test/data/node/test_and2d/test_data_set_0/output_0.pb,sha256=GcjmcM13wdXARvgFEuZBPW92lqso2-5sQMmJiIZovWI,25 +onnx/backend/test/data/node/test_and3d/model.onnx,sha256=jssi2o-pTtK6Si7NKEBEaxn9P5U4lmdUn0XF-YAJvnY,131 +onnx/backend/test/data/node/test_and3d/test_data_set_0/input_0.pb,sha256=Dr4k5dppNqEaWmFsMUXw_Pbp7QoEpqkXv4Bodqt7zMQ,73 +onnx/backend/test/data/node/test_and3d/test_data_set_0/input_1.pb,sha256=v5SpTzEGQijgxpt3GQdqFdpD-LCqlDgrPace75EL5pQ,73 +onnx/backend/test/data/node/test_and3d/test_data_set_0/output_0.pb,sha256=uCkMKeu3A5Fl999w3OJ0Fu3JjZpNFWJwCW-yupWBY78,75 +onnx/backend/test/data/node/test_and4d/model.onnx,sha256=fJ2a5gN50iVWK9QnSR_kCD16NE5gqeimgsReLfdAEgE,143 +onnx/backend/test/data/node/test_and4d/test_data_set_0/input_0.pb,sha256=sMKxU5gNQx6mv89ZOkmV0QCdEu9f8EhE-MZuoWNbcek,376 +onnx/backend/test/data/node/test_and4d/test_data_set_0/input_1.pb,sha256=9JaUvXVRf_NYTRwYWtPYsbSoKJDwOXPZuRBgFY7pYPc,376 +onnx/backend/test/data/node/test_and4d/test_data_set_0/output_0.pb,sha256=7z6DcqU4PYD4NBG0hBjBoQtn9AAU1HXoGQp_P6HX5ZY,378 +onnx/backend/test/data/node/test_and_bcast3v1d/model.onnx,sha256=V-L6MLM5UCLYeSkG6m8eMOy6OxDYPy3XvcGgesbt76E,131 +onnx/backend/test/data/node/test_and_bcast3v1d/test_data_set_0/input_0.pb,sha256=wF52hvLEXCfryTN0FvgWPKXSx3PPlpiaC_aAZ5IXCMc,73 +onnx/backend/test/data/node/test_and_bcast3v1d/test_data_set_0/input_1.pb,sha256=0cP0kHJ95VbmGsyJW3rlozPO38YQocSctc5buLas0ZI,14 +onnx/backend/test/data/node/test_and_bcast3v1d/test_data_set_0/output_0.pb,sha256=2zb5cHudGHSOWOFcjgMBQtTRJe48hbld128aQ2hbCy8,75 +onnx/backend/test/data/node/test_and_bcast3v2d/model.onnx,sha256=rkO8gZJ6USKFYpUWgKZDOVLVKrGaXIuHU-8heVXwwHo,135 +onnx/backend/test/data/node/test_and_bcast3v2d/test_data_set_0/input_0.pb,sha256=2Hi5gZzCyk3SWtB8dhtS5tIMipISnPJGI1SIjx08Fe0,73 +onnx/backend/test/data/node/test_and_bcast3v2d/test_data_set_0/input_1.pb,sha256=J1neLsOqZHTvGdagBF8_3O6sQk1byuUIMABR0wJxASA,31 +onnx/backend/test/data/node/test_and_bcast3v2d/test_data_set_0/output_0.pb,sha256=mdG-32ANjRiRrubGWEMBugaAkJKthCw74hqclFeUg0c,75 +onnx/backend/test/data/node/test_and_bcast4v2d/model.onnx,sha256=nQ71EhbbQRvGvdUrcCWMtA8gDvPy3CA3wyCmREtQu-4,143 +onnx/backend/test/data/node/test_and_bcast4v2d/test_data_set_0/input_0.pb,sha256=TG9PpNJBW-rhSdwYDpmwCMotWc8SOe2e2cLAoXaNQ8o,376 +onnx/backend/test/data/node/test_and_bcast4v2d/test_data_set_0/input_1.pb,sha256=D4JIlfVUzijC8rk152pkPsDWa33YJRlmutjMvkT-1FI,41 +onnx/backend/test/data/node/test_and_bcast4v2d/test_data_set_0/output_0.pb,sha256=BXDZEzEx30KZGRlX-QRAu_TfrftChYGi4jHGv64S9NI,378 +onnx/backend/test/data/node/test_and_bcast4v3d/model.onnx,sha256=lpW4vLVgx9F7FIKrqQ2KuHAfgS3hyDrL7HNjhnP7G40,147 +onnx/backend/test/data/node/test_and_bcast4v3d/test_data_set_0/input_0.pb,sha256=1gaunFCw_HO0ZAf_sAp9bfcwVRJW_aTJIwkv-2WYWMw,376 +onnx/backend/test/data/node/test_and_bcast4v3d/test_data_set_0/input_1.pb,sha256=26gLYNI9eQESqKzWzx1zrE8YDDCNIvfAE_0m_FoeT3E,133 +onnx/backend/test/data/node/test_and_bcast4v3d/test_data_set_0/output_0.pb,sha256=S4ncrzynkHE-cPqlleaq8UutgOSmUv4roECEsfI8u7o,378 +onnx/backend/test/data/node/test_and_bcast4v4d/model.onnx,sha256=TfEv0B9Q-1sIKlKXuKWAwVVrdJjWUkK6frFTvOHzfpM,151 +onnx/backend/test/data/node/test_and_bcast4v4d/test_data_set_0/input_0.pb,sha256=f63fGhPch5eTkAvMXCrJTC0FOIdhKnaSfJ8Ox9hnol0,39 +onnx/backend/test/data/node/test_and_bcast4v4d/test_data_set_0/input_1.pb,sha256=iCGDAU6H1-WHvqKKRCD44UWeluKsnFCWMpevbsJ5ASU,105 +onnx/backend/test/data/node/test_and_bcast4v4d/test_data_set_0/output_0.pb,sha256=WAS1y88UC7m0RqEGzxSzw4NBXEUt8_2JlykNVAzyVVM,378 +onnx/backend/test/data/node/test_argmax_default_axis_example/model.onnx,sha256=5gOsN-W4GWrgix75Y31K_ghoeSHrVbkLZ5U27gTbyvU,149 +onnx/backend/test/data/node/test_argmax_default_axis_example/test_data_set_0/input_0.pb,sha256=P7LmjrG5EozUQ8-DBtDjai78FFBzBbcHWYMPWxKW6mI,30 +onnx/backend/test/data/node/test_argmax_default_axis_example/test_data_set_0/output_0.pb,sha256=NoozXBTfuGBGESaqTAVgvBaY_2_1KnEXaCBKowhZYgc,32 +onnx/backend/test/data/node/test_argmax_default_axis_example_select_last_index/model.onnx,sha256=u0lNlP1UgUDRYDOHkWuB1E_PdfT8K2RLKi9q1vfJZgo,194 +onnx/backend/test/data/node/test_argmax_default_axis_example_select_last_index/test_data_set_0/input_0.pb,sha256=oiK8EV6rRGZdVJIrCJ0v9IVyWbWtqgZpw-MbWYg2ofA,30 +onnx/backend/test/data/node/test_argmax_default_axis_example_select_last_index/test_data_set_0/output_0.pb,sha256=NoozXBTfuGBGESaqTAVgvBaY_2_1KnEXaCBKowhZYgc,32 +onnx/backend/test/data/node/test_argmax_default_axis_random/model.onnx,sha256=-ioQatsDKz2cdDFAFf4kVlt_rTVhEEST2Iooz_Zo_Lc,157 +onnx/backend/test/data/node/test_argmax_default_axis_random/test_data_set_0/input_0.pb,sha256=v_AV3pvz2MDGCOnzjvfj-FvbarLucYe3JAE3R_azXjU,112 +onnx/backend/test/data/node/test_argmax_default_axis_random/test_data_set_0/output_0.pb,sha256=DhGaHjs8SV8OufGMy1I7s0-ROoSwvKKmES2wj44RaVU,114 +onnx/backend/test/data/node/test_argmax_default_axis_random_select_last_index/model.onnx,sha256=8DJmQWRw4NmhbgZNFemfZDS2J1glGNrvaUFAejj1Mtk,201 +onnx/backend/test/data/node/test_argmax_default_axis_random_select_last_index/test_data_set_0/input_0.pb,sha256=v_AV3pvz2MDGCOnzjvfj-FvbarLucYe3JAE3R_azXjU,112 +onnx/backend/test/data/node/test_argmax_default_axis_random_select_last_index/test_data_set_0/output_0.pb,sha256=DhGaHjs8SV8OufGMy1I7s0-ROoSwvKKmES2wj44RaVU,114 +onnx/backend/test/data/node/test_argmax_keepdims_example/model.onnx,sha256=9r9MwZu1aFQ7Tuu-ALVKqHWJmPLJPAlEqr98a_PkcFg,159 +onnx/backend/test/data/node/test_argmax_keepdims_example/test_data_set_0/input_0.pb,sha256=P7LmjrG5EozUQ8-DBtDjai78FFBzBbcHWYMPWxKW6mI,30 +onnx/backend/test/data/node/test_argmax_keepdims_example/test_data_set_0/output_0.pb,sha256=4F0-PRVXRwzEKW10dABm9QzkLNzVaRoMkYLi7CvSpnA,32 +onnx/backend/test/data/node/test_argmax_keepdims_example_select_last_index/model.onnx,sha256=N5h3EbEADdYKk5cC1XlMHsxpoWPXDYCbsITbAdw0m8w,203 +onnx/backend/test/data/node/test_argmax_keepdims_example_select_last_index/test_data_set_0/input_0.pb,sha256=oiK8EV6rRGZdVJIrCJ0v9IVyWbWtqgZpw-MbWYg2ofA,30 +onnx/backend/test/data/node/test_argmax_keepdims_example_select_last_index/test_data_set_0/output_0.pb,sha256=yjB4LORCAlt1ELQfYeptdZNSWozEnXkHE_Yw0FOF4Lk,32 +onnx/backend/test/data/node/test_argmax_keepdims_random/model.onnx,sha256=etLTCThXENf6WD3LX2dOaZ2FAddcKBgIggT4QA1788M,166 +onnx/backend/test/data/node/test_argmax_keepdims_random/test_data_set_0/input_0.pb,sha256=v_AV3pvz2MDGCOnzjvfj-FvbarLucYe3JAE3R_azXjU,112 +onnx/backend/test/data/node/test_argmax_keepdims_random/test_data_set_0/output_0.pb,sha256=xxKUj1nPly6oZ31Db1ZlT77n2WMOc93PhGls_r46NyI,82 +onnx/backend/test/data/node/test_argmax_keepdims_random_select_last_index/model.onnx,sha256=xk8Wg2EFTAVwfXy_DALKhdY-MCHrdQW-9fEjHDNToC0,210 +onnx/backend/test/data/node/test_argmax_keepdims_random_select_last_index/test_data_set_0/input_0.pb,sha256=v_AV3pvz2MDGCOnzjvfj-FvbarLucYe3JAE3R_azXjU,112 +onnx/backend/test/data/node/test_argmax_keepdims_random_select_last_index/test_data_set_0/output_0.pb,sha256=xxKUj1nPly6oZ31Db1ZlT77n2WMOc93PhGls_r46NyI,82 +onnx/backend/test/data/node/test_argmax_negative_axis_keepdims_example/model.onnx,sha256=4BvTuA4BDbWrJS-mfVHt2bFqnhLgWfn8HRJvck064co,182 +onnx/backend/test/data/node/test_argmax_negative_axis_keepdims_example/test_data_set_0/input_0.pb,sha256=P7LmjrG5EozUQ8-DBtDjai78FFBzBbcHWYMPWxKW6mI,30 +onnx/backend/test/data/node/test_argmax_negative_axis_keepdims_example/test_data_set_0/output_0.pb,sha256=4F0-PRVXRwzEKW10dABm9QzkLNzVaRoMkYLi7CvSpnA,32 +onnx/backend/test/data/node/test_argmax_negative_axis_keepdims_example_select_last_index/model.onnx,sha256=oak-YBL_HjQVmKcqwkeld5P7mSMFHVVjk1kHxt25lkw,226 +onnx/backend/test/data/node/test_argmax_negative_axis_keepdims_example_select_last_index/test_data_set_0/input_0.pb,sha256=oiK8EV6rRGZdVJIrCJ0v9IVyWbWtqgZpw-MbWYg2ofA,30 +onnx/backend/test/data/node/test_argmax_negative_axis_keepdims_example_select_last_index/test_data_set_0/output_0.pb,sha256=yjB4LORCAlt1ELQfYeptdZNSWozEnXkHE_Yw0FOF4Lk,32 +onnx/backend/test/data/node/test_argmax_negative_axis_keepdims_random/model.onnx,sha256=jNr6hd_05wJ10QiWMMLcKORD2Hk-vUqJgX3J0Vavt_Q,189 +onnx/backend/test/data/node/test_argmax_negative_axis_keepdims_random/test_data_set_0/input_0.pb,sha256=v_AV3pvz2MDGCOnzjvfj-FvbarLucYe3JAE3R_azXjU,112 +onnx/backend/test/data/node/test_argmax_negative_axis_keepdims_random/test_data_set_0/output_0.pb,sha256=rr10tTLghcLkMi30lMGX-yxUk_YBi6rKL8m3VnjtsiI,66 +onnx/backend/test/data/node/test_argmax_negative_axis_keepdims_random_select_last_index/model.onnx,sha256=CTLMBC-WEWztkyuVbzNLJGT2QJ-pB-pevX2SsHozLEY,233 +onnx/backend/test/data/node/test_argmax_negative_axis_keepdims_random_select_last_index/test_data_set_0/input_0.pb,sha256=v_AV3pvz2MDGCOnzjvfj-FvbarLucYe3JAE3R_azXjU,112 +onnx/backend/test/data/node/test_argmax_negative_axis_keepdims_random_select_last_index/test_data_set_0/output_0.pb,sha256=rr10tTLghcLkMi30lMGX-yxUk_YBi6rKL8m3VnjtsiI,66 +onnx/backend/test/data/node/test_argmax_no_keepdims_example/model.onnx,sha256=rHuO3OTD5eRTyGpiTrFsGGyzDC8iDwVYuLDRbMAfRpU,158 +onnx/backend/test/data/node/test_argmax_no_keepdims_example/test_data_set_0/input_0.pb,sha256=P7LmjrG5EozUQ8-DBtDjai78FFBzBbcHWYMPWxKW6mI,30 +onnx/backend/test/data/node/test_argmax_no_keepdims_example/test_data_set_0/output_0.pb,sha256=U570gmgD-FKNrX-L5G6HOIWOStTE8-Fpciy-B646kUU,30 +onnx/backend/test/data/node/test_argmax_no_keepdims_example_select_last_index/model.onnx,sha256=O0jhRm19HyzVNZKWKApMpgR0TJ6aJux5d7YHF9Htfw4,202 +onnx/backend/test/data/node/test_argmax_no_keepdims_example_select_last_index/test_data_set_0/input_0.pb,sha256=oiK8EV6rRGZdVJIrCJ0v9IVyWbWtqgZpw-MbWYg2ofA,30 +onnx/backend/test/data/node/test_argmax_no_keepdims_example_select_last_index/test_data_set_0/output_0.pb,sha256=q_pzvm7B9F06AGaDCm-ypSJ7iOO4k08SqKZnIWxn5YU,30 +onnx/backend/test/data/node/test_argmax_no_keepdims_random/model.onnx,sha256=aM8dVMPJE1DI3Up-JVkHJvVPRj_7UW7uNTxly9j-aVE,165 +onnx/backend/test/data/node/test_argmax_no_keepdims_random/test_data_set_0/input_0.pb,sha256=v_AV3pvz2MDGCOnzjvfj-FvbarLucYe3JAE3R_azXjU,112 +onnx/backend/test/data/node/test_argmax_no_keepdims_random/test_data_set_0/output_0.pb,sha256=Y4g-JZ7vtzt484_YeaxF0sGDj0IUC3Ow9h7-3AMEOwM,80 +onnx/backend/test/data/node/test_argmax_no_keepdims_random_select_last_index/model.onnx,sha256=6mhwUs8s7PzTMl4Zm22UNCwAQjRhMF93a7u-BhqUVoU,209 +onnx/backend/test/data/node/test_argmax_no_keepdims_random_select_last_index/test_data_set_0/input_0.pb,sha256=v_AV3pvz2MDGCOnzjvfj-FvbarLucYe3JAE3R_azXjU,112 +onnx/backend/test/data/node/test_argmax_no_keepdims_random_select_last_index/test_data_set_0/output_0.pb,sha256=Y4g-JZ7vtzt484_YeaxF0sGDj0IUC3Ow9h7-3AMEOwM,80 +onnx/backend/test/data/node/test_argmin_default_axis_example/model.onnx,sha256=iebLNVL1fxxh-j3DH5boBJGFNt8r2YAI_7_rP3LADu8,149 +onnx/backend/test/data/node/test_argmin_default_axis_example/test_data_set_0/input_0.pb,sha256=P7LmjrG5EozUQ8-DBtDjai78FFBzBbcHWYMPWxKW6mI,30 +onnx/backend/test/data/node/test_argmin_default_axis_example/test_data_set_0/output_0.pb,sha256=Bj_kN290f3RUlkABfWUh5o5MQuxnM-jXtBc-onOuETk,32 +onnx/backend/test/data/node/test_argmin_default_axis_example_select_last_index/model.onnx,sha256=qHYIVhWQ_8CbsMYeyYUWyg5wRPK25HQfuHsz_NjSr9E,194 +onnx/backend/test/data/node/test_argmin_default_axis_example_select_last_index/test_data_set_0/input_0.pb,sha256=oiK8EV6rRGZdVJIrCJ0v9IVyWbWtqgZpw-MbWYg2ofA,30 +onnx/backend/test/data/node/test_argmin_default_axis_example_select_last_index/test_data_set_0/output_0.pb,sha256=Bj_kN290f3RUlkABfWUh5o5MQuxnM-jXtBc-onOuETk,32 +onnx/backend/test/data/node/test_argmin_default_axis_random/model.onnx,sha256=_spSgaLvVIIaJFONNMGKGi5lR4DHEAcd-ZA0bUdo2Pg,157 +onnx/backend/test/data/node/test_argmin_default_axis_random/test_data_set_0/input_0.pb,sha256=v_AV3pvz2MDGCOnzjvfj-FvbarLucYe3JAE3R_azXjU,112 +onnx/backend/test/data/node/test_argmin_default_axis_random/test_data_set_0/output_0.pb,sha256=19x-g4YHmp9zWbUdC3zmRnrC_YGZiCXOH6ZYat-OKos,114 +onnx/backend/test/data/node/test_argmin_default_axis_random_select_last_index/model.onnx,sha256=ONXMwWYnms6w9bXmYZQgtnyYEtnMbIa8Zg5mQb3rWl0,201 +onnx/backend/test/data/node/test_argmin_default_axis_random_select_last_index/test_data_set_0/input_0.pb,sha256=v_AV3pvz2MDGCOnzjvfj-FvbarLucYe3JAE3R_azXjU,112 +onnx/backend/test/data/node/test_argmin_default_axis_random_select_last_index/test_data_set_0/output_0.pb,sha256=19x-g4YHmp9zWbUdC3zmRnrC_YGZiCXOH6ZYat-OKos,114 +onnx/backend/test/data/node/test_argmin_keepdims_example/model.onnx,sha256=jG3yLmfOLrln-g0UAq1T54CRlzEEcIHedSy2su1NvvM,159 +onnx/backend/test/data/node/test_argmin_keepdims_example/test_data_set_0/input_0.pb,sha256=P7LmjrG5EozUQ8-DBtDjai78FFBzBbcHWYMPWxKW6mI,30 +onnx/backend/test/data/node/test_argmin_keepdims_example/test_data_set_0/output_0.pb,sha256=Q0yB3rE94zUcdgoZieunw0TZyHYIKv7PGv-_zINGDwM,32 +onnx/backend/test/data/node/test_argmin_keepdims_example_select_last_index/model.onnx,sha256=3VpWljrpe_M0zxcrpMYgTIxCNT4_Wy_cXHs3xP5EiXA,203 +onnx/backend/test/data/node/test_argmin_keepdims_example_select_last_index/test_data_set_0/input_0.pb,sha256=oiK8EV6rRGZdVJIrCJ0v9IVyWbWtqgZpw-MbWYg2ofA,30 +onnx/backend/test/data/node/test_argmin_keepdims_example_select_last_index/test_data_set_0/output_0.pb,sha256=Q0yB3rE94zUcdgoZieunw0TZyHYIKv7PGv-_zINGDwM,32 +onnx/backend/test/data/node/test_argmin_keepdims_random/model.onnx,sha256=a8TjWBkgVRdOTdgVV47nuGKlqjkylM4H_7SVVa21Gxg,166 +onnx/backend/test/data/node/test_argmin_keepdims_random/test_data_set_0/input_0.pb,sha256=v_AV3pvz2MDGCOnzjvfj-FvbarLucYe3JAE3R_azXjU,112 +onnx/backend/test/data/node/test_argmin_keepdims_random/test_data_set_0/output_0.pb,sha256=n6D0HfXyUX61D0a6ma7Iu4yTDIR_AixA4NxI2eQ0xzo,82 +onnx/backend/test/data/node/test_argmin_keepdims_random_select_last_index/model.onnx,sha256=yStGGRrrd8ILfWWnSVSQh-nB9jvwj0blZHRJHbpQdVo,210 +onnx/backend/test/data/node/test_argmin_keepdims_random_select_last_index/test_data_set_0/input_0.pb,sha256=v_AV3pvz2MDGCOnzjvfj-FvbarLucYe3JAE3R_azXjU,112 +onnx/backend/test/data/node/test_argmin_keepdims_random_select_last_index/test_data_set_0/output_0.pb,sha256=n6D0HfXyUX61D0a6ma7Iu4yTDIR_AixA4NxI2eQ0xzo,82 +onnx/backend/test/data/node/test_argmin_negative_axis_keepdims_example/model.onnx,sha256=Qdcw6P4ShcjduyyWa2edCGBhuC27PN_UAhDQCmDkqck,182 +onnx/backend/test/data/node/test_argmin_negative_axis_keepdims_example/test_data_set_0/input_0.pb,sha256=P7LmjrG5EozUQ8-DBtDjai78FFBzBbcHWYMPWxKW6mI,30 +onnx/backend/test/data/node/test_argmin_negative_axis_keepdims_example/test_data_set_0/output_0.pb,sha256=Q0yB3rE94zUcdgoZieunw0TZyHYIKv7PGv-_zINGDwM,32 +onnx/backend/test/data/node/test_argmin_negative_axis_keepdims_example_select_last_index/model.onnx,sha256=nu8X7vFhj97yEguRh_mA5BiMbyiUMuSwRGR07TklqeQ,226 +onnx/backend/test/data/node/test_argmin_negative_axis_keepdims_example_select_last_index/test_data_set_0/input_0.pb,sha256=oiK8EV6rRGZdVJIrCJ0v9IVyWbWtqgZpw-MbWYg2ofA,30 +onnx/backend/test/data/node/test_argmin_negative_axis_keepdims_example_select_last_index/test_data_set_0/output_0.pb,sha256=Q0yB3rE94zUcdgoZieunw0TZyHYIKv7PGv-_zINGDwM,32 +onnx/backend/test/data/node/test_argmin_negative_axis_keepdims_random/model.onnx,sha256=6J7PgD_k3Fi2wUdzP7doBFOHxEd4w4En7b9JIZ7rNuQ,189 +onnx/backend/test/data/node/test_argmin_negative_axis_keepdims_random/test_data_set_0/input_0.pb,sha256=v_AV3pvz2MDGCOnzjvfj-FvbarLucYe3JAE3R_azXjU,112 +onnx/backend/test/data/node/test_argmin_negative_axis_keepdims_random/test_data_set_0/output_0.pb,sha256=QiaUgxoImJVzGqsKXpeRoj5YIhs4TSmZNujE2jRpNYE,66 +onnx/backend/test/data/node/test_argmin_negative_axis_keepdims_random_select_last_index/model.onnx,sha256=LqozWLlzhVSjSmkkRy5qE4bfDHr2zjeEhz2c0eLQmSs,233 +onnx/backend/test/data/node/test_argmin_negative_axis_keepdims_random_select_last_index/test_data_set_0/input_0.pb,sha256=v_AV3pvz2MDGCOnzjvfj-FvbarLucYe3JAE3R_azXjU,112 +onnx/backend/test/data/node/test_argmin_negative_axis_keepdims_random_select_last_index/test_data_set_0/output_0.pb,sha256=QiaUgxoImJVzGqsKXpeRoj5YIhs4TSmZNujE2jRpNYE,66 +onnx/backend/test/data/node/test_argmin_no_keepdims_example/model.onnx,sha256=xk1tQnhxenLqqiNWasT9qPvWf03sxIHmWRWpIVL8zyQ,158 +onnx/backend/test/data/node/test_argmin_no_keepdims_example/test_data_set_0/input_0.pb,sha256=P7LmjrG5EozUQ8-DBtDjai78FFBzBbcHWYMPWxKW6mI,30 +onnx/backend/test/data/node/test_argmin_no_keepdims_example/test_data_set_0/output_0.pb,sha256=orTottlTXJm72VAuuI4Z4Wm5MdszRna3fvV3HAflc64,30 +onnx/backend/test/data/node/test_argmin_no_keepdims_example_select_last_index/model.onnx,sha256=M9mq_Nks8i-0ic_sUhv6nJt4wTL0hdfKLYQd8COtiJs,202 +onnx/backend/test/data/node/test_argmin_no_keepdims_example_select_last_index/test_data_set_0/input_0.pb,sha256=oiK8EV6rRGZdVJIrCJ0v9IVyWbWtqgZpw-MbWYg2ofA,30 +onnx/backend/test/data/node/test_argmin_no_keepdims_example_select_last_index/test_data_set_0/output_0.pb,sha256=orTottlTXJm72VAuuI4Z4Wm5MdszRna3fvV3HAflc64,30 +onnx/backend/test/data/node/test_argmin_no_keepdims_random/model.onnx,sha256=BPo7t0D-DdFCNfgyDYp6CELjFD1Y7GOC1_b5jnpu_GQ,165 +onnx/backend/test/data/node/test_argmin_no_keepdims_random/test_data_set_0/input_0.pb,sha256=v_AV3pvz2MDGCOnzjvfj-FvbarLucYe3JAE3R_azXjU,112 +onnx/backend/test/data/node/test_argmin_no_keepdims_random/test_data_set_0/output_0.pb,sha256=1k5tX4X36O2Fo9-48qFsRj3eAH2qLSUtS2alMh32hKM,80 +onnx/backend/test/data/node/test_argmin_no_keepdims_random_select_last_index/model.onnx,sha256=G24mWmUuUdRohHXsk32EnB_fkE3bIDiSeetlrHg5ysY,209 +onnx/backend/test/data/node/test_argmin_no_keepdims_random_select_last_index/test_data_set_0/input_0.pb,sha256=v_AV3pvz2MDGCOnzjvfj-FvbarLucYe3JAE3R_azXjU,112 +onnx/backend/test/data/node/test_argmin_no_keepdims_random_select_last_index/test_data_set_0/output_0.pb,sha256=1k5tX4X36O2Fo9-48qFsRj3eAH2qLSUtS2alMh32hKM,80 +onnx/backend/test/data/node/test_asin/model.onnx,sha256=c-iJf0kSCwEzVgddUlzRKq6gFALlkPDD2UBEnCdW3-s,99 +onnx/backend/test/data/node/test_asin/test_data_set_0/input_0.pb,sha256=BcAjkpobMrS9vXiC_15TubU3Yu4pKcZr520PC6fKOT0,254 +onnx/backend/test/data/node/test_asin/test_data_set_0/output_0.pb,sha256=u9s_EapOLdfpDm7_h7j6kMIc1Xc73Byi-SkQpYwk5is,254 +onnx/backend/test/data/node/test_asin_example/model.onnx,sha256=4sClXtGRSQevJ6EHxwlNJ22fmFivvXzn9L39rtKp0Wg,91 +onnx/backend/test/data/node/test_asin_example/test_data_set_0/input_0.pb,sha256=cD3gw75QWnhZYjV4loK8Tm8OhSPzT7cxIHtMB2cBq90,21 +onnx/backend/test/data/node/test_asin_example/test_data_set_0/output_0.pb,sha256=FllD0s9HRvharNPBoXUEszaIk91MR79Tz0s1OwzXoi8,21 +onnx/backend/test/data/node/test_asinh/model.onnx,sha256=OFzITC7NYQXYt6211GMI-DzOSTdDq0Ylv66M2peAfaA,101 +onnx/backend/test/data/node/test_asinh/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_asinh/test_data_set_0/output_0.pb,sha256=OSyeQFYbXqnXu9FHx2wqEGpOugaeCVOSoBT-ELGWS4U,254 +onnx/backend/test/data/node/test_asinh_example/model.onnx,sha256=FcVkpsyH1Ngm1zwnDk0hYwl1x4tMHM0ctBT_9AxMe2Y,93 +onnx/backend/test/data/node/test_asinh_example/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_asinh_example/test_data_set_0/output_0.pb,sha256=8Fz60rR3DIxDgwmw7EMak08D9yt0M6Bb9psXbH1p8kw,21 +onnx/backend/test/data/node/test_atan/model.onnx,sha256=o_6XdZbEa_nAheUoFfn7ZSTtsvkSjpqCNi_e9eNd-uE,99 +onnx/backend/test/data/node/test_atan/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_atan/test_data_set_0/output_0.pb,sha256=kHD-XwUIE1V_kMPmW2vHkG1j1ghWwx0k1B2AxQ2p9W8,254 +onnx/backend/test/data/node/test_atan_example/model.onnx,sha256=rPAzURgzZhhsUYg-IOLsKTgIAV_UgaQUr7SAvGS9HDg,91 +onnx/backend/test/data/node/test_atan_example/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_atan_example/test_data_set_0/output_0.pb,sha256=kWWl9gDeCxk2l09E1LE-fw9VMRIYYxO2XDgPuZz1Ff8,21 +onnx/backend/test/data/node/test_atanh/model.onnx,sha256=l-2bku-epRnHOcAo5KWMy8ydA9tGlHxEz8_Bds1LIWM,101 +onnx/backend/test/data/node/test_atanh/test_data_set_0/input_0.pb,sha256=BcAjkpobMrS9vXiC_15TubU3Yu4pKcZr520PC6fKOT0,254 +onnx/backend/test/data/node/test_atanh/test_data_set_0/output_0.pb,sha256=r-fX1_Rxm17m9hCcqYIvuLdAs9mKxAt73siRxDdnWbA,254 +onnx/backend/test/data/node/test_atanh_example/model.onnx,sha256=TDrKdNv68FFl87fte-CBxIgmoNj9DTh1PBAJIF72av0,93 +onnx/backend/test/data/node/test_atanh_example/test_data_set_0/input_0.pb,sha256=cD3gw75QWnhZYjV4loK8Tm8OhSPzT7cxIHtMB2cBq90,21 +onnx/backend/test/data/node/test_atanh_example/test_data_set_0/output_0.pb,sha256=lnjpP4k_gQIoWj-p9POCPLQsNe7mT3U9ArJG73iZBro,21 +onnx/backend/test/data/node/test_averagepool_1d_default/model.onnx,sha256=XGxVXuUZJaDYaJlBHJ3tNjYYAKCas_F7zBAipWVexoI,145 +onnx/backend/test/data/node/test_averagepool_1d_default/test_data_set_0/input_0.pb,sha256=Za3StahBL7BbhQGm5tBIbmHL40mPVKMeoJms01DCbkU,398 +onnx/backend/test/data/node/test_averagepool_1d_default/test_data_set_0/output_0.pb,sha256=TZiHbd4tAQX4YEoKfpv4FAfDU8Wfj18Luu5gvR5syQs,386 +onnx/backend/test/data/node/test_averagepool_2d_ceil/model.onnx,sha256=Fw_0lzhJ45nf9cUdAsoXxHBOe-C29F7OTQOwfJAwmpU,189 +onnx/backend/test/data/node/test_averagepool_2d_ceil/test_data_set_0/input_0.pb,sha256=wWnR5nCmHbKNWucpc2qwGWA_byviZxAN4EWpmBWxWjA,79 +onnx/backend/test/data/node/test_averagepool_2d_ceil/test_data_set_0/output_0.pb,sha256=J_NsHn9iEsmJAHuv2HzlQWz7V96t50EsGijdfWrqmeA,31 +onnx/backend/test/data/node/test_averagepool_2d_default/model.onnx,sha256=GTRDJ2h6QLt2eXHnUv9PKpgFJ0mlNd7CeJHSo5MRPGU,156 +onnx/backend/test/data/node/test_averagepool_2d_default/test_data_set_0/input_0.pb,sha256=Unze_of2KARaDBbo9Z3SVvlMUfaP1AUSekfdK0VNWoY,12304 +onnx/backend/test/data/node/test_averagepool_2d_default/test_data_set_0/output_0.pb,sha256=WqDv_FV3cMJET2YYrTwD1QNaFS1HWX3l_wgNWu5fv8Y,11548 +onnx/backend/test/data/node/test_averagepool_2d_dilations/model.onnx,sha256=3Rw-BH5vKXyn0M1G114FCw6ywEWius1a4rdogrt6SeA,214 +onnx/backend/test/data/node/test_averagepool_2d_dilations/test_data_set_0/input_0.pb,sha256=wWnR5nCmHbKNWucpc2qwGWA_byviZxAN4EWpmBWxWjA,79 +onnx/backend/test/data/node/test_averagepool_2d_dilations/test_data_set_0/output_0.pb,sha256=W0xv7zagmB-A0lWKOzPp6y4w5v8iBNre0k8VvAb23es,31 +onnx/backend/test/data/node/test_averagepool_2d_pads/model.onnx,sha256=8rXDpuSWjpGXP4R_-P9DstYcmuTfa_To0QXbkmBsZP0,172 +onnx/backend/test/data/node/test_averagepool_2d_pads/test_data_set_0/input_0.pb,sha256=HFGpOb8k-_eSox3wcj8SOjH7WcbqDYrAaqiceZ_ZzdI,9424 +onnx/backend/test/data/node/test_averagepool_2d_pads/test_data_set_0/output_0.pb,sha256=zFviiXMYWWVgabusMEGvLMEybZSq7C296tqaNQL6kf8,10816 +onnx/backend/test/data/node/test_averagepool_2d_pads_count_include_pad/model.onnx,sha256=Tcar-PxrN3gQO2ghXmUTUd5HT1jq71G3wduvkHY7pMI,216 +onnx/backend/test/data/node/test_averagepool_2d_pads_count_include_pad/test_data_set_0/input_0.pb,sha256=HFGpOb8k-_eSox3wcj8SOjH7WcbqDYrAaqiceZ_ZzdI,9424 +onnx/backend/test/data/node/test_averagepool_2d_pads_count_include_pad/test_data_set_0/output_0.pb,sha256=5U46RW5XBKUVs3UYsbuPPOqxyVl-Bs5cqdlmg3zBVjs,10816 +onnx/backend/test/data/node/test_averagepool_2d_precomputed_pads/model.onnx,sha256=-vOeN6LHMAqcozCy_ijYCpUTqg9GC42TKRxVjL1BAXs,184 +onnx/backend/test/data/node/test_averagepool_2d_precomputed_pads/test_data_set_0/input_0.pb,sha256=OTlNmprtJOUuLYiDg3HHSwDSMZ9I07l2M5FcxdQLysw,115 +onnx/backend/test/data/node/test_averagepool_2d_precomputed_pads/test_data_set_0/output_0.pb,sha256=Ecxo2QxKQku1MYTr16hK8pervsU5PyL_fGNkROknnP8,115 +onnx/backend/test/data/node/test_averagepool_2d_precomputed_pads_count_include_pad/model.onnx,sha256=ocJT1xMaMEte6DJIZDi-YNn8H3gvvUJoqR6ytoH2rdo,228 +onnx/backend/test/data/node/test_averagepool_2d_precomputed_pads_count_include_pad/test_data_set_0/input_0.pb,sha256=OTlNmprtJOUuLYiDg3HHSwDSMZ9I07l2M5FcxdQLysw,115 +onnx/backend/test/data/node/test_averagepool_2d_precomputed_pads_count_include_pad/test_data_set_0/output_0.pb,sha256=2xNtCtCrswd372GDRTeANsSwrjEvCEBD52M4I77kG8U,115 +onnx/backend/test/data/node/test_averagepool_2d_precomputed_same_upper/model.onnx,sha256=NzQZV7uUhp4gx2s0smMDWIHMt7s7BhfkGJJZaaosV9w,216 +onnx/backend/test/data/node/test_averagepool_2d_precomputed_same_upper/test_data_set_0/input_0.pb,sha256=OTlNmprtJOUuLYiDg3HHSwDSMZ9I07l2M5FcxdQLysw,115 +onnx/backend/test/data/node/test_averagepool_2d_precomputed_same_upper/test_data_set_0/output_0.pb,sha256=PXPhQwNqHdL1LZx5KD3oGFGzjLnNpPSRRUEzLfDeflM,51 +onnx/backend/test/data/node/test_averagepool_2d_precomputed_strides/model.onnx,sha256=ZXyTepIY0qtR7sJGjF5KRupQ-WsxG1CibtDsGXBA4Q4,186 +onnx/backend/test/data/node/test_averagepool_2d_precomputed_strides/test_data_set_0/input_0.pb,sha256=OTlNmprtJOUuLYiDg3HHSwDSMZ9I07l2M5FcxdQLysw,115 +onnx/backend/test/data/node/test_averagepool_2d_precomputed_strides/test_data_set_0/output_0.pb,sha256=6kurBL2grQAt9ZKHrnJkfWS3ZP-ToeFCUf4U4ZcVcJ8,31 +onnx/backend/test/data/node/test_averagepool_2d_same_lower/model.onnx,sha256=FIOQrCExRc7_Ridv7KQZBnRSOqBu5uILR8xRh0GbvHs,186 +onnx/backend/test/data/node/test_averagepool_2d_same_lower/test_data_set_0/input_0.pb,sha256=Unze_of2KARaDBbo9Z3SVvlMUfaP1AUSekfdK0VNWoY,12304 +onnx/backend/test/data/node/test_averagepool_2d_same_lower/test_data_set_0/output_0.pb,sha256=-5ZZi0sQTaBI0T6CQ1Sv1ifyPAcHKzpYLJfVbGyk_qw,12304 +onnx/backend/test/data/node/test_averagepool_2d_same_upper/model.onnx,sha256=zlG3ufx_f3ntHSpYg3AbjwwzbSFA8nqk5JXbMpHfU7s,186 +onnx/backend/test/data/node/test_averagepool_2d_same_upper/test_data_set_0/input_0.pb,sha256=Unze_of2KARaDBbo9Z3SVvlMUfaP1AUSekfdK0VNWoY,12304 +onnx/backend/test/data/node/test_averagepool_2d_same_upper/test_data_set_0/output_0.pb,sha256=0dyD3w375fMoeUr4L5i396MTxAKFnGEc0E4FPhFOhwM,12304 +onnx/backend/test/data/node/test_averagepool_2d_strides/model.onnx,sha256=KtGE2-WFqNeokJtW-cKCQwk2qDPcYByzQWGamf-LlAg,174 +onnx/backend/test/data/node/test_averagepool_2d_strides/test_data_set_0/input_0.pb,sha256=Unze_of2KARaDBbo9Z3SVvlMUfaP1AUSekfdK0VNWoY,12304 +onnx/backend/test/data/node/test_averagepool_2d_strides/test_data_set_0/output_0.pb,sha256=IR_Q7lYkCMSUqv7Rvjth91C6d2O0alfXQxjoPdDuMhA,1216 +onnx/backend/test/data/node/test_averagepool_3d_default/model.onnx,sha256=vXnFse9St86WS8KljU4-LeGfSqr9H9q_-Z-2GPaidGA,166 +onnx/backend/test/data/node/test_averagepool_3d_default/test_data_set_0/input_0.pb,sha256=L_c9P56CmA7WzPjy1TlkWrKgYm0YkeXEYxiPZ6VV1wY,393235 +onnx/backend/test/data/node/test_averagepool_3d_default/test_data_set_0/output_0.pb,sha256=bWn81UGs4Eu7uQ-sAsgNLoy5M4lo5LZ8qLYoC87yC6g,357511 +onnx/backend/test/data/node/test_averagepool_3d_dilations_large_count_include_pad_is_0_ceil_mode_is_False/model.onnx,sha256=Cqn5O3OED27dgc23dNc0c2sEQRUrlUawuA4F6-NBnhA,303 +onnx/backend/test/data/node/test_averagepool_3d_dilations_large_count_include_pad_is_0_ceil_mode_is_False/test_data_set_0/input_0.pb,sha256=G0ZmaRtAJ5ywCVv8RZdiQQtdSsDDMkBOq129UCl5duM,131091 +onnx/backend/test/data/node/test_averagepool_3d_dilations_large_count_include_pad_is_0_ceil_mode_is_False/test_data_set_0/output_0.pb,sha256=Yungt3DB4TIX35zBpAvr34-YKrcXpE9pQNNO6NtV02Y,2066 +onnx/backend/test/data/node/test_averagepool_3d_dilations_large_count_include_pad_is_0_ceil_mode_is_True/model.onnx,sha256=6m3f6xbgXlpGRPCYmTKo-9dQOsznbArYR5HuYJtgZa8,302 +onnx/backend/test/data/node/test_averagepool_3d_dilations_large_count_include_pad_is_0_ceil_mode_is_True/test_data_set_0/input_0.pb,sha256=VKR6T99kXsypZCXWOcGPYobnNiPdA95uKzDtur6-Wps,131091 +onnx/backend/test/data/node/test_averagepool_3d_dilations_large_count_include_pad_is_0_ceil_mode_is_True/test_data_set_0/output_0.pb,sha256=h30I9AcAYxUV2k5fQjbh3KXNEB6pyvpzd2zxlMlFrl4,2934 +onnx/backend/test/data/node/test_averagepool_3d_dilations_large_count_include_pad_is_1_ceil_mode_is_False/model.onnx,sha256=Q6-i1kHz4mKBbzKq6j59XEpaoA1B4HiE2JJHOmxkxjQ,303 +onnx/backend/test/data/node/test_averagepool_3d_dilations_large_count_include_pad_is_1_ceil_mode_is_False/test_data_set_0/input_0.pb,sha256=NxZlzYncM3n7oZ55CPGRMSrsZYfKvhZL0vtBW0siD8M,131091 +onnx/backend/test/data/node/test_averagepool_3d_dilations_large_count_include_pad_is_1_ceil_mode_is_False/test_data_set_0/output_0.pb,sha256=X8NWa80Dr242-z3E8xYObU6geX8KZyhPxfonC_PxJ1E,2066 +onnx/backend/test/data/node/test_averagepool_3d_dilations_large_count_include_pad_is_1_ceil_mode_is_True/model.onnx,sha256=Njyn7VkHgpkSwVNV4rzVlWbhwO97NxZB6JDbhdpUmJQ,302 +onnx/backend/test/data/node/test_averagepool_3d_dilations_large_count_include_pad_is_1_ceil_mode_is_True/test_data_set_0/input_0.pb,sha256=COVvyW4FhX4PqauStmT3UkrJpe8KUoQXpqHjD8N_MqE,131091 +onnx/backend/test/data/node/test_averagepool_3d_dilations_large_count_include_pad_is_1_ceil_mode_is_True/test_data_set_0/output_0.pb,sha256=leZSs29PsGH-urb0TWXV06-lpEcstKVAAhBvP1iy5uQ,2934 +onnx/backend/test/data/node/test_averagepool_3d_dilations_small/model.onnx,sha256=w5ced538ReEPBCJddqkpQRxNYXDCxYKxIrfkIiBL9wE,234 +onnx/backend/test/data/node/test_averagepool_3d_dilations_small/test_data_set_0/input_0.pb,sha256=eUBZRSXolhPmPqfzFXG-IJegZfzGOQpfdDoeK2OEkrA,274 +onnx/backend/test/data/node/test_averagepool_3d_dilations_small/test_data_set_0/output_0.pb,sha256=Rtr3ZjMwQn0VuZWLIFhhFlRtrdRznLumebIVumuWPDk,49 +onnx/backend/test/data/node/test_basic_conv_with_padding/model.onnx,sha256=cD_1tCjoZkjAcpIi0QNzqmmbpV_AGw_2r03lgcxQFZY,201 +onnx/backend/test/data/node/test_basic_conv_with_padding/test_data_set_0/input_0.pb,sha256=RnJVWHKql-4c51A3JiCTvFe-ry8o5bavOnIotOzZFK8,115 +onnx/backend/test/data/node/test_basic_conv_with_padding/test_data_set_0/input_1.pb,sha256=i8uYkxFsjmh4jNDwmgCtV7jQtF11nwCwISLTKf1COgg,51 +onnx/backend/test/data/node/test_basic_conv_with_padding/test_data_set_0/output_0.pb,sha256=_LQ6Gcbvv-Twct7wPuCO5Hk3vIzVJ6zAJjqsCUTiESs,115 +onnx/backend/test/data/node/test_basic_conv_without_padding/model.onnx,sha256=-9Rdgv4ilXQLar82r8TBkda5LM3noubb0QGZBVJ9-mA,204 +onnx/backend/test/data/node/test_basic_conv_without_padding/test_data_set_0/input_0.pb,sha256=RnJVWHKql-4c51A3JiCTvFe-ry8o5bavOnIotOzZFK8,115 +onnx/backend/test/data/node/test_basic_conv_without_padding/test_data_set_0/input_1.pb,sha256=i8uYkxFsjmh4jNDwmgCtV7jQtF11nwCwISLTKf1COgg,51 +onnx/backend/test/data/node/test_basic_conv_without_padding/test_data_set_0/output_0.pb,sha256=K4v6OtrT-N3QKnkBVUq6hzq-cwsWe0UFSldl-Ue2gBo,51 +onnx/backend/test/data/node/test_basic_deform_conv_with_padding/model.onnx,sha256=fs-2bZMq7JxyOAnLmk1E7EYxGkgDl5yFjQvK-HhQHOs,308 +onnx/backend/test/data/node/test_basic_deform_conv_with_padding/test_data_set_0/input_0.pb,sha256=dBjaBXzMfFIovF45VFTSKa1SzvxiDU2cnrAzM3tOY7M,51 +onnx/backend/test/data/node/test_basic_deform_conv_with_padding/test_data_set_0/input_1.pb,sha256=4Ki_u3n_9JzedrVVClMoOCDVY7aP6BJSo_YgoX_n3EM,31 +onnx/backend/test/data/node/test_basic_deform_conv_with_padding/test_data_set_0/input_2.pb,sha256=ubxlpp8Ok84plEXefjcKBNLmtjYWHyW8lTBySGpXiVY,546 +onnx/backend/test/data/node/test_basic_deform_conv_with_padding/test_data_set_0/output_0.pb,sha256=Irv1r-kaSImnwtIZkxyVkxsEt5PsqthjzKxqarimwTk,92 +onnx/backend/test/data/node/test_basic_deform_conv_without_padding/model.onnx,sha256=-2wpGLXM87GIGDsKgf2iiKgF8E2FR37BBA1YsFMkC3w,323 +onnx/backend/test/data/node/test_basic_deform_conv_without_padding/test_data_set_0/input_0.pb,sha256=dBjaBXzMfFIovF45VFTSKa1SzvxiDU2cnrAzM3tOY7M,51 +onnx/backend/test/data/node/test_basic_deform_conv_without_padding/test_data_set_0/input_1.pb,sha256=4Ki_u3n_9JzedrVVClMoOCDVY7aP6BJSo_YgoX_n3EM,31 +onnx/backend/test/data/node/test_basic_deform_conv_without_padding/test_data_set_0/input_2.pb,sha256=X6j9HPA1XkWmfq_jLD2L8xChvOSYaeHC866XeRYcjYI,165 +onnx/backend/test/data/node/test_basic_deform_conv_without_padding/test_data_set_0/output_0.pb,sha256=peJJza0T-DL7-QEaZxSV3xP-Dw7ZUQF7DAoh54hXozo,47 +onnx/backend/test/data/node/test_batchnorm_epsilon/model.onnx,sha256=NXo3bdeikoeDCYmkO46QlVY4bBGEZfKsFlRE7Pw5RtI,250 +onnx/backend/test/data/node/test_batchnorm_epsilon/test_data_set_0/input_0.pb,sha256=8uEeu7nwOZUkfBuA5bw-ePBY0x7IS3e44oNXGMyGbkQ,496 +onnx/backend/test/data/node/test_batchnorm_epsilon/test_data_set_0/input_1.pb,sha256=OBGvvDW2vXlYrjwA7mwO12Os-DSXirqdY2jD0Oy6CpY,21 +onnx/backend/test/data/node/test_batchnorm_epsilon/test_data_set_0/input_2.pb,sha256=3wE1u1KcTYpoMTF-wBk003DsXnUc9oEGTFycIU4jCXM,24 +onnx/backend/test/data/node/test_batchnorm_epsilon/test_data_set_0/input_3.pb,sha256=T8_L--XgZOdbDGUSHIWGNqYTLTv1yFq2oH487fAI0tk,24 +onnx/backend/test/data/node/test_batchnorm_epsilon/test_data_set_0/input_4.pb,sha256=Ti6CdJotPj-3-v8kHqk-YWnNLFRA5W3Ggebr1lG_QiI,23 +onnx/backend/test/data/node/test_batchnorm_epsilon/test_data_set_0/output_0.pb,sha256=n6XFIIVyNJbcQt5Mj2SKHyj8_vjkASjh5KIlzVwFbi4,496 +onnx/backend/test/data/node/test_batchnorm_epsilon_training_mode/model.onnx,sha256=SAramxkbImr0vmaRPxJA-1RLrQaTx8rud89R0WuZ3JI,364 +onnx/backend/test/data/node/test_batchnorm_epsilon_training_mode/test_data_set_0/input_0.pb,sha256=8uEeu7nwOZUkfBuA5bw-ePBY0x7IS3e44oNXGMyGbkQ,496 +onnx/backend/test/data/node/test_batchnorm_epsilon_training_mode/test_data_set_0/input_1.pb,sha256=OBGvvDW2vXlYrjwA7mwO12Os-DSXirqdY2jD0Oy6CpY,21 +onnx/backend/test/data/node/test_batchnorm_epsilon_training_mode/test_data_set_0/input_2.pb,sha256=3wE1u1KcTYpoMTF-wBk003DsXnUc9oEGTFycIU4jCXM,24 +onnx/backend/test/data/node/test_batchnorm_epsilon_training_mode/test_data_set_0/input_3.pb,sha256=T8_L--XgZOdbDGUSHIWGNqYTLTv1yFq2oH487fAI0tk,24 +onnx/backend/test/data/node/test_batchnorm_epsilon_training_mode/test_data_set_0/input_4.pb,sha256=Ti6CdJotPj-3-v8kHqk-YWnNLFRA5W3Ggebr1lG_QiI,23 +onnx/backend/test/data/node/test_batchnorm_epsilon_training_mode/test_data_set_0/output_0.pb,sha256=WTc4mm4mdiyNSmlgxnwB17sNnDNlw_i_mHnXabAsE9o,496 +onnx/backend/test/data/node/test_batchnorm_epsilon_training_mode/test_data_set_0/output_1.pb,sha256=jHTM3xrd557eg6TzhgmP5mMHNrMCIlsBuK6OZqnnKkc,31 +onnx/backend/test/data/node/test_batchnorm_epsilon_training_mode/test_data_set_0/output_2.pb,sha256=N5551G0V8glw6AyG4jMpqsOL3dLoLlP8UUWNEoz_VZ4,30 +onnx/backend/test/data/node/test_batchnorm_example/model.onnx,sha256=ub690uwc1r-R7fxX6EtJDAMIF4aI4YfzhMCkkhGH7yQ,231 +onnx/backend/test/data/node/test_batchnorm_example/test_data_set_0/input_0.pb,sha256=SYKd5PF5vM-bKNrV8oTKPGHvYE0twyvHWdFt3T67zDY,496 +onnx/backend/test/data/node/test_batchnorm_example/test_data_set_0/input_1.pb,sha256=AI-3RnrtRdqYWsuL7R4_zbZ0Vb7zfnaDEBYsm0i2kvY,21 +onnx/backend/test/data/node/test_batchnorm_example/test_data_set_0/input_2.pb,sha256=2oNlXxvO0WnQ0oLXr0SuQwOm4ErOca8R9dkNLwewDK4,24 +onnx/backend/test/data/node/test_batchnorm_example/test_data_set_0/input_3.pb,sha256=d60mjqff9zbsLGnRD8GG9bbEk2qyV8UaYde7LaB_myc,24 +onnx/backend/test/data/node/test_batchnorm_example/test_data_set_0/input_4.pb,sha256=EyHoNru0GPIaJLApYVvbQ1CqR67ReuX4wdywoIWBV5o,23 +onnx/backend/test/data/node/test_batchnorm_example/test_data_set_0/output_0.pb,sha256=oeWjuEbXwDN3lB6trNUoMhzjqR-2WCk-Rt9KbKy_lJs,496 +onnx/backend/test/data/node/test_batchnorm_example_training_mode/model.onnx,sha256=DAaPW_k2UOPYfvNEk00CEgz2yHtNbpttW9kpp5z49cE,345 +onnx/backend/test/data/node/test_batchnorm_example_training_mode/test_data_set_0/input_0.pb,sha256=SYKd5PF5vM-bKNrV8oTKPGHvYE0twyvHWdFt3T67zDY,496 +onnx/backend/test/data/node/test_batchnorm_example_training_mode/test_data_set_0/input_1.pb,sha256=AI-3RnrtRdqYWsuL7R4_zbZ0Vb7zfnaDEBYsm0i2kvY,21 +onnx/backend/test/data/node/test_batchnorm_example_training_mode/test_data_set_0/input_2.pb,sha256=2oNlXxvO0WnQ0oLXr0SuQwOm4ErOca8R9dkNLwewDK4,24 +onnx/backend/test/data/node/test_batchnorm_example_training_mode/test_data_set_0/input_3.pb,sha256=d60mjqff9zbsLGnRD8GG9bbEk2qyV8UaYde7LaB_myc,24 +onnx/backend/test/data/node/test_batchnorm_example_training_mode/test_data_set_0/input_4.pb,sha256=EyHoNru0GPIaJLApYVvbQ1CqR67ReuX4wdywoIWBV5o,23 +onnx/backend/test/data/node/test_batchnorm_example_training_mode/test_data_set_0/output_0.pb,sha256=e-lIEIZD_v28N7-cbLdGgQKaTqsjUA7tmsc3OGNwz8M,496 +onnx/backend/test/data/node/test_batchnorm_example_training_mode/test_data_set_0/output_1.pb,sha256=B0lT9yeJL76Z8VJvMM29lFzBBqzZ7ifQi0nP8Rrih8Q,31 +onnx/backend/test/data/node/test_batchnorm_example_training_mode/test_data_set_0/output_2.pb,sha256=Hb31AnNOCJ7SIMDkm-IuS8WJaRwxB1qvuSJedInfXUY,30 +onnx/backend/test/data/node/test_bernoulli/model.onnx,sha256=DfSgPcWcc0wpNRXOvnl4eId8A_Q7yaHI63nS__C1tus,93 +onnx/backend/test/data/node/test_bernoulli/test_data_set_0/input_0.pb,sha256=hyJJsFqjdZ5XnAjaaBhB0YNRxb9AuGsGsBdSS1te2iw,89 +onnx/backend/test/data/node/test_bernoulli/test_data_set_0/output_0.pb,sha256=xs9aCuRMT-QHKEFQ0SiknUjFyecsDJLJJ1TQ9BQh2os,89 +onnx/backend/test/data/node/test_bernoulli_double/model.onnx,sha256=x0PDwvY_R5GtvN2pM9bGlcU5g2g_EklNdv5zTpIOhF0,114 +onnx/backend/test/data/node/test_bernoulli_double/test_data_set_0/input_0.pb,sha256=cuPUsQEt8rLSUYiu2PhyXLudD1nqEYaXqoG180s6eiw,49 +onnx/backend/test/data/node/test_bernoulli_double/test_data_set_0/output_0.pb,sha256=xs9aCuRMT-QHKEFQ0SiknUjFyecsDJLJJ1TQ9BQh2os,89 +onnx/backend/test/data/node/test_bernoulli_double_expanded/model.onnx,sha256=VrJk8c6NU8o0Or2sqjvIVkqY65pqaHpmcVgjkfecCJY,446 +onnx/backend/test/data/node/test_bernoulli_double_expanded/test_data_set_0/input_0.pb,sha256=cuPUsQEt8rLSUYiu2PhyXLudD1nqEYaXqoG180s6eiw,49 +onnx/backend/test/data/node/test_bernoulli_double_expanded/test_data_set_0/output_0.pb,sha256=xs9aCuRMT-QHKEFQ0SiknUjFyecsDJLJJ1TQ9BQh2os,89 +onnx/backend/test/data/node/test_bernoulli_expanded/model.onnx,sha256=yLUsgRq72OZR2yugRd3N4WyH3a9ko47PpEj35crnCP0,409 +onnx/backend/test/data/node/test_bernoulli_expanded/test_data_set_0/input_0.pb,sha256=hyJJsFqjdZ5XnAjaaBhB0YNRxb9AuGsGsBdSS1te2iw,89 +onnx/backend/test/data/node/test_bernoulli_expanded/test_data_set_0/output_0.pb,sha256=xs9aCuRMT-QHKEFQ0SiknUjFyecsDJLJJ1TQ9BQh2os,89 +onnx/backend/test/data/node/test_bernoulli_seed/model.onnx,sha256=nKQzzOSvENBJFqRa8O5jzVgFSS8BnQs9b1oz_uoiGZg,114 +onnx/backend/test/data/node/test_bernoulli_seed/test_data_set_0/input_0.pb,sha256=cuPUsQEt8rLSUYiu2PhyXLudD1nqEYaXqoG180s6eiw,49 +onnx/backend/test/data/node/test_bernoulli_seed/test_data_set_0/output_0.pb,sha256=wDTjyEkwQ92j1RiXjUtYJjS14OwkKkkTih0mdFRpGGc,49 +onnx/backend/test/data/node/test_bernoulli_seed_expanded/model.onnx,sha256=1hhf3qKyziZ8eVwUIHh_tX9Zzb2ETpZ8GwSG8yqmpfo,452 +onnx/backend/test/data/node/test_bernoulli_seed_expanded/test_data_set_0/input_0.pb,sha256=cuPUsQEt8rLSUYiu2PhyXLudD1nqEYaXqoG180s6eiw,49 +onnx/backend/test/data/node/test_bernoulli_seed_expanded/test_data_set_0/output_0.pb,sha256=wDTjyEkwQ92j1RiXjUtYJjS14OwkKkkTih0mdFRpGGc,49 +onnx/backend/test/data/node/test_bitshift_left_uint16/model.onnx,sha256=LnZd_SO9L1dOzaN11XKMPNWZCPRE96L0NT-xnJTOBtY,145 +onnx/backend/test/data/node/test_bitshift_left_uint16/test_data_set_0/input_0.pb,sha256=jQ176jxBAIMO5ulz_2R2t_RMNCYxBhWBKtEQvp3VPnA,15 +onnx/backend/test/data/node/test_bitshift_left_uint16/test_data_set_0/input_1.pb,sha256=d_wFROmBvDAar5EVjiFwX-5HWMz7-3X15eaoWQv8BAQ,15 +onnx/backend/test/data/node/test_bitshift_left_uint16/test_data_set_0/output_0.pb,sha256=FIj-kC6QX69gcjY2ZLPbJAeYeTxYhDoTphdZzBHyxr4,15 +onnx/backend/test/data/node/test_bitshift_left_uint32/model.onnx,sha256=Xbshde1wAgne2pBGkwDnLbZJFsZB-nH8INUp_fyKsH8,145 +onnx/backend/test/data/node/test_bitshift_left_uint32/test_data_set_0/input_0.pb,sha256=n0jmfIFAkvhmvSsxFmIhij1NKAC2rZ0GZjPUvl0OvpM,21 +onnx/backend/test/data/node/test_bitshift_left_uint32/test_data_set_0/input_1.pb,sha256=LPGgfy0kWPfzwbePS63zlBnHRcjnKVmTNhl56DQT_LQ,21 +onnx/backend/test/data/node/test_bitshift_left_uint32/test_data_set_0/output_0.pb,sha256=UHau1j361dwNuOuv9GVClDPrEvAsEMDnU7kZ9Ka9XEM,21 +onnx/backend/test/data/node/test_bitshift_left_uint64/model.onnx,sha256=uitk2ZXsCBnTzjWPhsF5PNoiwI5ECCfQvp9EGR19MOo,145 +onnx/backend/test/data/node/test_bitshift_left_uint64/test_data_set_0/input_0.pb,sha256=gdItplXa-gah3kzYq0cb4Ny7SwnrQpNjCBu9BwvRWs4,33 +onnx/backend/test/data/node/test_bitshift_left_uint64/test_data_set_0/input_1.pb,sha256=srzREYqcjuSETV_il_0QENxQV8EbW-IHk_e_wnZX_s0,33 +onnx/backend/test/data/node/test_bitshift_left_uint64/test_data_set_0/output_0.pb,sha256=uv291G2Nz0ctLLM_ZjvaFsSQLkB6Ywj1QJ2am6jrAOI,33 +onnx/backend/test/data/node/test_bitshift_left_uint8/model.onnx,sha256=2hKErCnyvnmUegUBOX3i_-OWYfrGyErXp3n4NlviXww,144 +onnx/backend/test/data/node/test_bitshift_left_uint8/test_data_set_0/input_0.pb,sha256=Npre_kHciTwjklfqHpxJJP6BJTCF638zHABwLLSJLVc,12 +onnx/backend/test/data/node/test_bitshift_left_uint8/test_data_set_0/input_1.pb,sha256=OmXyE8_Mj-VdKRY0dsUACEnjcieoln-kaSNRW1ulqnU,12 +onnx/backend/test/data/node/test_bitshift_left_uint8/test_data_set_0/output_0.pb,sha256=htgT0sbAbcKHka93qygml6m7QOyV_RNBGZ-7Bfh8VWg,12 +onnx/backend/test/data/node/test_bitshift_right_uint16/model.onnx,sha256=dMuztKV5VcxFmhxYQ_w9wEofyRNitDf6dXlT8OpOh4g,147 +onnx/backend/test/data/node/test_bitshift_right_uint16/test_data_set_0/input_0.pb,sha256=jQ176jxBAIMO5ulz_2R2t_RMNCYxBhWBKtEQvp3VPnA,15 +onnx/backend/test/data/node/test_bitshift_right_uint16/test_data_set_0/input_1.pb,sha256=d_wFROmBvDAar5EVjiFwX-5HWMz7-3X15eaoWQv8BAQ,15 +onnx/backend/test/data/node/test_bitshift_right_uint16/test_data_set_0/output_0.pb,sha256=OWgso0Bd4H7b02Z0t-9C6BIe0JqxZPl4x5pLDCuH-6g,15 +onnx/backend/test/data/node/test_bitshift_right_uint32/model.onnx,sha256=ZSUngGFGhJm4frc_N6LC_nOTHfOmgcEpVBloBXbfhY0,147 +onnx/backend/test/data/node/test_bitshift_right_uint32/test_data_set_0/input_0.pb,sha256=n0jmfIFAkvhmvSsxFmIhij1NKAC2rZ0GZjPUvl0OvpM,21 +onnx/backend/test/data/node/test_bitshift_right_uint32/test_data_set_0/input_1.pb,sha256=LPGgfy0kWPfzwbePS63zlBnHRcjnKVmTNhl56DQT_LQ,21 +onnx/backend/test/data/node/test_bitshift_right_uint32/test_data_set_0/output_0.pb,sha256=9HW8Yn_MW2Ply6yspxBbl2QGTJwZ6HTfx7F2ZWCB8ks,21 +onnx/backend/test/data/node/test_bitshift_right_uint64/model.onnx,sha256=3TJliEGizBkKgJoqU47jiojDGC7a1JJyt1OGJ9f_zjY,147 +onnx/backend/test/data/node/test_bitshift_right_uint64/test_data_set_0/input_0.pb,sha256=gdItplXa-gah3kzYq0cb4Ny7SwnrQpNjCBu9BwvRWs4,33 +onnx/backend/test/data/node/test_bitshift_right_uint64/test_data_set_0/input_1.pb,sha256=srzREYqcjuSETV_il_0QENxQV8EbW-IHk_e_wnZX_s0,33 +onnx/backend/test/data/node/test_bitshift_right_uint64/test_data_set_0/output_0.pb,sha256=cIXEma3tYpCdRIYdSiF8UH4PveH4LSu3mBe9eKtaxRY,33 +onnx/backend/test/data/node/test_bitshift_right_uint8/model.onnx,sha256=FiyhL_aDh3eJggmM0yqczmC9_K4z4r-NVC3So1zoX_I,146 +onnx/backend/test/data/node/test_bitshift_right_uint8/test_data_set_0/input_0.pb,sha256=Npre_kHciTwjklfqHpxJJP6BJTCF638zHABwLLSJLVc,12 +onnx/backend/test/data/node/test_bitshift_right_uint8/test_data_set_0/input_1.pb,sha256=OmXyE8_Mj-VdKRY0dsUACEnjcieoln-kaSNRW1ulqnU,12 +onnx/backend/test/data/node/test_bitshift_right_uint8/test_data_set_0/output_0.pb,sha256=xQwf8_nlX7DpOs61hkVxH1caC4G9PqWOvYdUgQCFFs4,12 +onnx/backend/test/data/node/test_bitwise_and_i16_3d/model.onnx,sha256=yc-YBEB-A5u1Ji2mSkoNgGF87Q901mX8Wzh_CxOZecQ,166 +onnx/backend/test/data/node/test_bitwise_and_i16_3d/test_data_set_0/input_0.pb,sha256=51_g10nhl3pGMHzIZPb9uJiIkLoqLtARgYxBZRXFJ8A,133 +onnx/backend/test/data/node/test_bitwise_and_i16_3d/test_data_set_0/input_1.pb,sha256=cHjBuAqlCncTYhGLbDmLR1oHQbjNdpHVt6Mo7Cg31To,133 +onnx/backend/test/data/node/test_bitwise_and_i16_3d/test_data_set_0/output_0.pb,sha256=HnblxbS-jMUk0JpLSVezpf9X_qP2I_E57Vv_TXxNubY,142 +onnx/backend/test/data/node/test_bitwise_and_i32_2d/model.onnx,sha256=VI5E4eNL7PdENxiR7BWHTpJQH4J22TwDIyYo3i6x2f4,154 +onnx/backend/test/data/node/test_bitwise_and_i32_2d/test_data_set_0/input_0.pb,sha256=vBRtRagPhbZGk_pp35bJnnqw8va3HLQEeA-gRpGr_cg,59 +onnx/backend/test/data/node/test_bitwise_and_i32_2d/test_data_set_0/input_1.pb,sha256=GLkOWs-6gJxZOVVgeCGpbbxY4U3jqjvRYYSunDQHxjQ,59 +onnx/backend/test/data/node/test_bitwise_and_i32_2d/test_data_set_0/output_0.pb,sha256=zSY0bjB6kO8gOdTaQqGVJstGswRtTnQ9hofYX2DVZII,68 +onnx/backend/test/data/node/test_bitwise_and_ui64_bcast_3v1d/model.onnx,sha256=0ilxxehkgXk3mDGpu7uXqdCoc65XmidwPzqMi0gwbAw,167 +onnx/backend/test/data/node/test_bitwise_and_ui64_bcast_3v1d/test_data_set_0/input_0.pb,sha256=yyF0q6vMUHrYoNxDxHkS1M1J57ABoWCxVPikiSbgt9I,494 +onnx/backend/test/data/node/test_bitwise_and_ui64_bcast_3v1d/test_data_set_0/input_1.pb,sha256=yhmmpWW5p4InqcZXi6vWNboAzILeHxIStUce2hV1TeQ,49 +onnx/backend/test/data/node/test_bitwise_and_ui64_bcast_3v1d/test_data_set_0/output_0.pb,sha256=YjQjN5VK40lEVH99dAAz4EeIEq9g8MBODn-ksUV8UUE,503 +onnx/backend/test/data/node/test_bitwise_and_ui8_bcast_4v3d/model.onnx,sha256=NAzIiuPcp2ykmd0Ylcqs6qC1itlr9FNG2Fct6GAxXAI,182 +onnx/backend/test/data/node/test_bitwise_and_ui8_bcast_4v3d/test_data_set_0/input_0.pb,sha256=M0-FUiXusNZj8HLlUaJlCacWI_VvmB7CXwEQm7W8-_o,376 +onnx/backend/test/data/node/test_bitwise_and_ui8_bcast_4v3d/test_data_set_0/input_1.pb,sha256=cmiv6qNYN8mNf9Mbmi3IZSAgZ1arXHV1abF_FizB1vk,133 +onnx/backend/test/data/node/test_bitwise_and_ui8_bcast_4v3d/test_data_set_0/output_0.pb,sha256=k58qWsuM9aJahBM4EBl-PRJW_vtQ2_MJe4DIdHgKzcE,385 +onnx/backend/test/data/node/test_bitwise_not_2d/model.onnx,sha256=Hi968CmDbvXJ6F-QnnsvEsuBm1PJG5mOddx40P3JTEA,127 +onnx/backend/test/data/node/test_bitwise_not_2d/test_data_set_0/input_0.pb,sha256=vBRtRagPhbZGk_pp35bJnnqw8va3HLQEeA-gRpGr_cg,59 +onnx/backend/test/data/node/test_bitwise_not_2d/test_data_set_0/output_0.pb,sha256=Igvz3l6da-yIIbb1dHK45BFXJl8FWckYz7UCN80_p84,69 +onnx/backend/test/data/node/test_bitwise_not_3d/model.onnx,sha256=li1vHDxnTXPyVVg0BX3sL0009QiN_vXxIjz0j-zbPnw,135 +onnx/backend/test/data/node/test_bitwise_not_3d/test_data_set_0/input_0.pb,sha256=-GvODn13fY7XtP0uHpumPLmhhvPiYGG-4gplBNAZrt0,133 +onnx/backend/test/data/node/test_bitwise_not_3d/test_data_set_0/output_0.pb,sha256=979pI-QykNeUo-B8su5J9nc-VBzqJoNMUOGhs1B88xw,143 +onnx/backend/test/data/node/test_bitwise_not_4d/model.onnx,sha256=InYj3ToYFWonqMUxkfgDCVZDYHtSr1QC72QebAd_4CI,143 +onnx/backend/test/data/node/test_bitwise_not_4d/test_data_set_0/input_0.pb,sha256=M0-FUiXusNZj8HLlUaJlCacWI_VvmB7CXwEQm7W8-_o,376 +onnx/backend/test/data/node/test_bitwise_not_4d/test_data_set_0/output_0.pb,sha256=cCdeum9xyuJG02YEHo7w2P15mUmeFWkAUufvJbL-wuA,386 +onnx/backend/test/data/node/test_bitwise_or_i16_4d/model.onnx,sha256=enje0I-pI3bLXqGYCBGKlQWK0PFdl59_PwDjwafql3o,174 +onnx/backend/test/data/node/test_bitwise_or_i16_4d/test_data_set_0/input_0.pb,sha256=PnxxwiX86m3d_QkLdl3Asalt7yqxjGmgck55zPjA2tM,376 +onnx/backend/test/data/node/test_bitwise_or_i16_4d/test_data_set_0/input_1.pb,sha256=dESyvCmUTodtbWS3T_t5OMVsH0XWXdlE-pNDsdco8AU,376 +onnx/backend/test/data/node/test_bitwise_or_i16_4d/test_data_set_0/output_0.pb,sha256=QeGUOBMBMRUWqrTa7bHM36t2Emg8OQwtVE0wLVO3Cpg,384 +onnx/backend/test/data/node/test_bitwise_or_i32_2d/model.onnx,sha256=McwgQLlT_EdkiUFBxBM3MASoBLHnDGhilM4N0nONxOk,149 +onnx/backend/test/data/node/test_bitwise_or_i32_2d/test_data_set_0/input_0.pb,sha256=vBRtRagPhbZGk_pp35bJnnqw8va3HLQEeA-gRpGr_cg,59 +onnx/backend/test/data/node/test_bitwise_or_i32_2d/test_data_set_0/input_1.pb,sha256=GLkOWs-6gJxZOVVgeCGpbbxY4U3jqjvRYYSunDQHxjQ,59 +onnx/backend/test/data/node/test_bitwise_or_i32_2d/test_data_set_0/output_0.pb,sha256=PWwPYI7MIgvc8DurWpiRPeZbG0vMzQ4m4jGGmC2OZT8,67 +onnx/backend/test/data/node/test_bitwise_or_ui64_bcast_3v1d/model.onnx,sha256=L_BirEZ6MunAtXPvm2htIlr8oVaFerC3x3TRibbuB1g,163 +onnx/backend/test/data/node/test_bitwise_or_ui64_bcast_3v1d/test_data_set_0/input_0.pb,sha256=yyF0q6vMUHrYoNxDxHkS1M1J57ABoWCxVPikiSbgt9I,494 +onnx/backend/test/data/node/test_bitwise_or_ui64_bcast_3v1d/test_data_set_0/input_1.pb,sha256=yhmmpWW5p4InqcZXi6vWNboAzILeHxIStUce2hV1TeQ,49 +onnx/backend/test/data/node/test_bitwise_or_ui64_bcast_3v1d/test_data_set_0/output_0.pb,sha256=gnw5KbwNGy3mtEYVA0VpILmEYsuxCZe1wGgEKHyNT5o,502 +onnx/backend/test/data/node/test_bitwise_or_ui8_bcast_4v3d/model.onnx,sha256=VF0_AxeWLUOiz_0T0ghcsMW6_ePD9xlS9XERYlKQoqc,178 +onnx/backend/test/data/node/test_bitwise_or_ui8_bcast_4v3d/test_data_set_0/input_0.pb,sha256=M0-FUiXusNZj8HLlUaJlCacWI_VvmB7CXwEQm7W8-_o,376 +onnx/backend/test/data/node/test_bitwise_or_ui8_bcast_4v3d/test_data_set_0/input_1.pb,sha256=cmiv6qNYN8mNf9Mbmi3IZSAgZ1arXHV1abF_FizB1vk,133 +onnx/backend/test/data/node/test_bitwise_or_ui8_bcast_4v3d/test_data_set_0/output_0.pb,sha256=Xb3ol4eZSuioqDtjcIDOyOCendI-ptK9UGFpi5tIWLc,384 +onnx/backend/test/data/node/test_bitwise_xor_i16_3d/model.onnx,sha256=NsWTaACGKoFPWQFATJhi26d7Ad7E1awRbgc3CnH7I8w,166 +onnx/backend/test/data/node/test_bitwise_xor_i16_3d/test_data_set_0/input_0.pb,sha256=51_g10nhl3pGMHzIZPb9uJiIkLoqLtARgYxBZRXFJ8A,133 +onnx/backend/test/data/node/test_bitwise_xor_i16_3d/test_data_set_0/input_1.pb,sha256=cHjBuAqlCncTYhGLbDmLR1oHQbjNdpHVt6Mo7Cg31To,133 +onnx/backend/test/data/node/test_bitwise_xor_i16_3d/test_data_set_0/output_0.pb,sha256=25Ra6ldrJ6PLqEEDgZYtL1vFlIf0_C9SO0UPBHeRks0,142 +onnx/backend/test/data/node/test_bitwise_xor_i32_2d/model.onnx,sha256=0QLOP7dIxQdVWZMwH3iA41FMg3nSJgH_KlpdF50dqS0,154 +onnx/backend/test/data/node/test_bitwise_xor_i32_2d/test_data_set_0/input_0.pb,sha256=vBRtRagPhbZGk_pp35bJnnqw8va3HLQEeA-gRpGr_cg,59 +onnx/backend/test/data/node/test_bitwise_xor_i32_2d/test_data_set_0/input_1.pb,sha256=GLkOWs-6gJxZOVVgeCGpbbxY4U3jqjvRYYSunDQHxjQ,59 +onnx/backend/test/data/node/test_bitwise_xor_i32_2d/test_data_set_0/output_0.pb,sha256=e61ZBx9CxyY306l5aTy6WcFQLoN6X93lXGQ6bjJl1NU,68 +onnx/backend/test/data/node/test_bitwise_xor_ui64_bcast_3v1d/model.onnx,sha256=LG4PKAQcvKETW_3kJrSXGFi1WrP_osg47XdIotLXITc,167 +onnx/backend/test/data/node/test_bitwise_xor_ui64_bcast_3v1d/test_data_set_0/input_0.pb,sha256=yyF0q6vMUHrYoNxDxHkS1M1J57ABoWCxVPikiSbgt9I,494 +onnx/backend/test/data/node/test_bitwise_xor_ui64_bcast_3v1d/test_data_set_0/input_1.pb,sha256=yhmmpWW5p4InqcZXi6vWNboAzILeHxIStUce2hV1TeQ,49 +onnx/backend/test/data/node/test_bitwise_xor_ui64_bcast_3v1d/test_data_set_0/output_0.pb,sha256=qBxhB5FE97cuhUy9PLy4vufyWq3WslKpVW9AszsMxoQ,503 +onnx/backend/test/data/node/test_bitwise_xor_ui8_bcast_4v3d/model.onnx,sha256=HELtLeN0P__CPF4SuXLa8Qkam3xmJp-Wt4S0DpQmfrk,182 +onnx/backend/test/data/node/test_bitwise_xor_ui8_bcast_4v3d/test_data_set_0/input_0.pb,sha256=M0-FUiXusNZj8HLlUaJlCacWI_VvmB7CXwEQm7W8-_o,376 +onnx/backend/test/data/node/test_bitwise_xor_ui8_bcast_4v3d/test_data_set_0/input_1.pb,sha256=cmiv6qNYN8mNf9Mbmi3IZSAgZ1arXHV1abF_FizB1vk,133 +onnx/backend/test/data/node/test_bitwise_xor_ui8_bcast_4v3d/test_data_set_0/output_0.pb,sha256=WbhBsgrQao6_fBtCLQYp6UGrGmVRcnMk1H7xyusFeG8,385 +onnx/backend/test/data/node/test_blackmanwindow/model.onnx,sha256=Nx4NbJvhAjOy2ul7cNMiqQFvHzyHF7xBvyni8rB0c8E,99 +onnx/backend/test/data/node/test_blackmanwindow/test_data_set_0/input_0.pb,sha256=xTJpebPqFxfV2WSsCwKnXNthXGyvwqNu-H_FWa5J6cA,11 +onnx/backend/test/data/node/test_blackmanwindow/test_data_set_0/output_0.pb,sha256=Kd21pKhpQEYlmWzcixw14dNevd3xULdC18vFPGLVN40,49 +onnx/backend/test/data/node/test_blackmanwindow_expanded/model.onnx,sha256=05pJhIQZB2um1j-TNW7okCAE3V_VDP6y3so0xkUc_Y4,4244 +onnx/backend/test/data/node/test_blackmanwindow_expanded/test_data_set_0/input_0.pb,sha256=xTJpebPqFxfV2WSsCwKnXNthXGyvwqNu-H_FWa5J6cA,11 +onnx/backend/test/data/node/test_blackmanwindow_expanded/test_data_set_0/output_0.pb,sha256=Kd21pKhpQEYlmWzcixw14dNevd3xULdC18vFPGLVN40,49 +onnx/backend/test/data/node/test_blackmanwindow_symmetric/model.onnx,sha256=X26VKulL-8l4oQJJbri3-rauH9EmYfdOt95QNjx5sIk,126 +onnx/backend/test/data/node/test_blackmanwindow_symmetric/test_data_set_0/input_0.pb,sha256=xTJpebPqFxfV2WSsCwKnXNthXGyvwqNu-H_FWa5J6cA,11 +onnx/backend/test/data/node/test_blackmanwindow_symmetric/test_data_set_0/output_0.pb,sha256=abF6aDOiROdgOi7d7c72vTOoApvpGnK82qx1JO3NkqY,49 +onnx/backend/test/data/node/test_blackmanwindow_symmetric_expanded/model.onnx,sha256=PcLK0s5KHCgn2quKjikZV8V1ouRwP7Y06MbWwXStF9w,4814 +onnx/backend/test/data/node/test_blackmanwindow_symmetric_expanded/test_data_set_0/input_0.pb,sha256=xTJpebPqFxfV2WSsCwKnXNthXGyvwqNu-H_FWa5J6cA,11 +onnx/backend/test/data/node/test_blackmanwindow_symmetric_expanded/test_data_set_0/output_0.pb,sha256=abF6aDOiROdgOi7d7c72vTOoApvpGnK82qx1JO3NkqY,49 +onnx/backend/test/data/node/test_cast_BFLOAT16_to_FLOAT/model.onnx,sha256=aRCxHtrF7xFz0b7CdnAy3Iz0_oBcNJ1zeUkwGGDewg8,138 +onnx/backend/test/data/node/test_cast_BFLOAT16_to_FLOAT/test_data_set_0/input_0.pb,sha256=7PtSEfkVZD2hMxs2mjj8K-ie6Rcy_-janaLIcnczEFs,39 +onnx/backend/test/data/node/test_cast_BFLOAT16_to_FLOAT/test_data_set_0/output_0.pb,sha256=n02cVMmSsmO-C3IqgA3VI65Qt0Mb4nA7308NU_hKx_k,64 +onnx/backend/test/data/node/test_cast_DOUBLE_to_FLOAT/model.onnx,sha256=LyMSsZvQjPWPSiK5NNxdUr3mtlTfTgqZNFqDTeytpyQ,136 +onnx/backend/test/data/node/test_cast_DOUBLE_to_FLOAT/test_data_set_0/input_0.pb,sha256=6HkDTeUmWLg8PEFbpSpZLMj7vMfJhi7QuVjK6KzI06Y,111 +onnx/backend/test/data/node/test_cast_DOUBLE_to_FLOAT/test_data_set_0/output_0.pb,sha256=C9cN3dfDqwak6remmnIK_aV-FQtJ7ySCfkuCIExKauQ,64 +onnx/backend/test/data/node/test_cast_DOUBLE_to_FLOAT16/model.onnx,sha256=Ai-InR6Ql4oScMiLWayavxeyHt3T-ZCIYBwtj7ZUuDA,138 +onnx/backend/test/data/node/test_cast_DOUBLE_to_FLOAT16/test_data_set_0/input_0.pb,sha256=8mxoy6mEAhl8zQHvPiK2JLR4boRN-SMAUSPCrKsiD9o,111 +onnx/backend/test/data/node/test_cast_DOUBLE_to_FLOAT16/test_data_set_0/output_0.pb,sha256=OcElOz0iQh2Eia2AnUOusERUmJcH7mFl2nFWCgGdc-w,40 +onnx/backend/test/data/node/test_cast_FLOAT16_to_DOUBLE/model.onnx,sha256=1CDTf9lRYu6JI_XP-SzvnHvNTO14CnynpmF7Tp9C1OQ,138 +onnx/backend/test/data/node/test_cast_FLOAT16_to_DOUBLE/test_data_set_0/input_0.pb,sha256=I2V7DvRr6upYRyk34-P96yMvuJWhF4Sz_7_XZyVkwsI,39 +onnx/backend/test/data/node/test_cast_FLOAT16_to_DOUBLE/test_data_set_0/output_0.pb,sha256=n9sfW336MaQ4ijNNykK-UBxM36StBFmAcPedaSQJDsw,112 +onnx/backend/test/data/node/test_cast_FLOAT16_to_FLOAT/model.onnx,sha256=narmFpioZ9nCa0LA9kN5NblPhZ9_bf_7FKZ3xTbyOh8,137 +onnx/backend/test/data/node/test_cast_FLOAT16_to_FLOAT/test_data_set_0/input_0.pb,sha256=ParyS-aBVa8GjFhnqm1i1f4rEDEdtTz-ynPUUMXKqDs,39 +onnx/backend/test/data/node/test_cast_FLOAT16_to_FLOAT/test_data_set_0/output_0.pb,sha256=qiswSIR8nu6bG-QLS9Qaw9QiA8_EejF0tgaSLGkh0lY,64 +onnx/backend/test/data/node/test_cast_FLOAT16_to_FLOAT8E4M3FN/model.onnx,sha256=iexYjokt0Zs_JxN5wsD6NUWyPqmQ_dcd7Gs46Rb4G8M,144 +onnx/backend/test/data/node/test_cast_FLOAT16_to_FLOAT8E4M3FN/test_data_set_0/input_0.pb,sha256=nEr5NWxtFFQYRqOKX5XGP4eyqAQW-ylgATItQC6bVkU,46 +onnx/backend/test/data/node/test_cast_FLOAT16_to_FLOAT8E4M3FN/test_data_set_0/output_0.pb,sha256=Q1oh3F7SlI7IbMKChzYs7aLXAtUv7l-I3ZHjxqiOw9g,29 +onnx/backend/test/data/node/test_cast_FLOAT16_to_FLOAT8E4M3FNUZ/model.onnx,sha256=Sljtvic-Qj36BesaWivttm_Qd6jOFox_xLVpjlGPK4Y,146 +onnx/backend/test/data/node/test_cast_FLOAT16_to_FLOAT8E4M3FNUZ/test_data_set_0/input_0.pb,sha256=nEr5NWxtFFQYRqOKX5XGP4eyqAQW-ylgATItQC6bVkU,46 +onnx/backend/test/data/node/test_cast_FLOAT16_to_FLOAT8E4M3FNUZ/test_data_set_0/output_0.pb,sha256=Ae71rTpzJh8hS1FS1CDTmmvRPOzT4Vfa85ZMoxfhFkg,29 +onnx/backend/test/data/node/test_cast_FLOAT16_to_FLOAT8E5M2/model.onnx,sha256=BmQI7_N7rd5HZZYJR73qyaf0ZIx_c4Y2RBJJRErZlvk,142 +onnx/backend/test/data/node/test_cast_FLOAT16_to_FLOAT8E5M2/test_data_set_0/input_0.pb,sha256=nEr5NWxtFFQYRqOKX5XGP4eyqAQW-ylgATItQC6bVkU,46 +onnx/backend/test/data/node/test_cast_FLOAT16_to_FLOAT8E5M2/test_data_set_0/output_0.pb,sha256=RimN3sh4v8xvWBBs9WXm31FklM63g_onnKVYeatOFKc,29 +onnx/backend/test/data/node/test_cast_FLOAT16_to_FLOAT8E5M2FNUZ/model.onnx,sha256=iwDk4S80HFMmJbRIufSSLKI_H9f5GaWcrCaQ6Tx8Sjg,146 +onnx/backend/test/data/node/test_cast_FLOAT16_to_FLOAT8E5M2FNUZ/test_data_set_0/input_0.pb,sha256=nEr5NWxtFFQYRqOKX5XGP4eyqAQW-ylgATItQC6bVkU,46 +onnx/backend/test/data/node/test_cast_FLOAT16_to_FLOAT8E5M2FNUZ/test_data_set_0/output_0.pb,sha256=f3_-TiUHKmojs41jJLGOtBfUHowWyUAvTJLQwKk77b4,29 +onnx/backend/test/data/node/test_cast_FLOAT16_to_INT4/model.onnx,sha256=x3LtAenIiqxFxzSObvV-Z6jmLHxRK2aptAHwl7kyCT4,136 +onnx/backend/test/data/node/test_cast_FLOAT16_to_INT4/test_data_set_0/input_0.pb,sha256=cBIuFz8h3Mo_kwhpdxaNsUk_sKScv3FWBN_kY6OF8VI,83 +onnx/backend/test/data/node/test_cast_FLOAT16_to_INT4/test_data_set_0/output_0.pb,sha256=T_rpPoCaZ4eqW7booZoc-KIDG4tsDopl6bOyjJiYSow,60 +onnx/backend/test/data/node/test_cast_FLOAT16_to_UINT4/model.onnx,sha256=hEVksdOaM4Ps2XP-x58Ati3S6J1iSCQWTHZWjCNfhY0,137 +onnx/backend/test/data/node/test_cast_FLOAT16_to_UINT4/test_data_set_0/input_0.pb,sha256=cBIuFz8h3Mo_kwhpdxaNsUk_sKScv3FWBN_kY6OF8VI,83 +onnx/backend/test/data/node/test_cast_FLOAT16_to_UINT4/test_data_set_0/output_0.pb,sha256=jhZ2SUtJqB8NsmXq31VaecI2yS5gRMzE2n7rnJrHLUw,28 +onnx/backend/test/data/node/test_cast_FLOAT8E4M3FNUZ_to_FLOAT/model.onnx,sha256=90q5_6yBXRYzQRxO5tPa6YTywtfQLnerv4wmNv7ZYAg,144 +onnx/backend/test/data/node/test_cast_FLOAT8E4M3FNUZ_to_FLOAT/test_data_set_0/input_0.pb,sha256=Ae71rTpzJh8hS1FS1CDTmmvRPOzT4Vfa85ZMoxfhFkg,29 +onnx/backend/test/data/node/test_cast_FLOAT8E4M3FNUZ_to_FLOAT/test_data_set_0/output_0.pb,sha256=1J-csxBqDSFfgrvJGin5GqGw4u0bhb6hIbsCD0AiGew,71 +onnx/backend/test/data/node/test_cast_FLOAT8E4M3FNUZ_to_FLOAT16/model.onnx,sha256=PTrLM-4YxEHcqcv-IVY_PbRKadOrPEJtvVJRfO-BkMQ,146 +onnx/backend/test/data/node/test_cast_FLOAT8E4M3FNUZ_to_FLOAT16/test_data_set_0/input_0.pb,sha256=Ae71rTpzJh8hS1FS1CDTmmvRPOzT4Vfa85ZMoxfhFkg,29 +onnx/backend/test/data/node/test_cast_FLOAT8E4M3FNUZ_to_FLOAT16/test_data_set_0/output_0.pb,sha256=67ZTf0zQR1Ougqil8hru7HESXDqVriYpKuyXcHQZVqU,44 +onnx/backend/test/data/node/test_cast_FLOAT8E4M3FN_to_FLOAT/model.onnx,sha256=yidk5WpXyO-jcuYpuchGrB4nUHtvBfM-Zt6EmZx-zLo,142 +onnx/backend/test/data/node/test_cast_FLOAT8E4M3FN_to_FLOAT/test_data_set_0/input_0.pb,sha256=Q1oh3F7SlI7IbMKChzYs7aLXAtUv7l-I3ZHjxqiOw9g,29 +onnx/backend/test/data/node/test_cast_FLOAT8E4M3FN_to_FLOAT/test_data_set_0/output_0.pb,sha256=Xzzc2IYdWujsRkPbDsHkjK00n622ZstNVozqLx4W7To,71 +onnx/backend/test/data/node/test_cast_FLOAT8E4M3FN_to_FLOAT16/model.onnx,sha256=3YuekqAZC7hRV9PBZvUQ4-h6z8-YE8J5yZ1DTUoZFok,144 +onnx/backend/test/data/node/test_cast_FLOAT8E4M3FN_to_FLOAT16/test_data_set_0/input_0.pb,sha256=Q1oh3F7SlI7IbMKChzYs7aLXAtUv7l-I3ZHjxqiOw9g,29 +onnx/backend/test/data/node/test_cast_FLOAT8E4M3FN_to_FLOAT16/test_data_set_0/output_0.pb,sha256=m-GA-fkn5xwtOphoj-J6RxKe44Uh6V3hd8J4zuPkQOM,46 +onnx/backend/test/data/node/test_cast_FLOAT8E5M2FNUZ_to_FLOAT/model.onnx,sha256=ip0AiPcXA8C-YQcKFtR9ujQiCQExGQnFCozYsNOstO0,144 +onnx/backend/test/data/node/test_cast_FLOAT8E5M2FNUZ_to_FLOAT/test_data_set_0/input_0.pb,sha256=f3_-TiUHKmojs41jJLGOtBfUHowWyUAvTJLQwKk77b4,29 +onnx/backend/test/data/node/test_cast_FLOAT8E5M2FNUZ_to_FLOAT/test_data_set_0/output_0.pb,sha256=tDWu9tBrVG3_FOBPvVYZv7xOfsazch1XxD-FFAcdQk8,71 +onnx/backend/test/data/node/test_cast_FLOAT8E5M2FNUZ_to_FLOAT16/model.onnx,sha256=fpfiqvIRxc4D3HwD5RqIySiS5dDkUkVJV1tWpx3S1_A,146 +onnx/backend/test/data/node/test_cast_FLOAT8E5M2FNUZ_to_FLOAT16/test_data_set_0/input_0.pb,sha256=f3_-TiUHKmojs41jJLGOtBfUHowWyUAvTJLQwKk77b4,29 +onnx/backend/test/data/node/test_cast_FLOAT8E5M2FNUZ_to_FLOAT16/test_data_set_0/output_0.pb,sha256=twl7BWviIU6UPRQSzGftqBAc2FCi22mqJYSNEjFwNYM,44 +onnx/backend/test/data/node/test_cast_FLOAT8E5M2_to_FLOAT/model.onnx,sha256=7YM3ckMuaYED7SrqftSD4ZKhyQqiR4ZkG0d-KbNGKp0,140 +onnx/backend/test/data/node/test_cast_FLOAT8E5M2_to_FLOAT/test_data_set_0/input_0.pb,sha256=RimN3sh4v8xvWBBs9WXm31FklM63g_onnKVYeatOFKc,29 +onnx/backend/test/data/node/test_cast_FLOAT8E5M2_to_FLOAT/test_data_set_0/output_0.pb,sha256=CZ_Zg2NLkIdERni5SYuPr4pIQJRy227B6TuPbaE79ZM,71 +onnx/backend/test/data/node/test_cast_FLOAT8E5M2_to_FLOAT16/model.onnx,sha256=D5Naazbairby8C3Szhua2qDU6vrRFXpPuJvye6uwtiY,142 +onnx/backend/test/data/node/test_cast_FLOAT8E5M2_to_FLOAT16/test_data_set_0/input_0.pb,sha256=RimN3sh4v8xvWBBs9WXm31FklM63g_onnKVYeatOFKc,29 +onnx/backend/test/data/node/test_cast_FLOAT8E5M2_to_FLOAT16/test_data_set_0/output_0.pb,sha256=siJb0acflN0wStRKNXSNIe4w7At828Rqi1uw6G-V_k0,46 +onnx/backend/test/data/node/test_cast_FLOAT_to_BFLOAT16/model.onnx,sha256=Xft0Ty7gt2jUTgnHu-ohcW8VGKhCHQ3MBsXYoSwr0qw,138 +onnx/backend/test/data/node/test_cast_FLOAT_to_BFLOAT16/test_data_set_0/input_0.pb,sha256=h-Mw8kpqdER-gQV9wnNRomY7p72FnbzKkvgjriulnMI,63 +onnx/backend/test/data/node/test_cast_FLOAT_to_BFLOAT16/test_data_set_0/output_0.pb,sha256=hwIS-rCpVNRgrx9XXtrf4dmhBQ1PbmRzGsSyuzuG7WI,40 +onnx/backend/test/data/node/test_cast_FLOAT_to_DOUBLE/model.onnx,sha256=WIYFRaFBBF-YBhBGuTZJMb1gexe9FuBNYOoIYjzaETU,136 +onnx/backend/test/data/node/test_cast_FLOAT_to_DOUBLE/test_data_set_0/input_0.pb,sha256=8t0QG5JpWVp5SwuBXkzB4NV12BBiHg7Fl58ELPZ0wcU,63 +onnx/backend/test/data/node/test_cast_FLOAT_to_DOUBLE/test_data_set_0/output_0.pb,sha256=2Rax3-J6ALt-LF7sbmKz6c5PZ0ubZP67DWTBd-E6B_Q,112 +onnx/backend/test/data/node/test_cast_FLOAT_to_FLOAT16/model.onnx,sha256=Yo917XR_WSLMc6sgbDHfPnLp9fvaIFT_zuK1l4UbaXA,137 +onnx/backend/test/data/node/test_cast_FLOAT_to_FLOAT16/test_data_set_0/input_0.pb,sha256=ftxQiq2nN6IAr8JHRIQXxG3vMB8Ra957fhAnBRn6HAA,63 +onnx/backend/test/data/node/test_cast_FLOAT_to_FLOAT16/test_data_set_0/output_0.pb,sha256=i1o3756jog8U36GzZ8lCj2V6TpOV9DiGMDsWZgVC_58,40 +onnx/backend/test/data/node/test_cast_FLOAT_to_FLOAT8E4M3FN/model.onnx,sha256=tkkUHINaG57UT0VG9kM5U2Ni7XGMZGQSDRFmos6CZf4,142 +onnx/backend/test/data/node/test_cast_FLOAT_to_FLOAT8E4M3FN/test_data_set_0/input_0.pb,sha256=7HvyFzAVT1ckyggxa83syY9R8tXpxopmWwZ4JU6n_5s,71 +onnx/backend/test/data/node/test_cast_FLOAT_to_FLOAT8E4M3FN/test_data_set_0/output_0.pb,sha256=Q1oh3F7SlI7IbMKChzYs7aLXAtUv7l-I3ZHjxqiOw9g,29 +onnx/backend/test/data/node/test_cast_FLOAT_to_FLOAT8E4M3FNUZ/model.onnx,sha256=0T8xd5pdIOYFe06HwEifFs7O4uMZRaEoXjjQa60OjPc,144 +onnx/backend/test/data/node/test_cast_FLOAT_to_FLOAT8E4M3FNUZ/test_data_set_0/input_0.pb,sha256=7HvyFzAVT1ckyggxa83syY9R8tXpxopmWwZ4JU6n_5s,71 +onnx/backend/test/data/node/test_cast_FLOAT_to_FLOAT8E4M3FNUZ/test_data_set_0/output_0.pb,sha256=Ae71rTpzJh8hS1FS1CDTmmvRPOzT4Vfa85ZMoxfhFkg,29 +onnx/backend/test/data/node/test_cast_FLOAT_to_FLOAT8E5M2/model.onnx,sha256=Z2XnULtBTj_fY2rQbltaNJJ0Pw0xErJ1Bg6R0a2gE_s,140 +onnx/backend/test/data/node/test_cast_FLOAT_to_FLOAT8E5M2/test_data_set_0/input_0.pb,sha256=7HvyFzAVT1ckyggxa83syY9R8tXpxopmWwZ4JU6n_5s,71 +onnx/backend/test/data/node/test_cast_FLOAT_to_FLOAT8E5M2/test_data_set_0/output_0.pb,sha256=RimN3sh4v8xvWBBs9WXm31FklM63g_onnKVYeatOFKc,29 +onnx/backend/test/data/node/test_cast_FLOAT_to_FLOAT8E5M2FNUZ/model.onnx,sha256=v4qQgkokjJLKNd_bTYIJKjgbkeIBsOPcaPI5cI2BmJQ,144 +onnx/backend/test/data/node/test_cast_FLOAT_to_FLOAT8E5M2FNUZ/test_data_set_0/input_0.pb,sha256=7HvyFzAVT1ckyggxa83syY9R8tXpxopmWwZ4JU6n_5s,71 +onnx/backend/test/data/node/test_cast_FLOAT_to_FLOAT8E5M2FNUZ/test_data_set_0/output_0.pb,sha256=f3_-TiUHKmojs41jJLGOtBfUHowWyUAvTJLQwKk77b4,29 +onnx/backend/test/data/node/test_cast_FLOAT_to_INT4/model.onnx,sha256=h8bEqzSwy5vUc4fvJtgwXxXnpNxXRuvxntmXmyV3ZU8,134 +onnx/backend/test/data/node/test_cast_FLOAT_to_INT4/test_data_set_0/input_0.pb,sha256=V72bNgWBPGMddD9IfNcn7Q2Lyb3R-4KfSBiLvFinh0s,111 +onnx/backend/test/data/node/test_cast_FLOAT_to_INT4/test_data_set_0/output_0.pb,sha256=T_rpPoCaZ4eqW7booZoc-KIDG4tsDopl6bOyjJiYSow,60 +onnx/backend/test/data/node/test_cast_FLOAT_to_STRING/model.onnx,sha256=sNROdnYt55xo4rk2WB-Xlx65Zif0LgU1w8fcx5SZ6ew,136 +onnx/backend/test/data/node/test_cast_FLOAT_to_STRING/test_data_set_0/input_0.pb,sha256=JnNpBHNwQmCts1dqNwxlHQe57ohJ0IXxvibJD_HNEPM,63 +onnx/backend/test/data/node/test_cast_FLOAT_to_STRING/test_data_set_0/output_0.pb,sha256=v40vkBihrsiWNAdrfrIp7q4NL0jW69jAVuhJPFMDUW8,155 +onnx/backend/test/data/node/test_cast_FLOAT_to_UINT4/model.onnx,sha256=-dPCDBqBjyx1yK7v4xDLXE7__poxoW2kHuM_SJ-z3bY,135 +onnx/backend/test/data/node/test_cast_FLOAT_to_UINT4/test_data_set_0/input_0.pb,sha256=V72bNgWBPGMddD9IfNcn7Q2Lyb3R-4KfSBiLvFinh0s,111 +onnx/backend/test/data/node/test_cast_FLOAT_to_UINT4/test_data_set_0/output_0.pb,sha256=jhZ2SUtJqB8NsmXq31VaecI2yS5gRMzE2n7rnJrHLUw,28 +onnx/backend/test/data/node/test_cast_INT4_to_FLOAT/model.onnx,sha256=z-qf99cvGaBMAlXlmmqwky9q7cwdjG08jMBrVZvJLww,134 +onnx/backend/test/data/node/test_cast_INT4_to_FLOAT/test_data_set_0/input_0.pb,sha256=lhDUUlQ_YKcM2HXEGyn5xm7_hLExBTR2Qxt9PFwAKF4,60 +onnx/backend/test/data/node/test_cast_INT4_to_FLOAT/test_data_set_0/output_0.pb,sha256=NvRqw9h1KUwSeuWNv44q7C5lMIJ-Jd1srZ-YtanW7g8,111 +onnx/backend/test/data/node/test_cast_INT4_to_FLOAT16/model.onnx,sha256=6DTsN8BLBOpmDHmaEzhtYo6pZ8uqrNSkYoFhb6KPic0,136 +onnx/backend/test/data/node/test_cast_INT4_to_FLOAT16/test_data_set_0/input_0.pb,sha256=lhDUUlQ_YKcM2HXEGyn5xm7_hLExBTR2Qxt9PFwAKF4,60 +onnx/backend/test/data/node/test_cast_INT4_to_FLOAT16/test_data_set_0/output_0.pb,sha256=O_4TtknuHnjZUh6zo4wFuIdzdcldFiV8kn3usbDe9WY,83 +onnx/backend/test/data/node/test_cast_INT4_to_INT8/model.onnx,sha256=LvLwZ6tQiYjIygdRVw67c57DDRom_nBWlYgtDdJo7KM,133 +onnx/backend/test/data/node/test_cast_INT4_to_INT8/test_data_set_0/input_0.pb,sha256=lhDUUlQ_YKcM2HXEGyn5xm7_hLExBTR2Qxt9PFwAKF4,60 +onnx/backend/test/data/node/test_cast_INT4_to_INT8/test_data_set_0/output_0.pb,sha256=h6acor1VaTpkb4fsScOk79Y93aM6kmg8FGBTLIQ_6SM,117 +onnx/backend/test/data/node/test_cast_STRING_to_FLOAT/model.onnx,sha256=Eusl1Jfrm9V4vXAgw5nmbYoXwPiYtiFLPMF8JwL9mQ0,136 +onnx/backend/test/data/node/test_cast_STRING_to_FLOAT/test_data_set_0/input_0.pb,sha256=Lk6xhMerJC15fOneCl4LUJFMKPdWwhs9XpoU2KT9Dbk,128 +onnx/backend/test/data/node/test_cast_STRING_to_FLOAT/test_data_set_0/output_0.pb,sha256=0jolK7AVdqKeYqxXZl_bMAlnbkHhPocU-fidVgDsG9g,64 +onnx/backend/test/data/node/test_cast_UINT4_to_FLOAT/model.onnx,sha256=HDTYMsYUZbkSSgPQSn43o1Te0hwZn63UHajSPDdSNJQ,135 +onnx/backend/test/data/node/test_cast_UINT4_to_FLOAT/test_data_set_0/input_0.pb,sha256=rzxT3FAfxe6B-5wPoZ1T_i-G0MvzfQHFAZsBMwB0T8c,28 +onnx/backend/test/data/node/test_cast_UINT4_to_FLOAT/test_data_set_0/output_0.pb,sha256=yPQV7lQtQX2Hb160l14xe8MYi6bXF9HTBUekqSKdUjA,111 +onnx/backend/test/data/node/test_cast_UINT4_to_FLOAT16/model.onnx,sha256=QiBq4_gpd9aHDB4OZHc69dNMdMiSXCTyzUYuoDkpWuw,137 +onnx/backend/test/data/node/test_cast_UINT4_to_FLOAT16/test_data_set_0/input_0.pb,sha256=rzxT3FAfxe6B-5wPoZ1T_i-G0MvzfQHFAZsBMwB0T8c,28 +onnx/backend/test/data/node/test_cast_UINT4_to_FLOAT16/test_data_set_0/output_0.pb,sha256=XiYyYvFGy1nbrX4a5ZjvbWFsq4yif53aC6OOOqnfZxk,65 +onnx/backend/test/data/node/test_cast_UINT4_to_UINT8/model.onnx,sha256=jsVrFZXWwfVZpxVXGE5ojsikcWwHPsGJzgAfg3L9vmI,135 +onnx/backend/test/data/node/test_cast_UINT4_to_UINT8/test_data_set_0/input_0.pb,sha256=rzxT3FAfxe6B-5wPoZ1T_i-G0MvzfQHFAZsBMwB0T8c,28 +onnx/backend/test/data/node/test_cast_UINT4_to_UINT8/test_data_set_0/output_0.pb,sha256=xpyw7PkD2CxMzDqLj0cOLxl0VS7yuNeig2WGFA0aSpE,36 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT16_to_FLOAT8E4M3FN/model.onnx,sha256=R165CpFlzlnQL0x3th8oPbSdLJZPgLeXTeViDGsTQKE,174 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT16_to_FLOAT8E4M3FN/test_data_set_0/input_0.pb,sha256=nEr5NWxtFFQYRqOKX5XGP4eyqAQW-ylgATItQC6bVkU,46 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT16_to_FLOAT8E4M3FN/test_data_set_0/output_0.pb,sha256=Xv05LKfX5PPNXznU352U3pE6zq0h-T3KA1BhnvUDrvE,29 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT16_to_FLOAT8E4M3FNUZ/model.onnx,sha256=eSFIYp3n8x3Z08AJjG4QEXLHWVWj-vQKfjFICsAVbrA,176 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT16_to_FLOAT8E4M3FNUZ/test_data_set_0/input_0.pb,sha256=nEr5NWxtFFQYRqOKX5XGP4eyqAQW-ylgATItQC6bVkU,46 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT16_to_FLOAT8E4M3FNUZ/test_data_set_0/output_0.pb,sha256=S2DGfiL4gHEoG2wB9Aa0mWpONUWB86RnSR3tznTM28Q,32 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT16_to_FLOAT8E5M2/model.onnx,sha256=q60CQtDUA8F8jVKv_SeDJGu7bk9-5CRqxWfHKQAZbaw,172 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT16_to_FLOAT8E5M2/test_data_set_0/input_0.pb,sha256=nEr5NWxtFFQYRqOKX5XGP4eyqAQW-ylgATItQC6bVkU,46 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT16_to_FLOAT8E5M2/test_data_set_0/output_0.pb,sha256=PbBsVqC-JoDBrYFEyL5k3N6L6ccmjjHrf2MuTX-wX8U,29 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT16_to_FLOAT8E5M2FNUZ/model.onnx,sha256=4U0_JLJo_pbj-txsz6f6oeyyxYPDPUpMwLBa8eE54hI,176 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT16_to_FLOAT8E5M2FNUZ/test_data_set_0/input_0.pb,sha256=nEr5NWxtFFQYRqOKX5XGP4eyqAQW-ylgATItQC6bVkU,46 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT16_to_FLOAT8E5M2FNUZ/test_data_set_0/output_0.pb,sha256=S4mEvW23ZssQV0yH3P7XM2NiD3wMDImWG5FH47ohzWc,32 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT_to_FLOAT8E4M3FN/model.onnx,sha256=QmQ0ewG9kbRCdA8RLuGkeYdnCtpXzXaK-6_amskTNZo,172 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT_to_FLOAT8E4M3FN/test_data_set_0/input_0.pb,sha256=7HvyFzAVT1ckyggxa83syY9R8tXpxopmWwZ4JU6n_5s,71 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT_to_FLOAT8E4M3FN/test_data_set_0/output_0.pb,sha256=Xv05LKfX5PPNXznU352U3pE6zq0h-T3KA1BhnvUDrvE,29 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT_to_FLOAT8E4M3FNUZ/model.onnx,sha256=36i0igx6WS8OKlRIJXWCCzMwF0G-SEiGkZCxNvFDA2w,174 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT_to_FLOAT8E4M3FNUZ/test_data_set_0/input_0.pb,sha256=7HvyFzAVT1ckyggxa83syY9R8tXpxopmWwZ4JU6n_5s,71 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT_to_FLOAT8E4M3FNUZ/test_data_set_0/output_0.pb,sha256=S2DGfiL4gHEoG2wB9Aa0mWpONUWB86RnSR3tznTM28Q,32 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT_to_FLOAT8E5M2/model.onnx,sha256=QMOq4Ka_UOEMPV7QvNMh8q8QW5Y53xbgNvEyk4BTqfs,170 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT_to_FLOAT8E5M2/test_data_set_0/input_0.pb,sha256=7HvyFzAVT1ckyggxa83syY9R8tXpxopmWwZ4JU6n_5s,71 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT_to_FLOAT8E5M2/test_data_set_0/output_0.pb,sha256=PbBsVqC-JoDBrYFEyL5k3N6L6ccmjjHrf2MuTX-wX8U,29 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT_to_FLOAT8E5M2FNUZ/model.onnx,sha256=qBJoNZW2IjTJaiNlhRXQeXVXiABnvff8x1Mg5g1WwhE,174 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT_to_FLOAT8E5M2FNUZ/test_data_set_0/input_0.pb,sha256=7HvyFzAVT1ckyggxa83syY9R8tXpxopmWwZ4JU6n_5s,71 +onnx/backend/test/data/node/test_cast_no_saturate_FLOAT_to_FLOAT8E5M2FNUZ/test_data_set_0/output_0.pb,sha256=S4mEvW23ZssQV0yH3P7XM2NiD3wMDImWG5FH47ohzWc,32 +onnx/backend/test/data/node/test_castlike_BFLOAT16_to_FLOAT/model.onnx,sha256=TF-b60JZ9onZKxxJtQfykeQb8zqLdxf-48v8W9U5YBA,162 +onnx/backend/test/data/node/test_castlike_BFLOAT16_to_FLOAT/test_data_set_0/input_0.pb,sha256=7PtSEfkVZD2hMxs2mjj8K-ie6Rcy_-janaLIcnczEFs,39 +onnx/backend/test/data/node/test_castlike_BFLOAT16_to_FLOAT/test_data_set_0/input_1.pb,sha256=eVvVIPFvWexMatIK3osv-P9GpyKJVe2oZOQcMAqBIUc,16 +onnx/backend/test/data/node/test_castlike_BFLOAT16_to_FLOAT/test_data_set_0/output_0.pb,sha256=n02cVMmSsmO-C3IqgA3VI65Qt0Mb4nA7308NU_hKx_k,64 +onnx/backend/test/data/node/test_castlike_BFLOAT16_to_FLOAT_expanded/model.onnx,sha256=A3CsWF2BjaHmlhC5EogdxAEDeSE1soPbHBKKceZHiRE,191 +onnx/backend/test/data/node/test_castlike_BFLOAT16_to_FLOAT_expanded/test_data_set_0/input_0.pb,sha256=7PtSEfkVZD2hMxs2mjj8K-ie6Rcy_-janaLIcnczEFs,39 +onnx/backend/test/data/node/test_castlike_BFLOAT16_to_FLOAT_expanded/test_data_set_0/input_1.pb,sha256=eVvVIPFvWexMatIK3osv-P9GpyKJVe2oZOQcMAqBIUc,16 +onnx/backend/test/data/node/test_castlike_BFLOAT16_to_FLOAT_expanded/test_data_set_0/output_0.pb,sha256=n02cVMmSsmO-C3IqgA3VI65Qt0Mb4nA7308NU_hKx_k,64 +onnx/backend/test/data/node/test_castlike_DOUBLE_to_FLOAT/model.onnx,sha256=LD5-Q2ncqS-vmdkvKzOoLWuDns5pF2P_GenRndduQI0,160 +onnx/backend/test/data/node/test_castlike_DOUBLE_to_FLOAT/test_data_set_0/input_0.pb,sha256=6HkDTeUmWLg8PEFbpSpZLMj7vMfJhi7QuVjK6KzI06Y,111 +onnx/backend/test/data/node/test_castlike_DOUBLE_to_FLOAT/test_data_set_0/input_1.pb,sha256=MYlnUln6bu-TntGNfFLCBihv1cs-8cz1xCxWjfka80Q,16 +onnx/backend/test/data/node/test_castlike_DOUBLE_to_FLOAT/test_data_set_0/output_0.pb,sha256=C9cN3dfDqwak6remmnIK_aV-FQtJ7ySCfkuCIExKauQ,64 +onnx/backend/test/data/node/test_castlike_DOUBLE_to_FLOAT16/model.onnx,sha256=JbEDixEDDUf3ej1PHcCebyiWbxSHmhSjtvbj_N3OQ50,162 +onnx/backend/test/data/node/test_castlike_DOUBLE_to_FLOAT16/test_data_set_0/input_0.pb,sha256=8mxoy6mEAhl8zQHvPiK2JLR4boRN-SMAUSPCrKsiD9o,111 +onnx/backend/test/data/node/test_castlike_DOUBLE_to_FLOAT16/test_data_set_0/input_1.pb,sha256=N-V-03btMN0Lrpbsfmgs7wWH-4OWb2xxPeGE0rKB3b4,14 +onnx/backend/test/data/node/test_castlike_DOUBLE_to_FLOAT16/test_data_set_0/output_0.pb,sha256=OcElOz0iQh2Eia2AnUOusERUmJcH7mFl2nFWCgGdc-w,40 +onnx/backend/test/data/node/test_castlike_DOUBLE_to_FLOAT16_expanded/model.onnx,sha256=kKgLrZYKrws_Wlb5KadyKIvTfm38WikHcnG_nHw1BOw,191 +onnx/backend/test/data/node/test_castlike_DOUBLE_to_FLOAT16_expanded/test_data_set_0/input_0.pb,sha256=8mxoy6mEAhl8zQHvPiK2JLR4boRN-SMAUSPCrKsiD9o,111 +onnx/backend/test/data/node/test_castlike_DOUBLE_to_FLOAT16_expanded/test_data_set_0/input_1.pb,sha256=N-V-03btMN0Lrpbsfmgs7wWH-4OWb2xxPeGE0rKB3b4,14 +onnx/backend/test/data/node/test_castlike_DOUBLE_to_FLOAT16_expanded/test_data_set_0/output_0.pb,sha256=OcElOz0iQh2Eia2AnUOusERUmJcH7mFl2nFWCgGdc-w,40 +onnx/backend/test/data/node/test_castlike_DOUBLE_to_FLOAT_expanded/model.onnx,sha256=s09F-JJWMF8v5bjbI0EvcfyOq8GZOz7GlIzpE_LV-DI,189 +onnx/backend/test/data/node/test_castlike_DOUBLE_to_FLOAT_expanded/test_data_set_0/input_0.pb,sha256=6HkDTeUmWLg8PEFbpSpZLMj7vMfJhi7QuVjK6KzI06Y,111 +onnx/backend/test/data/node/test_castlike_DOUBLE_to_FLOAT_expanded/test_data_set_0/input_1.pb,sha256=MYlnUln6bu-TntGNfFLCBihv1cs-8cz1xCxWjfka80Q,16 +onnx/backend/test/data/node/test_castlike_DOUBLE_to_FLOAT_expanded/test_data_set_0/output_0.pb,sha256=C9cN3dfDqwak6remmnIK_aV-FQtJ7ySCfkuCIExKauQ,64 +onnx/backend/test/data/node/test_castlike_FLOAT16_to_DOUBLE/model.onnx,sha256=mF-ADlZBBEVc3FnsDkx4T9R5A7eQGDVBMg2cBWPI-Ic,162 +onnx/backend/test/data/node/test_castlike_FLOAT16_to_DOUBLE/test_data_set_0/input_0.pb,sha256=I2V7DvRr6upYRyk34-P96yMvuJWhF4Sz_7_XZyVkwsI,39 +onnx/backend/test/data/node/test_castlike_FLOAT16_to_DOUBLE/test_data_set_0/input_1.pb,sha256=vhTwWTCgkF298o_bZZUOH-ibrd0YoXZ01obPMsL4Oms,20 +onnx/backend/test/data/node/test_castlike_FLOAT16_to_DOUBLE/test_data_set_0/output_0.pb,sha256=n9sfW336MaQ4ijNNykK-UBxM36StBFmAcPedaSQJDsw,112 +onnx/backend/test/data/node/test_castlike_FLOAT16_to_DOUBLE_expanded/model.onnx,sha256=feGRK8O-myM7cgXciclZvhOl_6hKbpDK8IQsvN4Vclg,191 +onnx/backend/test/data/node/test_castlike_FLOAT16_to_DOUBLE_expanded/test_data_set_0/input_0.pb,sha256=I2V7DvRr6upYRyk34-P96yMvuJWhF4Sz_7_XZyVkwsI,39 +onnx/backend/test/data/node/test_castlike_FLOAT16_to_DOUBLE_expanded/test_data_set_0/input_1.pb,sha256=vhTwWTCgkF298o_bZZUOH-ibrd0YoXZ01obPMsL4Oms,20 +onnx/backend/test/data/node/test_castlike_FLOAT16_to_DOUBLE_expanded/test_data_set_0/output_0.pb,sha256=n9sfW336MaQ4ijNNykK-UBxM36StBFmAcPedaSQJDsw,112 +onnx/backend/test/data/node/test_castlike_FLOAT16_to_FLOAT/model.onnx,sha256=MKeOiyggRaO1zsAvn62oqQEPTKl8M4ajIWed2N6sD2U,161 +onnx/backend/test/data/node/test_castlike_FLOAT16_to_FLOAT/test_data_set_0/input_0.pb,sha256=ParyS-aBVa8GjFhnqm1i1f4rEDEdtTz-ynPUUMXKqDs,39 +onnx/backend/test/data/node/test_castlike_FLOAT16_to_FLOAT/test_data_set_0/input_1.pb,sha256=KIwhetRSPHhtBbjTD_bTqAHja__yW558rw5Wnw71d2w,16 +onnx/backend/test/data/node/test_castlike_FLOAT16_to_FLOAT/test_data_set_0/output_0.pb,sha256=qiswSIR8nu6bG-QLS9Qaw9QiA8_EejF0tgaSLGkh0lY,64 +onnx/backend/test/data/node/test_castlike_FLOAT16_to_FLOAT_expanded/model.onnx,sha256=aZ13Pru7VyUA2QLNBkvsV00RoUfIzwyUHA2-xFDwFiA,190 +onnx/backend/test/data/node/test_castlike_FLOAT16_to_FLOAT_expanded/test_data_set_0/input_0.pb,sha256=ParyS-aBVa8GjFhnqm1i1f4rEDEdtTz-ynPUUMXKqDs,39 +onnx/backend/test/data/node/test_castlike_FLOAT16_to_FLOAT_expanded/test_data_set_0/input_1.pb,sha256=KIwhetRSPHhtBbjTD_bTqAHja__yW558rw5Wnw71d2w,16 +onnx/backend/test/data/node/test_castlike_FLOAT16_to_FLOAT_expanded/test_data_set_0/output_0.pb,sha256=qiswSIR8nu6bG-QLS9Qaw9QiA8_EejF0tgaSLGkh0lY,64 +onnx/backend/test/data/node/test_castlike_FLOAT8E4M3FNUZ_to_FLOAT/model.onnx,sha256=ZUhbPLZ6AQrnYzx18y56IMHoilhc8av190bkoUPt2-I,168 +onnx/backend/test/data/node/test_castlike_FLOAT8E4M3FNUZ_to_FLOAT/test_data_set_0/input_0.pb,sha256=-uKNW4mpQwdgDLta2kcp4pDg80rP_hPfNQjxU0CKXsM,25 +onnx/backend/test/data/node/test_castlike_FLOAT8E4M3FNUZ_to_FLOAT/test_data_set_0/input_1.pb,sha256=KkaD7OPzRa7CZ0KlWn_HP0vSR0gnNEauMENHnDPLD0A,16 +onnx/backend/test/data/node/test_castlike_FLOAT8E4M3FNUZ_to_FLOAT/test_data_set_0/output_0.pb,sha256=01vlJpEb85X8qTFoKW7K1G5MAeh2DTJWVL4w8YuhWEk,64 +onnx/backend/test/data/node/test_castlike_FLOAT8E4M3FNUZ_to_FLOAT_expanded/model.onnx,sha256=jdqO_BiGh676kBte10J2-z905qluzvdc1H8_pJgA9Bw,197 +onnx/backend/test/data/node/test_castlike_FLOAT8E4M3FNUZ_to_FLOAT_expanded/test_data_set_0/input_0.pb,sha256=-uKNW4mpQwdgDLta2kcp4pDg80rP_hPfNQjxU0CKXsM,25 +onnx/backend/test/data/node/test_castlike_FLOAT8E4M3FNUZ_to_FLOAT_expanded/test_data_set_0/input_1.pb,sha256=KkaD7OPzRa7CZ0KlWn_HP0vSR0gnNEauMENHnDPLD0A,16 +onnx/backend/test/data/node/test_castlike_FLOAT8E4M3FNUZ_to_FLOAT_expanded/test_data_set_0/output_0.pb,sha256=01vlJpEb85X8qTFoKW7K1G5MAeh2DTJWVL4w8YuhWEk,64 +onnx/backend/test/data/node/test_castlike_FLOAT8E4M3FN_to_FLOAT/model.onnx,sha256=TKbGR5PeO6vKIyOf_AiR0y25fwCS5XYiMnR3Jt736Jg,166 +onnx/backend/test/data/node/test_castlike_FLOAT8E4M3FN_to_FLOAT/test_data_set_0/input_0.pb,sha256=-uKNW4mpQwdgDLta2kcp4pDg80rP_hPfNQjxU0CKXsM,25 +onnx/backend/test/data/node/test_castlike_FLOAT8E4M3FN_to_FLOAT/test_data_set_0/input_1.pb,sha256=KkaD7OPzRa7CZ0KlWn_HP0vSR0gnNEauMENHnDPLD0A,16 +onnx/backend/test/data/node/test_castlike_FLOAT8E4M3FN_to_FLOAT/test_data_set_0/output_0.pb,sha256=01vlJpEb85X8qTFoKW7K1G5MAeh2DTJWVL4w8YuhWEk,64 +onnx/backend/test/data/node/test_castlike_FLOAT8E4M3FN_to_FLOAT_expanded/model.onnx,sha256=1WBLhPleSy_qlV6efHDYMJI_ejmoU0uqNV3wKs0ih14,195 +onnx/backend/test/data/node/test_castlike_FLOAT8E4M3FN_to_FLOAT_expanded/test_data_set_0/input_0.pb,sha256=-uKNW4mpQwdgDLta2kcp4pDg80rP_hPfNQjxU0CKXsM,25 +onnx/backend/test/data/node/test_castlike_FLOAT8E4M3FN_to_FLOAT_expanded/test_data_set_0/input_1.pb,sha256=KkaD7OPzRa7CZ0KlWn_HP0vSR0gnNEauMENHnDPLD0A,16 +onnx/backend/test/data/node/test_castlike_FLOAT8E4M3FN_to_FLOAT_expanded/test_data_set_0/output_0.pb,sha256=01vlJpEb85X8qTFoKW7K1G5MAeh2DTJWVL4w8YuhWEk,64 +onnx/backend/test/data/node/test_castlike_FLOAT8E5M2FNUZ_to_FLOAT/model.onnx,sha256=5yVjbx-9aEgO8u5Jq1C9wrdWA_j-7J1vLaR3gAVD1Jk,168 +onnx/backend/test/data/node/test_castlike_FLOAT8E5M2FNUZ_to_FLOAT/test_data_set_0/input_0.pb,sha256=GbmXPGmbF_NpMG0S3l3HT0svUuj6oSl7XNEP8dZjJ2c,25 +onnx/backend/test/data/node/test_castlike_FLOAT8E5M2FNUZ_to_FLOAT/test_data_set_0/input_1.pb,sha256=JUazImFbYsitER0t-H_pod69ENTey8wUpl7mqRCY6ow,16 +onnx/backend/test/data/node/test_castlike_FLOAT8E5M2FNUZ_to_FLOAT/test_data_set_0/output_0.pb,sha256=MVDRtjmyPqffFU5o1xbqz9xN8QDJU90w6EDVObeUpgA,64 +onnx/backend/test/data/node/test_castlike_FLOAT8E5M2FNUZ_to_FLOAT_expanded/model.onnx,sha256=wsDl4BHwfP0Rk7hdxKBLhGeJwS5ygEOoh_LZLFI7PnA,197 +onnx/backend/test/data/node/test_castlike_FLOAT8E5M2FNUZ_to_FLOAT_expanded/test_data_set_0/input_0.pb,sha256=GbmXPGmbF_NpMG0S3l3HT0svUuj6oSl7XNEP8dZjJ2c,25 +onnx/backend/test/data/node/test_castlike_FLOAT8E5M2FNUZ_to_FLOAT_expanded/test_data_set_0/input_1.pb,sha256=JUazImFbYsitER0t-H_pod69ENTey8wUpl7mqRCY6ow,16 +onnx/backend/test/data/node/test_castlike_FLOAT8E5M2FNUZ_to_FLOAT_expanded/test_data_set_0/output_0.pb,sha256=MVDRtjmyPqffFU5o1xbqz9xN8QDJU90w6EDVObeUpgA,64 +onnx/backend/test/data/node/test_castlike_FLOAT8E5M2_to_FLOAT/model.onnx,sha256=qm2rQekk8qr056zARTfd5727cpOJ66wCBBUp_cAMDo0,164 +onnx/backend/test/data/node/test_castlike_FLOAT8E5M2_to_FLOAT/test_data_set_0/input_0.pb,sha256=GbmXPGmbF_NpMG0S3l3HT0svUuj6oSl7XNEP8dZjJ2c,25 +onnx/backend/test/data/node/test_castlike_FLOAT8E5M2_to_FLOAT/test_data_set_0/input_1.pb,sha256=JUazImFbYsitER0t-H_pod69ENTey8wUpl7mqRCY6ow,16 +onnx/backend/test/data/node/test_castlike_FLOAT8E5M2_to_FLOAT/test_data_set_0/output_0.pb,sha256=MVDRtjmyPqffFU5o1xbqz9xN8QDJU90w6EDVObeUpgA,64 +onnx/backend/test/data/node/test_castlike_FLOAT8E5M2_to_FLOAT_expanded/model.onnx,sha256=hSJz6MCTXlHfvxtuBB3rkGvrA0xiZJ8F3FlvB4v_Dys,193 +onnx/backend/test/data/node/test_castlike_FLOAT8E5M2_to_FLOAT_expanded/test_data_set_0/input_0.pb,sha256=GbmXPGmbF_NpMG0S3l3HT0svUuj6oSl7XNEP8dZjJ2c,25 +onnx/backend/test/data/node/test_castlike_FLOAT8E5M2_to_FLOAT_expanded/test_data_set_0/input_1.pb,sha256=JUazImFbYsitER0t-H_pod69ENTey8wUpl7mqRCY6ow,16 +onnx/backend/test/data/node/test_castlike_FLOAT8E5M2_to_FLOAT_expanded/test_data_set_0/output_0.pb,sha256=MVDRtjmyPqffFU5o1xbqz9xN8QDJU90w6EDVObeUpgA,64 +onnx/backend/test/data/node/test_castlike_FLOAT_to_BFLOAT16/model.onnx,sha256=65-YKRTQsRtmHhwFBwBafNzD4b26upby4E5vdm3onIc,162 +onnx/backend/test/data/node/test_castlike_FLOAT_to_BFLOAT16/test_data_set_0/input_0.pb,sha256=h-Mw8kpqdER-gQV9wnNRomY7p72FnbzKkvgjriulnMI,63 +onnx/backend/test/data/node/test_castlike_FLOAT_to_BFLOAT16/test_data_set_0/input_1.pb,sha256=kSiWwieTCrJq7AJKpR9ekqOqJBTrdY-bCM2yOCaW4iU,14 +onnx/backend/test/data/node/test_castlike_FLOAT_to_BFLOAT16/test_data_set_0/output_0.pb,sha256=hwIS-rCpVNRgrx9XXtrf4dmhBQ1PbmRzGsSyuzuG7WI,40 +onnx/backend/test/data/node/test_castlike_FLOAT_to_BFLOAT16_expanded/model.onnx,sha256=bqoIvAtdMBjVP97LP28q5q6e-ucxs4ge_ohK3G2TTFw,191 +onnx/backend/test/data/node/test_castlike_FLOAT_to_BFLOAT16_expanded/test_data_set_0/input_0.pb,sha256=h-Mw8kpqdER-gQV9wnNRomY7p72FnbzKkvgjriulnMI,63 +onnx/backend/test/data/node/test_castlike_FLOAT_to_BFLOAT16_expanded/test_data_set_0/input_1.pb,sha256=kSiWwieTCrJq7AJKpR9ekqOqJBTrdY-bCM2yOCaW4iU,14 +onnx/backend/test/data/node/test_castlike_FLOAT_to_BFLOAT16_expanded/test_data_set_0/output_0.pb,sha256=hwIS-rCpVNRgrx9XXtrf4dmhBQ1PbmRzGsSyuzuG7WI,40 +onnx/backend/test/data/node/test_castlike_FLOAT_to_DOUBLE/model.onnx,sha256=7mTpN-l_y_d7IB8wL4s714_AHJiRLzUk0MqFdkIeOXQ,160 +onnx/backend/test/data/node/test_castlike_FLOAT_to_DOUBLE/test_data_set_0/input_0.pb,sha256=8t0QG5JpWVp5SwuBXkzB4NV12BBiHg7Fl58ELPZ0wcU,63 +onnx/backend/test/data/node/test_castlike_FLOAT_to_DOUBLE/test_data_set_0/input_1.pb,sha256=7HlEm9eEzvEsmzdE9hUccjLKqLBR_MWT1p_wtmFSK6M,20 +onnx/backend/test/data/node/test_castlike_FLOAT_to_DOUBLE/test_data_set_0/output_0.pb,sha256=2Rax3-J6ALt-LF7sbmKz6c5PZ0ubZP67DWTBd-E6B_Q,112 +onnx/backend/test/data/node/test_castlike_FLOAT_to_DOUBLE_expanded/model.onnx,sha256=-2ukJoc3Jrb8L6aquNLpwiahQHgwUjjPJox2EV2OWnY,189 +onnx/backend/test/data/node/test_castlike_FLOAT_to_DOUBLE_expanded/test_data_set_0/input_0.pb,sha256=8t0QG5JpWVp5SwuBXkzB4NV12BBiHg7Fl58ELPZ0wcU,63 +onnx/backend/test/data/node/test_castlike_FLOAT_to_DOUBLE_expanded/test_data_set_0/input_1.pb,sha256=7HlEm9eEzvEsmzdE9hUccjLKqLBR_MWT1p_wtmFSK6M,20 +onnx/backend/test/data/node/test_castlike_FLOAT_to_DOUBLE_expanded/test_data_set_0/output_0.pb,sha256=2Rax3-J6ALt-LF7sbmKz6c5PZ0ubZP67DWTBd-E6B_Q,112 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT16/model.onnx,sha256=6Jl4NsfuLL3ofUeVtfXuPLN6i4goYq1ALsbjqMHJCIU,161 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT16/test_data_set_0/input_0.pb,sha256=ftxQiq2nN6IAr8JHRIQXxG3vMB8Ra957fhAnBRn6HAA,63 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT16/test_data_set_0/input_1.pb,sha256=Pqb0Ny8b8X1hdAv4KOUYnn0WdFc9nvx-PivzwOErd7I,14 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT16/test_data_set_0/output_0.pb,sha256=i1o3756jog8U36GzZ8lCj2V6TpOV9DiGMDsWZgVC_58,40 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT16_expanded/model.onnx,sha256=rlzOxSpnVrcm2Hz48vBwXQroQlbsgt1Cu7iUfCqm9-4,190 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT16_expanded/test_data_set_0/input_0.pb,sha256=ftxQiq2nN6IAr8JHRIQXxG3vMB8Ra957fhAnBRn6HAA,63 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT16_expanded/test_data_set_0/input_1.pb,sha256=Pqb0Ny8b8X1hdAv4KOUYnn0WdFc9nvx-PivzwOErd7I,14 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT16_expanded/test_data_set_0/output_0.pb,sha256=i1o3756jog8U36GzZ8lCj2V6TpOV9DiGMDsWZgVC_58,40 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E4M3FN/model.onnx,sha256=h5Bq_T-jJagQNALjgnN7xPdS6-EzF7sLEtva0373xBE,166 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E4M3FN/test_data_set_0/input_0.pb,sha256=h-Mw8kpqdER-gQV9wnNRomY7p72FnbzKkvgjriulnMI,63 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E4M3FN/test_data_set_0/input_1.pb,sha256=NFS3HAOUuoWCepp_Zv1q6mjU1O4_wD8KDsZ2QeMWHU8,10 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E4M3FN/test_data_set_0/output_0.pb,sha256=7TfQhmVJAE-kiPL5eMn9NFlaqawzV9JHI5Us8Pt3sgU,24 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E4M3FNUZ/model.onnx,sha256=4OlIGhledse7iIQYffOjVj9lAW7AWJzByqUfG36Eh4Y,168 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E4M3FNUZ/test_data_set_0/input_0.pb,sha256=h-Mw8kpqdER-gQV9wnNRomY7p72FnbzKkvgjriulnMI,63 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E4M3FNUZ/test_data_set_0/input_1.pb,sha256=LBeVefhklG4MWxt46AVYXNURCljJYQrXKIm3Abk_TwQ,10 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E4M3FNUZ/test_data_set_0/output_0.pb,sha256=-uKNW4mpQwdgDLta2kcp4pDg80rP_hPfNQjxU0CKXsM,25 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E4M3FNUZ_expanded/model.onnx,sha256=_0l5PjfaBMLt1nDU5VrYhWiEnqWuMbEslwSmWD5bPRw,197 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E4M3FNUZ_expanded/test_data_set_0/input_0.pb,sha256=h-Mw8kpqdER-gQV9wnNRomY7p72FnbzKkvgjriulnMI,63 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E4M3FNUZ_expanded/test_data_set_0/input_1.pb,sha256=LBeVefhklG4MWxt46AVYXNURCljJYQrXKIm3Abk_TwQ,10 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E4M3FNUZ_expanded/test_data_set_0/output_0.pb,sha256=-uKNW4mpQwdgDLta2kcp4pDg80rP_hPfNQjxU0CKXsM,25 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E4M3FN_expanded/model.onnx,sha256=5w3NwQ1DrCe8_QpgN9WiUVZNgtXyBNOOIM6Yx0Drlb0,195 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E4M3FN_expanded/test_data_set_0/input_0.pb,sha256=h-Mw8kpqdER-gQV9wnNRomY7p72FnbzKkvgjriulnMI,63 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E4M3FN_expanded/test_data_set_0/input_1.pb,sha256=NFS3HAOUuoWCepp_Zv1q6mjU1O4_wD8KDsZ2QeMWHU8,10 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E4M3FN_expanded/test_data_set_0/output_0.pb,sha256=7TfQhmVJAE-kiPL5eMn9NFlaqawzV9JHI5Us8Pt3sgU,24 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E5M2/model.onnx,sha256=bzXysuOcw2rKLVim18STKSZV-RxMtwmb8kn_eMtGRHw,164 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E5M2/test_data_set_0/input_0.pb,sha256=h-Mw8kpqdER-gQV9wnNRomY7p72FnbzKkvgjriulnMI,63 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E5M2/test_data_set_0/input_1.pb,sha256=KQoS32qw37lSLEaIjNMT7vpg8pcVkqdPbOY7jR6LzFQ,10 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E5M2/test_data_set_0/output_0.pb,sha256=fFXiNax37-Vw_Hj5HoaUS1DHonjd4lUuJEJUj4WA_zo,24 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E5M2FNUZ/model.onnx,sha256=pfWs8FuHoptIvYst1QYsghSN7YzI-4DMTQlQVftTiXQ,168 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E5M2FNUZ/test_data_set_0/input_0.pb,sha256=h-Mw8kpqdER-gQV9wnNRomY7p72FnbzKkvgjriulnMI,63 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E5M2FNUZ/test_data_set_0/input_1.pb,sha256=CJtjieFVi77kv2L3Uyg0HJOWnnF10wHYN6XnQ6CYrkU,10 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E5M2FNUZ/test_data_set_0/output_0.pb,sha256=GbmXPGmbF_NpMG0S3l3HT0svUuj6oSl7XNEP8dZjJ2c,25 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E5M2FNUZ_expanded/model.onnx,sha256=TTa3BKkUG3oGGxwBlv7pS1egw1TlnT4evsibrWc-9U8,197 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E5M2FNUZ_expanded/test_data_set_0/input_0.pb,sha256=h-Mw8kpqdER-gQV9wnNRomY7p72FnbzKkvgjriulnMI,63 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E5M2FNUZ_expanded/test_data_set_0/input_1.pb,sha256=CJtjieFVi77kv2L3Uyg0HJOWnnF10wHYN6XnQ6CYrkU,10 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E5M2FNUZ_expanded/test_data_set_0/output_0.pb,sha256=GbmXPGmbF_NpMG0S3l3HT0svUuj6oSl7XNEP8dZjJ2c,25 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E5M2_expanded/model.onnx,sha256=crfTPpUHDMwXeNl1RS8_7SwdMPW8miu7hCnjevKz4O4,193 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E5M2_expanded/test_data_set_0/input_0.pb,sha256=h-Mw8kpqdER-gQV9wnNRomY7p72FnbzKkvgjriulnMI,63 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E5M2_expanded/test_data_set_0/input_1.pb,sha256=KQoS32qw37lSLEaIjNMT7vpg8pcVkqdPbOY7jR6LzFQ,10 +onnx/backend/test/data/node/test_castlike_FLOAT_to_FLOAT8E5M2_expanded/test_data_set_0/output_0.pb,sha256=fFXiNax37-Vw_Hj5HoaUS1DHonjd4lUuJEJUj4WA_zo,24 +onnx/backend/test/data/node/test_castlike_FLOAT_to_STRING/model.onnx,sha256=QN1wh9jN5XmpklxQga8N1Bf91ZrSP0h-eBeO28GqbYE,160 +onnx/backend/test/data/node/test_castlike_FLOAT_to_STRING/test_data_set_0/input_0.pb,sha256=JnNpBHNwQmCts1dqNwxlHQe57ohJ0IXxvibJD_HNEPM,63 +onnx/backend/test/data/node/test_castlike_FLOAT_to_STRING/test_data_set_0/input_1.pb,sha256=jfaZUJmaBdOG733Ji57TEocDjiMfW1mvNCBdAwAEWgo,21 +onnx/backend/test/data/node/test_castlike_FLOAT_to_STRING/test_data_set_0/output_0.pb,sha256=v40vkBihrsiWNAdrfrIp7q4NL0jW69jAVuhJPFMDUW8,155 +onnx/backend/test/data/node/test_castlike_FLOAT_to_STRING_expanded/model.onnx,sha256=-3v-f8eX8xPRZ-QLURZWFL3S7YO2JvqsSV2e2KE151Y,189 +onnx/backend/test/data/node/test_castlike_FLOAT_to_STRING_expanded/test_data_set_0/input_0.pb,sha256=JnNpBHNwQmCts1dqNwxlHQe57ohJ0IXxvibJD_HNEPM,63 +onnx/backend/test/data/node/test_castlike_FLOAT_to_STRING_expanded/test_data_set_0/input_1.pb,sha256=jfaZUJmaBdOG733Ji57TEocDjiMfW1mvNCBdAwAEWgo,21 +onnx/backend/test/data/node/test_castlike_FLOAT_to_STRING_expanded/test_data_set_0/output_0.pb,sha256=v40vkBihrsiWNAdrfrIp7q4NL0jW69jAVuhJPFMDUW8,155 +onnx/backend/test/data/node/test_castlike_STRING_to_FLOAT/model.onnx,sha256=yxV3DaQt9h2YfUW8WJrpIwDEAR9KPb28JNIhudDqBAQ,160 +onnx/backend/test/data/node/test_castlike_STRING_to_FLOAT/test_data_set_0/input_0.pb,sha256=Lk6xhMerJC15fOneCl4LUJFMKPdWwhs9XpoU2KT9Dbk,128 +onnx/backend/test/data/node/test_castlike_STRING_to_FLOAT/test_data_set_0/input_1.pb,sha256=SQtYejRQrhm-hC4PVdWO2parwQBWBNcBOGwdX55zpdg,16 +onnx/backend/test/data/node/test_castlike_STRING_to_FLOAT/test_data_set_0/output_0.pb,sha256=0jolK7AVdqKeYqxXZl_bMAlnbkHhPocU-fidVgDsG9g,64 +onnx/backend/test/data/node/test_castlike_STRING_to_FLOAT_expanded/model.onnx,sha256=PCugtk8Q53Wx-bRf7XMjIhhs7etG05AyR0cccBnTnXU,189 +onnx/backend/test/data/node/test_castlike_STRING_to_FLOAT_expanded/test_data_set_0/input_0.pb,sha256=Lk6xhMerJC15fOneCl4LUJFMKPdWwhs9XpoU2KT9Dbk,128 +onnx/backend/test/data/node/test_castlike_STRING_to_FLOAT_expanded/test_data_set_0/input_1.pb,sha256=SQtYejRQrhm-hC4PVdWO2parwQBWBNcBOGwdX55zpdg,16 +onnx/backend/test/data/node/test_castlike_STRING_to_FLOAT_expanded/test_data_set_0/output_0.pb,sha256=0jolK7AVdqKeYqxXZl_bMAlnbkHhPocU-fidVgDsG9g,64 +onnx/backend/test/data/node/test_ceil/model.onnx,sha256=zYQ7FyzP6UWksThCpmDNZVL7ACGAVk45wCUk7MSpMPw,99 +onnx/backend/test/data/node/test_ceil/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_ceil/test_data_set_0/output_0.pb,sha256=zFFujcTZbJ_iUdvnj1WgKihPqud_iXTK61jQn2s3fkU,254 +onnx/backend/test/data/node/test_ceil_example/model.onnx,sha256=8MRvY_buZ3kwGWbrH3N9AtIKqYnHsubJWQBe-Uk8wDg,91 +onnx/backend/test/data/node/test_ceil_example/test_data_set_0/input_0.pb,sha256=oA0la8_u-_HaZWXeP39QnRf3LzBFm_z-OT5ubTiMa_U,17 +onnx/backend/test/data/node/test_ceil_example/test_data_set_0/output_0.pb,sha256=B7VTcXhgHNoWwox6O0nLSYo-xZkobaTvtkPnXWM8T8w,17 +onnx/backend/test/data/node/test_celu/model.onnx,sha256=cAr7drqDNV5RDqzClNfkslVJ4K89Qlj0ywgg_uwHiiw,124 +onnx/backend/test/data/node/test_celu/test_data_set_0/input_0.pb,sha256=JgQmvqlXl4zwhOoqE3bqOj-0uikQHKd2j0Gt1aMhG3Y,123 +onnx/backend/test/data/node/test_celu/test_data_set_0/output_0.pb,sha256=6P8qt55BKmIbeRKrDDUb_9L_ZZgD2nf7uJFA69OD8Os,123 +onnx/backend/test/data/node/test_celu_expanded/model.onnx,sha256=HfTUwa-nDKvHFYB7ANwkEKpTexAJAuNrhXUSPIPIX8Y,485 +onnx/backend/test/data/node/test_celu_expanded/test_data_set_0/input_0.pb,sha256=JgQmvqlXl4zwhOoqE3bqOj-0uikQHKd2j0Gt1aMhG3Y,123 +onnx/backend/test/data/node/test_celu_expanded/test_data_set_0/output_0.pb,sha256=6P8qt55BKmIbeRKrDDUb_9L_ZZgD2nf7uJFA69OD8Os,123 +onnx/backend/test/data/node/test_center_crop_pad_crop/model.onnx,sha256=_UO9zWEcYILyGcbCnjQg7KHg2LFJw7PxdNFcUlmO68g,153 +onnx/backend/test/data/node/test_center_crop_pad_crop/test_data_set_0/input_0.pb,sha256=gf54Qr4guDZleIe2sau8YNEx-dDwW55nJJ5RqhWYCHk,2414 +onnx/backend/test/data/node/test_center_crop_pad_crop/test_data_set_0/input_1.pb,sha256=azvnzc647fD3maqvDtZTEwrGWfugxBxPHJAPz7424Qk,37 +onnx/backend/test/data/node/test_center_crop_pad_crop/test_data_set_0/output_0.pb,sha256=SNJXACP7f4r8OFKE3X8vI4Bfu4kXNPTkemOqeuQifTc,854 +onnx/backend/test/data/node/test_center_crop_pad_crop_and_pad/model.onnx,sha256=2yQn_WyWM7819hVHdgPRMNyxAI_6pTMS5dH_ahRzZrA,161 +onnx/backend/test/data/node/test_center_crop_pad_crop_and_pad/test_data_set_0/input_0.pb,sha256=SIiEQh9DWWergN3i0O3OXxsIpYNH0cA1hQCksZCA6EA,1934 +onnx/backend/test/data/node/test_center_crop_pad_crop_and_pad/test_data_set_0/input_1.pb,sha256=XE-LFxtx5KK6KyhPjIx-Hp-i_db8kg7viI8PN8SDXnc,37 +onnx/backend/test/data/node/test_center_crop_pad_crop_and_pad/test_data_set_0/output_0.pb,sha256=NwJcXJ-I3-LwyTwk8F6WaLvoITXD4jynu8BnWdMfxdY,1214 +onnx/backend/test/data/node/test_center_crop_pad_crop_and_pad_expanded/model.onnx,sha256=3SXsErt7mt9BpECaXiK4kW66R7xeU072PARgYd_fQmc,2658 +onnx/backend/test/data/node/test_center_crop_pad_crop_and_pad_expanded/test_data_set_0/input_0.pb,sha256=SIiEQh9DWWergN3i0O3OXxsIpYNH0cA1hQCksZCA6EA,1934 +onnx/backend/test/data/node/test_center_crop_pad_crop_and_pad_expanded/test_data_set_0/input_1.pb,sha256=XE-LFxtx5KK6KyhPjIx-Hp-i_db8kg7viI8PN8SDXnc,37 +onnx/backend/test/data/node/test_center_crop_pad_crop_and_pad_expanded/test_data_set_0/output_0.pb,sha256=NwJcXJ-I3-LwyTwk8F6WaLvoITXD4jynu8BnWdMfxdY,1214 +onnx/backend/test/data/node/test_center_crop_pad_crop_axes_chw/model.onnx,sha256=NxTo_Ni7c8YpJj640cd9qoXA_GdxapFiCCSeLbH_7Xw,177 +onnx/backend/test/data/node/test_center_crop_pad_crop_axes_chw/test_data_set_0/input_0.pb,sha256=5bJVCfNa9wvtQv3XL4MKOfrM0RylCo4e3iKMenFQojw,1934 +onnx/backend/test/data/node/test_center_crop_pad_crop_axes_chw/test_data_set_0/input_1.pb,sha256=4yIx4EW1uo-c7xiAkhnpKeIm-RWjL8oFEHUqTc2ieMM,29 +onnx/backend/test/data/node/test_center_crop_pad_crop_axes_chw/test_data_set_0/output_0.pb,sha256=golyE5sd9CtwPUzy7oEWi2ovFIug8BohD5b2N7o2_Xg,1094 +onnx/backend/test/data/node/test_center_crop_pad_crop_axes_chw_expanded/model.onnx,sha256=WyXAtrdbAMR_xZYhE9W_iHz8P2V3DbLLhVeY2lFeSlI,3485 +onnx/backend/test/data/node/test_center_crop_pad_crop_axes_chw_expanded/test_data_set_0/input_0.pb,sha256=5bJVCfNa9wvtQv3XL4MKOfrM0RylCo4e3iKMenFQojw,1934 +onnx/backend/test/data/node/test_center_crop_pad_crop_axes_chw_expanded/test_data_set_0/input_1.pb,sha256=4yIx4EW1uo-c7xiAkhnpKeIm-RWjL8oFEHUqTc2ieMM,29 +onnx/backend/test/data/node/test_center_crop_pad_crop_axes_chw_expanded/test_data_set_0/output_0.pb,sha256=golyE5sd9CtwPUzy7oEWi2ovFIug8BohD5b2N7o2_Xg,1094 +onnx/backend/test/data/node/test_center_crop_pad_crop_axes_hwc/model.onnx,sha256=ViKPMOcZcmDs3W72cKydVNfdXiz7BzXrzuTJD2BWa-M,177 +onnx/backend/test/data/node/test_center_crop_pad_crop_axes_hwc/test_data_set_0/input_0.pb,sha256=SIiEQh9DWWergN3i0O3OXxsIpYNH0cA1hQCksZCA6EA,1934 +onnx/backend/test/data/node/test_center_crop_pad_crop_axes_hwc/test_data_set_0/input_1.pb,sha256=4yIx4EW1uo-c7xiAkhnpKeIm-RWjL8oFEHUqTc2ieMM,29 +onnx/backend/test/data/node/test_center_crop_pad_crop_axes_hwc/test_data_set_0/output_0.pb,sha256=ywUWnOKgTUDpebF1B3HI5cEuEAvmvdsA13IZEnfncZY,1094 +onnx/backend/test/data/node/test_center_crop_pad_crop_axes_hwc_expanded/model.onnx,sha256=Na51-QkGvR8-oGz-1V5aWseNqcPL6KwnfSRCrGe91gg,3485 +onnx/backend/test/data/node/test_center_crop_pad_crop_axes_hwc_expanded/test_data_set_0/input_0.pb,sha256=SIiEQh9DWWergN3i0O3OXxsIpYNH0cA1hQCksZCA6EA,1934 +onnx/backend/test/data/node/test_center_crop_pad_crop_axes_hwc_expanded/test_data_set_0/input_1.pb,sha256=4yIx4EW1uo-c7xiAkhnpKeIm-RWjL8oFEHUqTc2ieMM,29 +onnx/backend/test/data/node/test_center_crop_pad_crop_axes_hwc_expanded/test_data_set_0/output_0.pb,sha256=ywUWnOKgTUDpebF1B3HI5cEuEAvmvdsA13IZEnfncZY,1094 +onnx/backend/test/data/node/test_center_crop_pad_crop_expanded/model.onnx,sha256=i3QMXxhLCRMRCXHiDBj9X5IwPjkexZK_mCyKeXgBO00,2410 +onnx/backend/test/data/node/test_center_crop_pad_crop_expanded/test_data_set_0/input_0.pb,sha256=gf54Qr4guDZleIe2sau8YNEx-dDwW55nJJ5RqhWYCHk,2414 +onnx/backend/test/data/node/test_center_crop_pad_crop_expanded/test_data_set_0/input_1.pb,sha256=azvnzc647fD3maqvDtZTEwrGWfugxBxPHJAPz7424Qk,37 +onnx/backend/test/data/node/test_center_crop_pad_crop_expanded/test_data_set_0/output_0.pb,sha256=SNJXACP7f4r8OFKE3X8vI4Bfu4kXNPTkemOqeuQifTc,854 +onnx/backend/test/data/node/test_center_crop_pad_crop_negative_axes_hwc/model.onnx,sha256=SG0akIxqpCM29L2aTKW_NVPJr9KxJWBGJuENM2EH4o8,204 +onnx/backend/test/data/node/test_center_crop_pad_crop_negative_axes_hwc/test_data_set_0/input_0.pb,sha256=SIiEQh9DWWergN3i0O3OXxsIpYNH0cA1hQCksZCA6EA,1934 +onnx/backend/test/data/node/test_center_crop_pad_crop_negative_axes_hwc/test_data_set_0/input_1.pb,sha256=4yIx4EW1uo-c7xiAkhnpKeIm-RWjL8oFEHUqTc2ieMM,29 +onnx/backend/test/data/node/test_center_crop_pad_crop_negative_axes_hwc/test_data_set_0/output_0.pb,sha256=ywUWnOKgTUDpebF1B3HI5cEuEAvmvdsA13IZEnfncZY,1094 +onnx/backend/test/data/node/test_center_crop_pad_crop_negative_axes_hwc_expanded/model.onnx,sha256=bH3jKyXyTbQO1v4cdRYuri9vJ78cWi8tR9co_Ihg-ho,3864 +onnx/backend/test/data/node/test_center_crop_pad_crop_negative_axes_hwc_expanded/test_data_set_0/input_0.pb,sha256=SIiEQh9DWWergN3i0O3OXxsIpYNH0cA1hQCksZCA6EA,1934 +onnx/backend/test/data/node/test_center_crop_pad_crop_negative_axes_hwc_expanded/test_data_set_0/input_1.pb,sha256=4yIx4EW1uo-c7xiAkhnpKeIm-RWjL8oFEHUqTc2ieMM,29 +onnx/backend/test/data/node/test_center_crop_pad_crop_negative_axes_hwc_expanded/test_data_set_0/output_0.pb,sha256=ywUWnOKgTUDpebF1B3HI5cEuEAvmvdsA13IZEnfncZY,1094 +onnx/backend/test/data/node/test_center_crop_pad_pad/model.onnx,sha256=ZsLLWWeEugG7qfPtUUsbt7iBfXtdL1Qoq3zFJ-gyM-A,151 +onnx/backend/test/data/node/test_center_crop_pad_pad/test_data_set_0/input_0.pb,sha256=Brb9NNYy61mIqV0k6WJ2uoox7QOgy-TI61uJhMg0QZw,854 +onnx/backend/test/data/node/test_center_crop_pad_pad/test_data_set_0/input_1.pb,sha256=MicQ1KFx-S6_RlEPgAe1cyjiu2xloE-vov752d7v394,37 +onnx/backend/test/data/node/test_center_crop_pad_pad/test_data_set_0/output_0.pb,sha256=TqD45G9p_DkrIF5wtvJXaOW0ls_XSgER9EsfKT66no4,2414 +onnx/backend/test/data/node/test_center_crop_pad_pad_expanded/model.onnx,sha256=rIrnvjLpY-MpX1WgOkSabprL4n8fLeTTlPfJc9ZCCHc,2379 +onnx/backend/test/data/node/test_center_crop_pad_pad_expanded/test_data_set_0/input_0.pb,sha256=Brb9NNYy61mIqV0k6WJ2uoox7QOgy-TI61uJhMg0QZw,854 +onnx/backend/test/data/node/test_center_crop_pad_pad_expanded/test_data_set_0/input_1.pb,sha256=MicQ1KFx-S6_RlEPgAe1cyjiu2xloE-vov752d7v394,37 +onnx/backend/test/data/node/test_center_crop_pad_pad_expanded/test_data_set_0/output_0.pb,sha256=TqD45G9p_DkrIF5wtvJXaOW0ls_XSgER9EsfKT66no4,2414 +onnx/backend/test/data/node/test_clip/model.onnx,sha256=LeRDoahciHypJ9D7ArLrfQwYmGTzb2Mco1syI1Dpp4w,139 +onnx/backend/test/data/node/test_clip/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_clip/test_data_set_0/input_1.pb,sha256=BRepSTg3YLUzFaeEDm7kjV41FnB7WdzkW7ynoPYkPGs,13 +onnx/backend/test/data/node/test_clip/test_data_set_0/input_2.pb,sha256=qLMCCKsNYs42OTq0_Gwqq2h8q5iUz8-GZ2CCZo1GGT4,13 +onnx/backend/test/data/node/test_clip/test_data_set_0/output_0.pb,sha256=0Mj3n7O3-ghXihrxNll_6tjrgxFo27H4atMMfljBuEg,254 +onnx/backend/test/data/node/test_clip_default_inbounds/model.onnx,sha256=K4rP6tKhvLWvZ25uFXfcgzpjmeBe4fodMVdEp6TWncw,104 +onnx/backend/test/data/node/test_clip_default_inbounds/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_clip_default_inbounds/test_data_set_0/output_0.pb,sha256=lezr2GRMo0JWUSPIQ-nlrz1YNhYT_XpoHnO0oq6y-ic,21 +onnx/backend/test/data/node/test_clip_default_inbounds_expanded/model.onnx,sha256=dtxZlZ7xu_1pfCXBFLU8V_bS-YBZeWCxB9mmT1rXhMU,115 +onnx/backend/test/data/node/test_clip_default_inbounds_expanded/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_clip_default_inbounds_expanded/test_data_set_0/output_0.pb,sha256=lezr2GRMo0JWUSPIQ-nlrz1YNhYT_XpoHnO0oq6y-ic,21 +onnx/backend/test/data/node/test_clip_default_int8_inbounds/model.onnx,sha256=gUowpb23puqbhCvxUPiduQpf5-uoFRVsf_28cLYT99E,109 +onnx/backend/test/data/node/test_clip_default_int8_inbounds/test_data_set_0/input_0.pb,sha256=vG4b7OylZbXs1oNvJh6N2DD10wdfMSnVxkKOz7zbvwA,12 +onnx/backend/test/data/node/test_clip_default_int8_inbounds/test_data_set_0/output_0.pb,sha256=p1SpO2Oa2tZ9dMA6FVWw3xl2fU-OVpuE21zP9E4rquQ,12 +onnx/backend/test/data/node/test_clip_default_int8_inbounds_expanded/model.onnx,sha256=GFvqxihYvSo_9PG2IQbf-cvnA6E56Hkv_VzP1lYBuYM,120 +onnx/backend/test/data/node/test_clip_default_int8_inbounds_expanded/test_data_set_0/input_0.pb,sha256=vG4b7OylZbXs1oNvJh6N2DD10wdfMSnVxkKOz7zbvwA,12 +onnx/backend/test/data/node/test_clip_default_int8_inbounds_expanded/test_data_set_0/output_0.pb,sha256=p1SpO2Oa2tZ9dMA6FVWw3xl2fU-OVpuE21zP9E4rquQ,12 +onnx/backend/test/data/node/test_clip_default_int8_max/model.onnx,sha256=EtzpiOh8w6lp9tqy_kO-yRryxqohJAFuZskjyH-ctgQ,138 +onnx/backend/test/data/node/test_clip_default_int8_max/test_data_set_0/input_0.pb,sha256=rzR9S2WkPtQ_eTqlg48tVCWUU-HSMIndc3aNczLLc8I,73 +onnx/backend/test/data/node/test_clip_default_int8_max/test_data_set_0/input_1.pb,sha256=CEHeigf36q5XZtDez_5a1gT5ZJRMYNVEBF9z_OHEBEU,10 +onnx/backend/test/data/node/test_clip_default_int8_max/test_data_set_0/output_0.pb,sha256=tO69iQwwhb-FI6NNsgjnmVvfGzuTybog0CfmVRQ3lfY,73 +onnx/backend/test/data/node/test_clip_default_int8_max_expanded/model.onnx,sha256=h1njraDyzwOWbA8QLswnmldaGPRfHrV2lzjSi-3FFyw,311 +onnx/backend/test/data/node/test_clip_default_int8_max_expanded/test_data_set_0/input_0.pb,sha256=rzR9S2WkPtQ_eTqlg48tVCWUU-HSMIndc3aNczLLc8I,73 +onnx/backend/test/data/node/test_clip_default_int8_max_expanded/test_data_set_0/input_1.pb,sha256=CEHeigf36q5XZtDez_5a1gT5ZJRMYNVEBF9z_OHEBEU,10 +onnx/backend/test/data/node/test_clip_default_int8_max_expanded/test_data_set_0/output_0.pb,sha256=tO69iQwwhb-FI6NNsgjnmVvfGzuTybog0CfmVRQ3lfY,73 +onnx/backend/test/data/node/test_clip_default_int8_min/model.onnx,sha256=StI7Sylxxi_7VU7dPaKQ3MFSOkXIOuFMgIbcmZ7SVYE,136 +onnx/backend/test/data/node/test_clip_default_int8_min/test_data_set_0/input_0.pb,sha256=jlYHFzRpo9mRew7WTiCIRuEiMgtorH1Bs1r4lKBBdz8,73 +onnx/backend/test/data/node/test_clip_default_int8_min/test_data_set_0/input_1.pb,sha256=6kLq4kB4db3nFcoSmyNZk_unkHDDP5rztDpt8kwhzD4,10 +onnx/backend/test/data/node/test_clip_default_int8_min/test_data_set_0/output_0.pb,sha256=cxOphxSnH_muVv845xL_eqeIP1yVmxRCqmu92YISig4,73 +onnx/backend/test/data/node/test_clip_default_int8_min_expanded/model.onnx,sha256=CTaB_ZhST7UKYki3k9KcHg_d7Ov6pv2sZIo_A239SBo,309 +onnx/backend/test/data/node/test_clip_default_int8_min_expanded/test_data_set_0/input_0.pb,sha256=jlYHFzRpo9mRew7WTiCIRuEiMgtorH1Bs1r4lKBBdz8,73 +onnx/backend/test/data/node/test_clip_default_int8_min_expanded/test_data_set_0/input_1.pb,sha256=6kLq4kB4db3nFcoSmyNZk_unkHDDP5rztDpt8kwhzD4,10 +onnx/backend/test/data/node/test_clip_default_int8_min_expanded/test_data_set_0/output_0.pb,sha256=cxOphxSnH_muVv845xL_eqeIP1yVmxRCqmu92YISig4,73 +onnx/backend/test/data/node/test_clip_default_max/model.onnx,sha256=FL6R-HHUIBFafkhbS6htUeEGGIEAZ-1TdQJKD3gPGMI,133 +onnx/backend/test/data/node/test_clip_default_max/test_data_set_0/input_0.pb,sha256=bLIkw3E_ZyNzOrq7tsNCLynmVOluwxDhZiJUPP_menI,254 +onnx/backend/test/data/node/test_clip_default_max/test_data_set_0/input_1.pb,sha256=-8abKXDhQDgDtXksv1K3VkLu2XJbj3RVgko0p7nK-8o,13 +onnx/backend/test/data/node/test_clip_default_max/test_data_set_0/output_0.pb,sha256=I04WldqbkTeeL0KpKpuo4Aw_r1J6E5FHlnYOTQjMQxU,254 +onnx/backend/test/data/node/test_clip_default_max_expanded/model.onnx,sha256=swEbjGDk9FNUHy4gDxWQSCJ5BNna0Co1PGXv9lyKzrY,296 +onnx/backend/test/data/node/test_clip_default_max_expanded/test_data_set_0/input_0.pb,sha256=bLIkw3E_ZyNzOrq7tsNCLynmVOluwxDhZiJUPP_menI,254 +onnx/backend/test/data/node/test_clip_default_max_expanded/test_data_set_0/input_1.pb,sha256=-8abKXDhQDgDtXksv1K3VkLu2XJbj3RVgko0p7nK-8o,13 +onnx/backend/test/data/node/test_clip_default_max_expanded/test_data_set_0/output_0.pb,sha256=I04WldqbkTeeL0KpKpuo4Aw_r1J6E5FHlnYOTQjMQxU,254 +onnx/backend/test/data/node/test_clip_default_min/model.onnx,sha256=HXEnek_-WWBF-8Qh73uGU59Os6hZDH2x71kE2X35ZnA,131 +onnx/backend/test/data/node/test_clip_default_min/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_clip_default_min/test_data_set_0/input_1.pb,sha256=YN1rp1oEc2SLKQVP1qKZlBVrrUYAUCXA0Ids1IC8cVo,13 +onnx/backend/test/data/node/test_clip_default_min/test_data_set_0/output_0.pb,sha256=SqNj4MYf1_XozMtqbVv31LmL5bj5_lZV-tOJ9tLs8Ik,254 +onnx/backend/test/data/node/test_clip_default_min_expanded/model.onnx,sha256=dqAs6KUlnP5pfOaHxVCo5kEycyWSywvoqVANWd4waEo,294 +onnx/backend/test/data/node/test_clip_default_min_expanded/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_clip_default_min_expanded/test_data_set_0/input_1.pb,sha256=YN1rp1oEc2SLKQVP1qKZlBVrrUYAUCXA0Ids1IC8cVo,13 +onnx/backend/test/data/node/test_clip_default_min_expanded/test_data_set_0/output_0.pb,sha256=SqNj4MYf1_XozMtqbVv31LmL5bj5_lZV-tOJ9tLs8Ik,254 +onnx/backend/test/data/node/test_clip_example/model.onnx,sha256=jmMzre4t9LogRagPrYJD0Qk_VHljfUufDPpUJWTFj04,131 +onnx/backend/test/data/node/test_clip_example/test_data_set_0/input_0.pb,sha256=9k-PJmwQ91koTiiJe8X4rULQA1y9P3aSpPPuUdAwT9E,21 +onnx/backend/test/data/node/test_clip_example/test_data_set_0/input_1.pb,sha256=BRepSTg3YLUzFaeEDm7kjV41FnB7WdzkW7ynoPYkPGs,13 +onnx/backend/test/data/node/test_clip_example/test_data_set_0/input_2.pb,sha256=qLMCCKsNYs42OTq0_Gwqq2h8q5iUz8-GZ2CCZo1GGT4,13 +onnx/backend/test/data/node/test_clip_example/test_data_set_0/output_0.pb,sha256=lezr2GRMo0JWUSPIQ-nlrz1YNhYT_XpoHnO0oq6y-ic,21 +onnx/backend/test/data/node/test_clip_example_expanded/model.onnx,sha256=eM62MADuYthk8xV0LVHcf8-wSqROjU_IgJXQlg9RGyU,578 +onnx/backend/test/data/node/test_clip_example_expanded/test_data_set_0/input_0.pb,sha256=9k-PJmwQ91koTiiJe8X4rULQA1y9P3aSpPPuUdAwT9E,21 +onnx/backend/test/data/node/test_clip_example_expanded/test_data_set_0/input_1.pb,sha256=BRepSTg3YLUzFaeEDm7kjV41FnB7WdzkW7ynoPYkPGs,13 +onnx/backend/test/data/node/test_clip_example_expanded/test_data_set_0/input_2.pb,sha256=qLMCCKsNYs42OTq0_Gwqq2h8q5iUz8-GZ2CCZo1GGT4,13 +onnx/backend/test/data/node/test_clip_example_expanded/test_data_set_0/output_0.pb,sha256=lezr2GRMo0JWUSPIQ-nlrz1YNhYT_XpoHnO0oq6y-ic,21 +onnx/backend/test/data/node/test_clip_expanded/model.onnx,sha256=bTXSndV3iagfSKE9Aw3psPNzl6a0l7_M_iLpujEm7n4,530 +onnx/backend/test/data/node/test_clip_expanded/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_clip_expanded/test_data_set_0/input_1.pb,sha256=BRepSTg3YLUzFaeEDm7kjV41FnB7WdzkW7ynoPYkPGs,13 +onnx/backend/test/data/node/test_clip_expanded/test_data_set_0/input_2.pb,sha256=qLMCCKsNYs42OTq0_Gwqq2h8q5iUz8-GZ2CCZo1GGT4,13 +onnx/backend/test/data/node/test_clip_expanded/test_data_set_0/output_0.pb,sha256=0Mj3n7O3-ghXihrxNll_6tjrgxFo27H4atMMfljBuEg,254 +onnx/backend/test/data/node/test_clip_inbounds/model.onnx,sha256=idZxS7uYmOrM7b4yKftomS-sd83XIBs8zBIZAEEWXf8,132 +onnx/backend/test/data/node/test_clip_inbounds/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_clip_inbounds/test_data_set_0/input_1.pb,sha256=DyQyzAiPdgNHN3peR3o7xZVckmF3lQneq_S8M2AMI9w,13 +onnx/backend/test/data/node/test_clip_inbounds/test_data_set_0/input_2.pb,sha256=3sy_5aJR44lfPhtuIRBCqv9TlSqgBMK4u192l4On_5I,13 +onnx/backend/test/data/node/test_clip_inbounds/test_data_set_0/output_0.pb,sha256=lezr2GRMo0JWUSPIQ-nlrz1YNhYT_XpoHnO0oq6y-ic,21 +onnx/backend/test/data/node/test_clip_inbounds_expanded/model.onnx,sha256=OvcxAGIGUWSIMmNoGIUt6XJRdTRgGIvhveBRAaoM3sc,587 +onnx/backend/test/data/node/test_clip_inbounds_expanded/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_clip_inbounds_expanded/test_data_set_0/input_1.pb,sha256=DyQyzAiPdgNHN3peR3o7xZVckmF3lQneq_S8M2AMI9w,13 +onnx/backend/test/data/node/test_clip_inbounds_expanded/test_data_set_0/input_2.pb,sha256=3sy_5aJR44lfPhtuIRBCqv9TlSqgBMK4u192l4On_5I,13 +onnx/backend/test/data/node/test_clip_inbounds_expanded/test_data_set_0/output_0.pb,sha256=lezr2GRMo0JWUSPIQ-nlrz1YNhYT_XpoHnO0oq6y-ic,21 +onnx/backend/test/data/node/test_clip_outbounds/model.onnx,sha256=DKK18noAKi9Btc-47_kGK2uuLsHlfmuRs-bXv1P6wh4,133 +onnx/backend/test/data/node/test_clip_outbounds/test_data_set_0/input_0.pb,sha256=V0F1iOI7iAm2K90hqNNItzlFwxXH3rKXm9a5wXB-qYQ,21 +onnx/backend/test/data/node/test_clip_outbounds/test_data_set_0/input_1.pb,sha256=DyQyzAiPdgNHN3peR3o7xZVckmF3lQneq_S8M2AMI9w,13 +onnx/backend/test/data/node/test_clip_outbounds/test_data_set_0/input_2.pb,sha256=3sy_5aJR44lfPhtuIRBCqv9TlSqgBMK4u192l4On_5I,13 +onnx/backend/test/data/node/test_clip_outbounds/test_data_set_0/output_0.pb,sha256=vvqmqBIu_2d9LT3R85W2RtMis_GI5BLVrhWdaGNRMB0,21 +onnx/backend/test/data/node/test_clip_outbounds_expanded/model.onnx,sha256=lGBTvlb3SGg-7LQFxUUhPvDYorM4NudbNqk3khkv8HI,596 +onnx/backend/test/data/node/test_clip_outbounds_expanded/test_data_set_0/input_0.pb,sha256=V0F1iOI7iAm2K90hqNNItzlFwxXH3rKXm9a5wXB-qYQ,21 +onnx/backend/test/data/node/test_clip_outbounds_expanded/test_data_set_0/input_1.pb,sha256=DyQyzAiPdgNHN3peR3o7xZVckmF3lQneq_S8M2AMI9w,13 +onnx/backend/test/data/node/test_clip_outbounds_expanded/test_data_set_0/input_2.pb,sha256=3sy_5aJR44lfPhtuIRBCqv9TlSqgBMK4u192l4On_5I,13 +onnx/backend/test/data/node/test_clip_outbounds_expanded/test_data_set_0/output_0.pb,sha256=vvqmqBIu_2d9LT3R85W2RtMis_GI5BLVrhWdaGNRMB0,21 +onnx/backend/test/data/node/test_clip_splitbounds/model.onnx,sha256=57xFzbLRYD7xPA_1rSX225JFx7FpBqe-TxAh0yRGutA,135 +onnx/backend/test/data/node/test_clip_splitbounds/test_data_set_0/input_0.pb,sha256=leQohPrMDQG6Z0CCWG_wiYb4jpwYZA2BG_LvQcmbl9o,21 +onnx/backend/test/data/node/test_clip_splitbounds/test_data_set_0/input_1.pb,sha256=DyQyzAiPdgNHN3peR3o7xZVckmF3lQneq_S8M2AMI9w,13 +onnx/backend/test/data/node/test_clip_splitbounds/test_data_set_0/input_2.pb,sha256=3sy_5aJR44lfPhtuIRBCqv9TlSqgBMK4u192l4On_5I,13 +onnx/backend/test/data/node/test_clip_splitbounds/test_data_set_0/output_0.pb,sha256=15BzcxfrfHAuKkv7YEqCqM0o_yR1xECA5GJqzFGwo0Y,21 +onnx/backend/test/data/node/test_clip_splitbounds_expanded/model.onnx,sha256=EfcHHaSmBKZEG32NdMfGA8XQmcxTQrCEHz1Xznl9zC0,613 +onnx/backend/test/data/node/test_clip_splitbounds_expanded/test_data_set_0/input_0.pb,sha256=leQohPrMDQG6Z0CCWG_wiYb4jpwYZA2BG_LvQcmbl9o,21 +onnx/backend/test/data/node/test_clip_splitbounds_expanded/test_data_set_0/input_1.pb,sha256=DyQyzAiPdgNHN3peR3o7xZVckmF3lQneq_S8M2AMI9w,13 +onnx/backend/test/data/node/test_clip_splitbounds_expanded/test_data_set_0/input_2.pb,sha256=3sy_5aJR44lfPhtuIRBCqv9TlSqgBMK4u192l4On_5I,13 +onnx/backend/test/data/node/test_clip_splitbounds_expanded/test_data_set_0/output_0.pb,sha256=15BzcxfrfHAuKkv7YEqCqM0o_yR1xECA5GJqzFGwo0Y,21 +onnx/backend/test/data/node/test_col2im/model.onnx,sha256=IOf8vTDwhQGOtllNTnICmKiz07hm1rxWD-Nc7NNADfk,206 +onnx/backend/test/data/node/test_col2im/test_data_set_0/input_0.pb,sha256=5A3VwvCazC3A3QMIcJp8G1vvMKrc67-pH8D3QiYJxrw,117 +onnx/backend/test/data/node/test_col2im/test_data_set_0/input_1.pb,sha256=sC33YEdMBoSDhAdtQ124hhrmtKVAxoV-pDz8e6zRvRk,35 +onnx/backend/test/data/node/test_col2im/test_data_set_0/input_2.pb,sha256=dDzuXwdlcFBeNNxzJK0GQtcMTbb_H6Y32BCWnvDooDU,35 +onnx/backend/test/data/node/test_col2im/test_data_set_0/output_0.pb,sha256=xEQ2Kj9FDYM_Uu6NEhi2oPAvBijU8rklQamYJqr4AYs,120 +onnx/backend/test/data/node/test_col2im_5d/model.onnx,sha256=k-5N3_FUpbwT1QXtMqOufg0uQsJxnV4rqswqe8D-NXg,213 +onnx/backend/test/data/node/test_col2im_5d/test_data_set_0/input_0.pb,sha256=10NWZ13V9CcUU08fb3zJSdT9Wn1ykGAMSmEV63jwgSc,498 +onnx/backend/test/data/node/test_col2im_5d/test_data_set_0/input_1.pb,sha256=mSuKXNsMN1uBSUQ6osPvFSCLieHyPCHcY348CC0BAZ8,43 +onnx/backend/test/data/node/test_col2im_5d/test_data_set_0/input_2.pb,sha256=KWWYXJvC1NdLgGh8u0n8JUmPG2N9Ykdsp9YVT_8huiE,43 +onnx/backend/test/data/node/test_col2im_5d/test_data_set_0/output_0.pb,sha256=iEagmo2f_5o9u1f8VbWM0axVO6jVi8PqZ0Kwqew7g8M,503 +onnx/backend/test/data/node/test_col2im_dilations/model.onnx,sha256=V12TeDt8MRMS_EvbIKGg8G1SwMdC75kU_h5nk862Nww,236 +onnx/backend/test/data/node/test_col2im_dilations/test_data_set_0/input_0.pb,sha256=d9wmnv3b9dr2kDj10v5X5wkY0lIAaxqSoVm_FGKz2TY,97 +onnx/backend/test/data/node/test_col2im_dilations/test_data_set_0/input_1.pb,sha256=4GVa_GmgAg6wnfkkTNNgGTYtMDQuUT19vWfeDA_9GVw,35 +onnx/backend/test/data/node/test_col2im_dilations/test_data_set_0/input_2.pb,sha256=bLrs_xcBqV1BYkqqtdhN8gk07hFw_rfW0NpC3S8enAw,35 +onnx/backend/test/data/node/test_col2im_dilations/test_data_set_0/output_0.pb,sha256=jZt6LpawiR5-xTQ4emYrD7LTWpTpe3GbIbSV5oKlPo0,165 +onnx/backend/test/data/node/test_col2im_pads/model.onnx,sha256=FszV1KU3PR-9pZE7t4lltctCRUPjhT5FFIUdhGh41mo,230 +onnx/backend/test/data/node/test_col2im_pads/test_data_set_0/input_0.pb,sha256=FJT7adhospTzI4VyMN70Qk2Ijori7V0uLua2eLO3wGI,318 +onnx/backend/test/data/node/test_col2im_pads/test_data_set_0/input_1.pb,sha256=sC33YEdMBoSDhAdtQ124hhrmtKVAxoV-pDz8e6zRvRk,35 +onnx/backend/test/data/node/test_col2im_pads/test_data_set_0/input_2.pb,sha256=dDzuXwdlcFBeNNxzJK0GQtcMTbb_H6Y32BCWnvDooDU,35 +onnx/backend/test/data/node/test_col2im_pads/test_data_set_0/output_0.pb,sha256=Wi5m8n4bgmc9Fvl68grDeS8VTj3pkvBlVCr9wp9QjT4,120 +onnx/backend/test/data/node/test_col2im_strides/model.onnx,sha256=wdgnYE0Qeoy3xRKLMUpR4NOymjxDg9SNtvTbvEkAK3M,232 +onnx/backend/test/data/node/test_col2im_strides/test_data_set_0/input_0.pb,sha256=bATQTJp-m3z-CD8kdkzPCHqEkGQCZWzeYH1luHYPsLs,162 +onnx/backend/test/data/node/test_col2im_strides/test_data_set_0/input_1.pb,sha256=sC33YEdMBoSDhAdtQ124hhrmtKVAxoV-pDz8e6zRvRk,35 +onnx/backend/test/data/node/test_col2im_strides/test_data_set_0/input_2.pb,sha256=zrcpb5IFLjcr6h-Ze3T0j8tMq-I7sAJZD1TULUur5Ew,35 +onnx/backend/test/data/node/test_col2im_strides/test_data_set_0/output_0.pb,sha256=3SveaOMPddbY4IP1dOqylTuVoz7a_KyOLxB45bgvhIo,120 +onnx/backend/test/data/node/test_compress_0/model.onnx,sha256=nodKtdil0L0jbH4L5wO-v7yOu-BP0BkZlQEJDVbvvf8,169 +onnx/backend/test/data/node/test_compress_0/test_data_set_0/input_0.pb,sha256=46laMzhSwTujxNC5wdf57VFKvfNtw0oXjGKcn85m99c,39 +onnx/backend/test/data/node/test_compress_0/test_data_set_0/input_1.pb,sha256=jw39JH8-z2Ji3LtSHV2AWRWenNBHag9p9CIi0kH0WHs,20 +onnx/backend/test/data/node/test_compress_0/test_data_set_0/output_0.pb,sha256=REWTt5gfMJMzFJqlXfEgkPunKMIa4ztIMdbazCALGAY,32 +onnx/backend/test/data/node/test_compress_1/model.onnx,sha256=QhUJpXVhlcZ5jAmQFQY0LAvCYLZKqCpUxk7bLn-_WCM,169 +onnx/backend/test/data/node/test_compress_1/test_data_set_0/input_0.pb,sha256=46laMzhSwTujxNC5wdf57VFKvfNtw0oXjGKcn85m99c,39 +onnx/backend/test/data/node/test_compress_1/test_data_set_0/input_1.pb,sha256=cU3M65jPa3QTEGyHULlR-gk02IKzUg28hWixar4xdQ4,19 +onnx/backend/test/data/node/test_compress_1/test_data_set_0/output_0.pb,sha256=P-YBRUIo2dEPuIakb5wO4LXMR_mhYPCtGflcP-3B-qk,28 +onnx/backend/test/data/node/test_compress_default_axis/model.onnx,sha256=Tia1aeKj5DAQqe23y8WKMYY9LzJHn_rWX4hYA189-N4,163 +onnx/backend/test/data/node/test_compress_default_axis/test_data_set_0/input_0.pb,sha256=46laMzhSwTujxNC5wdf57VFKvfNtw0oXjGKcn85m99c,39 +onnx/backend/test/data/node/test_compress_default_axis/test_data_set_0/input_1.pb,sha256=As8vdbdiBHE5yy7e9p1d0ztqpDC8js6Dnzj3Sz7uWMU,22 +onnx/backend/test/data/node/test_compress_default_axis/test_data_set_0/output_0.pb,sha256=5m4bYUDPwIjGY_qUyTFXAxuUzstD-Vi3TTFlwkdjp7w,22 +onnx/backend/test/data/node/test_compress_negative_axis/model.onnx,sha256=qSUsJJPWf-fDfmbGlU3sG-j968n09FJ95FgTeFUhx3U,190 +onnx/backend/test/data/node/test_compress_negative_axis/test_data_set_0/input_0.pb,sha256=46laMzhSwTujxNC5wdf57VFKvfNtw0oXjGKcn85m99c,39 +onnx/backend/test/data/node/test_compress_negative_axis/test_data_set_0/input_1.pb,sha256=cU3M65jPa3QTEGyHULlR-gk02IKzUg28hWixar4xdQ4,19 +onnx/backend/test/data/node/test_compress_negative_axis/test_data_set_0/output_0.pb,sha256=P-YBRUIo2dEPuIakb5wO4LXMR_mhYPCtGflcP-3B-qk,28 +onnx/backend/test/data/node/test_concat_1d_axis_0/model.onnx,sha256=UD4hvQQPsO5mdTW7kHiWrpisTxd-QLk_Jdn854AlV0I,161 +onnx/backend/test/data/node/test_concat_1d_axis_0/test_data_set_0/input_0.pb,sha256=M_zmlmaj7q-xos1LgfEPC1kNNKTsWO4Fvr1h-Ep1zaM,22 +onnx/backend/test/data/node/test_concat_1d_axis_0/test_data_set_0/input_1.pb,sha256=ALDN6Sc9t1SBGFf3S6VjvzHPLi6FM7bqSzORcQ78E3I,22 +onnx/backend/test/data/node/test_concat_1d_axis_0/test_data_set_0/output_0.pb,sha256=vSCHzMFSvur8t7ADW1BDQbERwPltOiCd-JuPcQ6-XcQ,30 +onnx/backend/test/data/node/test_concat_1d_axis_negative_1/model.onnx,sha256=YoSMpWext44eCDoh2QV4N8J2chRPhYI2t4bjYD2n1L8,179 +onnx/backend/test/data/node/test_concat_1d_axis_negative_1/test_data_set_0/input_0.pb,sha256=M_zmlmaj7q-xos1LgfEPC1kNNKTsWO4Fvr1h-Ep1zaM,22 +onnx/backend/test/data/node/test_concat_1d_axis_negative_1/test_data_set_0/input_1.pb,sha256=ALDN6Sc9t1SBGFf3S6VjvzHPLi6FM7bqSzORcQ78E3I,22 +onnx/backend/test/data/node/test_concat_1d_axis_negative_1/test_data_set_0/output_0.pb,sha256=vSCHzMFSvur8t7ADW1BDQbERwPltOiCd-JuPcQ6-XcQ,30 +onnx/backend/test/data/node/test_concat_2d_axis_0/model.onnx,sha256=e2A3L9ryrB2kVlNKX4dcF1wAIQuk6eLWJgJ35yvurSM,173 +onnx/backend/test/data/node/test_concat_2d_axis_0/test_data_set_0/input_0.pb,sha256=FF2sDSeoQuwLOa3kNb3bHbtcVNAifeP-yk4AM9YE9A4,32 +onnx/backend/test/data/node/test_concat_2d_axis_0/test_data_set_0/input_1.pb,sha256=x9-oIRV3uL9uNyxiTOG7Q5zHYeT9DketTiY7dySqaKc,32 +onnx/backend/test/data/node/test_concat_2d_axis_0/test_data_set_0/output_0.pb,sha256=Ta3eaIezq0mRJM1A9OsJDF7hyiipBPfqZHsahXM5SNo,48 +onnx/backend/test/data/node/test_concat_2d_axis_1/model.onnx,sha256=rFTQnBxGEZCLdelAGH1xXRLwPrJ4bgeZUFcNAtGEctM,173 +onnx/backend/test/data/node/test_concat_2d_axis_1/test_data_set_0/input_0.pb,sha256=FF2sDSeoQuwLOa3kNb3bHbtcVNAifeP-yk4AM9YE9A4,32 +onnx/backend/test/data/node/test_concat_2d_axis_1/test_data_set_0/input_1.pb,sha256=x9-oIRV3uL9uNyxiTOG7Q5zHYeT9DketTiY7dySqaKc,32 +onnx/backend/test/data/node/test_concat_2d_axis_1/test_data_set_0/output_0.pb,sha256=9BvkeNloIrO5LNPMggPhy2lcCqOOU5HHPdQPj7sEfvw,48 +onnx/backend/test/data/node/test_concat_2d_axis_negative_1/model.onnx,sha256=zzVnaH5YLO-T0lWGv-Bexq56fxLAtm9JkkFyQ-0HU4U,191 +onnx/backend/test/data/node/test_concat_2d_axis_negative_1/test_data_set_0/input_0.pb,sha256=FF2sDSeoQuwLOa3kNb3bHbtcVNAifeP-yk4AM9YE9A4,32 +onnx/backend/test/data/node/test_concat_2d_axis_negative_1/test_data_set_0/input_1.pb,sha256=x9-oIRV3uL9uNyxiTOG7Q5zHYeT9DketTiY7dySqaKc,32 +onnx/backend/test/data/node/test_concat_2d_axis_negative_1/test_data_set_0/output_0.pb,sha256=9BvkeNloIrO5LNPMggPhy2lcCqOOU5HHPdQPj7sEfvw,48 +onnx/backend/test/data/node/test_concat_2d_axis_negative_2/model.onnx,sha256=nR004XJROokQER1gioPgrJx4lNbMRKr53nGnBj8L9EA,191 +onnx/backend/test/data/node/test_concat_2d_axis_negative_2/test_data_set_0/input_0.pb,sha256=FF2sDSeoQuwLOa3kNb3bHbtcVNAifeP-yk4AM9YE9A4,32 +onnx/backend/test/data/node/test_concat_2d_axis_negative_2/test_data_set_0/input_1.pb,sha256=x9-oIRV3uL9uNyxiTOG7Q5zHYeT9DketTiY7dySqaKc,32 +onnx/backend/test/data/node/test_concat_2d_axis_negative_2/test_data_set_0/output_0.pb,sha256=Ta3eaIezq0mRJM1A9OsJDF7hyiipBPfqZHsahXM5SNo,48 +onnx/backend/test/data/node/test_concat_3d_axis_0/model.onnx,sha256=LN-VZp2V3JT2FAMnwWn97NDFexgJj7xpOwyztM1GPBA,185 +onnx/backend/test/data/node/test_concat_3d_axis_0/test_data_set_0/input_0.pb,sha256=81NAfcLNqQRpHPhx5OmEJSm6F4NEGzhRgBLOLjv9Kx4,50 +onnx/backend/test/data/node/test_concat_3d_axis_0/test_data_set_0/input_1.pb,sha256=msjjoWLAIXh4e0fgz6zQH_f98YwZNfdBOfU870P_dug,50 +onnx/backend/test/data/node/test_concat_3d_axis_0/test_data_set_0/output_0.pb,sha256=dUvwM04_dv9m8UaqWmWl0QjkurUJSN_oRSXorZ97h2Q,82 +onnx/backend/test/data/node/test_concat_3d_axis_1/model.onnx,sha256=QLwkJrU-3hhnPEFv5-9DAukR_kvj1CGb0CtqmImkYRc,185 +onnx/backend/test/data/node/test_concat_3d_axis_1/test_data_set_0/input_0.pb,sha256=81NAfcLNqQRpHPhx5OmEJSm6F4NEGzhRgBLOLjv9Kx4,50 +onnx/backend/test/data/node/test_concat_3d_axis_1/test_data_set_0/input_1.pb,sha256=msjjoWLAIXh4e0fgz6zQH_f98YwZNfdBOfU870P_dug,50 +onnx/backend/test/data/node/test_concat_3d_axis_1/test_data_set_0/output_0.pb,sha256=2aOEqB0HzGWyPK-b-x4GY4_rB9VAb3Xk195r28cSNMQ,82 +onnx/backend/test/data/node/test_concat_3d_axis_2/model.onnx,sha256=WBBOjo3hw1d_0I7ErVWfYps6q03HwpcoUSGKewUFlhg,185 +onnx/backend/test/data/node/test_concat_3d_axis_2/test_data_set_0/input_0.pb,sha256=81NAfcLNqQRpHPhx5OmEJSm6F4NEGzhRgBLOLjv9Kx4,50 +onnx/backend/test/data/node/test_concat_3d_axis_2/test_data_set_0/input_1.pb,sha256=msjjoWLAIXh4e0fgz6zQH_f98YwZNfdBOfU870P_dug,50 +onnx/backend/test/data/node/test_concat_3d_axis_2/test_data_set_0/output_0.pb,sha256=83yvsYA4HAppNb6e1qyXLKZZOFJwpFvQkNmBQ6VXvgg,82 +onnx/backend/test/data/node/test_concat_3d_axis_negative_1/model.onnx,sha256=aywEPDxFVmIGoIi8rFzzhSfsIK5gZ8vk9CLymObB-LY,203 +onnx/backend/test/data/node/test_concat_3d_axis_negative_1/test_data_set_0/input_0.pb,sha256=81NAfcLNqQRpHPhx5OmEJSm6F4NEGzhRgBLOLjv9Kx4,50 +onnx/backend/test/data/node/test_concat_3d_axis_negative_1/test_data_set_0/input_1.pb,sha256=msjjoWLAIXh4e0fgz6zQH_f98YwZNfdBOfU870P_dug,50 +onnx/backend/test/data/node/test_concat_3d_axis_negative_1/test_data_set_0/output_0.pb,sha256=83yvsYA4HAppNb6e1qyXLKZZOFJwpFvQkNmBQ6VXvgg,82 +onnx/backend/test/data/node/test_concat_3d_axis_negative_2/model.onnx,sha256=DzSa3Qr5LYMmo9J8lcvgq86FUEg5Wj9t5zJ31-96l4s,203 +onnx/backend/test/data/node/test_concat_3d_axis_negative_2/test_data_set_0/input_0.pb,sha256=81NAfcLNqQRpHPhx5OmEJSm6F4NEGzhRgBLOLjv9Kx4,50 +onnx/backend/test/data/node/test_concat_3d_axis_negative_2/test_data_set_0/input_1.pb,sha256=msjjoWLAIXh4e0fgz6zQH_f98YwZNfdBOfU870P_dug,50 +onnx/backend/test/data/node/test_concat_3d_axis_negative_2/test_data_set_0/output_0.pb,sha256=2aOEqB0HzGWyPK-b-x4GY4_rB9VAb3Xk195r28cSNMQ,82 +onnx/backend/test/data/node/test_concat_3d_axis_negative_3/model.onnx,sha256=kN_yeyuEFOOXREufwa55NK5mRhj64blkAd1Zcixp3B0,203 +onnx/backend/test/data/node/test_concat_3d_axis_negative_3/test_data_set_0/input_0.pb,sha256=81NAfcLNqQRpHPhx5OmEJSm6F4NEGzhRgBLOLjv9Kx4,50 +onnx/backend/test/data/node/test_concat_3d_axis_negative_3/test_data_set_0/input_1.pb,sha256=msjjoWLAIXh4e0fgz6zQH_f98YwZNfdBOfU870P_dug,50 +onnx/backend/test/data/node/test_concat_3d_axis_negative_3/test_data_set_0/output_0.pb,sha256=dUvwM04_dv9m8UaqWmWl0QjkurUJSN_oRSXorZ97h2Q,82 +onnx/backend/test/data/node/test_constant/model.onnx,sha256=NgvNIWLo-KqwndNEnkx1berULJK4kHUe0nq0w-y8O0U,224 +onnx/backend/test/data/node/test_constant/test_data_set_0/output_0.pb,sha256=ZtZq7biLE0YlYjO3sb2Ior_w2EOqzpNpNMCGHbGmiVQ,116 +onnx/backend/test/data/node/test_constant_pad/model.onnx,sha256=RzIW1QdhSL880Nw37OuToq9LUCOXqxa5IaE3YGSvr_w,186 +onnx/backend/test/data/node/test_constant_pad/test_data_set_0/input_0.pb,sha256=hFcc65NzF2jKbEvO_e8J60kUbf0yiM7kqrgc4fH_zcw,256 +onnx/backend/test/data/node/test_constant_pad/test_data_set_0/input_1.pb,sha256=ATAqLmSfBqkY7YZcsD06zkF3azvPnL7hkoPMm3R__Tc,76 +onnx/backend/test/data/node/test_constant_pad/test_data_set_0/input_2.pb,sha256=-iWKiqLhApsL8FEiLSrlvrdAGtuBlTxHvG7p-_FNqzk,15 +onnx/backend/test/data/node/test_constant_pad/test_data_set_0/output_0.pb,sha256=0e78ZdrtxNO0GEW3dObwOBqTpBycxQUoInBNVhnLm7w,1024 +onnx/backend/test/data/node/test_constant_pad_axes/model.onnx,sha256=4qCCp3kfU1HHopgHPThhe8B15lvFWirMCQTUD3a9LZU,217 +onnx/backend/test/data/node/test_constant_pad_axes/test_data_set_0/input_0.pb,sha256=hFcc65NzF2jKbEvO_e8J60kUbf0yiM7kqrgc4fH_zcw,256 +onnx/backend/test/data/node/test_constant_pad_axes/test_data_set_0/input_1.pb,sha256=tY8h_wDfpSXDPKmu9zOpH_G0C_JlFYFRLs84BZy2vw4,44 +onnx/backend/test/data/node/test_constant_pad_axes/test_data_set_0/input_2.pb,sha256=-iWKiqLhApsL8FEiLSrlvrdAGtuBlTxHvG7p-_FNqzk,15 +onnx/backend/test/data/node/test_constant_pad_axes/test_data_set_0/input_3.pb,sha256=pX9n5VloYbf2naQVDVOgUQjBFu_mcvPOWkP1mH0Fwp8,28 +onnx/backend/test/data/node/test_constant_pad_axes/test_data_set_0/output_0.pb,sha256=32CHhoVQ_jOuUuNIFNIYKynezo6BafmF2Eyrkr2uNjE,592 +onnx/backend/test/data/node/test_constant_pad_negative_axes/model.onnx,sha256=LTZJujjMR7KukYL5rTqaD6V-cjE7zU3kcQfQ018MkUk,226 +onnx/backend/test/data/node/test_constant_pad_negative_axes/test_data_set_0/input_0.pb,sha256=hFcc65NzF2jKbEvO_e8J60kUbf0yiM7kqrgc4fH_zcw,256 +onnx/backend/test/data/node/test_constant_pad_negative_axes/test_data_set_0/input_1.pb,sha256=tY8h_wDfpSXDPKmu9zOpH_G0C_JlFYFRLs84BZy2vw4,44 +onnx/backend/test/data/node/test_constant_pad_negative_axes/test_data_set_0/input_2.pb,sha256=-iWKiqLhApsL8FEiLSrlvrdAGtuBlTxHvG7p-_FNqzk,15 +onnx/backend/test/data/node/test_constant_pad_negative_axes/test_data_set_0/input_3.pb,sha256=jOLO9c9UDkiTUsrm1piTLJODPkSHC2uDBWr6sbTixtY,28 +onnx/backend/test/data/node/test_constant_pad_negative_axes/test_data_set_0/output_0.pb,sha256=32CHhoVQ_jOuUuNIFNIYKynezo6BafmF2Eyrkr2uNjE,592 +onnx/backend/test/data/node/test_constantofshape_float_ones/model.onnx,sha256=Z8DqV7v0OumJY1RRjtJ3ZcWHZ16j05R4EDP3VkhPksU,156 +onnx/backend/test/data/node/test_constantofshape_float_ones/test_data_set_0/input_0.pb,sha256=SF8xOIzH7O17I29ISO9iIIogd75Kt4opGiw4UN5z9Ig,33 +onnx/backend/test/data/node/test_constantofshape_float_ones/test_data_set_0/output_0.pb,sha256=fM51l16xt2ADixD3tubDFSZJhvp44ltpkzuAZo1D9tg,109 +onnx/backend/test/data/node/test_constantofshape_int_shape_zero/model.onnx,sha256=7X0bKVxuxMQBQXjs6GG11r5UNJg2JZiMSr7kuYDx4LI,148 +onnx/backend/test/data/node/test_constantofshape_int_shape_zero/test_data_set_0/input_0.pb,sha256=49_f6waZPNPplh_reb2omwHhFxvAn7iTR__LuEBk59Y,17 +onnx/backend/test/data/node/test_constantofshape_int_shape_zero/test_data_set_0/output_0.pb,sha256=0v9IGvSgdFHNbGSEn5hgWuc58i5GJAQPlXlTW3A7nQQ,9 +onnx/backend/test/data/node/test_constantofshape_int_zeros/model.onnx,sha256=vX3zxk7KfDE899J44-saGhsXG-ZRs8qiakUVoMoe2ew,147 +onnx/backend/test/data/node/test_constantofshape_int_zeros/test_data_set_0/input_0.pb,sha256=_VQY3wcZhcCAEZwbELNR-ceaJwmhW-uKL6DcRBVX2Ks,25 +onnx/backend/test/data/node/test_constantofshape_int_zeros/test_data_set_0/output_0.pb,sha256=lKYhgMCsEKm6_R6aX9y86EzA3ZggFUKai7DekgAn9rY,252 +onnx/backend/test/data/node/test_conv_with_autopad_same/model.onnx,sha256=JRAdcTUyYemWrJ1WfSemMdVe0o6-oKUQsgowkKNyQ3k,226 +onnx/backend/test/data/node/test_conv_with_autopad_same/test_data_set_0/input_0.pb,sha256=RnJVWHKql-4c51A3JiCTvFe-ry8o5bavOnIotOzZFK8,115 +onnx/backend/test/data/node/test_conv_with_autopad_same/test_data_set_0/input_1.pb,sha256=i8uYkxFsjmh4jNDwmgCtV7jQtF11nwCwISLTKf1COgg,51 +onnx/backend/test/data/node/test_conv_with_autopad_same/test_data_set_0/output_0.pb,sha256=fBuFid7X2GAEdW-WKUKklPs4CDHcf2SMq98fX9QICIQ,51 +onnx/backend/test/data/node/test_conv_with_strides_and_asymmetric_padding/model.onnx,sha256=y8V7Oi4cpuZKy-L9YuLkUGLyUKiGv3XIwv0SOKpsVks,236 +onnx/backend/test/data/node/test_conv_with_strides_and_asymmetric_padding/test_data_set_0/input_0.pb,sha256=EKni8PMyz3jKiEUekWtHFQJ_0vQqMDykmd7ijlyvmUk,156 +onnx/backend/test/data/node/test_conv_with_strides_and_asymmetric_padding/test_data_set_0/input_1.pb,sha256=i8uYkxFsjmh4jNDwmgCtV7jQtF11nwCwISLTKf1COgg,51 +onnx/backend/test/data/node/test_conv_with_strides_and_asymmetric_padding/test_data_set_0/output_0.pb,sha256=zWxG_I4hksIoNyVDyHd3focjwFyOzpjzxRY0lKt8hLc,47 +onnx/backend/test/data/node/test_conv_with_strides_no_padding/model.onnx,sha256=YtFVp-6n7Dz2vct6Evkp8n7i76Ml7k6T_GA7MMf6-jk,224 +onnx/backend/test/data/node/test_conv_with_strides_no_padding/test_data_set_0/input_0.pb,sha256=EKni8PMyz3jKiEUekWtHFQJ_0vQqMDykmd7ijlyvmUk,156 +onnx/backend/test/data/node/test_conv_with_strides_no_padding/test_data_set_0/input_1.pb,sha256=i8uYkxFsjmh4jNDwmgCtV7jQtF11nwCwISLTKf1COgg,51 +onnx/backend/test/data/node/test_conv_with_strides_no_padding/test_data_set_0/output_0.pb,sha256=jpWXQcHC97bNhULIDaC1cTAxCzLIRdQDp6Uujd7Vt2M,39 +onnx/backend/test/data/node/test_conv_with_strides_padding/model.onnx,sha256=AlxPB95Rk740RmbKoBn3b_TL_3olp6__FCrXvDTITNQ,221 +onnx/backend/test/data/node/test_conv_with_strides_padding/test_data_set_0/input_0.pb,sha256=EKni8PMyz3jKiEUekWtHFQJ_0vQqMDykmd7ijlyvmUk,156 +onnx/backend/test/data/node/test_conv_with_strides_padding/test_data_set_0/input_1.pb,sha256=i8uYkxFsjmh4jNDwmgCtV7jQtF11nwCwISLTKf1COgg,51 +onnx/backend/test/data/node/test_conv_with_strides_padding/test_data_set_0/output_0.pb,sha256=KJnfa4Bs2wm6SwFgzBZZWQuQ3rrG5KOYY9bZH4G2toQ,63 +onnx/backend/test/data/node/test_convinteger_with_padding/model.onnx,sha256=rZ9_v1mn3tlT0c4QYmCDv_CsMuzobV9qzFVsA-YiXT8,224 +onnx/backend/test/data/node/test_convinteger_with_padding/test_data_set_0/input_0.pb,sha256=Blk9OpAbYayI22rinL6HBBpGT7VxJcAbgYpPMNlfDLs,24 +onnx/backend/test/data/node/test_convinteger_with_padding/test_data_set_0/input_1.pb,sha256=6DoOKAFFqEvwzMlQqPnfz-Y99DBZgq3zfgSBJrrG-1c,19 +onnx/backend/test/data/node/test_convinteger_with_padding/test_data_set_0/input_2.pb,sha256=ZeQpztgb7wGWXMWNAGDKbvP-wyc-Toe88voMPU1aWKI,19 +onnx/backend/test/data/node/test_convinteger_with_padding/test_data_set_0/output_0.pb,sha256=LvjQOQRGMuP3X76zTRoANF8n3mcJfiY9CEdCsLOXnJs,79 +onnx/backend/test/data/node/test_convinteger_without_padding/model.onnx,sha256=3f-BSu8df7zOqub8da6dejeGah2S4HCeiwgKcb_4W6s,208 +onnx/backend/test/data/node/test_convinteger_without_padding/test_data_set_0/input_0.pb,sha256=Blk9OpAbYayI22rinL6HBBpGT7VxJcAbgYpPMNlfDLs,24 +onnx/backend/test/data/node/test_convinteger_without_padding/test_data_set_0/input_1.pb,sha256=6DoOKAFFqEvwzMlQqPnfz-Y99DBZgq3zfgSBJrrG-1c,19 +onnx/backend/test/data/node/test_convinteger_without_padding/test_data_set_0/input_2.pb,sha256=ZeQpztgb7wGWXMWNAGDKbvP-wyc-Toe88voMPU1aWKI,19 +onnx/backend/test/data/node/test_convinteger_without_padding/test_data_set_0/output_0.pb,sha256=yXCRQ-429TJ61mZfgaxJtnt0Zp5_EEQzBLY_Dmf0on4,31 +onnx/backend/test/data/node/test_convtranspose/model.onnx,sha256=CDTHThq7eKmq4zxFCSKmpFGAv8V8wxb6w9dGOY0p-Tk,158 +onnx/backend/test/data/node/test_convtranspose/test_data_set_0/input_0.pb,sha256=dBjaBXzMfFIovF45VFTSKa1SzvxiDU2cnrAzM3tOY7M,51 +onnx/backend/test/data/node/test_convtranspose/test_data_set_0/input_1.pb,sha256=QtNDpr_tblKea9uJw7Ndo2VwZsE_YR0zGs3GxkA2F1M,87 +onnx/backend/test/data/node/test_convtranspose/test_data_set_0/output_0.pb,sha256=HneG-T3MmfaylD4KMxnOwosFJxoRtZraGOg0lU_Hsc8,216 +onnx/backend/test/data/node/test_convtranspose_1d/model.onnx,sha256=0BBNTQlSdqJxCX5iAsFBCDgidprv_1JRFEpBj3u_Di8,148 +onnx/backend/test/data/node/test_convtranspose_1d/test_data_set_0/input_0.pb,sha256=DugdB8mDd8zLjys9Gz6guoROF5WniAbCMOYxTsRL2yY,25 +onnx/backend/test/data/node/test_convtranspose_1d/test_data_set_0/input_1.pb,sha256=vqReEteH3D9QNgAwAcubV7ohKew3T-tgB6LOqwzNZYQ,37 +onnx/backend/test/data/node/test_convtranspose_1d/test_data_set_0/output_0.pb,sha256=m82EQ2cpDznkgM3kvHq8Zg7BXP7GHlUqqHAeLFknzNA,53 +onnx/backend/test/data/node/test_convtranspose_3d/model.onnx,sha256=RLn0FqHMYwJpDmmvMRwN75HfXW5H6OqY5K21aJNiDKQ,173 +onnx/backend/test/data/node/test_convtranspose_3d/test_data_set_0/input_0.pb,sha256=yzJg-6Mo6S_Ii0J_O6ARrRuJRKm2GYYzSfoKJBTl1Tw,258 +onnx/backend/test/data/node/test_convtranspose_3d/test_data_set_0/input_1.pb,sha256=bJ7b_MaU60hfPdSePVI_fuAX-u2abWHoX3JjQuEshOw,234 +onnx/backend/test/data/node/test_convtranspose_3d/test_data_set_0/output_0.pb,sha256=Ko5haeAPyeETXCurwHh_K-3n6hM4JTBkT8074b05hGc,1698 +onnx/backend/test/data/node/test_convtranspose_autopad_same/model.onnx,sha256=qrFgpAHwcIcDD9fIeoAmBxlx94VE1YoUizBdZShRVlA,216 +onnx/backend/test/data/node/test_convtranspose_autopad_same/test_data_set_0/input_0.pb,sha256=dBjaBXzMfFIovF45VFTSKa1SzvxiDU2cnrAzM3tOY7M,51 +onnx/backend/test/data/node/test_convtranspose_autopad_same/test_data_set_0/input_1.pb,sha256=QtNDpr_tblKea9uJw7Ndo2VwZsE_YR0zGs3GxkA2F1M,87 +onnx/backend/test/data/node/test_convtranspose_autopad_same/test_data_set_0/output_0.pb,sha256=mz9atUy2Vn7Nr6d5zIB8psRUkJh1hDQTt__CSBYXiA4,304 +onnx/backend/test/data/node/test_convtranspose_dilations/model.onnx,sha256=jArWIBpWosKKV7zBNP0AwM4KuouRt4M4oLn5FDiF78Q,188 +onnx/backend/test/data/node/test_convtranspose_dilations/test_data_set_0/input_0.pb,sha256=bzWXnz5LvVN9kq3fKc3EYBxkwZ6KmN1FMR5y-0duD9E,51 +onnx/backend/test/data/node/test_convtranspose_dilations/test_data_set_0/input_1.pb,sha256=4MwrDO_hTzR2PV0ke-Q-ESpzROSmpcoPTLu20GNZ2oc,31 +onnx/backend/test/data/node/test_convtranspose_dilations/test_data_set_0/output_0.pb,sha256=SU_6_ceIvxcyEkY5cIaxfpxFOO7o08pElgWQJXtUQao,115 +onnx/backend/test/data/node/test_convtranspose_kernel_shape/model.onnx,sha256=I8QXD3tFjk3p1FCCnIQjCgknxaZOVLkoECrSI-qFmlU,266 +onnx/backend/test/data/node/test_convtranspose_kernel_shape/test_data_set_0/input_0.pb,sha256=dBjaBXzMfFIovF45VFTSKa1SzvxiDU2cnrAzM3tOY7M,51 +onnx/backend/test/data/node/test_convtranspose_kernel_shape/test_data_set_0/input_1.pb,sha256=QtNDpr_tblKea9uJw7Ndo2VwZsE_YR0zGs3GxkA2F1M,87 +onnx/backend/test/data/node/test_convtranspose_kernel_shape/test_data_set_0/output_0.pb,sha256=cNpj1cm-VSGhzBRQ9cssIsBnwZsZIiYwXeSfSRj7hM4,656 +onnx/backend/test/data/node/test_convtranspose_output_shape/model.onnx,sha256=7BkUxaL84IIZrqG2MChkLp_TWBjcPpbhlVhYFH3ireE,212 +onnx/backend/test/data/node/test_convtranspose_output_shape/test_data_set_0/input_0.pb,sha256=dBjaBXzMfFIovF45VFTSKa1SzvxiDU2cnrAzM3tOY7M,51 +onnx/backend/test/data/node/test_convtranspose_output_shape/test_data_set_0/input_1.pb,sha256=QtNDpr_tblKea9uJw7Ndo2VwZsE_YR0zGs3GxkA2F1M,87 +onnx/backend/test/data/node/test_convtranspose_output_shape/test_data_set_0/output_0.pb,sha256=cNpj1cm-VSGhzBRQ9cssIsBnwZsZIiYwXeSfSRj7hM4,656 +onnx/backend/test/data/node/test_convtranspose_pad/model.onnx,sha256=t_OVRUoCr-fcAkOOL7GCwp6v5Jx4XXVDmcrw0vlxFmU,205 +onnx/backend/test/data/node/test_convtranspose_pad/test_data_set_0/input_0.pb,sha256=dBjaBXzMfFIovF45VFTSKa1SzvxiDU2cnrAzM3tOY7M,51 +onnx/backend/test/data/node/test_convtranspose_pad/test_data_set_0/input_1.pb,sha256=QtNDpr_tblKea9uJw7Ndo2VwZsE_YR0zGs3GxkA2F1M,87 +onnx/backend/test/data/node/test_convtranspose_pad/test_data_set_0/output_0.pb,sha256=cNpj1cm-VSGhzBRQ9cssIsBnwZsZIiYwXeSfSRj7hM4,656 +onnx/backend/test/data/node/test_convtranspose_pads/model.onnx,sha256=O9N7MfV1Cqc-i1y3jrpHzhQ4z4ybm7xhvaYk_3kHaoE,200 +onnx/backend/test/data/node/test_convtranspose_pads/test_data_set_0/input_0.pb,sha256=dBjaBXzMfFIovF45VFTSKa1SzvxiDU2cnrAzM3tOY7M,51 +onnx/backend/test/data/node/test_convtranspose_pads/test_data_set_0/input_1.pb,sha256=QtNDpr_tblKea9uJw7Ndo2VwZsE_YR0zGs3GxkA2F1M,87 +onnx/backend/test/data/node/test_convtranspose_pads/test_data_set_0/output_0.pb,sha256=shm1OKRrwwW7T6skwkeDpmN7KA1v3qUgB75cH9Dl8qc,184 +onnx/backend/test/data/node/test_cos/model.onnx,sha256=JqQPABFwNetJ8k3TM1FfhaFBZFKxBM9-dMU185AcfDg,97 +onnx/backend/test/data/node/test_cos/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_cos/test_data_set_0/output_0.pb,sha256=xwExE7n32GQjc8I4MyAWZBOXoLeSnXllZfK9b67amvY,254 +onnx/backend/test/data/node/test_cos_example/model.onnx,sha256=FUqkvGLPtpq9uZdbTqecqfuJY4FWt3uEs_Ta_qIiZ58,89 +onnx/backend/test/data/node/test_cos_example/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_cos_example/test_data_set_0/output_0.pb,sha256=F4c54AgTxALn0fDkECFlprJnqpteSyq28Lwua7w37OY,21 +onnx/backend/test/data/node/test_cosh/model.onnx,sha256=N1IXHy8bV58Y7jmdMESPpE5GfVdMyKU2_VGN1KEEAJk,99 +onnx/backend/test/data/node/test_cosh/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_cosh/test_data_set_0/output_0.pb,sha256=8h80ytvhxyChC29TOj0-9v_A_nrl7dVcziTok0F2Jj4,254 +onnx/backend/test/data/node/test_cosh_example/model.onnx,sha256=vt7YBSVolYtenXFTRSneZJFXVn1BAGRfYBT6h7--R1g,91 +onnx/backend/test/data/node/test_cosh_example/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_cosh_example/test_data_set_0/output_0.pb,sha256=TV08IsvyBPUn6ebOi3s-f3qrKERxV3-4zstkXypQRKQ,21 +onnx/backend/test/data/node/test_cumsum_1d/model.onnx,sha256=a0-USQspl-Y7FCLaopAcOcXl8YJMSHZdoJVuiabnwxc,112 +onnx/backend/test/data/node/test_cumsum_1d/test_data_set_0/input_0.pb,sha256=i6b0KGwNGodlhQS6I1bvaD0TN1xGWnLEfwEfYQp_T7o,49 +onnx/backend/test/data/node/test_cumsum_1d/test_data_set_0/input_1.pb,sha256=c4GeiTmXkiID1JY-ipXQzRl59ZLUi_imaNYP8eLM1d4,14 +onnx/backend/test/data/node/test_cumsum_1d/test_data_set_0/output_0.pb,sha256=8u6a-CH_s8CSSeDu2Xkey9IkVrCxfGeK1zruGN-GBZ0,49 +onnx/backend/test/data/node/test_cumsum_1d_exclusive/model.onnx,sha256=8roorRSX_UmgltqXKhlqq700efdCxQvgbtUQp_pXdBk,140 +onnx/backend/test/data/node/test_cumsum_1d_exclusive/test_data_set_0/input_0.pb,sha256=i6b0KGwNGodlhQS6I1bvaD0TN1xGWnLEfwEfYQp_T7o,49 +onnx/backend/test/data/node/test_cumsum_1d_exclusive/test_data_set_0/input_1.pb,sha256=c4GeiTmXkiID1JY-ipXQzRl59ZLUi_imaNYP8eLM1d4,14 +onnx/backend/test/data/node/test_cumsum_1d_exclusive/test_data_set_0/output_0.pb,sha256=KxZYZON3hTdtJo13FySOB4-9VcLllUPVPRiYl6J5jKc,49 +onnx/backend/test/data/node/test_cumsum_1d_reverse/model.onnx,sha256=GJVic393uVQwWZIITwyUiXJEdeE31PCfaREaa0V47rs,136 +onnx/backend/test/data/node/test_cumsum_1d_reverse/test_data_set_0/input_0.pb,sha256=i6b0KGwNGodlhQS6I1bvaD0TN1xGWnLEfwEfYQp_T7o,49 +onnx/backend/test/data/node/test_cumsum_1d_reverse/test_data_set_0/input_1.pb,sha256=c4GeiTmXkiID1JY-ipXQzRl59ZLUi_imaNYP8eLM1d4,14 +onnx/backend/test/data/node/test_cumsum_1d_reverse/test_data_set_0/output_0.pb,sha256=Qx9JC5x68xbCSGlmcVAfxcPWBfRq4XWTKX9vcU4v7lM,49 +onnx/backend/test/data/node/test_cumsum_1d_reverse_exclusive/model.onnx,sha256=Hx4GBa3gVcU6OeG4waFSxEAa7r_VvPZDa4C4HLv6Bno,165 +onnx/backend/test/data/node/test_cumsum_1d_reverse_exclusive/test_data_set_0/input_0.pb,sha256=i6b0KGwNGodlhQS6I1bvaD0TN1xGWnLEfwEfYQp_T7o,49 +onnx/backend/test/data/node/test_cumsum_1d_reverse_exclusive/test_data_set_0/input_1.pb,sha256=c4GeiTmXkiID1JY-ipXQzRl59ZLUi_imaNYP8eLM1d4,14 +onnx/backend/test/data/node/test_cumsum_1d_reverse_exclusive/test_data_set_0/output_0.pb,sha256=WQ4WtDbRG6kYBvqYrMr2Tqz1nyUugHKzXw6_kIKreGU,49 +onnx/backend/test/data/node/test_cumsum_2d_axis_0/model.onnx,sha256=RKckuBZhpqglL2dviFPjshMDeazp7lc4RovX29mqtpw,127 +onnx/backend/test/data/node/test_cumsum_2d_axis_0/test_data_set_0/input_0.pb,sha256=HYbXouTLeBCL2-7g5q5Mc7Fbd-enOZs4KSp57C0c_X4,59 +onnx/backend/test/data/node/test_cumsum_2d_axis_0/test_data_set_0/input_1.pb,sha256=c4GeiTmXkiID1JY-ipXQzRl59ZLUi_imaNYP8eLM1d4,14 +onnx/backend/test/data/node/test_cumsum_2d_axis_0/test_data_set_0/output_0.pb,sha256=uIQ9aM_YHV83ShrV9fZREjUB_1AzsmlyFmksuyGpvQo,59 +onnx/backend/test/data/node/test_cumsum_2d_axis_1/model.onnx,sha256=kLauNIYFM7TmL27su7h41eon9EoCo_hIDgFo2prXd3Y,127 +onnx/backend/test/data/node/test_cumsum_2d_axis_1/test_data_set_0/input_0.pb,sha256=HYbXouTLeBCL2-7g5q5Mc7Fbd-enOZs4KSp57C0c_X4,59 +onnx/backend/test/data/node/test_cumsum_2d_axis_1/test_data_set_0/input_1.pb,sha256=NMmvV9Gd72jAqm4CVKvPfg5tsoTU35TpnJO8I3QgmOM,14 +onnx/backend/test/data/node/test_cumsum_2d_axis_1/test_data_set_0/output_0.pb,sha256=fjOhNqyyw_q9RM3Q_tHH-zo5LyuiIIPlnwg_ikn-G1o,59 +onnx/backend/test/data/node/test_cumsum_2d_negative_axis/model.onnx,sha256=PzI6naK8X5kO2jZohlXfYB8Y8Iu7veFMLxVIGThBJ0Q,134 +onnx/backend/test/data/node/test_cumsum_2d_negative_axis/test_data_set_0/input_0.pb,sha256=HYbXouTLeBCL2-7g5q5Mc7Fbd-enOZs4KSp57C0c_X4,59 +onnx/backend/test/data/node/test_cumsum_2d_negative_axis/test_data_set_0/input_1.pb,sha256=VFp0AgVnZO0DGSm4jbAffIBQgezMSIif_MjDWojRRFQ,14 +onnx/backend/test/data/node/test_cumsum_2d_negative_axis/test_data_set_0/output_0.pb,sha256=fjOhNqyyw_q9RM3Q_tHH-zo5LyuiIIPlnwg_ikn-G1o,59 +onnx/backend/test/data/node/test_deform_conv_with_mask_bias/model.onnx,sha256=mk3rYBKtm10EG-6qGMY8LaQCBQWRUOe5ez7KSuakdQg,310 +onnx/backend/test/data/node/test_deform_conv_with_mask_bias/test_data_set_0/input_0.pb,sha256=dBjaBXzMfFIovF45VFTSKa1SzvxiDU2cnrAzM3tOY7M,51 +onnx/backend/test/data/node/test_deform_conv_with_mask_bias/test_data_set_0/input_1.pb,sha256=4Ki_u3n_9JzedrVVClMoOCDVY7aP6BJSo_YgoX_n3EM,31 +onnx/backend/test/data/node/test_deform_conv_with_mask_bias/test_data_set_0/input_2.pb,sha256=EhURFxsdfyOamsaS1dDwOw3rNpUzQFR_rV_PiCUXGgQ,149 +onnx/backend/test/data/node/test_deform_conv_with_mask_bias/test_data_set_0/input_3.pb,sha256=bWxbblK-zibXhuqJnlYY76Q-CMwp2JIBxYU-OSpIJlg,13 +onnx/backend/test/data/node/test_deform_conv_with_mask_bias/test_data_set_0/input_4.pb,sha256=aAzeB402zdR1RgnF4bwrRmCAJqeefsffEwh6adi3hFE,82 +onnx/backend/test/data/node/test_deform_conv_with_mask_bias/test_data_set_0/output_0.pb,sha256=QDXvxD5as96MNzcX4VyrYjfyUVh70ERSgZWsbhUYAXc,31 +onnx/backend/test/data/node/test_deform_conv_with_multiple_offset_groups/model.onnx,sha256=1L6a9siQ63csRt6V8CzYe_5WziCUFIkzsYmv04O6xzA,286 +onnx/backend/test/data/node/test_deform_conv_with_multiple_offset_groups/test_data_set_0/input_0.pb,sha256=mz70lzdE3dV9eCVSeVw1IPD0yxzuvQww2K-IygMsQ8U,87 +onnx/backend/test/data/node/test_deform_conv_with_multiple_offset_groups/test_data_set_0/input_1.pb,sha256=owMofin1vCKYHIHkslh4bPVinD0BZTgNyltN7xiGjjY,47 +onnx/backend/test/data/node/test_deform_conv_with_multiple_offset_groups/test_data_set_0/input_2.pb,sha256=GZrP3XBGQvMlDDHBU10iOG6UcGOI8RsMTWFJPM1XXhc,277 +onnx/backend/test/data/node/test_deform_conv_with_multiple_offset_groups/test_data_set_0/output_0.pb,sha256=1GKwquiq34PG-gdNzkcEMTVkLLXkM1fDY3jlkYLS1do,31 +onnx/backend/test/data/node/test_depthtospace_crd_mode_example/model.onnx,sha256=NKPEJg_MCvJ1JpqkMeaVOv8i6mDopHCUnypY29SNBJc,175 +onnx/backend/test/data/node/test_depthtospace_crd_mode_example/test_data_set_0/input_0.pb,sha256=RgEVxMZP6lyZL5iCrvplZ59lKVjVOqOK6HbeXaJvtBg,208 +onnx/backend/test/data/node/test_depthtospace_crd_mode_example/test_data_set_0/output_0.pb,sha256=jcnCB1p-6U2LnnzjM5JZTSv-wLkadP8R8T7xDtBZf2M,208 +onnx/backend/test/data/node/test_depthtospace_example/model.onnx,sha256=aMvte-8S8rfH8edeajpPdRSDpxRnhjv9Pvf_KOwbFdk,166 +onnx/backend/test/data/node/test_depthtospace_example/test_data_set_0/input_0.pb,sha256=RgEVxMZP6lyZL5iCrvplZ59lKVjVOqOK6HbeXaJvtBg,208 +onnx/backend/test/data/node/test_depthtospace_example/test_data_set_0/output_0.pb,sha256=XZsnoNSiBEpeUw5duLl02ij6y-CLa75VFwa6fK9CW-U,208 +onnx/backend/test/data/node/test_dequantizelinear/model.onnx,sha256=L96RsRIO-YksGoGaJkWp16FPC6zHwrr2zqrlrTSIOn0,174 +onnx/backend/test/data/node/test_dequantizelinear/test_data_set_0/input_0.pb,sha256=EmysV2w6e9A4gkikFxjf-h5DEDE-xLzeoN7oYeCQE04,13 +onnx/backend/test/data/node/test_dequantizelinear/test_data_set_0/input_1.pb,sha256=qN7egCoW3Tix8QKnMxT243blKL-WqkjKisULKpcdBdY,17 +onnx/backend/test/data/node/test_dequantizelinear/test_data_set_0/input_2.pb,sha256=lFnABk3b12PAd-w6LAhH1aQBJ4jE7GVjHeT6rBlLPHQ,19 +onnx/backend/test/data/node/test_dequantizelinear/test_data_set_0/output_0.pb,sha256=PY_Zk5COHTEuddMpyFjKHFk5E6u4xQQEXMiAU2kHfd8,25 +onnx/backend/test/data/node/test_dequantizelinear_axis/model.onnx,sha256=CFG3KuEIctiDEJJOQNQViBDDZ2bKrxMeT-aH9cohjTU,211 +onnx/backend/test/data/node/test_dequantizelinear_axis/test_data_set_0/input_0.pb,sha256=F1clPA2jDiRJvIkJOrVW4-uWrMCe3ci36ERppwWeFr0,33 +onnx/backend/test/data/node/test_dequantizelinear_axis/test_data_set_0/input_1.pb,sha256=8153V3qlOWYhYmsbcUqVSI4UZKUMFAC9iONtVWruuDI,27 +onnx/backend/test/data/node/test_dequantizelinear_axis/test_data_set_0/input_2.pb,sha256=AX9gtY0YGg4meweG0-gBYu8fbZQMaFj-4ePD0AVE23U,23 +onnx/backend/test/data/node/test_dequantizelinear_axis/test_data_set_0/output_0.pb,sha256=clTyPPD3CizDRWpK3EKw1PsOobgue1JHzKmFMlVf8XA,87 +onnx/backend/test/data/node/test_dequantizelinear_blocked/model.onnx,sha256=XbM9Bd1emBJZsDUa3sF9I3fMYUJ-ZjOthiVR-v0epHk,270 +onnx/backend/test/data/node/test_dequantizelinear_blocked/test_data_set_0/input_0.pb,sha256=ZxzPl8EUGTeLzCv52dlK4HFE-3oOw00s84dsg2InQYk,39 +onnx/backend/test/data/node/test_dequantizelinear_blocked/test_data_set_0/input_1.pb,sha256=NYzPvsHJckD9ggSfHu3zCfSuydIK4yWlJ4G9yaO0Tzs,69 +onnx/backend/test/data/node/test_dequantizelinear_blocked/test_data_set_0/input_2.pb,sha256=mo5zdy6QWdX0GdSEWMTVpD4XzPbTC51Z-AklKdIcfmw,38 +onnx/backend/test/data/node/test_dequantizelinear_blocked/test_data_set_0/output_0.pb,sha256=aBoBdmpRQFaYR0_h7OkQmHQmgr4-A7HoNa28QLdbvW0,111 +onnx/backend/test/data/node/test_dequantizelinear_e4m3fn/model.onnx,sha256=cbnWr-YWZIifAfDJJiX_9qJHz6ti1tTf12UOqTg4OHo,156 +onnx/backend/test/data/node/test_dequantizelinear_e4m3fn/test_data_set_0/input_0.pb,sha256=ouNewVo-woBhiPhPqys2tzIXPZTj1jRxBMX8D4ENTOY,15 +onnx/backend/test/data/node/test_dequantizelinear_e4m3fn/test_data_set_0/input_1.pb,sha256=qN7egCoW3Tix8QKnMxT243blKL-WqkjKisULKpcdBdY,17 +onnx/backend/test/data/node/test_dequantizelinear_e4m3fn/test_data_set_0/output_0.pb,sha256=HoScRA1An9eM-pF3W0B6jeMjkwVrEBJSFFqrSI8JPgY,29 +onnx/backend/test/data/node/test_dequantizelinear_e4m3fn_float16/model.onnx,sha256=jClXu4TGlTCT-rx34XewJ62-vBuFd7hS3tNh_zJRJc0,164 +onnx/backend/test/data/node/test_dequantizelinear_e4m3fn_float16/test_data_set_0/input_0.pb,sha256=ouNewVo-woBhiPhPqys2tzIXPZTj1jRxBMX8D4ENTOY,15 +onnx/backend/test/data/node/test_dequantizelinear_e4m3fn_float16/test_data_set_0/input_1.pb,sha256=60dxb5sNyankjSpTaguNhgOa3MiKy1KQMQNSDbmOr6Q,15 +onnx/backend/test/data/node/test_dequantizelinear_e4m3fn_float16/test_data_set_0/output_0.pb,sha256=Vri-LRqOSx9Zy_Sh00jBhW36uqkowDRmKj75DDCxvp0,19 +onnx/backend/test/data/node/test_dequantizelinear_e4m3fn_zero_point/model.onnx,sha256=2HgM2AHlHDD8ngDS88wQBV6nkSiSbTV4DbryZ2YCgXM,205 +onnx/backend/test/data/node/test_dequantizelinear_e4m3fn_zero_point/test_data_set_0/input_0.pb,sha256=ouNewVo-woBhiPhPqys2tzIXPZTj1jRxBMX8D4ENTOY,15 +onnx/backend/test/data/node/test_dequantizelinear_e4m3fn_zero_point/test_data_set_0/input_1.pb,sha256=qN7egCoW3Tix8QKnMxT243blKL-WqkjKisULKpcdBdY,17 +onnx/backend/test/data/node/test_dequantizelinear_e4m3fn_zero_point/test_data_set_0/input_2.pb,sha256=3qDZIcWHdcD6sR2bi0d9zxwFMbgB-8xVA8QC78gP0h4,19 +onnx/backend/test/data/node/test_dequantizelinear_e4m3fn_zero_point/test_data_set_0/output_0.pb,sha256=HoScRA1An9eM-pF3W0B6jeMjkwVrEBJSFFqrSI8JPgY,29 +onnx/backend/test/data/node/test_dequantizelinear_e5m2/model.onnx,sha256=DC2gj23QXwNtHa4JUdsdofG969BuYkFO7Cfy5X1MZk8,154 +onnx/backend/test/data/node/test_dequantizelinear_e5m2/test_data_set_0/input_0.pb,sha256=yw3eIa6_XeFddym2QAP7S_IfKNNy-bUYwPABatQogxU,15 +onnx/backend/test/data/node/test_dequantizelinear_e5m2/test_data_set_0/input_1.pb,sha256=qN7egCoW3Tix8QKnMxT243blKL-WqkjKisULKpcdBdY,17 +onnx/backend/test/data/node/test_dequantizelinear_e5m2/test_data_set_0/output_0.pb,sha256=rowj1Mm8ky6Puzl1Gvwlb8okeNXIqZKLe_Y3L6K-xyY,29 +onnx/backend/test/data/node/test_dequantizelinear_int16/model.onnx,sha256=O2H29plZ51ekIcWkbPCIZlFa3fjtGLWpBMJEHTThWcQ,180 +onnx/backend/test/data/node/test_dequantizelinear_int16/test_data_set_0/input_0.pb,sha256=5kwTISfoYEY-gS6vbfkx0YG9bb_QT9eLgZ2nWJgUSks,17 +onnx/backend/test/data/node/test_dequantizelinear_int16/test_data_set_0/input_1.pb,sha256=qN7egCoW3Tix8QKnMxT243blKL-WqkjKisULKpcdBdY,17 +onnx/backend/test/data/node/test_dequantizelinear_int16/test_data_set_0/input_2.pb,sha256=aA9bTmWOD6-LSfDuU18yhKzxxLN8k7baAz0o2RU2Tjk,20 +onnx/backend/test/data/node/test_dequantizelinear_int16/test_data_set_0/output_0.pb,sha256=4CMGzWoZPL0Pe6ra48nb6IyJcJ5pPIn8egmX-CisQr0,25 +onnx/backend/test/data/node/test_dequantizelinear_int4/model.onnx,sha256=9LK35NYKjHLC6JmUJVZSXxI7vCSfoVOyZvVCxLwu4L8,196 +onnx/backend/test/data/node/test_dequantizelinear_int4/test_data_set_0/input_0.pb,sha256=8w_hl7JvcDysb07OiHDWHUWGW-hOCSWr8h5wywBEbS0,21 +onnx/backend/test/data/node/test_dequantizelinear_int4/test_data_set_0/input_1.pb,sha256=qN7egCoW3Tix8QKnMxT243blKL-WqkjKisULKpcdBdY,17 +onnx/backend/test/data/node/test_dequantizelinear_int4/test_data_set_0/input_2.pb,sha256=GxHG9IjbNdY0hTEZRYeNssvlZnFbX4ouLHJww6ODDzg,21 +onnx/backend/test/data/node/test_dequantizelinear_int4/test_data_set_0/output_0.pb,sha256=AB-QMRL0DZclEXLlbWVi-ti0NMUhxGgqeAKzRFgL-rM,29 +onnx/backend/test/data/node/test_dequantizelinear_uint16/model.onnx,sha256=z3gpBXrtiijpDEjhgO8EFJWJR4_EZBJ8ei1dpUPMca0,181 +onnx/backend/test/data/node/test_dequantizelinear_uint16/test_data_set_0/input_0.pb,sha256=lXtHv2NCkXFokLmUC4XcHZ0KObiQXiRreUIK-COCeLU,17 +onnx/backend/test/data/node/test_dequantizelinear_uint16/test_data_set_0/input_1.pb,sha256=qN7egCoW3Tix8QKnMxT243blKL-WqkjKisULKpcdBdY,17 +onnx/backend/test/data/node/test_dequantizelinear_uint16/test_data_set_0/input_2.pb,sha256=YQZmuOZhu1-ia2hm8uiNP_tZt4UzyU2zqiQqwD1rXTQ,20 +onnx/backend/test/data/node/test_dequantizelinear_uint16/test_data_set_0/output_0.pb,sha256=1DaudDf_dFSb6ER64wWqnX0fuF09V-qLVWFygx4LlMM,25 +onnx/backend/test/data/node/test_dequantizelinear_uint4/model.onnx,sha256=doOZg8lFykcn-ZiGUM7t9eNIe8AXSPsFvhE_q9pNKmw,197 +onnx/backend/test/data/node/test_dequantizelinear_uint4/test_data_set_0/input_0.pb,sha256=DujJO55o1k-7hYjfqgLjHUHvFWS9ME3xYq410pSWAPk,13 +onnx/backend/test/data/node/test_dequantizelinear_uint4/test_data_set_0/input_1.pb,sha256=qN7egCoW3Tix8QKnMxT243blKL-WqkjKisULKpcdBdY,17 +onnx/backend/test/data/node/test_dequantizelinear_uint4/test_data_set_0/input_2.pb,sha256=Uy6DIdWiaShjYZoqvhZytFOW-nrnkq_jDxhSya5GM1Q,21 +onnx/backend/test/data/node/test_dequantizelinear_uint4/test_data_set_0/output_0.pb,sha256=xQMWX_7whFxNBSm64slp3ncuglAEdprKtaQD0zkjTqI,29 +onnx/backend/test/data/node/test_det_2d/model.onnx,sha256=OW5mSOi7fO6hao91GdGbPZF9JKegWWmf7ZkgSHmS4vM,84 +onnx/backend/test/data/node/test_det_2d/test_data_set_0/input_0.pb,sha256=0phUVbSY99aYNDvbb4I7ZwTF4_oqB7I9Teh_3LZTlfk,27 +onnx/backend/test/data/node/test_det_2d/test_data_set_0/output_0.pb,sha256=b25eApoGtGI7BEKuAIjKZCC5IY5G7F_xvMlh9QxE7Og,11 +onnx/backend/test/data/node/test_det_nd/model.onnx,sha256=3IqPBQkthO88FwEuPKYbVpNE9Q_fDbVtXLkI03BFrIg,92 +onnx/backend/test/data/node/test_det_nd/test_data_set_0/input_0.pb,sha256=8YluWZgsAsrlB4CYnJbRuf7gscfo9QKQNTqVdfYttvE,61 +onnx/backend/test/data/node/test_det_nd/test_data_set_0/output_0.pb,sha256=4i7Hs35s1K_MXnQ7ljAM-dfHG1YeQVsSls_jDMSmxWM,21 +onnx/backend/test/data/node/test_dft/model.onnx,sha256=OC6v3RTWTdS5XFaZLtKboBg60uglHMkzqrGkcynDPQI,129 +onnx/backend/test/data/node/test_dft/test_data_set_0/input_0.pb,sha256=BIHO98lzVkUyYknuhq6si9MRulBqSgXJNCwPwn7ZzUA,416 +onnx/backend/test/data/node/test_dft/test_data_set_0/input_1.pb,sha256=9FhB30n-ceweWvV552dURW3S3pEzQl90sx9pYsJ2W_A,18 +onnx/backend/test/data/node/test_dft/test_data_set_0/output_0.pb,sha256=FIC9_IUQkvqOO3xkJXw7X5pYOXf6MV_hYPccpDfuUtA,816 +onnx/backend/test/data/node/test_dft_axis/model.onnx,sha256=CSEAyMihmvsyCVdRYs9_bCr_sBrREnWkj6coDhVhKKE,134 +onnx/backend/test/data/node/test_dft_axis/test_data_set_0/input_0.pb,sha256=BIHO98lzVkUyYknuhq6si9MRulBqSgXJNCwPwn7ZzUA,416 +onnx/backend/test/data/node/test_dft_axis/test_data_set_0/input_1.pb,sha256=_08JRy_4qCwnWW9gzVVqB_WYpllZrQZTWfv4xG3HQ3k,18 +onnx/backend/test/data/node/test_dft_axis/test_data_set_0/output_0.pb,sha256=HDPqr6hb2bvmQljkRaFU670JrBKdKHGBQDQHONjjr8Y,816 +onnx/backend/test/data/node/test_dft_axis_opset19/model.onnx,sha256=E-3UD4-UxE1w1WHlKsCkc8T__PsmV07nOQrd3Gkhi1M,131 +onnx/backend/test/data/node/test_dft_axis_opset19/test_data_set_0/input_0.pb,sha256=BIHO98lzVkUyYknuhq6si9MRulBqSgXJNCwPwn7ZzUA,416 +onnx/backend/test/data/node/test_dft_axis_opset19/test_data_set_0/output_0.pb,sha256=HDPqr6hb2bvmQljkRaFU670JrBKdKHGBQDQHONjjr8Y,816 +onnx/backend/test/data/node/test_dft_inverse/model.onnx,sha256=hwvNFltz3x9Zkrk4j-Y-vjVp1WPTHFH364LBTtqFa8s,154 +onnx/backend/test/data/node/test_dft_inverse/test_data_set_0/input_0.pb,sha256=7PtwIwLGEO0OFI_rXNZqOmtnlBQExbdZcxeeOFm4aug,816 +onnx/backend/test/data/node/test_dft_inverse/test_data_set_0/input_1.pb,sha256=9FhB30n-ceweWvV552dURW3S3pEzQl90sx9pYsJ2W_A,18 +onnx/backend/test/data/node/test_dft_inverse/test_data_set_0/output_0.pb,sha256=tKtsuSQuh-ZK-2V8d9lDhjcIq6KtEVGaJmoGT_TXxQc,816 +onnx/backend/test/data/node/test_dft_inverse_opset19/model.onnx,sha256=H4smfc3KYTCDYTcvLjTJTAHl_YExtNaAyXYmR1ip5cw,150 +onnx/backend/test/data/node/test_dft_inverse_opset19/test_data_set_0/input_0.pb,sha256=7PtwIwLGEO0OFI_rXNZqOmtnlBQExbdZcxeeOFm4aug,816 +onnx/backend/test/data/node/test_dft_inverse_opset19/test_data_set_0/output_0.pb,sha256=tKtsuSQuh-ZK-2V8d9lDhjcIq6KtEVGaJmoGT_TXxQc,816 +onnx/backend/test/data/node/test_dft_opset19/model.onnx,sha256=8Ifb5vDjFlWzj_I7Om2zdm5hnfFfb5HlZz6c2vm0uRc,126 +onnx/backend/test/data/node/test_dft_opset19/test_data_set_0/input_0.pb,sha256=BIHO98lzVkUyYknuhq6si9MRulBqSgXJNCwPwn7ZzUA,416 +onnx/backend/test/data/node/test_dft_opset19/test_data_set_0/output_0.pb,sha256=FIC9_IUQkvqOO3xkJXw7X5pYOXf6MV_hYPccpDfuUtA,816 +onnx/backend/test/data/node/test_div/model.onnx,sha256=BC7Wj-nywr5jTxV_jbCcDkgEkPD_yoQ7KQRk5zTinKs,125 +onnx/backend/test/data/node/test_div/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_div/test_data_set_0/input_1.pb,sha256=EdnSdDX16HmReb_YAemagUKjdzJfVyWKaqThtG0XUdo,254 +onnx/backend/test/data/node/test_div/test_data_set_0/output_0.pb,sha256=oDeGZ2kWqr-qxEt70OvpI9yKHxxk-U0_Fd5toUxgNO0,254 +onnx/backend/test/data/node/test_div_bcast/model.onnx,sha256=m0B1sh2gk5LB6GaJ3ncV0reTkUkWdxBk3TafRd8MPW8,123 +onnx/backend/test/data/node/test_div_bcast/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_div_bcast/test_data_set_0/input_1.pb,sha256=MqH-1RsmHArvXfx7K35Qp2E8yr83igIRnQTj67suDHA,29 +onnx/backend/test/data/node/test_div_bcast/test_data_set_0/output_0.pb,sha256=2yycZqpnMyKEloUowJhkwXjK-IE9yPRd1T7TWPBxvvk,254 +onnx/backend/test/data/node/test_div_example/model.onnx,sha256=-V4U6W3JAnNtGcoXj-90cNrefeLcd1WzODbJxSLNCLo,109 +onnx/backend/test/data/node/test_div_example/test_data_set_0/input_0.pb,sha256=5m2PEEsKXQbJNrLR6JC_nvLRZCEVZ89U3jSEZ7Zcn2g,17 +onnx/backend/test/data/node/test_div_example/test_data_set_0/input_1.pb,sha256=UpYzYhBYwWnWsKo320h6NtTBuKOsfvvObUYElou6U1Y,17 +onnx/backend/test/data/node/test_div_example/test_data_set_0/output_0.pb,sha256=F-A7vcHWnvj2qwZVhcZLvB4sACk6jmYJEidt7RNhvcE,17 +onnx/backend/test/data/node/test_div_uint8/model.onnx,sha256=73xGoanAmgvFoxnmeY70cMDJMj_tiw_G2UXfL1xHP3Q,131 +onnx/backend/test/data/node/test_div_uint8/test_data_set_0/input_0.pb,sha256=o1qOEpD4OnCp_Ziad-Zczbkof8qMJSVo_uGta8KkmYg,73 +onnx/backend/test/data/node/test_div_uint8/test_data_set_0/input_1.pb,sha256=00CeSgyLXpakzaeaUlt-h0hU3j3N0Lc1Vt0LPjvqa1Q,73 +onnx/backend/test/data/node/test_div_uint8/test_data_set_0/output_0.pb,sha256=Bh8cQ09M3qXcpCqGWHhkvbw0L_aYN2JcPN1UVDo-ZV4,73 +onnx/backend/test/data/node/test_dropout_default/model.onnx,sha256=c512IB8jo0CP0zbx_CJu7shhixogAi6OttlsCDO_KpM,126 +onnx/backend/test/data/node/test_dropout_default/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_dropout_default/test_data_set_0/output_0.pb,sha256=4E7JZEL-6RPD2ZoGlbe4UZXnssgawDGOHg15V0RhX58,254 +onnx/backend/test/data/node/test_dropout_default_mask/model.onnx,sha256=EtzWiXHC7M1GwwL0errd19mG5mSaIxdkob5OzzBWYyc,160 +onnx/backend/test/data/node/test_dropout_default_mask/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_dropout_default_mask/test_data_set_0/output_0.pb,sha256=4E7JZEL-6RPD2ZoGlbe4UZXnssgawDGOHg15V0RhX58,254 +onnx/backend/test/data/node/test_dropout_default_mask/test_data_set_0/output_1.pb,sha256=4sWzAHgaDiZSCGEr949BKLl9dNfAyqyQpzL1ccqJJ2M,73 +onnx/backend/test/data/node/test_dropout_default_mask_ratio/model.onnx,sha256=xPZNDfgDNzJeTxiVHxuHQiVl1QbxqswOY3APQZxkALY,182 +onnx/backend/test/data/node/test_dropout_default_mask_ratio/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_dropout_default_mask_ratio/test_data_set_0/input_1.pb,sha256=9FZuwRdGL9lHbnD4w8PpG6fdJjuBpsU9Ei8UsUTXsgA,11 +onnx/backend/test/data/node/test_dropout_default_mask_ratio/test_data_set_0/output_0.pb,sha256=4E7JZEL-6RPD2ZoGlbe4UZXnssgawDGOHg15V0RhX58,254 +onnx/backend/test/data/node/test_dropout_default_mask_ratio/test_data_set_0/output_1.pb,sha256=4sWzAHgaDiZSCGEr949BKLl9dNfAyqyQpzL1ccqJJ2M,73 +onnx/backend/test/data/node/test_dropout_default_old/model.onnx,sha256=ukHf_-XR4gMOPC5xA7oRg6a5E0bdXsrkl7H6oScsLBE,101 +onnx/backend/test/data/node/test_dropout_default_old/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_dropout_default_old/test_data_set_0/output_0.pb,sha256=lezr2GRMo0JWUSPIQ-nlrz1YNhYT_XpoHnO0oq6y-ic,21 +onnx/backend/test/data/node/test_dropout_default_ratio/model.onnx,sha256=LFW31ngqrdSkn0eoF1qWXpkVVWwaJZID64p_UFiciKE,148 +onnx/backend/test/data/node/test_dropout_default_ratio/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_dropout_default_ratio/test_data_set_0/input_1.pb,sha256=9FZuwRdGL9lHbnD4w8PpG6fdJjuBpsU9Ei8UsUTXsgA,11 +onnx/backend/test/data/node/test_dropout_default_ratio/test_data_set_0/output_0.pb,sha256=4E7JZEL-6RPD2ZoGlbe4UZXnssgawDGOHg15V0RhX58,254 +onnx/backend/test/data/node/test_dropout_random_old/model.onnx,sha256=indJUl0_xdJkjHL8itDQnja4iDBaut3bkRryOv65Ajs,133 +onnx/backend/test/data/node/test_dropout_random_old/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_dropout_random_old/test_data_set_0/output_0.pb,sha256=4E7JZEL-6RPD2ZoGlbe4UZXnssgawDGOHg15V0RhX58,254 +onnx/backend/test/data/node/test_dynamicquantizelinear/model.onnx,sha256=I5Nk7KgdM4PiZcv5toOWiQZNASMP7H1-BVip_TM4zVM,184 +onnx/backend/test/data/node/test_dynamicquantizelinear/test_data_set_0/input_0.pb,sha256=GzpMBt69V1vDS1x0m43plOXtiv0KATlGTtFlf6wKT58,33 +onnx/backend/test/data/node/test_dynamicquantizelinear/test_data_set_0/output_0.pb,sha256=ry3_qJJ8NX3XhMicOdeNEia49jnRgxG2mYxsRuCNIqA,15 +onnx/backend/test/data/node/test_dynamicquantizelinear/test_data_set_0/output_1.pb,sha256=_qXCs5BSE3SUvlSSmCKQs09zGQDHtxBxiiZA6tT1pds,17 +onnx/backend/test/data/node/test_dynamicquantizelinear/test_data_set_0/output_2.pb,sha256=AlfPMoClolzBruTVYhCw3YMjy4RinXVFQpOJ4568epw,19 +onnx/backend/test/data/node/test_dynamicquantizelinear_expanded/model.onnx,sha256=y0RptQ5zEM33enw0M1lqkW8WMzTf1M1OU5CXFRV_shc,3145 +onnx/backend/test/data/node/test_dynamicquantizelinear_expanded/test_data_set_0/input_0.pb,sha256=GzpMBt69V1vDS1x0m43plOXtiv0KATlGTtFlf6wKT58,33 +onnx/backend/test/data/node/test_dynamicquantizelinear_expanded/test_data_set_0/output_0.pb,sha256=ry3_qJJ8NX3XhMicOdeNEia49jnRgxG2mYxsRuCNIqA,15 +onnx/backend/test/data/node/test_dynamicquantizelinear_expanded/test_data_set_0/output_1.pb,sha256=_qXCs5BSE3SUvlSSmCKQs09zGQDHtxBxiiZA6tT1pds,17 +onnx/backend/test/data/node/test_dynamicquantizelinear_expanded/test_data_set_0/output_2.pb,sha256=AlfPMoClolzBruTVYhCw3YMjy4RinXVFQpOJ4568epw,19 +onnx/backend/test/data/node/test_dynamicquantizelinear_max_adjusted/model.onnx,sha256=sQDOYQy4OZq0Hh-EeM0hb5nEsMZaNAGnOzeDniTnIJY,197 +onnx/backend/test/data/node/test_dynamicquantizelinear_max_adjusted/test_data_set_0/input_0.pb,sha256=5Sw2guhx2Fx5oLhz2en7hvFuDC4Ch7l4rDCamVC0X94,33 +onnx/backend/test/data/node/test_dynamicquantizelinear_max_adjusted/test_data_set_0/output_0.pb,sha256=o7JvKwH0WX9r2TNocBXFjxns3RpyEmLMEPxlzT225VM,15 +onnx/backend/test/data/node/test_dynamicquantizelinear_max_adjusted/test_data_set_0/output_1.pb,sha256=pVez9xxtD7ltZc4oLTF3NrV30IFL9ogVC7eseamvAco,17 +onnx/backend/test/data/node/test_dynamicquantizelinear_max_adjusted/test_data_set_0/output_2.pb,sha256=Ew0CDXUBMf1O1PCpkeQ-lLgahqVnxe0ydyS5jeyNuCk,19 +onnx/backend/test/data/node/test_dynamicquantizelinear_max_adjusted_expanded/model.onnx,sha256=TehN-V9Yqegtvra-N4OTEq_TJ5LfgjOpihSxyogHuI8,3600 +onnx/backend/test/data/node/test_dynamicquantizelinear_max_adjusted_expanded/test_data_set_0/input_0.pb,sha256=5Sw2guhx2Fx5oLhz2en7hvFuDC4Ch7l4rDCamVC0X94,33 +onnx/backend/test/data/node/test_dynamicquantizelinear_max_adjusted_expanded/test_data_set_0/output_0.pb,sha256=o7JvKwH0WX9r2TNocBXFjxns3RpyEmLMEPxlzT225VM,15 +onnx/backend/test/data/node/test_dynamicquantizelinear_max_adjusted_expanded/test_data_set_0/output_1.pb,sha256=pVez9xxtD7ltZc4oLTF3NrV30IFL9ogVC7eseamvAco,17 +onnx/backend/test/data/node/test_dynamicquantizelinear_max_adjusted_expanded/test_data_set_0/output_2.pb,sha256=Ew0CDXUBMf1O1PCpkeQ-lLgahqVnxe0ydyS5jeyNuCk,19 +onnx/backend/test/data/node/test_dynamicquantizelinear_min_adjusted/model.onnx,sha256=my7WaurE-jr43TsIE5cuj87Sy7n28aDqgboGieSu0Rg,205 +onnx/backend/test/data/node/test_dynamicquantizelinear_min_adjusted/test_data_set_0/input_0.pb,sha256=8wCIWAmNy9W1cv2n1Cqyir5HR5RnEIoXCiO3nqkK_kU,59 +onnx/backend/test/data/node/test_dynamicquantizelinear_min_adjusted/test_data_set_0/output_0.pb,sha256=iwSG6hTgbf_TtcdovQLDsNtTlht754rvSfq5hg2SP-g,23 +onnx/backend/test/data/node/test_dynamicquantizelinear_min_adjusted/test_data_set_0/output_1.pb,sha256=pVez9xxtD7ltZc4oLTF3NrV30IFL9ogVC7eseamvAco,17 +onnx/backend/test/data/node/test_dynamicquantizelinear_min_adjusted/test_data_set_0/output_2.pb,sha256=L2h2DxO9c7IdJJkT8HsuYfIREZMgRoyy1cA8eeD-F6g,19 +onnx/backend/test/data/node/test_dynamicquantizelinear_min_adjusted_expanded/model.onnx,sha256=TVtsc5K3LrPgDkW648mwTQ7Oq73tVvfGy7INcf3gg0k,3608 +onnx/backend/test/data/node/test_dynamicquantizelinear_min_adjusted_expanded/test_data_set_0/input_0.pb,sha256=8wCIWAmNy9W1cv2n1Cqyir5HR5RnEIoXCiO3nqkK_kU,59 +onnx/backend/test/data/node/test_dynamicquantizelinear_min_adjusted_expanded/test_data_set_0/output_0.pb,sha256=iwSG6hTgbf_TtcdovQLDsNtTlht754rvSfq5hg2SP-g,23 +onnx/backend/test/data/node/test_dynamicquantizelinear_min_adjusted_expanded/test_data_set_0/output_1.pb,sha256=pVez9xxtD7ltZc4oLTF3NrV30IFL9ogVC7eseamvAco,17 +onnx/backend/test/data/node/test_dynamicquantizelinear_min_adjusted_expanded/test_data_set_0/output_2.pb,sha256=L2h2DxO9c7IdJJkT8HsuYfIREZMgRoyy1cA8eeD-F6g,19 +onnx/backend/test/data/node/test_edge_pad/model.onnx,sha256=Yl8YxyLvwIycP4okywat3S8bNdNMZluO9fFieQ-UN84,154 +onnx/backend/test/data/node/test_edge_pad/test_data_set_0/input_0.pb,sha256=cOtF8vlVdG7QMT-RPMc2QVp7RFd7GBwL0kJdZpJtZMc,256 +onnx/backend/test/data/node/test_edge_pad/test_data_set_0/input_1.pb,sha256=8s4uRhNf19t1kZ-BWDxuitcnY9MeHgkG6w6i1Sghnz0,76 +onnx/backend/test/data/node/test_edge_pad/test_data_set_0/output_0.pb,sha256=Qv3dfFBzQiAv5a2UJMcD3Imge8rs9Lhddyi-ZA2vxXw,520 +onnx/backend/test/data/node/test_einsum_batch_diagonal/model.onnx,sha256=4zTeWtGLnr0OVE8J7llox-D3bgsWZlqPWZAYOTzVPSU,143 +onnx/backend/test/data/node/test_einsum_batch_diagonal/test_data_set_0/input_0.pb,sha256=sHXqijBwpgvbMjoAuwT6ee_BgNE5XxZNjbh9ezoAJ5A,614 +onnx/backend/test/data/node/test_einsum_batch_diagonal/test_data_set_0/output_0.pb,sha256=bW-C_Ix72YDKjjTC486yi59ehsLSuK6zTphLx60y15w,131 +onnx/backend/test/data/node/test_einsum_batch_matmul/model.onnx,sha256=JLgXzilgY10tl-yILQhGZI2n83SmH9sjBZYamf6_FOE,177 +onnx/backend/test/data/node/test_einsum_batch_matmul/test_data_set_0/input_0.pb,sha256=XA6uNuRQNZT9t697igygft7fS_BrWdyvbkDTJ1ycv9o,254 +onnx/backend/test/data/node/test_einsum_batch_matmul/test_data_set_0/input_1.pb,sha256=Rb-T5LOsdM55duFCcf7_kuymX-A1Fh1JItpbSdYdohQ,494 +onnx/backend/test/data/node/test_einsum_batch_matmul/test_data_set_0/output_0.pb,sha256=btxQVcOiJhZxHjzD9JlDtX7mMumNmOmQNf4XQwP_AnA,334 +onnx/backend/test/data/node/test_einsum_inner_prod/model.onnx,sha256=m0BWV7SrmniOu6XKl0_czSFiH5asoxj7aJtes0U6YPM,134 +onnx/backend/test/data/node/test_einsum_inner_prod/test_data_set_0/input_0.pb,sha256=6ALN9kajvwedycdrXcEwbCbOklCh2-IRC2ow--sBu6w,49 +onnx/backend/test/data/node/test_einsum_inner_prod/test_data_set_0/input_1.pb,sha256=3zewJx5xB7-0SZf2259wx2ILafmvfob_BJq1cOs2fdM,49 +onnx/backend/test/data/node/test_einsum_inner_prod/test_data_set_0/output_0.pb,sha256=MUB8WvKbPcte5zUbCNfBkkxdgJyMr-1_W3OnGT4dL9s,15 +onnx/backend/test/data/node/test_einsum_sum/model.onnx,sha256=QWE5X5J5jTX5g5X9_cNfJ_Wx54sighP2-RzVLmvIA9s,117 +onnx/backend/test/data/node/test_einsum_sum/test_data_set_0/input_0.pb,sha256=kaRz7Spymj6w1xeS_WNmXkA4FBwzOeDwZxGKrDqtMUM,107 +onnx/backend/test/data/node/test_einsum_sum/test_data_set_0/output_0.pb,sha256=mmH-SDsmj8Pcm24YqzLTL4nZ6CPuL6qZdB7UBTKmDTM,33 +onnx/backend/test/data/node/test_einsum_transpose/model.onnx,sha256=HmVAMMWlGNC3EwszWdTE3kVxq1mZYxlpAbLe5VJkpFM,128 +onnx/backend/test/data/node/test_einsum_transpose/test_data_set_0/input_0.pb,sha256=kaRz7Spymj6w1xeS_WNmXkA4FBwzOeDwZxGKrDqtMUM,107 +onnx/backend/test/data/node/test_einsum_transpose/test_data_set_0/output_0.pb,sha256=D8CgoMhtm2b7opWaiN6AUgqbOtpSxhagaUexmJr0-pE,107 +onnx/backend/test/data/node/test_elu/model.onnx,sha256=0XbXdjHnYZN-wl-d6J6EdWFr8JT4-EsTazhUfnmtLg0,114 +onnx/backend/test/data/node/test_elu/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_elu/test_data_set_0/output_0.pb,sha256=1qikqd90KnVBAo1LXaZuZ_plZqqyNCGPaXg7nndxHaQ,254 +onnx/backend/test/data/node/test_elu_default/model.onnx,sha256=XQTxlJDS6hzf-zznRbwP_zUULWOBUkogcPbheH7JmRs,105 +onnx/backend/test/data/node/test_elu_default/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_elu_default/test_data_set_0/output_0.pb,sha256=R10e2MFkJdT7LXU0o6fWwpUeMNIrnT9xSP45r1r6KIU,254 +onnx/backend/test/data/node/test_elu_default_expanded_ver18/model.onnx,sha256=cLYK-eJjTul0ne919XzExJWx4eqzVZOo1v1F5QMblas,1317 +onnx/backend/test/data/node/test_elu_default_expanded_ver18/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_elu_default_expanded_ver18/test_data_set_0/output_0.pb,sha256=R10e2MFkJdT7LXU0o6fWwpUeMNIrnT9xSP45r1r6KIU,254 +onnx/backend/test/data/node/test_elu_example/model.onnx,sha256=KrYe-L_vMuNzyLPvF4t-jDUWQVoVIEHOVdtOiBvsxqA,106 +onnx/backend/test/data/node/test_elu_example/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_elu_example/test_data_set_0/output_0.pb,sha256=BagtTGVCh4Fl4wKScganoM7CS_gWaMK6AlPDEDAksnw,21 +onnx/backend/test/data/node/test_elu_example_expanded_ver18/model.onnx,sha256=KR8nIfX48GdYT6AZkavN6z305oe7Hw7gcK04dShq2g0,1301 +onnx/backend/test/data/node/test_elu_example_expanded_ver18/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_elu_example_expanded_ver18/test_data_set_0/output_0.pb,sha256=BagtTGVCh4Fl4wKScganoM7CS_gWaMK6AlPDEDAksnw,21 +onnx/backend/test/data/node/test_elu_expanded_ver18/model.onnx,sha256=2JjMB9lsE_l7VxXofbpqL9pqcTcKwubvEzDzODSxt78,1147 +onnx/backend/test/data/node/test_elu_expanded_ver18/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_elu_expanded_ver18/test_data_set_0/output_0.pb,sha256=1qikqd90KnVBAo1LXaZuZ_plZqqyNCGPaXg7nndxHaQ,254 +onnx/backend/test/data/node/test_equal/model.onnx,sha256=lwmcVb5UafztmJnMu-R95GH3ckvfjLXyLtiN8MxniVc,129 +onnx/backend/test/data/node/test_equal/test_data_set_0/input_0.pb,sha256=MKLPWH4JsioIIdvp1FcMaVkAvmJInLU3evdsDMTfU_k,254 +onnx/backend/test/data/node/test_equal/test_data_set_0/input_1.pb,sha256=4NP29YAJAKfKqC62zG5fj4kjHkgVtZNubPKm6-TtGiM,254 +onnx/backend/test/data/node/test_equal/test_data_set_0/output_0.pb,sha256=ttqpyVFrpeKQuRmyvcAQBtR70YTBB24AYZ3ObZ-r4hQ,73 +onnx/backend/test/data/node/test_equal_bcast/model.onnx,sha256=bFxea4JWRlSryQN79E6QcVCanmIKm_3FvCiUpvY2Zac,127 +onnx/backend/test/data/node/test_equal_bcast/test_data_set_0/input_0.pb,sha256=MKLPWH4JsioIIdvp1FcMaVkAvmJInLU3evdsDMTfU_k,254 +onnx/backend/test/data/node/test_equal_bcast/test_data_set_0/input_1.pb,sha256=PSjreW5uEBtMh7ZA7HYMm-dNGY8_FOGUWvaQf6vqo1I,29 +onnx/backend/test/data/node/test_equal_bcast/test_data_set_0/output_0.pb,sha256=FDjiiT-UI1Vmyv7YZPwrXo1-PrlbunLjj4OJ41_aAjs,73 +onnx/backend/test/data/node/test_equal_string/model.onnx,sha256=0FKXgIuUUzbp_zfW68dk4nfjNABehY7u60xHCnjROZc,112 +onnx/backend/test/data/node/test_equal_string/test_data_set_0/input_0.pb,sha256=JL7-CwtjtjFEHwmsDSxw-GA7U5qY0iV3bQVs_6zFmTw,25 +onnx/backend/test/data/node/test_equal_string/test_data_set_0/input_1.pb,sha256=MuifKJHibr3zcpvSLMjcRVz4751u9shjGf9us1VbCI4,25 +onnx/backend/test/data/node/test_equal_string/test_data_set_0/output_0.pb,sha256=vuGdPlSHGdS-OGbB7phc5TqRuC3BQcpJCN7BrvD7pMA,11 +onnx/backend/test/data/node/test_equal_string_broadcast/model.onnx,sha256=_48qehgyKwmUXNKC97eMVRBYwZg5KSt-o_jE3K1KW2A,122 +onnx/backend/test/data/node/test_equal_string_broadcast/test_data_set_0/input_0.pb,sha256=JL7-CwtjtjFEHwmsDSxw-GA7U5qY0iV3bQVs_6zFmTw,25 +onnx/backend/test/data/node/test_equal_string_broadcast/test_data_set_0/input_1.pb,sha256=isHWUq2j_Iwbwc2dyAmnF813WI3zs4sENKnIvov_e50,16 +onnx/backend/test/data/node/test_equal_string_broadcast/test_data_set_0/output_0.pb,sha256=vuGdPlSHGdS-OGbB7phc5TqRuC3BQcpJCN7BrvD7pMA,11 +onnx/backend/test/data/node/test_erf/model.onnx,sha256=gZmISY2s3vLkp-p3hGrtYQ7g7x1ZyxPp7qBCk_BtuLo,105 +onnx/backend/test/data/node/test_erf/test_data_set_0/input_0.pb,sha256=Unze_of2KARaDBbo9Z3SVvlMUfaP1AUSekfdK0VNWoY,12304 +onnx/backend/test/data/node/test_erf/test_data_set_0/output_0.pb,sha256=9sqo5UrZ7X_DtEbtytD-4yhCG-fxZN8AtpfVZnDq7IM,12304 +onnx/backend/test/data/node/test_exp/model.onnx,sha256=g1MTYxYg4MkE_ehqbgO_MDDr5izLU4lyhd2-C_dRejo,97 +onnx/backend/test/data/node/test_exp/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_exp/test_data_set_0/output_0.pb,sha256=jP7r-z7yLediEhV36ovwLP0z0r1PtySuyJ7u9FQI_jo,254 +onnx/backend/test/data/node/test_exp_example/model.onnx,sha256=I6CSuYgNQyGhCW3VGbAoB0xv_oKXZSCeFahI_t_zAuk,89 +onnx/backend/test/data/node/test_exp_example/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_exp_example/test_data_set_0/output_0.pb,sha256=8Z6Sn41Qmu-5DSO90Vhz6iYIY8302yNCBwlDvnxlW9c,21 +onnx/backend/test/data/node/test_expand_dim_changed/model.onnx,sha256=c4_vEqKUcEBPs4Vd9l-OA6h5GN5_rTBFnc6zkcOFe_s,168 +onnx/backend/test/data/node/test_expand_dim_changed/test_data_set_0/input_0.pb,sha256=AoPnKt-H_dcTIDGK9TessImdkEH3HfmiW-77-3ZZIqY,26 +onnx/backend/test/data/node/test_expand_dim_changed/test_data_set_0/input_1.pb,sha256=nz_Os8au3_CRXPk6uOHXGma3TwkLsQ0R0ZWJdZl2wYU,41 +onnx/backend/test/data/node/test_expand_dim_changed/test_data_set_0/output_0.pb,sha256=Xeuh-wyUv3E_ihC7s1rogDDZI7IiZZX2Rvq8DW5LwiA,165 +onnx/backend/test/data/node/test_expand_dim_unchanged/model.onnx,sha256=_P8uTx5R3WMbSbUV8du2SdE6XirZGS41H0jzpGc8Syo,166 +onnx/backend/test/data/node/test_expand_dim_unchanged/test_data_set_0/input_0.pb,sha256=AoPnKt-H_dcTIDGK9TessImdkEH3HfmiW-77-3ZZIqY,26 +onnx/backend/test/data/node/test_expand_dim_unchanged/test_data_set_0/input_1.pb,sha256=DvZj0CscD2fjY1rTp_dvS7OobwuGgeXFLEfUroZjXRk,33 +onnx/backend/test/data/node/test_expand_dim_unchanged/test_data_set_0/output_0.pb,sha256=W0FAvWPJ1K2wCIJxDV-G4Ejv6MMLgP8xZGwN_KKOEXI,66 +onnx/backend/test/data/node/test_eyelike_populate_off_main_diagonal/model.onnx,sha256=2nzLdB3VP7s2WzF3PkJvvTnngfXsk9wzN48W6JK_GuY,148 +onnx/backend/test/data/node/test_eyelike_populate_off_main_diagonal/test_data_set_0/input_0.pb,sha256=Tx9m2RIZI9vFt7CE3QTvOTK4cLjEh5MACeSTP1egDeA,91 +onnx/backend/test/data/node/test_eyelike_populate_off_main_diagonal/test_data_set_0/output_0.pb,sha256=gXtL9SCTNQYHwx7EeNmFhxq1VmtV-WtGGJq-Jk7W4n8,91 +onnx/backend/test/data/node/test_eyelike_with_dtype/model.onnx,sha256=VZP49xyDolwSdm-hp57IdUsRsq684Z6cLo7cDoXGrbQ,122 +onnx/backend/test/data/node/test_eyelike_with_dtype/test_data_set_0/input_0.pb,sha256=l4NjktRUQoCUFQXwkl4ZkuFX0uN_a4U3otSqPmmAL_E,59 +onnx/backend/test/data/node/test_eyelike_with_dtype/test_data_set_0/output_0.pb,sha256=0QxDxZ7AFNB3zkhhpmKoveOZZG-yj8CEbcbprLAxmL0,107 +onnx/backend/test/data/node/test_eyelike_without_dtype/model.onnx,sha256=uwNzuWn2cEP1YL8V5T_7VUMFMZCxPnJdZTw69QXSUpQ,111 +onnx/backend/test/data/node/test_eyelike_without_dtype/test_data_set_0/input_0.pb,sha256=1UjqCqHuod0Uy98DDsmwuxJylfDmD1WC6n1cuIaueAg,75 +onnx/backend/test/data/node/test_eyelike_without_dtype/test_data_set_0/output_0.pb,sha256=41IuN8qkdjiw-rvD37Xoci-okMoFlzst_lOEmxMN22k,75 +onnx/backend/test/data/node/test_flatten_axis0/model.onnx,sha256=oZx69fnX_e_UM0qy_10YpmMshtGllk_7Mj0PCe8WOYU,124 +onnx/backend/test/data/node/test_flatten_axis0/test_data_set_0/input_0.pb,sha256=Tmfo0GyWZmCyZgxTurluTU7UqCKaAMO269Nmvnlc5Z8,496 +onnx/backend/test/data/node/test_flatten_axis0/test_data_set_0/output_0.pb,sha256=rK76rD-Gd7pN7MFiCa4gnGDsCIaupauMseig-khoG4A,492 +onnx/backend/test/data/node/test_flatten_axis1/model.onnx,sha256=pS1b-XTpXA_-etV-JVtwfMwz0QlYdXjRThCeKRdtqXs,124 +onnx/backend/test/data/node/test_flatten_axis1/test_data_set_0/input_0.pb,sha256=Tmfo0GyWZmCyZgxTurluTU7UqCKaAMO269Nmvnlc5Z8,496 +onnx/backend/test/data/node/test_flatten_axis1/test_data_set_0/output_0.pb,sha256=gp4cg8vDGCIxsjAVH_C6zXUfFdU8e5Zt2rFrNtLE4qM,492 +onnx/backend/test/data/node/test_flatten_axis2/model.onnx,sha256=lDdk-qk8Yt_FT4HJcoJ2OyZLj1xz_20uqhwfcpPoPo4,124 +onnx/backend/test/data/node/test_flatten_axis2/test_data_set_0/input_0.pb,sha256=Tmfo0GyWZmCyZgxTurluTU7UqCKaAMO269Nmvnlc5Z8,496 +onnx/backend/test/data/node/test_flatten_axis2/test_data_set_0/output_0.pb,sha256=WxLYJ0ShJO5yi1cemMDvetHIRHssm7QET5WHVyhCz4w,492 +onnx/backend/test/data/node/test_flatten_axis3/model.onnx,sha256=hgW-r_hV817rE4qFvk8384933GPMh46Ph66l5UR3mUY,124 +onnx/backend/test/data/node/test_flatten_axis3/test_data_set_0/input_0.pb,sha256=Tmfo0GyWZmCyZgxTurluTU7UqCKaAMO269Nmvnlc5Z8,496 +onnx/backend/test/data/node/test_flatten_axis3/test_data_set_0/output_0.pb,sha256=yPyzGHWmluHXMDP7BvL5BElzL7KCgMtwEH3tLKTwJoE,492 +onnx/backend/test/data/node/test_flatten_default_axis/model.onnx,sha256=qdpn0U_qLsojXMLcGNVc_T6cCVDPJTVLs2zQq3rfao8,118 +onnx/backend/test/data/node/test_flatten_default_axis/test_data_set_0/input_0.pb,sha256=hLRdvgAmmF6-VWT36JhFYz7XSQ8QBpZyobohXpWIaSE,496 +onnx/backend/test/data/node/test_flatten_default_axis/test_data_set_0/output_0.pb,sha256=6rx1onyZ2U_4kN9SI-xNMUkluod27NrzqMkvO1TH9-Q,492 +onnx/backend/test/data/node/test_flatten_negative_axis1/model.onnx,sha256=IxHo1zq1T3MjdAYnKXLHS-gFVZ09h_Yg_X5cTOq4TKQ,142 +onnx/backend/test/data/node/test_flatten_negative_axis1/test_data_set_0/input_0.pb,sha256=Tmfo0GyWZmCyZgxTurluTU7UqCKaAMO269Nmvnlc5Z8,496 +onnx/backend/test/data/node/test_flatten_negative_axis1/test_data_set_0/output_0.pb,sha256=yPyzGHWmluHXMDP7BvL5BElzL7KCgMtwEH3tLKTwJoE,492 +onnx/backend/test/data/node/test_flatten_negative_axis2/model.onnx,sha256=TWinCF0peGanm-Oe4dQpx2ZK3xg30ZWDlWiBXMzb8Pc,142 +onnx/backend/test/data/node/test_flatten_negative_axis2/test_data_set_0/input_0.pb,sha256=Tmfo0GyWZmCyZgxTurluTU7UqCKaAMO269Nmvnlc5Z8,496 +onnx/backend/test/data/node/test_flatten_negative_axis2/test_data_set_0/output_0.pb,sha256=WxLYJ0ShJO5yi1cemMDvetHIRHssm7QET5WHVyhCz4w,492 +onnx/backend/test/data/node/test_flatten_negative_axis3/model.onnx,sha256=uAwwzHOGIBgxBoZVs6T_32i-NwumuohO4IxOs9ZuEsM,142 +onnx/backend/test/data/node/test_flatten_negative_axis3/test_data_set_0/input_0.pb,sha256=Tmfo0GyWZmCyZgxTurluTU7UqCKaAMO269Nmvnlc5Z8,496 +onnx/backend/test/data/node/test_flatten_negative_axis3/test_data_set_0/output_0.pb,sha256=gp4cg8vDGCIxsjAVH_C6zXUfFdU8e5Zt2rFrNtLE4qM,492 +onnx/backend/test/data/node/test_flatten_negative_axis4/model.onnx,sha256=lFgl86S3q3WFqaxHaaxE35cED2bP5R79X7ae-dtjCAo,142 +onnx/backend/test/data/node/test_flatten_negative_axis4/test_data_set_0/input_0.pb,sha256=Tmfo0GyWZmCyZgxTurluTU7UqCKaAMO269Nmvnlc5Z8,496 +onnx/backend/test/data/node/test_flatten_negative_axis4/test_data_set_0/output_0.pb,sha256=rK76rD-Gd7pN7MFiCa4gnGDsCIaupauMseig-khoG4A,492 +onnx/backend/test/data/node/test_floor/model.onnx,sha256=ys4pd-oBAvmFqHS_YNndTptJf8jJUJ25303yszFXYz0,101 +onnx/backend/test/data/node/test_floor/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_floor/test_data_set_0/output_0.pb,sha256=SLn5L75ClP1fBiBE6ZDE4VZzTrXCiWcTMy7QXKa-Ikw,254 +onnx/backend/test/data/node/test_floor_example/model.onnx,sha256=VdIVjxbhQSgQ3qGk0N-ADc6pxiVzMX5yIrBQgg2bFy8,93 +onnx/backend/test/data/node/test_floor_example/test_data_set_0/input_0.pb,sha256=ILNT9pLgfeiHDm9wzsrp3xhT2j2j7e4JJZnFxDDExm4,21 +onnx/backend/test/data/node/test_floor_example/test_data_set_0/output_0.pb,sha256=R2jpEs28Fpb5O1rged2oKCrC6rKz2V4AZANFkIZWhvo,21 +onnx/backend/test/data/node/test_gather_0/model.onnx,sha256=BVuBNnaBUTjWY0CQmPobcWUlvKhO90GmSJ0_Q-K6qos,165 +onnx/backend/test/data/node/test_gather_0/test_data_set_0/input_0.pb,sha256=v9ro1rIfI6GdfDAOlfzMtvUqIdKvWlTBkhnXf794Nm8,499 +onnx/backend/test/data/node/test_gather_0/test_data_set_0/input_1.pb,sha256=0Ccyp4GZ5Le6DSa9l6Ll54TxclZLEJcFSzyZNmAGtOM,39 +onnx/backend/test/data/node/test_gather_0/test_data_set_0/output_0.pb,sha256=09KeAIrn_T0fae6_KPa4XtXP6OS1dQUtxXObxyVKA3o,304 +onnx/backend/test/data/node/test_gather_1/model.onnx,sha256=-einNP84SiRXQdfqLWV_ptKuGZOfDZhxRR6PkxoA2Sg,165 +onnx/backend/test/data/node/test_gather_1/test_data_set_0/input_0.pb,sha256=v9ro1rIfI6GdfDAOlfzMtvUqIdKvWlTBkhnXf794Nm8,499 +onnx/backend/test/data/node/test_gather_1/test_data_set_0/input_1.pb,sha256=0Ccyp4GZ5Le6DSa9l6Ll54TxclZLEJcFSzyZNmAGtOM,39 +onnx/backend/test/data/node/test_gather_1/test_data_set_0/output_0.pb,sha256=ieyU1qyiHYO1t5Ab7_nxdVGd_OaMgc3YbKzYuqajEPg,376 +onnx/backend/test/data/node/test_gather_2d_indices/model.onnx,sha256=DPra5GUI_sv0fTGRUntpMSCozKIDCyJJgGnsrORMvcM,166 +onnx/backend/test/data/node/test_gather_2d_indices/test_data_set_0/input_0.pb,sha256=SOaFrsa79PCS2ugXp1X-Zq0oKZzsDF1ZhhCp-IilMbI,50 +onnx/backend/test/data/node/test_gather_2d_indices/test_data_set_0/input_1.pb,sha256=bXdhgzbcEgKzjvDLVobDKoZnWAbC9zGJZFC0lj7Y_o8,33 +onnx/backend/test/data/node/test_gather_2d_indices/test_data_set_0/output_0.pb,sha256=w7ycEGtr1LhcasCY6CV0lbteDV95mdJMiOSF6kAeL68,37 +onnx/backend/test/data/node/test_gather_elements_0/model.onnx,sha256=-UKGxaVDmtpIiOxLp3hdBW50jufIf6WzYL2x9CXsna0,170 +onnx/backend/test/data/node/test_gather_elements_0/test_data_set_0/input_0.pb,sha256=wRC2L4HlxZ6ZgrkzAQSaPSyf6PJwdYTMGNVxHYmTBOE,30 +onnx/backend/test/data/node/test_gather_elements_0/test_data_set_0/input_1.pb,sha256=wSm3mqUOOxYumS8IIOegQAGpVj-DJa5dmRvLafVm1go,49 +onnx/backend/test/data/node/test_gather_elements_0/test_data_set_0/output_0.pb,sha256=F1BDlx55q8vtfYsgvENwpn1xht12Z7Dsw5tL1fkhntI,27 +onnx/backend/test/data/node/test_gather_elements_1/model.onnx,sha256=qhtzp0PVLrQhxI8DObE-nI7P7zDA8JgKTo8pliOpxW8,170 +onnx/backend/test/data/node/test_gather_elements_1/test_data_set_0/input_0.pb,sha256=Wm59aOyyoZOZv5YocFWSzXPg25a-rdTtaDgEYaWC5kc,50 +onnx/backend/test/data/node/test_gather_elements_1/test_data_set_0/input_1.pb,sha256=5pgWxgEKVysUsnq2IsW-NVvjJquNbcZKBWa1yFTP0VQ,65 +onnx/backend/test/data/node/test_gather_elements_1/test_data_set_0/output_0.pb,sha256=h_nydv1J1lfEmGnseH4P5IDLSjCcUDxV0wK-aQfoPKY,35 +onnx/backend/test/data/node/test_gather_elements_negative_indices/model.onnx,sha256=eFwUhoyzlAwCXYUqo2uMg9At6nY630sSPZYiO6xYER0,185 +onnx/backend/test/data/node/test_gather_elements_negative_indices/test_data_set_0/input_0.pb,sha256=Wm59aOyyoZOZv5YocFWSzXPg25a-rdTtaDgEYaWC5kc,50 +onnx/backend/test/data/node/test_gather_elements_negative_indices/test_data_set_0/input_1.pb,sha256=0VU9o2--B-qtP4gN3jZZoUPLGH2P_3RcU3JEkOZ9xEo,65 +onnx/backend/test/data/node/test_gather_elements_negative_indices/test_data_set_0/output_0.pb,sha256=fibWdZ4Mvy73m47uPMZAAKLshVsu3yP3zerYkK7Tqbw,35 +onnx/backend/test/data/node/test_gather_negative_indices/model.onnx,sha256=LsHLhrGMuLkwmRgyZ2IQDIFSPV3498o2_GBtp54cnQk,156 +onnx/backend/test/data/node/test_gather_negative_indices/test_data_set_0/input_0.pb,sha256=GXKNcQ-uaECnpF8lviHCWJqTZop-OiUYjf15GjWXEdM,52 +onnx/backend/test/data/node/test_gather_negative_indices/test_data_set_0/input_1.pb,sha256=5A7wfS6LnXClLPQu_ACgve0ttcPbYZSfwUcXrHRn7f8,39 +onnx/backend/test/data/node/test_gather_negative_indices/test_data_set_0/output_0.pb,sha256=A5dULKLVuSszRV-Lr4tSbcCXE8SmBGXGEt2PBEUnrC0,21 +onnx/backend/test/data/node/test_gathernd_example_float32/model.onnx,sha256=qXeqzRaQl1k20yQxj66_8OMPrjB8hhbVLTAIP6Mhwrw,180 +onnx/backend/test/data/node/test_gathernd_example_float32/test_data_set_0/input_0.pb,sha256=5cz0WLnzXzK7tNORYLLJt7M4cEl5voO6weP87g10uzY,48 +onnx/backend/test/data/node/test_gathernd_example_float32/test_data_set_0/input_1.pb,sha256=jbBMayVnL94pY1Yt8CqgATmUpNvs4tIWsExStJnd_mU,51 +onnx/backend/test/data/node/test_gathernd_example_float32/test_data_set_0/output_0.pb,sha256=mji8xWMFyFYSHhjyJ7DCrYk0VKFyB-A-UXkQ4i_FRsY,34 +onnx/backend/test/data/node/test_gathernd_example_int32/model.onnx,sha256=BNQJZvLmpsNQWy9awDjRUyKJUFvbW6KFlHgtnp31l4c,162 +onnx/backend/test/data/node/test_gathernd_example_int32/test_data_set_0/input_0.pb,sha256=9zPZQrD5dWr-AFtjXJKRMv3rliBFCWE67e5HYlFgI8Q,30 +onnx/backend/test/data/node/test_gathernd_example_int32/test_data_set_0/input_1.pb,sha256=IgYXoL1hPZ5AWWRDlAg52hQcugR9rQ315g3gS5h2SKU,49 +onnx/backend/test/data/node/test_gathernd_example_int32/test_data_set_0/output_0.pb,sha256=wBiSs17OlKdFQgS0dKfm8s3KikSCYY3DCqHVJLqTax0,22 +onnx/backend/test/data/node/test_gathernd_example_int32_batch_dim1/model.onnx,sha256=6VH-THSJwzI7Z3ZzLqkZIu8raqAyYJ5cYsBtZOTrDSM,200 +onnx/backend/test/data/node/test_gathernd_example_int32_batch_dim1/test_data_set_0/input_0.pb,sha256=_Ufgo65torZP3BJXMuNV_I5bjGxlFeNnmYbzBQDtGgk,48 +onnx/backend/test/data/node/test_gathernd_example_int32_batch_dim1/test_data_set_0/input_1.pb,sha256=PYURLeB1BEiLLcnnTviJ8DKU5TaUbxAMmuEs-y9aEPc,33 +onnx/backend/test/data/node/test_gathernd_example_int32_batch_dim1/test_data_set_0/output_0.pb,sha256=SevtZOQwcp9sevBCWvvSXtPStmtLO1deGNkftiCcUK4,32 +onnx/backend/test/data/node/test_gelu_default_1/model.onnx,sha256=mwFqEZ51y0LvHRM5IsCHKreJZ9KXEyX5zpk4_QX32nA,93 +onnx/backend/test/data/node/test_gelu_default_1/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_gelu_default_1/test_data_set_0/output_0.pb,sha256=4bjdfbLPTMUgx0SayiTA0G96UOGIGX2PKAxfYQeRkyU,21 +onnx/backend/test/data/node/test_gelu_default_1_expanded/model.onnx,sha256=8BP72Oh43uhKm9rtAdckT1VxWGqEeUXkY1_6E2EHLL8,1429 +onnx/backend/test/data/node/test_gelu_default_1_expanded/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_gelu_default_1_expanded/test_data_set_0/output_0.pb,sha256=4bjdfbLPTMUgx0SayiTA0G96UOGIGX2PKAxfYQeRkyU,21 +onnx/backend/test/data/node/test_gelu_default_2/model.onnx,sha256=QKh66wLVRddslXbWjz9w5qMZaiNSjSuha7xpkQSyiIs,109 +onnx/backend/test/data/node/test_gelu_default_2/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_gelu_default_2/test_data_set_0/output_0.pb,sha256=pVbTvOHkijnz8EDFKTln9j4pxiht56VbV3uhv4gzQAQ,254 +onnx/backend/test/data/node/test_gelu_default_2_expanded/model.onnx,sha256=WF1LGm5Sk7UrVsSVxArXm_T8CAoFIB_pwbNhMLDoIxA,1445 +onnx/backend/test/data/node/test_gelu_default_2_expanded/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_gelu_default_2_expanded/test_data_set_0/output_0.pb,sha256=pVbTvOHkijnz8EDFKTln9j4pxiht56VbV3uhv4gzQAQ,254 +onnx/backend/test/data/node/test_gelu_tanh_1/model.onnx,sha256=VLP4GruStaso802a_IGfYbdxZ7qR5TacEx330pS5xcc,114 +onnx/backend/test/data/node/test_gelu_tanh_1/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_gelu_tanh_1/test_data_set_0/output_0.pb,sha256=zOPUFL9N2bIgMy9AISAxC9H_2OkMgH08bIVd-BLSf4E,21 +onnx/backend/test/data/node/test_gelu_tanh_1_expanded/model.onnx,sha256=QL1UVQadFUDytZKnFhkSKJcLhYwDOKmk3LtuYLtPrVE,2239 +onnx/backend/test/data/node/test_gelu_tanh_1_expanded/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_gelu_tanh_1_expanded/test_data_set_0/output_0.pb,sha256=zOPUFL9N2bIgMy9AISAxC9H_2OkMgH08bIVd-BLSf4E,21 +onnx/backend/test/data/node/test_gelu_tanh_2/model.onnx,sha256=xRiWp7RIE4Jo9IoWKwCUPU5sWho5uxSsVPtNciPFWWo,130 +onnx/backend/test/data/node/test_gelu_tanh_2/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_gelu_tanh_2/test_data_set_0/output_0.pb,sha256=YtELNtZvMXqM_O4qUaKFRUz209LqKrODM63qs7DOOEE,254 +onnx/backend/test/data/node/test_gelu_tanh_2_expanded/model.onnx,sha256=65XfLWnYZKKO3UPutOkQXXY9sotXRG-Im9CJinKBmEM,2255 +onnx/backend/test/data/node/test_gelu_tanh_2_expanded/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_gelu_tanh_2_expanded/test_data_set_0/output_0.pb,sha256=YtELNtZvMXqM_O4qUaKFRUz209LqKrODM63qs7DOOEE,254 +onnx/backend/test/data/node/test_gemm_all_attributes/model.onnx,sha256=HTR5DpeE8XQRryF_KzHEKihBvN_IRKm7yHy0WKvGjBI,218 +onnx/backend/test/data/node/test_gemm_all_attributes/test_data_set_0/input_0.pb,sha256=RXR_ByfkxKZMoWCAHpiivbcuBwFAKhfBUVkyCln3T0o,59 +onnx/backend/test/data/node/test_gemm_all_attributes/test_data_set_0/input_1.pb,sha256=aVoY8BMzhNSlp1xP2b-jQ6REpi2Dtc1qnEAPQSMYGKY,91 +onnx/backend/test/data/node/test_gemm_all_attributes/test_data_set_0/input_2.pb,sha256=8gbLQicg2cZTYZpzj5BI-b4-9XtcCgaLXiQM1a6sZo0,31 +onnx/backend/test/data/node/test_gemm_all_attributes/test_data_set_0/output_0.pb,sha256=M8ow4VnzrBXwrvTAwznwIas4zwLusLNUS7u7MWkQsYY,71 +onnx/backend/test/data/node/test_gemm_alpha/model.onnx,sha256=SzQN_zHM5FNWh8GdU0P7nGh5JurL_z5yCQ1xbB8EfmM,163 +onnx/backend/test/data/node/test_gemm_alpha/test_data_set_0/input_0.pb,sha256=MILDtCDczrrmVCYbqkuMR6oYZmKaoVlWgARdLbzo9B4,71 +onnx/backend/test/data/node/test_gemm_alpha/test_data_set_0/input_1.pb,sha256=1xWZAGPREKiwEVDAShgn4ytsgBqHdZiahRzc34kWXIo,91 +onnx/backend/test/data/node/test_gemm_alpha/test_data_set_0/input_2.pb,sha256=_0XG54N6Zz5zb0Nxb201xMsoJvRtQ47nblJ4VfMvvXA,27 +onnx/backend/test/data/node/test_gemm_alpha/test_data_set_0/output_0.pb,sha256=0QrvhY8NjgaHuyrMkfNjHiN8EGUrrI2ik9s8pmKXLVM,59 +onnx/backend/test/data/node/test_gemm_beta/model.onnx,sha256=lbbrtTgcdg49mL9b9JhVQLbSNXp7HnHHN4P0XoFkuto,161 +onnx/backend/test/data/node/test_gemm_beta/test_data_set_0/input_0.pb,sha256=r5WMCnSToJqMbEYSeANqSG5Nbsp3PtGfofCSBKWZxEw,67 +onnx/backend/test/data/node/test_gemm_beta/test_data_set_0/input_1.pb,sha256=JeOCNEjJirYi_V4m0bfCyIwat86xDBD3ZA9OL6u1lPM,123 +onnx/backend/test/data/node/test_gemm_beta/test_data_set_0/input_2.pb,sha256=IB3Jspke-7-5qQWMVzj62hbsVQ_lbvNUYjXvtFGl5IM,27 +onnx/backend/test/data/node/test_gemm_beta/test_data_set_0/output_0.pb,sha256=JVxD5TKmPSVG8a_4F5GxWsxGnVh_R0L9rDe2k3xCqYg,43 +onnx/backend/test/data/node/test_gemm_default_matrix_bias/model.onnx,sha256=EncOjVzLufDY4k7CdiKPnwWV3QqoASAgUDlw9q_f0z0,160 +onnx/backend/test/data/node/test_gemm_default_matrix_bias/test_data_set_0/input_0.pb,sha256=XvAUu3zVMoHoUthPBoozrxWJMZk4TyEmquNiplVhb5g,83 +onnx/backend/test/data/node/test_gemm_default_matrix_bias/test_data_set_0/input_1.pb,sha256=L69sZ8XUqDcre4xBAI4kPV0TarQpqwHvAdTg5f7WW9Y,107 +onnx/backend/test/data/node/test_gemm_default_matrix_bias/test_data_set_0/input_2.pb,sha256=muCFB3W2JEK0ta9OShyFBcQwmhqbNzDHbY3sUDkVkew,59 +onnx/backend/test/data/node/test_gemm_default_matrix_bias/test_data_set_0/output_0.pb,sha256=iQ3LpdiK7rS38o00o7UMFfXGstIaDyxsk2fileNMB1Q,59 +onnx/backend/test/data/node/test_gemm_default_no_bias/model.onnx,sha256=kLKHvgantAA1L4aWXYOBFpK_y_6ZYUd8sDM0HuauoYk,131 +onnx/backend/test/data/node/test_gemm_default_no_bias/test_data_set_0/input_0.pb,sha256=cTbU9_Eif-2LEv4q5aKaThA_zQhShNPDIiZjXSRhMxs,91 +onnx/backend/test/data/node/test_gemm_default_no_bias/test_data_set_0/input_1.pb,sha256=9trDrSVCbecGUg-TpqlqdscizRriba9XmLEIAPekmDU,131 +onnx/backend/test/data/node/test_gemm_default_no_bias/test_data_set_0/output_0.pb,sha256=KC0JMcrMnA0iDy2_QfcyfQQs8KWTbYA9kLb9aSbETMw,35 +onnx/backend/test/data/node/test_gemm_default_scalar_bias/model.onnx,sha256=kyvYbbUiBOl43sEzFkP1NX2h-4EHAR5BuTkNyiPUIKs,151 +onnx/backend/test/data/node/test_gemm_default_scalar_bias/test_data_set_0/input_0.pb,sha256=VtTpPce13D-fdCP-802VIbl4EIVLgrtF7mtl517CpIE,35 +onnx/backend/test/data/node/test_gemm_default_scalar_bias/test_data_set_0/input_1.pb,sha256=SZG6iS5JbxzyssmlpNhOPGFP_iOd8yfd9sbSGvm4LIA,59 +onnx/backend/test/data/node/test_gemm_default_scalar_bias/test_data_set_0/input_2.pb,sha256=zRb1hwJ_IqbwUHQg5cFKeoV_wK7hFWJCLGv5_3lfDoU,11 +onnx/backend/test/data/node/test_gemm_default_scalar_bias/test_data_set_0/output_0.pb,sha256=EINJmRzyMakSMOcv5FRxJpwJuzuYxYFp_vFX3lPPz1M,43 +onnx/backend/test/data/node/test_gemm_default_single_elem_vector_bias/model.onnx,sha256=mUQWPy0JVeYPaVvAvb0jdSDFOKLpwda0OxRe6GBE3Ic,168 +onnx/backend/test/data/node/test_gemm_default_single_elem_vector_bias/test_data_set_0/input_0.pb,sha256=KtFIVi8IliPuo9E23jP3YKzU-jsTyv306GzQu64WNRM,95 +onnx/backend/test/data/node/test_gemm_default_single_elem_vector_bias/test_data_set_0/input_1.pb,sha256=QZrZwYak1ep3E164r-vGwhW0neLicmkAgQwUYgKQpG0,95 +onnx/backend/test/data/node/test_gemm_default_single_elem_vector_bias/test_data_set_0/input_2.pb,sha256=qqc1BKD0WlwHFwNCosttdZqyvg0EznvDfX4sN4oLiGY,13 +onnx/backend/test/data/node/test_gemm_default_single_elem_vector_bias/test_data_set_0/output_0.pb,sha256=uhIwMJjEzSp8y0mRbvsD9SClUujmS9ze-3vbtZmpAK8,47 +onnx/backend/test/data/node/test_gemm_default_vector_bias/model.onnx,sha256=d3yUgH0ldHi5SmINnvKCBg81rqZf64z97wzDRixnEtU,160 +onnx/backend/test/data/node/test_gemm_default_vector_bias/test_data_set_0/input_0.pb,sha256=r5WMCnSToJqMbEYSeANqSG5Nbsp3PtGfofCSBKWZxEw,67 +onnx/backend/test/data/node/test_gemm_default_vector_bias/test_data_set_0/input_1.pb,sha256=JeOCNEjJirYi_V4m0bfCyIwat86xDBD3ZA9OL6u1lPM,123 +onnx/backend/test/data/node/test_gemm_default_vector_bias/test_data_set_0/input_2.pb,sha256=IB3Jspke-7-5qQWMVzj62hbsVQ_lbvNUYjXvtFGl5IM,27 +onnx/backend/test/data/node/test_gemm_default_vector_bias/test_data_set_0/output_0.pb,sha256=-bT3X5G1IREouSTeHUyj7eZeA6zlCC5U25jNljD222A,43 +onnx/backend/test/data/node/test_gemm_default_zero_bias/model.onnx,sha256=fKG8IMPCAtOZnskNM_HhHhGPX54KM7mbRafsJxcKmAY,158 +onnx/backend/test/data/node/test_gemm_default_zero_bias/test_data_set_0/input_0.pb,sha256=MILDtCDczrrmVCYbqkuMR6oYZmKaoVlWgARdLbzo9B4,71 +onnx/backend/test/data/node/test_gemm_default_zero_bias/test_data_set_0/input_1.pb,sha256=1xWZAGPREKiwEVDAShgn4ytsgBqHdZiahRzc34kWXIo,91 +onnx/backend/test/data/node/test_gemm_default_zero_bias/test_data_set_0/input_2.pb,sha256=_0XG54N6Zz5zb0Nxb201xMsoJvRtQ47nblJ4VfMvvXA,27 +onnx/backend/test/data/node/test_gemm_default_zero_bias/test_data_set_0/output_0.pb,sha256=-fJCSSDCHHgenicNtmUhcWgfyxt9tadr5CZPutY1ol4,59 +onnx/backend/test/data/node/test_gemm_transposeA/model.onnx,sha256=VRg_UfqynzEdxEmglw_sL9ebMTbIvMUjxH2OmGPCIYY,166 +onnx/backend/test/data/node/test_gemm_transposeA/test_data_set_0/input_0.pb,sha256=8v_ybDUH0fDPp5sZoIMfWJvtnweReOYdHp7Iu7qGYCk,83 +onnx/backend/test/data/node/test_gemm_transposeA/test_data_set_0/input_1.pb,sha256=L69sZ8XUqDcre4xBAI4kPV0TarQpqwHvAdTg5f7WW9Y,107 +onnx/backend/test/data/node/test_gemm_transposeA/test_data_set_0/input_2.pb,sha256=_0XG54N6Zz5zb0Nxb201xMsoJvRtQ47nblJ4VfMvvXA,27 +onnx/backend/test/data/node/test_gemm_transposeA/test_data_set_0/output_0.pb,sha256=k-IwJyiSI1g8vypylfNjds4-VnWY6uME1fx6BH2lYZU,59 +onnx/backend/test/data/node/test_gemm_transposeB/model.onnx,sha256=qF4Yla3z628tEgTZQuvk_LRwoGr5nh7FJGJZxu3S4hQ,166 +onnx/backend/test/data/node/test_gemm_transposeB/test_data_set_0/input_0.pb,sha256=XvAUu3zVMoHoUthPBoozrxWJMZk4TyEmquNiplVhb5g,83 +onnx/backend/test/data/node/test_gemm_transposeB/test_data_set_0/input_1.pb,sha256=VKG3Ykxfro1pdhKVtzUNYRYQjysnnn3_P7ZkAKfVQm8,107 +onnx/backend/test/data/node/test_gemm_transposeB/test_data_set_0/input_2.pb,sha256=_0XG54N6Zz5zb0Nxb201xMsoJvRtQ47nblJ4VfMvvXA,27 +onnx/backend/test/data/node/test_gemm_transposeB/test_data_set_0/output_0.pb,sha256=RPKyOLcHHw4m-bpJDc5OB_oLunmPJGdD9FYP-keAVHU,59 +onnx/backend/test/data/node/test_globalaveragepool/model.onnx,sha256=O8bUH8bIsDlQm_GcKCIZX15KygH3tB-BOAui31v3RqU,133 +onnx/backend/test/data/node/test_globalaveragepool/test_data_set_0/input_0.pb,sha256=qmHY5ZOgfUeKlGr6gKjS6R4nQl61rIb7E8ZrdQSs3HQ,316 +onnx/backend/test/data/node/test_globalaveragepool/test_data_set_0/output_0.pb,sha256=xNA3TgQwfvnKKtJLHlP8BGoYSyY42pXVndHvtMzKNSw,27 +onnx/backend/test/data/node/test_globalaveragepool_precomputed/model.onnx,sha256=fYYxYRB9qBy5cVtNA5H_od0ZzETP2-629m4F94mACDE,145 +onnx/backend/test/data/node/test_globalaveragepool_precomputed/test_data_set_0/input_0.pb,sha256=VEPr9C-MPAPrWNFhCqj3re8AshFCwfNIPN9i9k2EvMo,51 +onnx/backend/test/data/node/test_globalaveragepool_precomputed/test_data_set_0/output_0.pb,sha256=otE0U2fczeenDUSTqh59jqDAfCNOJMoQjqE0pLtL-KQ,19 +onnx/backend/test/data/node/test_globalmaxpool/model.onnx,sha256=Z4zJa5gYvDzHTdML1nc82x9KOR97Ir3LF5TOdvAhRhE,125 +onnx/backend/test/data/node/test_globalmaxpool/test_data_set_0/input_0.pb,sha256=qmHY5ZOgfUeKlGr6gKjS6R4nQl61rIb7E8ZrdQSs3HQ,316 +onnx/backend/test/data/node/test_globalmaxpool/test_data_set_0/output_0.pb,sha256=56GJcF-qYJQhHJoqwvMOiOlFGpGfi3tVUUGyrRdycLs,27 +onnx/backend/test/data/node/test_globalmaxpool_precomputed/model.onnx,sha256=w92_r0TUpATDaoaqn7YgupNN2MJKWEtXy-t2ff3mEH8,137 +onnx/backend/test/data/node/test_globalmaxpool_precomputed/test_data_set_0/input_0.pb,sha256=VEPr9C-MPAPrWNFhCqj3re8AshFCwfNIPN9i9k2EvMo,51 +onnx/backend/test/data/node/test_globalmaxpool_precomputed/test_data_set_0/output_0.pb,sha256=BIUCmZ29Gugpj6pl82Cf1tuj26kP34Mbp33VqWuFmnI,19 +onnx/backend/test/data/node/test_greater/model.onnx,sha256=ewXoLOj7YNobggSofGk__hmXs7RamQ2IosA_fQ3Eaos,145 +onnx/backend/test/data/node/test_greater/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_greater/test_data_set_0/input_1.pb,sha256=qlmuuQNl3eLtxabKfNE6cbZHjYcj-DzNBr1M4rz7GSo,254 +onnx/backend/test/data/node/test_greater/test_data_set_0/output_0.pb,sha256=uDTgBvvrdZBv60SwtKEuuMrGFn3ip783yOujJbu2_wc,79 +onnx/backend/test/data/node/test_greater_bcast/model.onnx,sha256=JL5xVnCCPgeF5ILL-bLmKpmrfSU-CfJXL1FBhe4GW7E,143 +onnx/backend/test/data/node/test_greater_bcast/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_greater_bcast/test_data_set_0/input_1.pb,sha256=jYn9FLUzE9ETJcxFW48XqSehRlyePPjUoI-gtNyozBs,29 +onnx/backend/test/data/node/test_greater_bcast/test_data_set_0/output_0.pb,sha256=ZferDXWUCmTQTquCgvAxWYEnjQ_b1B-dOjznTOngcdQ,79 +onnx/backend/test/data/node/test_greater_equal/model.onnx,sha256=pWC2qrXxm0Vbjj2m8omzBcdrjBVlVCxRRpqg3rY8Lx0,171 +onnx/backend/test/data/node/test_greater_equal/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_greater_equal/test_data_set_0/input_1.pb,sha256=qlmuuQNl3eLtxabKfNE6cbZHjYcj-DzNBr1M4rz7GSo,254 +onnx/backend/test/data/node/test_greater_equal/test_data_set_0/output_0.pb,sha256=mzT9JswDPorPoAK92dgoB5a05d4MI3Tx5985GQu3Ntw,85 +onnx/backend/test/data/node/test_greater_equal_bcast/model.onnx,sha256=E1Hy02R8SQc_F_PlX-ArhzZQJwPyTmPnw2z_mxDP-kE,169 +onnx/backend/test/data/node/test_greater_equal_bcast/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_greater_equal_bcast/test_data_set_0/input_1.pb,sha256=jYn9FLUzE9ETJcxFW48XqSehRlyePPjUoI-gtNyozBs,29 +onnx/backend/test/data/node/test_greater_equal_bcast/test_data_set_0/output_0.pb,sha256=oDBk3zKJWIs6Tb8nJcKwh-Fuv0CHlJ8PtGAuV9byRuw,85 +onnx/backend/test/data/node/test_greater_equal_bcast_expanded/model.onnx,sha256=fATWTpPG4PUHGDL0tDnliPidXPuXmVd2GPSryeXnkmQ,447 +onnx/backend/test/data/node/test_greater_equal_bcast_expanded/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_greater_equal_bcast_expanded/test_data_set_0/input_1.pb,sha256=jYn9FLUzE9ETJcxFW48XqSehRlyePPjUoI-gtNyozBs,29 +onnx/backend/test/data/node/test_greater_equal_bcast_expanded/test_data_set_0/output_0.pb,sha256=oDBk3zKJWIs6Tb8nJcKwh-Fuv0CHlJ8PtGAuV9byRuw,85 +onnx/backend/test/data/node/test_greater_equal_expanded/model.onnx,sha256=5stox5Y1fFCl4dX3MuRH40TfIeiJHg4_zUwRAt0FyBQ,425 +onnx/backend/test/data/node/test_greater_equal_expanded/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_greater_equal_expanded/test_data_set_0/input_1.pb,sha256=qlmuuQNl3eLtxabKfNE6cbZHjYcj-DzNBr1M4rz7GSo,254 +onnx/backend/test/data/node/test_greater_equal_expanded/test_data_set_0/output_0.pb,sha256=mzT9JswDPorPoAK92dgoB5a05d4MI3Tx5985GQu3Ntw,85 +onnx/backend/test/data/node/test_gridsample/model.onnx,sha256=Y5taQZPMQ3tH-j6quHLs65SS66KnAys189L6YIKg1Lg,225 +onnx/backend/test/data/node/test_gridsample/test_data_set_0/input_0.pb,sha256=Er_KMA_bpGQqp91W7npSqfc4KcISgiYpOOth1aVtUoI,79 +onnx/backend/test/data/node/test_gridsample/test_data_set_0/input_1.pb,sha256=VIc-rbiGUK4N9CBPdEUTgumay_Uf6CQo8S8rSodbCdo,307 +onnx/backend/test/data/node/test_gridsample/test_data_set_0/output_0.pb,sha256=35yxRZMYSQ1swNbyzFBFwVsmmeUdZR7iduxEuQ3I0NM,160 +onnx/backend/test/data/node/test_gridsample_aligncorners_true/model.onnx,sha256=gIUAf9d-myMl0eBUPmBLWI688_xSvUt6GHQWbY3M7ac,217 +onnx/backend/test/data/node/test_gridsample_aligncorners_true/test_data_set_0/input_0.pb,sha256=eusk3-63HiQIJBbA_iwre2x4nSbdmUrKrC1hscOgPoQ,39 +onnx/backend/test/data/node/test_gridsample_aligncorners_true/test_data_set_0/input_1.pb,sha256=-sgoYS71T1zyadZBJamze1_tDkLWp1RdqtziONB_zus,82 +onnx/backend/test/data/node/test_gridsample_aligncorners_true/test_data_set_0/output_0.pb,sha256=LKkY9CQbQvs4La-iuW0PzhH7gUCo-vr7friq3vXiyRw,47 +onnx/backend/test/data/node/test_gridsample_bicubic/model.onnx,sha256=QJFtvpq57DHb-6CHFbgT9VSK0GIiTZx6A3-sa0FPU8g,184 +onnx/backend/test/data/node/test_gridsample_bicubic/test_data_set_0/input_0.pb,sha256=eusk3-63HiQIJBbA_iwre2x4nSbdmUrKrC1hscOgPoQ,39 +onnx/backend/test/data/node/test_gridsample_bicubic/test_data_set_0/input_1.pb,sha256=-sgoYS71T1zyadZBJamze1_tDkLWp1RdqtziONB_zus,82 +onnx/backend/test/data/node/test_gridsample_bicubic/test_data_set_0/output_0.pb,sha256=iWyxqWQU2N7P8sBWOhtYGlbaZOizT5bWdJyERH9Vmd4,47 +onnx/backend/test/data/node/test_gridsample_bicubic_align_corners_0_additional_1/model.onnx,sha256=fX_RCGD8Fe7aHKaPSUnIXsf2v4zAN1fm8dTtgbeMawg,235 +onnx/backend/test/data/node/test_gridsample_bicubic_align_corners_0_additional_1/test_data_set_0/input_0.pb,sha256=eusk3-63HiQIJBbA_iwre2x4nSbdmUrKrC1hscOgPoQ,39 +onnx/backend/test/data/node/test_gridsample_bicubic_align_corners_0_additional_1/test_data_set_0/input_1.pb,sha256=S-pBmDHOidx8aoIg0YScdo5k3XAxEqy94qiZuwzIg5E,82 +onnx/backend/test/data/node/test_gridsample_bicubic_align_corners_0_additional_1/test_data_set_0/output_0.pb,sha256=CH1eGEUSYtT6gnhdwtywpw00oxGO061P3KX40yu1pNE,47 +onnx/backend/test/data/node/test_gridsample_bicubic_align_corners_1_additional_1/model.onnx,sha256=ICdDK7uhaVQNAn-JnN5IWJSwZfBwVMIkam0Pnupooxo,235 +onnx/backend/test/data/node/test_gridsample_bicubic_align_corners_1_additional_1/test_data_set_0/input_0.pb,sha256=eusk3-63HiQIJBbA_iwre2x4nSbdmUrKrC1hscOgPoQ,39 +onnx/backend/test/data/node/test_gridsample_bicubic_align_corners_1_additional_1/test_data_set_0/input_1.pb,sha256=S-pBmDHOidx8aoIg0YScdo5k3XAxEqy94qiZuwzIg5E,82 +onnx/backend/test/data/node/test_gridsample_bicubic_align_corners_1_additional_1/test_data_set_0/output_0.pb,sha256=FA2Hwp_kMsEDsazjxJCrhvf9B1b6sG4cCYriEcmOLPw,47 +onnx/backend/test/data/node/test_gridsample_bilinear/model.onnx,sha256=tOzktSXr7QWFk9cnI_qa2XoX2ViXUVFgjV1mHX8Tw9k,186 +onnx/backend/test/data/node/test_gridsample_bilinear/test_data_set_0/input_0.pb,sha256=eusk3-63HiQIJBbA_iwre2x4nSbdmUrKrC1hscOgPoQ,39 +onnx/backend/test/data/node/test_gridsample_bilinear/test_data_set_0/input_1.pb,sha256=-sgoYS71T1zyadZBJamze1_tDkLWp1RdqtziONB_zus,82 +onnx/backend/test/data/node/test_gridsample_bilinear/test_data_set_0/output_0.pb,sha256=JT3KtQjkMJaSS4Apt00mMjXzrXSol3CsMvGX-0_2-w8,47 +onnx/backend/test/data/node/test_gridsample_bilinear_align_corners_0_additional_1/model.onnx,sha256=MtXiA9UDvQ6f4Rcw7WOwucp7Zu3Fn-EebrV0XIhw2AQ,237 +onnx/backend/test/data/node/test_gridsample_bilinear_align_corners_0_additional_1/test_data_set_0/input_0.pb,sha256=eusk3-63HiQIJBbA_iwre2x4nSbdmUrKrC1hscOgPoQ,39 +onnx/backend/test/data/node/test_gridsample_bilinear_align_corners_0_additional_1/test_data_set_0/input_1.pb,sha256=S-pBmDHOidx8aoIg0YScdo5k3XAxEqy94qiZuwzIg5E,82 +onnx/backend/test/data/node/test_gridsample_bilinear_align_corners_0_additional_1/test_data_set_0/output_0.pb,sha256=Hia4lHz9zXuIbUxTcFdXcKVmSfITXZX2Nv3iWTsYPc8,47 +onnx/backend/test/data/node/test_gridsample_bilinear_align_corners_1_additional_1/model.onnx,sha256=PRN0rfnJuEkG3ALA576Yxhi7t5bTh7wXq_9Akb_lbhA,237 +onnx/backend/test/data/node/test_gridsample_bilinear_align_corners_1_additional_1/test_data_set_0/input_0.pb,sha256=eusk3-63HiQIJBbA_iwre2x4nSbdmUrKrC1hscOgPoQ,39 +onnx/backend/test/data/node/test_gridsample_bilinear_align_corners_1_additional_1/test_data_set_0/input_1.pb,sha256=S-pBmDHOidx8aoIg0YScdo5k3XAxEqy94qiZuwzIg5E,82 +onnx/backend/test/data/node/test_gridsample_bilinear_align_corners_1_additional_1/test_data_set_0/output_0.pb,sha256=2lnf2H2pxf833H5TR83M3Ie7RrrEwAq95jOFyr1n6I4,47 +onnx/backend/test/data/node/test_gridsample_border_padding/model.onnx,sha256=pyu6QYZLC2IW6bfaeiia9tN-9bbYyUx0FdSLKVmdmXU,200 +onnx/backend/test/data/node/test_gridsample_border_padding/test_data_set_0/input_0.pb,sha256=eusk3-63HiQIJBbA_iwre2x4nSbdmUrKrC1hscOgPoQ,39 +onnx/backend/test/data/node/test_gridsample_border_padding/test_data_set_0/input_1.pb,sha256=uIKQ9S1EJfGQDnIqDNFDZbPRcI4P9J11zZOQAFxwJGA,82 +onnx/backend/test/data/node/test_gridsample_border_padding/test_data_set_0/output_0.pb,sha256=9rfdAbkyFmuRjDoU9VsoDOaem0ChoKqoz4aBl_pcsGk,47 +onnx/backend/test/data/node/test_gridsample_nearest/model.onnx,sha256=Rju8O5VexzU3kNqPA8uxj7NQs7B95_BAEUaclho0Sf0,186 +onnx/backend/test/data/node/test_gridsample_nearest/test_data_set_0/input_0.pb,sha256=eusk3-63HiQIJBbA_iwre2x4nSbdmUrKrC1hscOgPoQ,39 +onnx/backend/test/data/node/test_gridsample_nearest/test_data_set_0/input_1.pb,sha256=-sgoYS71T1zyadZBJamze1_tDkLWp1RdqtziONB_zus,82 +onnx/backend/test/data/node/test_gridsample_nearest/test_data_set_0/output_0.pb,sha256=RM6pagyCCllTI5PrwIPKwCNW9RocxzB7ktzcax9cNiM,47 +onnx/backend/test/data/node/test_gridsample_nearest_align_corners_0_additional_1/model.onnx,sha256=EB3bPuLQQBu3Sh-QuBZHq8p7LZbYNW0nL78qqysJifg,237 +onnx/backend/test/data/node/test_gridsample_nearest_align_corners_0_additional_1/test_data_set_0/input_0.pb,sha256=eusk3-63HiQIJBbA_iwre2x4nSbdmUrKrC1hscOgPoQ,39 +onnx/backend/test/data/node/test_gridsample_nearest_align_corners_0_additional_1/test_data_set_0/input_1.pb,sha256=S-pBmDHOidx8aoIg0YScdo5k3XAxEqy94qiZuwzIg5E,82 +onnx/backend/test/data/node/test_gridsample_nearest_align_corners_0_additional_1/test_data_set_0/output_0.pb,sha256=pprCS5vT63wV2Kn1n1Vm8G_lSlOxeKhGCoWiNFmhJWg,47 +onnx/backend/test/data/node/test_gridsample_nearest_align_corners_1_additional_1/model.onnx,sha256=5xb97cMD3DHOWLTFeNrbIrBgPM8ahe26nb8EPgd5ZYc,237 +onnx/backend/test/data/node/test_gridsample_nearest_align_corners_1_additional_1/test_data_set_0/input_0.pb,sha256=eusk3-63HiQIJBbA_iwre2x4nSbdmUrKrC1hscOgPoQ,39 +onnx/backend/test/data/node/test_gridsample_nearest_align_corners_1_additional_1/test_data_set_0/input_1.pb,sha256=S-pBmDHOidx8aoIg0YScdo5k3XAxEqy94qiZuwzIg5E,82 +onnx/backend/test/data/node/test_gridsample_nearest_align_corners_1_additional_1/test_data_set_0/output_0.pb,sha256=AzIm5H-C20yEZs3qQNunUmyAwLF5U2ziZlH17iYr7EM,47 +onnx/backend/test/data/node/test_gridsample_reflection_padding/model.onnx,sha256=31P3a0rcwL2v8CNOnePd82cxzrQpzRpiOpu13R_QccQ,208 +onnx/backend/test/data/node/test_gridsample_reflection_padding/test_data_set_0/input_0.pb,sha256=eusk3-63HiQIJBbA_iwre2x4nSbdmUrKrC1hscOgPoQ,39 +onnx/backend/test/data/node/test_gridsample_reflection_padding/test_data_set_0/input_1.pb,sha256=uIKQ9S1EJfGQDnIqDNFDZbPRcI4P9J11zZOQAFxwJGA,82 +onnx/backend/test/data/node/test_gridsample_reflection_padding/test_data_set_0/output_0.pb,sha256=8Ovh-MCNiNQzDI6mWj3wv0noANRS_mwlPRnZPtEz7Pk,47 +onnx/backend/test/data/node/test_gridsample_volumetric_bilinear_align_corners_0/model.onnx,sha256=109XZARKZ-ALKmdI1r4qPB_ZRpde8KftZEHZKkayp1w,247 +onnx/backend/test/data/node/test_gridsample_volumetric_bilinear_align_corners_0/test_data_set_0/input_0.pb,sha256=w3udC33Xk5943mn4uUmJjjfWWlSTh4VpmQ6jcfBb39c,65 +onnx/backend/test/data/node/test_gridsample_volumetric_bilinear_align_corners_0/test_data_set_0/input_1.pb,sha256=j0XIrTf0kge2JI0qWKCw8Pgklg_WapelI24JtKplbYI,213 +onnx/backend/test/data/node/test_gridsample_volumetric_bilinear_align_corners_0/test_data_set_0/output_0.pb,sha256=4RZ1gXSksQEIzTr2XANATu1u1zucNYe-SP9YA84ksg4,81 +onnx/backend/test/data/node/test_gridsample_volumetric_bilinear_align_corners_1/model.onnx,sha256=IzXpsWlysmwNbvekQZh0CofFYM-Nyx3Rv34XjCNfaW4,247 +onnx/backend/test/data/node/test_gridsample_volumetric_bilinear_align_corners_1/test_data_set_0/input_0.pb,sha256=w3udC33Xk5943mn4uUmJjjfWWlSTh4VpmQ6jcfBb39c,65 +onnx/backend/test/data/node/test_gridsample_volumetric_bilinear_align_corners_1/test_data_set_0/input_1.pb,sha256=j0XIrTf0kge2JI0qWKCw8Pgklg_WapelI24JtKplbYI,213 +onnx/backend/test/data/node/test_gridsample_volumetric_bilinear_align_corners_1/test_data_set_0/output_0.pb,sha256=HxSumNRz-peyT_0-4Ic5h34T9DFf6vqx5KobtnJCVvQ,81 +onnx/backend/test/data/node/test_gridsample_volumetric_nearest_align_corners_0/model.onnx,sha256=jsaB4NmUTJr672hwcxER7M_IEmOZi2MSGo91d6dvGIU,247 +onnx/backend/test/data/node/test_gridsample_volumetric_nearest_align_corners_0/test_data_set_0/input_0.pb,sha256=w3udC33Xk5943mn4uUmJjjfWWlSTh4VpmQ6jcfBb39c,65 +onnx/backend/test/data/node/test_gridsample_volumetric_nearest_align_corners_0/test_data_set_0/input_1.pb,sha256=j0XIrTf0kge2JI0qWKCw8Pgklg_WapelI24JtKplbYI,213 +onnx/backend/test/data/node/test_gridsample_volumetric_nearest_align_corners_0/test_data_set_0/output_0.pb,sha256=qpKayAfI6ebDn-hG3_zWQEN-ktIeocC7cnuXbe6dBi8,81 +onnx/backend/test/data/node/test_gridsample_volumetric_nearest_align_corners_1/model.onnx,sha256=F4iUvnBU8njXkNcay_oG3q-jaH2nBn8uZAQ1iUAz-BY,247 +onnx/backend/test/data/node/test_gridsample_volumetric_nearest_align_corners_1/test_data_set_0/input_0.pb,sha256=w3udC33Xk5943mn4uUmJjjfWWlSTh4VpmQ6jcfBb39c,65 +onnx/backend/test/data/node/test_gridsample_volumetric_nearest_align_corners_1/test_data_set_0/input_1.pb,sha256=j0XIrTf0kge2JI0qWKCw8Pgklg_WapelI24JtKplbYI,213 +onnx/backend/test/data/node/test_gridsample_volumetric_nearest_align_corners_1/test_data_set_0/output_0.pb,sha256=91xEyuO_lD8wWYy5PndQ9qwj-Cu4dTvoPM7eJzu9Ww0,81 +onnx/backend/test/data/node/test_gridsample_zeros_padding/model.onnx,sha256=Bi93sE9_j6hEFEcwdHjGJ2pPxcsrtd-WZMb14MigeoE,198 +onnx/backend/test/data/node/test_gridsample_zeros_padding/test_data_set_0/input_0.pb,sha256=eusk3-63HiQIJBbA_iwre2x4nSbdmUrKrC1hscOgPoQ,39 +onnx/backend/test/data/node/test_gridsample_zeros_padding/test_data_set_0/input_1.pb,sha256=uIKQ9S1EJfGQDnIqDNFDZbPRcI4P9J11zZOQAFxwJGA,82 +onnx/backend/test/data/node/test_gridsample_zeros_padding/test_data_set_0/output_0.pb,sha256=jRhFqa9skdV2ePTbDrQAFklZOaUhfsUJp4FA8TgrUq8,47 +onnx/backend/test/data/node/test_group_normalization_epsilon/model.onnx,sha256=IWmWvvQALlCFUQsKw2vuzBZVCNLgorkLrWCJEPcJG1k,237 +onnx/backend/test/data/node/test_group_normalization_epsilon/test_data_set_0/input_0.pb,sha256=692CieKQgoIgcJDXYv6vb3tpyZcEpU2pA8gk_cgjurc,208 +onnx/backend/test/data/node/test_group_normalization_epsilon/test_data_set_0/input_1.pb,sha256=V8-sP2yOFVu15tceGemZwbw8kYpaxnUBnMrD1VV5uPs,29 +onnx/backend/test/data/node/test_group_normalization_epsilon/test_data_set_0/input_2.pb,sha256=67rO8d_pa-UblpMvR2LTgAvqsWOcSl3Y90e1kFz0LhQ,28 +onnx/backend/test/data/node/test_group_normalization_epsilon/test_data_set_0/output_0.pb,sha256=JjaAD2JmnNphn2cTa3r6PnSW2T8YEzXcgzuS2UfJlfw,208 +onnx/backend/test/data/node/test_group_normalization_epsilon_expanded/model.onnx,sha256=XTSHGpSU1AfC_DjsnV3SNqGnu-_e64LZr7i_BGE0GRg,7018 +onnx/backend/test/data/node/test_group_normalization_epsilon_expanded/test_data_set_0/input_0.pb,sha256=692CieKQgoIgcJDXYv6vb3tpyZcEpU2pA8gk_cgjurc,208 +onnx/backend/test/data/node/test_group_normalization_epsilon_expanded/test_data_set_0/input_1.pb,sha256=V8-sP2yOFVu15tceGemZwbw8kYpaxnUBnMrD1VV5uPs,29 +onnx/backend/test/data/node/test_group_normalization_epsilon_expanded/test_data_set_0/input_2.pb,sha256=67rO8d_pa-UblpMvR2LTgAvqsWOcSl3Y90e1kFz0LhQ,28 +onnx/backend/test/data/node/test_group_normalization_epsilon_expanded/test_data_set_0/output_0.pb,sha256=JjaAD2JmnNphn2cTa3r6PnSW2T8YEzXcgzuS2UfJlfw,208 +onnx/backend/test/data/node/test_group_normalization_example/model.onnx,sha256=ru04WRQ8XYo0fGdeEs1sDftlHVr9yto9Gtf3nQKPIQI,218 +onnx/backend/test/data/node/test_group_normalization_example/test_data_set_0/input_0.pb,sha256=692CieKQgoIgcJDXYv6vb3tpyZcEpU2pA8gk_cgjurc,208 +onnx/backend/test/data/node/test_group_normalization_example/test_data_set_0/input_1.pb,sha256=V8-sP2yOFVu15tceGemZwbw8kYpaxnUBnMrD1VV5uPs,29 +onnx/backend/test/data/node/test_group_normalization_example/test_data_set_0/input_2.pb,sha256=67rO8d_pa-UblpMvR2LTgAvqsWOcSl3Y90e1kFz0LhQ,28 +onnx/backend/test/data/node/test_group_normalization_example/test_data_set_0/output_0.pb,sha256=BVpVQ2TcrdSkl3KPb8GNgtKDPozLxYe0be9x7C0MDHE,208 +onnx/backend/test/data/node/test_group_normalization_example_expanded/model.onnx,sha256=09n99FpzxcHOp6Io2Vb46zem0PtnRxmMr7v0qMnPSPQ,7018 +onnx/backend/test/data/node/test_group_normalization_example_expanded/test_data_set_0/input_0.pb,sha256=692CieKQgoIgcJDXYv6vb3tpyZcEpU2pA8gk_cgjurc,208 +onnx/backend/test/data/node/test_group_normalization_example_expanded/test_data_set_0/input_1.pb,sha256=V8-sP2yOFVu15tceGemZwbw8kYpaxnUBnMrD1VV5uPs,29 +onnx/backend/test/data/node/test_group_normalization_example_expanded/test_data_set_0/input_2.pb,sha256=67rO8d_pa-UblpMvR2LTgAvqsWOcSl3Y90e1kFz0LhQ,28 +onnx/backend/test/data/node/test_group_normalization_example_expanded/test_data_set_0/output_0.pb,sha256=BVpVQ2TcrdSkl3KPb8GNgtKDPozLxYe0be9x7C0MDHE,208 +onnx/backend/test/data/node/test_gru_batchwise/model.onnx,sha256=EJ7A4Lcnv9u2MxIAe6aUd7SRfGWA8rsG2_KbvUJ7sWQ,235 +onnx/backend/test/data/node/test_gru_batchwise/test_data_set_0/input_0.pb,sha256=X4Jx731skujCbqYlfE79v4ovLqbqAar9VfhQTegviw8,37 +onnx/backend/test/data/node/test_gru_batchwise/test_data_set_0/input_1.pb,sha256=1XWIhQUoTeNdQ7bZCX8yrBOX7Q92l7BeORAPrRY1avU,158 +onnx/backend/test/data/node/test_gru_batchwise/test_data_set_0/input_2.pb,sha256=oTE_0fIu0UOnnYZ7CNygBadHRlesmV1ETn3TvYDEya4,446 +onnx/backend/test/data/node/test_gru_batchwise/test_data_set_0/output_0.pb,sha256=9JVi2_8o_taU1q2mC9RFUb8_KHoyJ-J4xI0rXrK5LYs,87 +onnx/backend/test/data/node/test_gru_batchwise/test_data_set_0/output_1.pb,sha256=CnxT40oj7emJcthZanYarqegr_Zsvg3qkro1M3x3dUU,87 +onnx/backend/test/data/node/test_gru_defaults/model.onnx,sha256=pDyjizKvec0NBmGlQm_mVeKKbVfEZqsb007hZq5Bo8A,189 +onnx/backend/test/data/node/test_gru_defaults/test_data_set_0/input_0.pb,sha256=CfdyEi1TF1qNXdr7S9ZjPY0_c7pWtxM7ou3bNl3fNIo,37 +onnx/backend/test/data/node/test_gru_defaults/test_data_set_0/input_1.pb,sha256=Z_bpWjBL52Ni7_VmBBnx9GHq8oGZzSILXNiGdNDDxiQ,133 +onnx/backend/test/data/node/test_gru_defaults/test_data_set_0/input_2.pb,sha256=blVg0U6-PdjmVF5azf2rAQafYFnzJtcTojgoF5OKCak,314 +onnx/backend/test/data/node/test_gru_defaults/test_data_set_0/output_0.pb,sha256=xhW9IjUNOKkLWOQu1imw5N_rLbHxi-vqJStJf-FksVM,75 +onnx/backend/test/data/node/test_gru_seq_length/model.onnx,sha256=-5slDVVQLeKrsx2oXppph0Ecn5-FyJsyKNJFl6NCcjM,215 +onnx/backend/test/data/node/test_gru_seq_length/test_data_set_0/input_0.pb,sha256=fFR4OIXWBj5wmFEuZbfl7GOhhRaSJv9tEZM8VbjQ72Q,85 +onnx/backend/test/data/node/test_gru_seq_length/test_data_set_0/input_1.pb,sha256=tdQyLDrgooqElZX1X_yvVWQ3l77QTVxNMuJ9Mc3my5A,194 +onnx/backend/test/data/node/test_gru_seq_length/test_data_set_0/input_2.pb,sha256=bGMNqQFklQXONtCc1NZ73_jH8eb-uYXHaNyqNefJOzg,314 +onnx/backend/test/data/node/test_gru_seq_length/test_data_set_0/input_3.pb,sha256=pj0sMaV4OPwfxoqssa4Ub4IA_xoFv1hFo--pPjxftQ4,131 +onnx/backend/test/data/node/test_gru_seq_length/test_data_set_0/output_0.pb,sha256=ANqsfpHbnQLw0XMH1goz9oheZN79GTowkv3yVIr6l8A,75 +onnx/backend/test/data/node/test_gru_with_initial_bias/model.onnx,sha256=7ujI1hPo9JwV7jRibMTJudZbvNfm8b6HDmbtRLzRFQk,222 +onnx/backend/test/data/node/test_gru_with_initial_bias/test_data_set_0/input_0.pb,sha256=eujm2YGHVRU93EAGK-aPA-aFEublku2wP288wJjVc4M,49 +onnx/backend/test/data/node/test_gru_with_initial_bias/test_data_set_0/input_1.pb,sha256=EfCDZYPnRWB7IsE8QGLIxMMz1xsQspu2INrBzW0mBtw,121 +onnx/backend/test/data/node/test_gru_with_initial_bias/test_data_set_0/input_2.pb,sha256=NuX-1ShTCBOI4m3H5fPAevf2oD_13zSsGNnhuLqlfwg,121 +onnx/backend/test/data/node/test_gru_with_initial_bias/test_data_set_0/input_3.pb,sha256=SVjGa5u2ro0p1fUr5PEg1iOb7l12XHqFk9ZhQdMB2uQ,83 +onnx/backend/test/data/node/test_gru_with_initial_bias/test_data_set_0/output_0.pb,sha256=HqlZXwUiQd76iSbd2zcKuJXxDM4Xe2av5ojJ5ndZ4Vk,51 +onnx/backend/test/data/node/test_hammingwindow/model.onnx,sha256=XmWXPruOkAjvFooZAUhkRImHklBQyIc-oNDAnk6Kp_s,97 +onnx/backend/test/data/node/test_hammingwindow/test_data_set_0/input_0.pb,sha256=xTJpebPqFxfV2WSsCwKnXNthXGyvwqNu-H_FWa5J6cA,11 +onnx/backend/test/data/node/test_hammingwindow/test_data_set_0/output_0.pb,sha256=KHRxapvLm3gwLRyjDLCTSUw2LYu0R-4Ine-q075Xs4A,49 +onnx/backend/test/data/node/test_hammingwindow_expanded/model.onnx,sha256=q-Ts9UxK0oBEFLslo8fT_xaD5tAIfY0LUc9DqeVTYk8,4131 +onnx/backend/test/data/node/test_hammingwindow_expanded/test_data_set_0/input_0.pb,sha256=xTJpebPqFxfV2WSsCwKnXNthXGyvwqNu-H_FWa5J6cA,11 +onnx/backend/test/data/node/test_hammingwindow_expanded/test_data_set_0/output_0.pb,sha256=KHRxapvLm3gwLRyjDLCTSUw2LYu0R-4Ine-q075Xs4A,49 +onnx/backend/test/data/node/test_hammingwindow_symmetric/model.onnx,sha256=8JOAx7sAb5z3xfSuOwq8AD30ghEjTR1iH06VS9uqAsM,124 +onnx/backend/test/data/node/test_hammingwindow_symmetric/test_data_set_0/input_0.pb,sha256=xTJpebPqFxfV2WSsCwKnXNthXGyvwqNu-H_FWa5J6cA,11 +onnx/backend/test/data/node/test_hammingwindow_symmetric/test_data_set_0/output_0.pb,sha256=joP1pO8FY4x2Ztta52__5Q78G3RP-3zOo9yp_Np0rLY,49 +onnx/backend/test/data/node/test_hammingwindow_symmetric_expanded/model.onnx,sha256=n6fC4unzUlQFNiC0fiAPQRhIuTRkdwb-xIbr2ucCxks,4701 +onnx/backend/test/data/node/test_hammingwindow_symmetric_expanded/test_data_set_0/input_0.pb,sha256=xTJpebPqFxfV2WSsCwKnXNthXGyvwqNu-H_FWa5J6cA,11 +onnx/backend/test/data/node/test_hammingwindow_symmetric_expanded/test_data_set_0/output_0.pb,sha256=joP1pO8FY4x2Ztta52__5Q78G3RP-3zOo9yp_Np0rLY,49 +onnx/backend/test/data/node/test_hannwindow/model.onnx,sha256=fpu3WfmAnvLPZokShbCTr9zhweBMiIs4BSWCJ6kM14E,91 +onnx/backend/test/data/node/test_hannwindow/test_data_set_0/input_0.pb,sha256=xTJpebPqFxfV2WSsCwKnXNthXGyvwqNu-H_FWa5J6cA,11 +onnx/backend/test/data/node/test_hannwindow/test_data_set_0/output_0.pb,sha256=3vdVUKH0CbdNd09aJUNTXAxwWQ4hNyJMLBFsekCaXgU,49 +onnx/backend/test/data/node/test_hannwindow_expanded/model.onnx,sha256=ghAYZSC8XHRMj1HAkopEVhR1VEM8XJ_JCD0RcUFmJ_o,3792 +onnx/backend/test/data/node/test_hannwindow_expanded/test_data_set_0/input_0.pb,sha256=xTJpebPqFxfV2WSsCwKnXNthXGyvwqNu-H_FWa5J6cA,11 +onnx/backend/test/data/node/test_hannwindow_expanded/test_data_set_0/output_0.pb,sha256=3vdVUKH0CbdNd09aJUNTXAxwWQ4hNyJMLBFsekCaXgU,49 +onnx/backend/test/data/node/test_hannwindow_symmetric/model.onnx,sha256=ieBsEc3yA82fJuZLOPWaGQXzY1oC639oG1dzpxVjR8g,118 +onnx/backend/test/data/node/test_hannwindow_symmetric/test_data_set_0/input_0.pb,sha256=xTJpebPqFxfV2WSsCwKnXNthXGyvwqNu-H_FWa5J6cA,11 +onnx/backend/test/data/node/test_hannwindow_symmetric/test_data_set_0/output_0.pb,sha256=3mRQHHxV0WLzf57XNm_Y-ao24EVy5cxRSWlrJle_Uak,49 +onnx/backend/test/data/node/test_hannwindow_symmetric_expanded/model.onnx,sha256=Xpsrjinpeu2i0nz2_8Jm6nIHzLhVwNLxc1w5QM8kr3Q,4362 +onnx/backend/test/data/node/test_hannwindow_symmetric_expanded/test_data_set_0/input_0.pb,sha256=xTJpebPqFxfV2WSsCwKnXNthXGyvwqNu-H_FWa5J6cA,11 +onnx/backend/test/data/node/test_hannwindow_symmetric_expanded/test_data_set_0/output_0.pb,sha256=3mRQHHxV0WLzf57XNm_Y-ao24EVy5cxRSWlrJle_Uak,49 +onnx/backend/test/data/node/test_hardmax_axis_0/model.onnx,sha256=moiCGKB0Oc3qJtx6dMYz2IfuBDJShvTEZHTGzMR7tBM,125 +onnx/backend/test/data/node/test_hardmax_axis_0/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_hardmax_axis_0/test_data_set_0/output_0.pb,sha256=-jupCDBJs-WrmX2w6Gbwg76FW-XG0RzAhHYZeclMMHM,254 +onnx/backend/test/data/node/test_hardmax_axis_1/model.onnx,sha256=lLcJFwZG9SxP1SR1ip1xRH_GWjwwUgZSGcDmK_DiOKg,125 +onnx/backend/test/data/node/test_hardmax_axis_1/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_hardmax_axis_1/test_data_set_0/output_0.pb,sha256=ShJTro9DVDSj1gN_QIhaNfi3aDsXbW0xlw5He2wbAF0,254 +onnx/backend/test/data/node/test_hardmax_axis_2/model.onnx,sha256=Aq5v4HywyLNApU7lL1ge1Izhbwamc7jnq2GVxB27hkA,125 +onnx/backend/test/data/node/test_hardmax_axis_2/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_hardmax_axis_2/test_data_set_0/output_0.pb,sha256=p6ts1e5G3uPByR7frd8okvp0wsK8j91dRLPkDbq5Yf4,254 +onnx/backend/test/data/node/test_hardmax_default_axis/model.onnx,sha256=K3iL4jVTDBNWZxWOSU7B9U-AP6EStZbEW4EfvDR8YKI,118 +onnx/backend/test/data/node/test_hardmax_default_axis/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_hardmax_default_axis/test_data_set_0/output_0.pb,sha256=p6ts1e5G3uPByR7frd8okvp0wsK8j91dRLPkDbq5Yf4,254 +onnx/backend/test/data/node/test_hardmax_example/model.onnx,sha256=tAlOkY2DZGE9MiBisTVqXU-wggSqTxs7DOiy6piZ-wQ,105 +onnx/backend/test/data/node/test_hardmax_example/test_data_set_0/input_0.pb,sha256=OzGq9WBWdosYSRvhnaiLhc7KylgWFWhYq8uaEybAeNs,75 +onnx/backend/test/data/node/test_hardmax_example/test_data_set_0/output_0.pb,sha256=M4Nd1Dixr1GZVG8M8nttCzW57XEJpxlL3pT_jKit-as,75 +onnx/backend/test/data/node/test_hardmax_negative_axis/model.onnx,sha256=GEMroWxNGa7Kdjf5rFkOztNV5167ywdGdc2gIlLGXmI,141 +onnx/backend/test/data/node/test_hardmax_negative_axis/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_hardmax_negative_axis/test_data_set_0/output_0.pb,sha256=p6ts1e5G3uPByR7frd8okvp0wsK8j91dRLPkDbq5Yf4,254 +onnx/backend/test/data/node/test_hardmax_one_hot/model.onnx,sha256=_QtPLoPEGl0HTFjIH6AWIiqzLwUzHdOc2SjWXHiK2bI,105 +onnx/backend/test/data/node/test_hardmax_one_hot/test_data_set_0/input_0.pb,sha256=DLuz3VAkGId5FXX8jyoCfePLABiQklqpVIJJXzXOvsc,27 +onnx/backend/test/data/node/test_hardmax_one_hot/test_data_set_0/output_0.pb,sha256=P6WwvTGfwLDhJb72onY1iPvk3b0s1_tn5fMLATQ1JtU,27 +onnx/backend/test/data/node/test_hardsigmoid/model.onnx,sha256=x1jxdfM48F0WGkijtGaaTVbTtdPHxnjTRlCk6DB0FSs,146 +onnx/backend/test/data/node/test_hardsigmoid/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_hardsigmoid/test_data_set_0/output_0.pb,sha256=TqEPthNFWy5d78pFlUAqA8L0OV5_7USxH89x_hDCGSY,254 +onnx/backend/test/data/node/test_hardsigmoid_default/model.onnx,sha256=Y7ODdiBbpLK5fxFTSMMTIMqPfBaA7lwgo47S8xmOt1Y,121 +onnx/backend/test/data/node/test_hardsigmoid_default/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_hardsigmoid_default/test_data_set_0/output_0.pb,sha256=9uWdF9Of6-C1Xl4uBs_ViDKh_BS2f-kxq05kKF6CL8E,254 +onnx/backend/test/data/node/test_hardsigmoid_default_expanded_ver18/model.onnx,sha256=abINq0Rs2GihOQ0A0ZGP-wrzpbfi1vQw8-zXbMtUCKs,1832 +onnx/backend/test/data/node/test_hardsigmoid_default_expanded_ver18/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_hardsigmoid_default_expanded_ver18/test_data_set_0/output_0.pb,sha256=9uWdF9Of6-C1Xl4uBs_ViDKh_BS2f-kxq05kKF6CL8E,254 +onnx/backend/test/data/node/test_hardsigmoid_example/model.onnx,sha256=zXIxR7vCrEiVbhZT-8cy11aWjdJVvGMst4HEfjk3mo4,138 +onnx/backend/test/data/node/test_hardsigmoid_example/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_hardsigmoid_example/test_data_set_0/output_0.pb,sha256=PHqA-MDOmKyDONKLmnSan5kJG8kPdXMzAzKuVLYvEGI,21 +onnx/backend/test/data/node/test_hardsigmoid_example_expanded_ver18/model.onnx,sha256=C5ZbLmEUIJ2Cx3uX_V9D5hOk28lCRZCoNp_pstKJr4A,1816 +onnx/backend/test/data/node/test_hardsigmoid_example_expanded_ver18/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_hardsigmoid_example_expanded_ver18/test_data_set_0/output_0.pb,sha256=PHqA-MDOmKyDONKLmnSan5kJG8kPdXMzAzKuVLYvEGI,21 +onnx/backend/test/data/node/test_hardsigmoid_expanded_ver18/model.onnx,sha256=S9Qspqr4OliFKvUDw3SzZdX-rsMZhcEzOoYvaq8XiLU,1643 +onnx/backend/test/data/node/test_hardsigmoid_expanded_ver18/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_hardsigmoid_expanded_ver18/test_data_set_0/output_0.pb,sha256=TqEPthNFWy5d78pFlUAqA8L0OV5_7USxH89x_hDCGSY,254 +onnx/backend/test/data/node/test_hardswish/model.onnx,sha256=L-oSF3JZQ1u0bOfve5WvbO1oKug4e0XpoY5FXCEKc1A,109 +onnx/backend/test/data/node/test_hardswish/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_hardswish/test_data_set_0/output_0.pb,sha256=GmF26kbYvCTZt0rhtvIs_790x5A_8j5LG4zpVHM8BaI,254 +onnx/backend/test/data/node/test_hardswish_expanded/model.onnx,sha256=94S0Yt1sfvNl1oFwM_9VttQwAQsshx2--_2U5brMOw8,266 +onnx/backend/test/data/node/test_hardswish_expanded/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_hardswish_expanded/test_data_set_0/output_0.pb,sha256=GmF26kbYvCTZt0rhtvIs_790x5A_8j5LG4zpVHM8BaI,254 +onnx/backend/test/data/node/test_identity/model.onnx,sha256=uSLPZlspmrNbsmXfTe4UizZVB1sQz3vx0S01TQ9nmsw,115 +onnx/backend/test/data/node/test_identity/test_data_set_0/input_0.pb,sha256=jVVsnPYTtqJkezoas6FKgAvUJVH-Z4OI0Eocd13lMSg,31 +onnx/backend/test/data/node/test_identity/test_data_set_0/output_0.pb,sha256=gGR3wxRX145Jj-O43a8Qh6gPsPhFfPlEHErpPYQTP4Q,31 +onnx/backend/test/data/node/test_identity_opt/model.onnx,sha256=O_lLBDX5CpDSi7O2MOl6WIg4QEQHw36dxVPbu3ejBoA,133 +onnx/backend/test/data/node/test_identity_opt/test_data_set_0/input_0.pb,sha256=YddZEUyYhxMxNF8EBJChB6-dIseFq_7iAqB4GMdZr5s,42 +onnx/backend/test/data/node/test_identity_opt/test_data_set_0/output_0.pb,sha256=eh6A9N--g1PUhUrMyCSJUT3XmOiwx8LdKFOQ96LXotM,43 +onnx/backend/test/data/node/test_identity_sequence/model.onnx,sha256=WS5dB97IqwRuWLibzy-iQDdSiN9sjv7FVyYrIg3WneA,96 +onnx/backend/test/data/node/test_identity_sequence/test_data_set_0/input_0.pb,sha256=OtWKEZPsdistnxRCHxBveuTAodHN1mzb88-jDLRqGWM,65 +onnx/backend/test/data/node/test_identity_sequence/test_data_set_0/output_0.pb,sha256=Nzoz0AbflpzSX5mlHltvaOoEuIt3h7yohG3-3hUdgXs,65 +onnx/backend/test/data/node/test_if/model.onnx,sha256=Kpbswl4zKb87TSAXvwkKrXZ-pz5_FiaP8_q9CBT4q_g,321 +onnx/backend/test/data/node/test_if/test_data_set_0/input_0.pb,sha256=OH775Ik9hGMBcOHijMYFq_DdWSSqgPdT-CZfhNjcOpw,11 +onnx/backend/test/data/node/test_if/test_data_set_0/output_0.pb,sha256=6Z-sAab-Ll2mHzCTuxkbUjBy_RmQb_4im7xdYzJzlbA,31 +onnx/backend/test/data/node/test_if_opt/model.onnx,sha256=bHltYx2DCg9ppabsHWPOdZ6ovPN8-fRYxKS9kcHBsOU,419 +onnx/backend/test/data/node/test_if_opt/test_data_set_0/input_0.pb,sha256=9kbmn5T_KJnQNPG1ixMvyTnx_50JkgYRl4SGlqjdY00,11 +onnx/backend/test/data/node/test_if_opt/test_data_set_0/output_0.pb,sha256=CD_cSpf7zENvc3TWqcIg_NGBAiNDKNKlQTYwwfMpe04,44 +onnx/backend/test/data/node/test_if_seq/model.onnx,sha256=OzaqxTiIf5D_51--3X-8qhjyGSpDus2SnZ5ez_ImuRc,389 +onnx/backend/test/data/node/test_if_seq/test_data_set_0/input_0.pb,sha256=OH775Ik9hGMBcOHijMYFq_DdWSSqgPdT-CZfhNjcOpw,11 +onnx/backend/test/data/node/test_if_seq/test_data_set_0/output_0.pb,sha256=nrKwZGW_LIu6T51XGN9q76qgQlPcdYwJp1dZ-icyV8w,35 +onnx/backend/test/data/node/test_image_decoder_decode_bmp_rgb/model.onnx,sha256=WliphL_oL1UgrTQ1-onJLzDK2VWpGKC1PZv9UYUvpms,165 +onnx/backend/test/data/node/test_image_decoder_decode_bmp_rgb/test_data_set_0/input_0.pb,sha256=3WphR9u0eYaYc-XgtQK4UjGM2EWmQuMDht0k3NM8pno,3140 +onnx/backend/test/data/node/test_image_decoder_decode_bmp_rgb/test_data_set_0/output_0.pb,sha256=r6Ad2pCIMQzYmpKqj7lhmrshM4Y3dcEU9S8Zq198uu4,3091 +onnx/backend/test/data/node/test_image_decoder_decode_jpeg2k_rgb/model.onnx,sha256=V2srJk-PqZKcghl-dalGTNsCSdQBJT1r_YW370_f5KY,168 +onnx/backend/test/data/node/test_image_decoder_decode_jpeg2k_rgb/test_data_set_0/input_0.pb,sha256=7YHyMa_rxzJi3ShqJQGrZFTRg43HAoDL2ceMop4Xtgc,1901 +onnx/backend/test/data/node/test_image_decoder_decode_jpeg2k_rgb/test_data_set_0/output_0.pb,sha256=r6Ad2pCIMQzYmpKqj7lhmrshM4Y3dcEU9S8Zq198uu4,3091 +onnx/backend/test/data/node/test_image_decoder_decode_jpeg_bgr/model.onnx,sha256=KZTC2Ru7YUTy5NjFmXUTQq-ePrTUJaFBfiVKIeIBFF4,166 +onnx/backend/test/data/node/test_image_decoder_decode_jpeg_bgr/test_data_set_0/input_0.pb,sha256=jKWRdIilLcY9ra7Mef0dyEutF3bREESbKpY4cmvtSJQ,1072 +onnx/backend/test/data/node/test_image_decoder_decode_jpeg_bgr/test_data_set_0/output_0.pb,sha256=KTYAFI9diH71TAb3rsZda44mlhUJIwZ_ycbZHHAfLcU,3091 +onnx/backend/test/data/node/test_image_decoder_decode_jpeg_grayscale/model.onnx,sha256=m0y8LkT1za8NIMxf3O1bfv_C3gmNuiwYdusshrHVmXQ,178 +onnx/backend/test/data/node/test_image_decoder_decode_jpeg_grayscale/test_data_set_0/input_0.pb,sha256=jKWRdIilLcY9ra7Mef0dyEutF3bREESbKpY4cmvtSJQ,1072 +onnx/backend/test/data/node/test_image_decoder_decode_jpeg_grayscale/test_data_set_0/output_0.pb,sha256=4H1hbiSnnwUsVmyPLBXa_zr9-g-C-YzTKm65FTR1Oss,1043 +onnx/backend/test/data/node/test_image_decoder_decode_jpeg_rgb/model.onnx,sha256=QJiqi-SilkGw8BNNNjyYYutAlpKXb0mMJxng8Rk-w-c,166 +onnx/backend/test/data/node/test_image_decoder_decode_jpeg_rgb/test_data_set_0/input_0.pb,sha256=jKWRdIilLcY9ra7Mef0dyEutF3bREESbKpY4cmvtSJQ,1072 +onnx/backend/test/data/node/test_image_decoder_decode_jpeg_rgb/test_data_set_0/output_0.pb,sha256=vad7ak4qNxXFLEep24zF3V7qbgUwsBhchfYXRgiwaF8,3091 +onnx/backend/test/data/node/test_image_decoder_decode_png_rgb/model.onnx,sha256=Lk8tvz3NDlmWsFgNjpMXCEWobGs325Dvc_iBMp8F9jM,165 +onnx/backend/test/data/node/test_image_decoder_decode_png_rgb/test_data_set_0/input_0.pb,sha256=hc97gZD_jsHH5MsJv_kg1yDuFuv2D7AOBy_cc7wxQVw,326 +onnx/backend/test/data/node/test_image_decoder_decode_png_rgb/test_data_set_0/output_0.pb,sha256=r6Ad2pCIMQzYmpKqj7lhmrshM4Y3dcEU9S8Zq198uu4,3091 +onnx/backend/test/data/node/test_image_decoder_decode_pnm_rgb/model.onnx,sha256=uHFPiiOb_TqZHQv_8SbuyOpv9D_OWgzPUxKWVnzdLoc,165 +onnx/backend/test/data/node/test_image_decoder_decode_pnm_rgb/test_data_set_0/input_0.pb,sha256=rpeeyR0Xyhfm7zJsp8FaufpFQOs1bPv63WHw6GKVHms,3099 +onnx/backend/test/data/node/test_image_decoder_decode_pnm_rgb/test_data_set_0/output_0.pb,sha256=r6Ad2pCIMQzYmpKqj7lhmrshM4Y3dcEU9S8Zq198uu4,3091 +onnx/backend/test/data/node/test_image_decoder_decode_tiff_rgb/model.onnx,sha256=KYvXOq_PJ6sJ3oFpxmOwxh0e3A-TE10scd2HEGo8Ea8,166 +onnx/backend/test/data/node/test_image_decoder_decode_tiff_rgb/test_data_set_0/input_0.pb,sha256=lSd_mOxQHA0RvJH_4r8Wf6_idVKu3KGiHDa8qxfqev4,3226 +onnx/backend/test/data/node/test_image_decoder_decode_tiff_rgb/test_data_set_0/output_0.pb,sha256=r6Ad2pCIMQzYmpKqj7lhmrshM4Y3dcEU9S8Zq198uu4,3091 +onnx/backend/test/data/node/test_image_decoder_decode_webp_rgb/model.onnx,sha256=RmXX3Rk82ET7z66fYUlNmsO2TGz0oe49CBUHb3RI7k4,166 +onnx/backend/test/data/node/test_image_decoder_decode_webp_rgb/test_data_set_0/input_0.pb,sha256=MdIansNcRXVyK-xR3xH1YeH1H6PGC5J0a13FFNcTffk,566 +onnx/backend/test/data/node/test_image_decoder_decode_webp_rgb/test_data_set_0/output_0.pb,sha256=0pMvVMzifzYFw9TGLLCNQwbz1y3sy1s2IO0eV4SsYpA,3091 +onnx/backend/test/data/node/test_instancenorm_epsilon/model.onnx,sha256=Jy0FweIbj7aH1vnDHW5FACBUrua21w-GX7WrKmaJtKA,206 +onnx/backend/test/data/node/test_instancenorm_epsilon/test_data_set_0/input_0.pb,sha256=SYKd5PF5vM-bKNrV8oTKPGHvYE0twyvHWdFt3T67zDY,496 +onnx/backend/test/data/node/test_instancenorm_epsilon/test_data_set_0/input_1.pb,sha256=AI-3RnrtRdqYWsuL7R4_zbZ0Vb7zfnaDEBYsm0i2kvY,21 +onnx/backend/test/data/node/test_instancenorm_epsilon/test_data_set_0/input_2.pb,sha256=2oNlXxvO0WnQ0oLXr0SuQwOm4ErOca8R9dkNLwewDK4,24 +onnx/backend/test/data/node/test_instancenorm_epsilon/test_data_set_0/output_0.pb,sha256=kViHfy66VxtlADF2mSyn7ORx1duw-RQk1rfI2xGzAf0,496 +onnx/backend/test/data/node/test_instancenorm_example/model.onnx,sha256=LLUFeAIjH4RnEcejNWi-T2Sb5jrD6MImRG__380WiMo,187 +onnx/backend/test/data/node/test_instancenorm_example/test_data_set_0/input_0.pb,sha256=VUIPX62VbmFbVNHPK2_fBC1r_H9D8f-WYOR64GExRxg,39 +onnx/backend/test/data/node/test_instancenorm_example/test_data_set_0/input_1.pb,sha256=LAyTcjTe_XhxYy49mFzDu7kzZbw1weK3MkBpQcDLY7Y,17 +onnx/backend/test/data/node/test_instancenorm_example/test_data_set_0/input_2.pb,sha256=Qr3ZK8mEuzcmIiC7CqVjLO10zP1ZUJSVhf57xgIViJc,20 +onnx/backend/test/data/node/test_instancenorm_example/test_data_set_0/output_0.pb,sha256=UXtFI_9wnq2LC-OKKrnRlR7OkEDwtzSz6huAdy3yogI,39 +onnx/backend/test/data/node/test_isinf/model.onnx,sha256=Ex9vI0MtCeoLMMqgxcI6tE_Dwy1_8sl2TSUh-QcK3p4,85 +onnx/backend/test/data/node/test_isinf/test_data_set_0/input_0.pb,sha256=DUSqI3C3X65jGkuhMXunUVTh3fc8Y2RfKVcIgqF2PSI,33 +onnx/backend/test/data/node/test_isinf/test_data_set_0/output_0.pb,sha256=IU0OrIPsDhiQy6zcHECAVjDvfVr6qR_yXQEvoNCEOAQ,15 +onnx/backend/test/data/node/test_isinf_float16/model.onnx,sha256=21EyffqOPA0mw4_IyeP4qxaA_v6dDHcP74Mz0Lsl3tY,93 +onnx/backend/test/data/node/test_isinf_float16/test_data_set_0/input_0.pb,sha256=GzL9GHU-GuROgkjEhBQpi7v5zDspP_CGz6TBesGOmAQ,21 +onnx/backend/test/data/node/test_isinf_float16/test_data_set_0/output_0.pb,sha256=IU0OrIPsDhiQy6zcHECAVjDvfVr6qR_yXQEvoNCEOAQ,15 +onnx/backend/test/data/node/test_isinf_negative/model.onnx,sha256=F48N3xhREkkIfjYZ0l52JgEXFFhpb2BwkS1sF1uIN6A,118 +onnx/backend/test/data/node/test_isinf_negative/test_data_set_0/input_0.pb,sha256=TZCOthXvSS1htnpkqRg4pC6K8OoGrAnxG_8lBZ2_jKk,33 +onnx/backend/test/data/node/test_isinf_negative/test_data_set_0/output_0.pb,sha256=EkxCQSMrh0vkL8zRXyX5wQasiMwKXB41vEbwM-mJ9Sk,15 +onnx/backend/test/data/node/test_isinf_positive/model.onnx,sha256=jSWVzWtPahdbEnkGYKT0NMWkv10gbP6_X9rrT8k3QYI,118 +onnx/backend/test/data/node/test_isinf_positive/test_data_set_0/input_0.pb,sha256=EszjPltq6ca9PXAPoXK2UayVbMS6fQ4AMMUExeYrUZc,33 +onnx/backend/test/data/node/test_isinf_positive/test_data_set_0/output_0.pb,sha256=k8MIxh7aAeuvyLaQHb3xCbJ1zjetTSbQ9jrGyvmdMAA,15 +onnx/backend/test/data/node/test_isnan/model.onnx,sha256=xGcIz2X4lzM8eWIH8SUBVuDzeZcvNvMrU6NDaMvrqo8,85 +onnx/backend/test/data/node/test_isnan/test_data_set_0/input_0.pb,sha256=DUSqI3C3X65jGkuhMXunUVTh3fc8Y2RfKVcIgqF2PSI,33 +onnx/backend/test/data/node/test_isnan/test_data_set_0/output_0.pb,sha256=6w7AlYmICAOPY5SerH7LcWt1NXFIk6l61AOzgFibC3w,15 +onnx/backend/test/data/node/test_isnan_float16/model.onnx,sha256=OHNj0HW8VurC7SGHHtkgZknPOIO0ci4L_p7L9CIBkxU,93 +onnx/backend/test/data/node/test_isnan_float16/test_data_set_0/input_0.pb,sha256=GzL9GHU-GuROgkjEhBQpi7v5zDspP_CGz6TBesGOmAQ,21 +onnx/backend/test/data/node/test_isnan_float16/test_data_set_0/output_0.pb,sha256=6w7AlYmICAOPY5SerH7LcWt1NXFIk6l61AOzgFibC3w,15 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0/model.onnx,sha256=Qu0CT3bG2vP6ou0oyq0FeISUX1PBaBGR3O6yFpUv_P4,261 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0/test_data_set_0/input_0.pb,sha256=z0sxUFTPXC7xORC3TXH-lrSh-0Dvo0Izr6rqpN7XmMQ,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0/test_data_set_0/input_1.pb,sha256=c5v62j2q513u7QaD0Kck1K-O-g3Q6LiJWVgNaVZ3umg,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0/test_data_set_0/input_2.pb,sha256=LXv8aWQikWfT9D0FYEGMsl-g-FZoYCh4YhCuC_3uprM,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0/test_data_set_0/output_0.pb,sha256=iVt1SwGe_eyWjGblJNeTU6rlSc-4YC-xkTWTBTmHODI,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0/test_data_set_0/output_1.pb,sha256=0ejfMOUdKfdOLL6TPubooul674_H7ZZecRkExYlE40Y,18 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0/test_data_set_0/output_2.pb,sha256=_lB0ZM2LsSWG1KALOG-iiMDIp75mHdZ3BlGa0KiPZL8,23 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0_expanded/model.onnx,sha256=DUxhEduHYmkto1q2Uv_ED0HtSnJ0Hl7aU-3PIS9Veqw,6048 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0_expanded/test_data_set_0/input_0.pb,sha256=z0sxUFTPXC7xORC3TXH-lrSh-0Dvo0Izr6rqpN7XmMQ,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0_expanded/test_data_set_0/input_1.pb,sha256=c5v62j2q513u7QaD0Kck1K-O-g3Q6LiJWVgNaVZ3umg,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0_expanded/test_data_set_0/input_2.pb,sha256=LXv8aWQikWfT9D0FYEGMsl-g-FZoYCh4YhCuC_3uprM,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0_expanded/test_data_set_0/output_0.pb,sha256=iVt1SwGe_eyWjGblJNeTU6rlSc-4YC-xkTWTBTmHODI,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0_expanded/test_data_set_0/output_1.pb,sha256=0ejfMOUdKfdOLL6TPubooul674_H7ZZecRkExYlE40Y,18 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0_expanded/test_data_set_0/output_2.pb,sha256=_lB0ZM2LsSWG1KALOG-iiMDIp75mHdZ3BlGa0KiPZL8,23 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0_expanded_ver18/model.onnx,sha256=tMjoGfaPiZVV21CHq_LjcOc68zQW8yZ35H-1gxQYvkI,6300 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0_expanded_ver18/test_data_set_0/input_0.pb,sha256=z0sxUFTPXC7xORC3TXH-lrSh-0Dvo0Izr6rqpN7XmMQ,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0_expanded_ver18/test_data_set_0/input_1.pb,sha256=c5v62j2q513u7QaD0Kck1K-O-g3Q6LiJWVgNaVZ3umg,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0_expanded_ver18/test_data_set_0/input_2.pb,sha256=LXv8aWQikWfT9D0FYEGMsl-g-FZoYCh4YhCuC_3uprM,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0_expanded_ver18/test_data_set_0/output_0.pb,sha256=iVt1SwGe_eyWjGblJNeTU6rlSc-4YC-xkTWTBTmHODI,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0_expanded_ver18/test_data_set_0/output_1.pb,sha256=0ejfMOUdKfdOLL6TPubooul674_H7ZZecRkExYlE40Y,18 +onnx/backend/test/data/node/test_layer_normalization_2d_axis0_expanded_ver18/test_data_set_0/output_2.pb,sha256=_lB0ZM2LsSWG1KALOG-iiMDIp75mHdZ3BlGa0KiPZL8,23 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1/model.onnx,sha256=sfa3c68-O_GcmBe4JJTF-vU1Y7ITngIUR1G5uPwynfk,253 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1/test_data_set_0/input_0.pb,sha256=z0sxUFTPXC7xORC3TXH-lrSh-0Dvo0Izr6rqpN7XmMQ,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1/test_data_set_0/input_1.pb,sha256=EZKA7ForrN2YairSPGNOCkCyOm1H2fD6otmjaVO4YdQ,25 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1/test_data_set_0/input_2.pb,sha256=RbUzETROopB8rHchzsZs6Msdde_nz6eGAAvGd5telR4,25 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1/test_data_set_0/output_0.pb,sha256=_7YDN6UEtee83kkh5wsnjvgdzJLyTGqAqlcF1Mig-8s,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1/test_data_set_0/output_1.pb,sha256=5ECA6Cn2IbIzbiRuOg38eKBimy42xy4LEHe1g1EYte0,26 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1/test_data_set_0/output_2.pb,sha256=tHQXIVTQIuP89WdedOAN8Ba1rw4PrttrBKH3B23-9RE,31 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1_expanded/model.onnx,sha256=yiizqo2aVKXpdnyNQtQN9Aaox20FkEpMwkgewv1Qnfk,6040 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1_expanded/test_data_set_0/input_0.pb,sha256=z0sxUFTPXC7xORC3TXH-lrSh-0Dvo0Izr6rqpN7XmMQ,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1_expanded/test_data_set_0/input_1.pb,sha256=EZKA7ForrN2YairSPGNOCkCyOm1H2fD6otmjaVO4YdQ,25 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1_expanded/test_data_set_0/input_2.pb,sha256=RbUzETROopB8rHchzsZs6Msdde_nz6eGAAvGd5telR4,25 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1_expanded/test_data_set_0/output_0.pb,sha256=_7YDN6UEtee83kkh5wsnjvgdzJLyTGqAqlcF1Mig-8s,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1_expanded/test_data_set_0/output_1.pb,sha256=5ECA6Cn2IbIzbiRuOg38eKBimy42xy4LEHe1g1EYte0,26 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1_expanded/test_data_set_0/output_2.pb,sha256=tHQXIVTQIuP89WdedOAN8Ba1rw4PrttrBKH3B23-9RE,31 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1_expanded_ver18/model.onnx,sha256=-ZUiNrDjVRz4py1EbR--rS26VWfdv9y6fKtQvlYWNHA,6292 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1_expanded_ver18/test_data_set_0/input_0.pb,sha256=z0sxUFTPXC7xORC3TXH-lrSh-0Dvo0Izr6rqpN7XmMQ,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1_expanded_ver18/test_data_set_0/input_1.pb,sha256=EZKA7ForrN2YairSPGNOCkCyOm1H2fD6otmjaVO4YdQ,25 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1_expanded_ver18/test_data_set_0/input_2.pb,sha256=RbUzETROopB8rHchzsZs6Msdde_nz6eGAAvGd5telR4,25 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1_expanded_ver18/test_data_set_0/output_0.pb,sha256=_7YDN6UEtee83kkh5wsnjvgdzJLyTGqAqlcF1Mig-8s,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1_expanded_ver18/test_data_set_0/output_1.pb,sha256=5ECA6Cn2IbIzbiRuOg38eKBimy42xy4LEHe1g1EYte0,26 +onnx/backend/test/data/node/test_layer_normalization_2d_axis1_expanded_ver18/test_data_set_0/output_2.pb,sha256=tHQXIVTQIuP89WdedOAN8Ba1rw4PrttrBKH3B23-9RE,31 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1/model.onnx,sha256=pZlPnXiGuXY7ZUkh8lgWBpkktPV_OzY6efzG3KfQpoo,272 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1/test_data_set_0/input_0.pb,sha256=z0sxUFTPXC7xORC3TXH-lrSh-0Dvo0Izr6rqpN7XmMQ,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1/test_data_set_0/input_1.pb,sha256=Z9KJWMim8_HHQqx310sN-MwR8HbbppmvpY0Gd7oGIIY,25 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1/test_data_set_0/input_2.pb,sha256=E-0WQ1hbMoH0mpC3NBjz-AzskpOkxKDYBugpDFckqXY,25 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1/test_data_set_0/output_0.pb,sha256=VnTGQAZfGqzqbsQv6BCWCuKG17-1be_ARnOAG2KQni8,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1/test_data_set_0/output_1.pb,sha256=5ECA6Cn2IbIzbiRuOg38eKBimy42xy4LEHe1g1EYte0,26 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1/test_data_set_0/output_2.pb,sha256=tHQXIVTQIuP89WdedOAN8Ba1rw4PrttrBKH3B23-9RE,31 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1_expanded/model.onnx,sha256=D-htrwW-dU676NZDIItiLrkYjhMTUNmHSFcMiJEs7IQ,6633 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1_expanded/test_data_set_0/input_0.pb,sha256=z0sxUFTPXC7xORC3TXH-lrSh-0Dvo0Izr6rqpN7XmMQ,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1_expanded/test_data_set_0/input_1.pb,sha256=Z9KJWMim8_HHQqx310sN-MwR8HbbppmvpY0Gd7oGIIY,25 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1_expanded/test_data_set_0/input_2.pb,sha256=E-0WQ1hbMoH0mpC3NBjz-AzskpOkxKDYBugpDFckqXY,25 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1_expanded/test_data_set_0/output_0.pb,sha256=VnTGQAZfGqzqbsQv6BCWCuKG17-1be_ARnOAG2KQni8,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1_expanded/test_data_set_0/output_1.pb,sha256=5ECA6Cn2IbIzbiRuOg38eKBimy42xy4LEHe1g1EYte0,26 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1_expanded/test_data_set_0/output_2.pb,sha256=tHQXIVTQIuP89WdedOAN8Ba1rw4PrttrBKH3B23-9RE,31 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1_expanded_ver18/model.onnx,sha256=POGSoiJko5Xu7inl9n9LtBqkhkIMpdjOgKZ5Drwrano,6915 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1_expanded_ver18/test_data_set_0/input_0.pb,sha256=z0sxUFTPXC7xORC3TXH-lrSh-0Dvo0Izr6rqpN7XmMQ,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1_expanded_ver18/test_data_set_0/input_1.pb,sha256=Z9KJWMim8_HHQqx310sN-MwR8HbbppmvpY0Gd7oGIIY,25 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1_expanded_ver18/test_data_set_0/input_2.pb,sha256=E-0WQ1hbMoH0mpC3NBjz-AzskpOkxKDYBugpDFckqXY,25 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1_expanded_ver18/test_data_set_0/output_0.pb,sha256=VnTGQAZfGqzqbsQv6BCWCuKG17-1be_ARnOAG2KQni8,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1_expanded_ver18/test_data_set_0/output_1.pb,sha256=5ECA6Cn2IbIzbiRuOg38eKBimy42xy4LEHe1g1EYte0,26 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_1_expanded_ver18/test_data_set_0/output_2.pb,sha256=tHQXIVTQIuP89WdedOAN8Ba1rw4PrttrBKH3B23-9RE,31 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2/model.onnx,sha256=lFCifpXsH80wLKo1i6kqVLL9bL6nZVC6d4EoRp6FCyw,280 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2/test_data_set_0/input_0.pb,sha256=z0sxUFTPXC7xORC3TXH-lrSh-0Dvo0Izr6rqpN7XmMQ,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2/test_data_set_0/input_1.pb,sha256=xCO5Odqhx6GmjsUlwlrHLnjfm63TjRJb3rzZndZCYyQ,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2/test_data_set_0/input_2.pb,sha256=Pd-TMGMv2FcDr-jRpePp8LPEJICDh8AD1HX04T6MrLI,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2/test_data_set_0/output_0.pb,sha256=1QvjVDP3oBuUy26wURfUK_9v8pj-Jr8iXjjvMpm0Lp4,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2/test_data_set_0/output_1.pb,sha256=0ejfMOUdKfdOLL6TPubooul674_H7ZZecRkExYlE40Y,18 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2/test_data_set_0/output_2.pb,sha256=_lB0ZM2LsSWG1KALOG-iiMDIp75mHdZ3BlGa0KiPZL8,23 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2_expanded/model.onnx,sha256=MrIobPhwcsTjlPntx0-XmOZSh2jahDomXYmftvYNOeI,6641 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2_expanded/test_data_set_0/input_0.pb,sha256=z0sxUFTPXC7xORC3TXH-lrSh-0Dvo0Izr6rqpN7XmMQ,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2_expanded/test_data_set_0/input_1.pb,sha256=xCO5Odqhx6GmjsUlwlrHLnjfm63TjRJb3rzZndZCYyQ,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2_expanded/test_data_set_0/input_2.pb,sha256=Pd-TMGMv2FcDr-jRpePp8LPEJICDh8AD1HX04T6MrLI,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2_expanded/test_data_set_0/output_0.pb,sha256=1QvjVDP3oBuUy26wURfUK_9v8pj-Jr8iXjjvMpm0Lp4,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2_expanded/test_data_set_0/output_1.pb,sha256=0ejfMOUdKfdOLL6TPubooul674_H7ZZecRkExYlE40Y,18 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2_expanded/test_data_set_0/output_2.pb,sha256=_lB0ZM2LsSWG1KALOG-iiMDIp75mHdZ3BlGa0KiPZL8,23 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2_expanded_ver18/model.onnx,sha256=FT55lf5rew2LfzX4yYFsYutgHfAPJMl2TmTz3kcIg4A,6923 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2_expanded_ver18/test_data_set_0/input_0.pb,sha256=z0sxUFTPXC7xORC3TXH-lrSh-0Dvo0Izr6rqpN7XmMQ,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2_expanded_ver18/test_data_set_0/input_1.pb,sha256=xCO5Odqhx6GmjsUlwlrHLnjfm63TjRJb3rzZndZCYyQ,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2_expanded_ver18/test_data_set_0/input_2.pb,sha256=Pd-TMGMv2FcDr-jRpePp8LPEJICDh8AD1HX04T6MrLI,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2_expanded_ver18/test_data_set_0/output_0.pb,sha256=1QvjVDP3oBuUy26wURfUK_9v8pj-Jr8iXjjvMpm0Lp4,59 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2_expanded_ver18/test_data_set_0/output_1.pb,sha256=0ejfMOUdKfdOLL6TPubooul674_H7ZZecRkExYlE40Y,18 +onnx/backend/test/data/node/test_layer_normalization_2d_axis_negative_2_expanded_ver18/test_data_set_0/output_2.pb,sha256=_lB0ZM2LsSWG1KALOG-iiMDIp75mHdZ3BlGa0KiPZL8,23 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon/model.onnx,sha256=nWu0d8mPuo_q-6EbxNldBgOwAHLI6r82ofpwMOeY3VE,312 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon/test_data_set_0/input_1.pb,sha256=7opcXxjA9jj8tE0kwn6o2zODfFoW-_nJ5TGxMKGx3Fs,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon/test_data_set_0/input_2.pb,sha256=5PyU6fVFO0EEZAhy7iw-cH3feNNieB9cflROAd5uNrE,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon/test_data_set_0/output_0.pb,sha256=v4LJo9woQRls5a_jNs6Rjayjn62ji3ZGj_FOZjvOR6w,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon/test_data_set_0/output_1.pb,sha256=H81tuC3okK3RUjvS60hBfM9-STj5kuLiQ45PaSjvw04,20 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon/test_data_set_0/output_2.pb,sha256=TssEW9-a_1tOb8XcyDe8IiJJqERhn0g-u0tEX7bp9Ak,25 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon_expanded/model.onnx,sha256=0ngaHnVQCQtDLmTPUnsltXdMbsef1Zao-_xztRUrJqg,6600 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon_expanded/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon_expanded/test_data_set_0/input_1.pb,sha256=7opcXxjA9jj8tE0kwn6o2zODfFoW-_nJ5TGxMKGx3Fs,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon_expanded/test_data_set_0/input_2.pb,sha256=5PyU6fVFO0EEZAhy7iw-cH3feNNieB9cflROAd5uNrE,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon_expanded/test_data_set_0/output_0.pb,sha256=v4LJo9woQRls5a_jNs6Rjayjn62ji3ZGj_FOZjvOR6w,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon_expanded/test_data_set_0/output_1.pb,sha256=H81tuC3okK3RUjvS60hBfM9-STj5kuLiQ45PaSjvw04,20 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon_expanded/test_data_set_0/output_2.pb,sha256=TssEW9-a_1tOb8XcyDe8IiJJqERhn0g-u0tEX7bp9Ak,25 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon_expanded_ver18/model.onnx,sha256=dlYp8YolSE9qPr-UD8df4ajih6-FKKW4S7wvGSpwwGE,6876 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon_expanded_ver18/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon_expanded_ver18/test_data_set_0/input_1.pb,sha256=7opcXxjA9jj8tE0kwn6o2zODfFoW-_nJ5TGxMKGx3Fs,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon_expanded_ver18/test_data_set_0/input_2.pb,sha256=5PyU6fVFO0EEZAhy7iw-cH3feNNieB9cflROAd5uNrE,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon_expanded_ver18/test_data_set_0/output_0.pb,sha256=v4LJo9woQRls5a_jNs6Rjayjn62ji3ZGj_FOZjvOR6w,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon_expanded_ver18/test_data_set_0/output_1.pb,sha256=H81tuC3okK3RUjvS60hBfM9-STj5kuLiQ45PaSjvw04,20 +onnx/backend/test/data/node/test_layer_normalization_3d_axis0_epsilon_expanded_ver18/test_data_set_0/output_2.pb,sha256=TssEW9-a_1tOb8XcyDe8IiJJqERhn0g-u0tEX7bp9Ak,25 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon/model.onnx,sha256=L1n3cJ5FYR20w-NL_S3NqlfSja8XnlEEZGDUXF5_Lp4,304 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon/test_data_set_0/input_1.pb,sha256=ZPe50IuXkbPIN0CuON0wV-i4zLn3HPRsxO5bwFT9bkw,71 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon/test_data_set_0/input_2.pb,sha256=ZKaykW9NECftmK4UFhp4YwKdZ066BMxKP2xmGzUHx28,71 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon/test_data_set_0/output_0.pb,sha256=d2aHuLpzy26hKZfRcdjI52hGJm4-Rnvt4OA7yaHmC70,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon/test_data_set_0/output_1.pb,sha256=PHTleirt5gAWtXMHawJcVrMcujCETXqf1i89SdGF-ho,24 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon/test_data_set_0/output_2.pb,sha256=AcKwzHM9Di6J1dyJw8oVyabffaBkKvZ2XqmMiJormnc,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon_expanded/model.onnx,sha256=aPdwPxyqI8o_B1EVa074QMWy1LFSRTbDx0hGkB_VWNI,6592 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon_expanded/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon_expanded/test_data_set_0/input_1.pb,sha256=ZPe50IuXkbPIN0CuON0wV-i4zLn3HPRsxO5bwFT9bkw,71 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon_expanded/test_data_set_0/input_2.pb,sha256=ZKaykW9NECftmK4UFhp4YwKdZ066BMxKP2xmGzUHx28,71 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon_expanded/test_data_set_0/output_0.pb,sha256=d2aHuLpzy26hKZfRcdjI52hGJm4-Rnvt4OA7yaHmC70,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon_expanded/test_data_set_0/output_1.pb,sha256=PHTleirt5gAWtXMHawJcVrMcujCETXqf1i89SdGF-ho,24 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon_expanded/test_data_set_0/output_2.pb,sha256=AcKwzHM9Di6J1dyJw8oVyabffaBkKvZ2XqmMiJormnc,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon_expanded_ver18/model.onnx,sha256=Tf3OQ_KUgR3XL4Q8Uno1nuzP5lghFAUzrUSCNbvqLyk,6868 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon_expanded_ver18/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon_expanded_ver18/test_data_set_0/input_1.pb,sha256=ZPe50IuXkbPIN0CuON0wV-i4zLn3HPRsxO5bwFT9bkw,71 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon_expanded_ver18/test_data_set_0/input_2.pb,sha256=ZKaykW9NECftmK4UFhp4YwKdZ066BMxKP2xmGzUHx28,71 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon_expanded_ver18/test_data_set_0/output_0.pb,sha256=d2aHuLpzy26hKZfRcdjI52hGJm4-Rnvt4OA7yaHmC70,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon_expanded_ver18/test_data_set_0/output_1.pb,sha256=PHTleirt5gAWtXMHawJcVrMcujCETXqf1i89SdGF-ho,24 +onnx/backend/test/data/node/test_layer_normalization_3d_axis1_epsilon_expanded_ver18/test_data_set_0/output_2.pb,sha256=AcKwzHM9Di6J1dyJw8oVyabffaBkKvZ2XqmMiJormnc,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon/model.onnx,sha256=CpeyhCB4UgjtGlVuw-BMJErTR4D3zFHDIZGjjcDBlzE,296 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon/test_data_set_0/input_1.pb,sha256=va8YRICxaQUTR-F6lUfwmRzaiwoaXpFUgZQvNMMtd2Y,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon/test_data_set_0/input_2.pb,sha256=k3SelyY2T9UlihBpgMkCl7r0uQ1BnbqvAaqnmxhD3BU,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon/test_data_set_0/output_0.pb,sha256=9UZHfb3EbKd1lt9AAGTPK2rFA9w_3PYU1FfL5IMX3aM,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon/test_data_set_0/output_1.pb,sha256=AaAYL201wLhEwUVhQKgq49OBfx_80pQdi_Wo33isQNY,40 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon/test_data_set_0/output_2.pb,sha256=KkksKiORHOMDplW6506hU0pgFYBUTSjkAndy3JMiaic,45 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon_expanded/model.onnx,sha256=TcFs6aY6Oar3gTdtyKmzSxynu1bE4j3XLQ746ZeeLQU,6584 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon_expanded/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon_expanded/test_data_set_0/input_1.pb,sha256=va8YRICxaQUTR-F6lUfwmRzaiwoaXpFUgZQvNMMtd2Y,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon_expanded/test_data_set_0/input_2.pb,sha256=k3SelyY2T9UlihBpgMkCl7r0uQ1BnbqvAaqnmxhD3BU,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon_expanded/test_data_set_0/output_0.pb,sha256=9UZHfb3EbKd1lt9AAGTPK2rFA9w_3PYU1FfL5IMX3aM,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon_expanded/test_data_set_0/output_1.pb,sha256=AaAYL201wLhEwUVhQKgq49OBfx_80pQdi_Wo33isQNY,40 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon_expanded/test_data_set_0/output_2.pb,sha256=KkksKiORHOMDplW6506hU0pgFYBUTSjkAndy3JMiaic,45 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon_expanded_ver18/model.onnx,sha256=ZwEvYPFp-eSF4B-jfEdEX3aDSTNZdjjo9L4BlwwVVyw,6860 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon_expanded_ver18/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon_expanded_ver18/test_data_set_0/input_1.pb,sha256=va8YRICxaQUTR-F6lUfwmRzaiwoaXpFUgZQvNMMtd2Y,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon_expanded_ver18/test_data_set_0/input_2.pb,sha256=k3SelyY2T9UlihBpgMkCl7r0uQ1BnbqvAaqnmxhD3BU,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon_expanded_ver18/test_data_set_0/output_0.pb,sha256=9UZHfb3EbKd1lt9AAGTPK2rFA9w_3PYU1FfL5IMX3aM,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon_expanded_ver18/test_data_set_0/output_1.pb,sha256=AaAYL201wLhEwUVhQKgq49OBfx_80pQdi_Wo33isQNY,40 +onnx/backend/test/data/node/test_layer_normalization_3d_axis2_epsilon_expanded_ver18/test_data_set_0/output_2.pb,sha256=KkksKiORHOMDplW6506hU0pgFYBUTSjkAndy3JMiaic,45 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon/model.onnx,sha256=Mgyj5v0-FOddHlPK2b49zSnTzPhLyEM5FiHI5R637uQ,315 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon/test_data_set_0/input_1.pb,sha256=4PBrvKy3rxl13drhLIE5bL7rYJLK7SoYRzFAKTTxLy0,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon/test_data_set_0/input_2.pb,sha256=QoSWvX-d04Zqaupf99gDg5LEvvIx4A1qA6BgR8pJKMg,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon/test_data_set_0/output_0.pb,sha256=lXpnAlkFz26dL_kmQMXJJBuuk88Vtr6FMhlY7fLtBEY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon/test_data_set_0/output_1.pb,sha256=AaAYL201wLhEwUVhQKgq49OBfx_80pQdi_Wo33isQNY,40 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon/test_data_set_0/output_2.pb,sha256=KkksKiORHOMDplW6506hU0pgFYBUTSjkAndy3JMiaic,45 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon_expanded/model.onnx,sha256=XURUaN2LvupLmJtwiMxdDYqC93dBWn4aiBVkkFlsYW0,7171 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon_expanded/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon_expanded/test_data_set_0/input_1.pb,sha256=4PBrvKy3rxl13drhLIE5bL7rYJLK7SoYRzFAKTTxLy0,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon_expanded/test_data_set_0/input_2.pb,sha256=QoSWvX-d04Zqaupf99gDg5LEvvIx4A1qA6BgR8pJKMg,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon_expanded/test_data_set_0/output_0.pb,sha256=lXpnAlkFz26dL_kmQMXJJBuuk88Vtr6FMhlY7fLtBEY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon_expanded/test_data_set_0/output_1.pb,sha256=AaAYL201wLhEwUVhQKgq49OBfx_80pQdi_Wo33isQNY,40 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon_expanded/test_data_set_0/output_2.pb,sha256=KkksKiORHOMDplW6506hU0pgFYBUTSjkAndy3JMiaic,45 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon_expanded_ver18/model.onnx,sha256=Czi1LTqxIkTQ2PXFHqbp6R5Z1Y4XXM70c6r_hgc8k7o,7478 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon_expanded_ver18/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon_expanded_ver18/test_data_set_0/input_1.pb,sha256=4PBrvKy3rxl13drhLIE5bL7rYJLK7SoYRzFAKTTxLy0,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon_expanded_ver18/test_data_set_0/input_2.pb,sha256=QoSWvX-d04Zqaupf99gDg5LEvvIx4A1qA6BgR8pJKMg,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon_expanded_ver18/test_data_set_0/output_0.pb,sha256=lXpnAlkFz26dL_kmQMXJJBuuk88Vtr6FMhlY7fLtBEY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon_expanded_ver18/test_data_set_0/output_1.pb,sha256=AaAYL201wLhEwUVhQKgq49OBfx_80pQdi_Wo33isQNY,40 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_1_epsilon_expanded_ver18/test_data_set_0/output_2.pb,sha256=KkksKiORHOMDplW6506hU0pgFYBUTSjkAndy3JMiaic,45 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon/model.onnx,sha256=tfYuT0x48pO3zOdPvb2af-sgrWXGAtDgigBlY_f2A8E,323 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon/test_data_set_0/input_1.pb,sha256=RsFIHUaVXXkVUuqNiwwNCyD0G-fnbmxu-VtjpWDCr-g,71 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon/test_data_set_0/input_2.pb,sha256=sHcZJ9BtHritZi7auxmp2MQdRfb6VioBIMj37gdsuUQ,71 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon/test_data_set_0/output_0.pb,sha256=OvSGu63DhH8Q6FOw4vCzNGh3CbvdnCI3IWrxEogZ7OM,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon/test_data_set_0/output_1.pb,sha256=PHTleirt5gAWtXMHawJcVrMcujCETXqf1i89SdGF-ho,24 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon/test_data_set_0/output_2.pb,sha256=AcKwzHM9Di6J1dyJw8oVyabffaBkKvZ2XqmMiJormnc,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon_expanded/model.onnx,sha256=1aszJdGvZzf6nhA_2iFchJivNtGVEIY3hLsuqQQ-U0U,7179 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon_expanded/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon_expanded/test_data_set_0/input_1.pb,sha256=RsFIHUaVXXkVUuqNiwwNCyD0G-fnbmxu-VtjpWDCr-g,71 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon_expanded/test_data_set_0/input_2.pb,sha256=sHcZJ9BtHritZi7auxmp2MQdRfb6VioBIMj37gdsuUQ,71 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon_expanded/test_data_set_0/output_0.pb,sha256=OvSGu63DhH8Q6FOw4vCzNGh3CbvdnCI3IWrxEogZ7OM,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon_expanded/test_data_set_0/output_1.pb,sha256=PHTleirt5gAWtXMHawJcVrMcujCETXqf1i89SdGF-ho,24 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon_expanded/test_data_set_0/output_2.pb,sha256=AcKwzHM9Di6J1dyJw8oVyabffaBkKvZ2XqmMiJormnc,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon_expanded_ver18/model.onnx,sha256=mXqVqfsfyDeK9O9mvz6w8dBQsW4pScdPJF_aXhD0-EY,7486 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon_expanded_ver18/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon_expanded_ver18/test_data_set_0/input_1.pb,sha256=RsFIHUaVXXkVUuqNiwwNCyD0G-fnbmxu-VtjpWDCr-g,71 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon_expanded_ver18/test_data_set_0/input_2.pb,sha256=sHcZJ9BtHritZi7auxmp2MQdRfb6VioBIMj37gdsuUQ,71 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon_expanded_ver18/test_data_set_0/output_0.pb,sha256=OvSGu63DhH8Q6FOw4vCzNGh3CbvdnCI3IWrxEogZ7OM,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon_expanded_ver18/test_data_set_0/output_1.pb,sha256=PHTleirt5gAWtXMHawJcVrMcujCETXqf1i89SdGF-ho,24 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_2_epsilon_expanded_ver18/test_data_set_0/output_2.pb,sha256=AcKwzHM9Di6J1dyJw8oVyabffaBkKvZ2XqmMiJormnc,29 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon/model.onnx,sha256=EsaeIyu7HXxvvWKBbE4MtVCAVtZSc8dLn-xEL_r1WEQ,331 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon/test_data_set_0/input_1.pb,sha256=-_y6ugkVRAuX6VtIyUN56Ja8hsdgc-uYpjQhH9o3wgM,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon/test_data_set_0/input_2.pb,sha256=pL40Oaw2IK9LzoPOZDaP75bc2lI0nDGGPsLMtsEA7CE,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon/test_data_set_0/output_0.pb,sha256=_jNYy4sNYdyeP5vTYQs63WW9_PpdR1ObiJalbo7T1rc,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon/test_data_set_0/output_1.pb,sha256=H81tuC3okK3RUjvS60hBfM9-STj5kuLiQ45PaSjvw04,20 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon/test_data_set_0/output_2.pb,sha256=TssEW9-a_1tOb8XcyDe8IiJJqERhn0g-u0tEX7bp9Ak,25 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon_expanded/model.onnx,sha256=D6LP8qum68kfFVrPWqQpubdeeIJbDzrBY-kkaNgzfRQ,7187 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon_expanded/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon_expanded/test_data_set_0/input_1.pb,sha256=-_y6ugkVRAuX6VtIyUN56Ja8hsdgc-uYpjQhH9o3wgM,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon_expanded/test_data_set_0/input_2.pb,sha256=pL40Oaw2IK9LzoPOZDaP75bc2lI0nDGGPsLMtsEA7CE,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon_expanded/test_data_set_0/output_0.pb,sha256=_jNYy4sNYdyeP5vTYQs63WW9_PpdR1ObiJalbo7T1rc,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon_expanded/test_data_set_0/output_1.pb,sha256=H81tuC3okK3RUjvS60hBfM9-STj5kuLiQ45PaSjvw04,20 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon_expanded/test_data_set_0/output_2.pb,sha256=TssEW9-a_1tOb8XcyDe8IiJJqERhn0g-u0tEX7bp9Ak,25 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon_expanded_ver18/model.onnx,sha256=-UB36cfKrf0whCnJPy35TDp_2DBfxSt_8Bt_dWcwDGM,7494 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon_expanded_ver18/test_data_set_0/input_0.pb,sha256=x8hqB6r-sU_tXut9YmsSmWXCDcdeW-IZc_2vzMnZegY,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon_expanded_ver18/test_data_set_0/input_1.pb,sha256=-_y6ugkVRAuX6VtIyUN56Ja8hsdgc-uYpjQhH9o3wgM,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon_expanded_ver18/test_data_set_0/input_2.pb,sha256=pL40Oaw2IK9LzoPOZDaP75bc2lI0nDGGPsLMtsEA7CE,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon_expanded_ver18/test_data_set_0/output_0.pb,sha256=_jNYy4sNYdyeP5vTYQs63WW9_PpdR1ObiJalbo7T1rc,133 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon_expanded_ver18/test_data_set_0/output_1.pb,sha256=H81tuC3okK3RUjvS60hBfM9-STj5kuLiQ45PaSjvw04,20 +onnx/backend/test/data/node/test_layer_normalization_3d_axis_negative_3_epsilon_expanded_ver18/test_data_set_0/output_2.pb,sha256=TssEW9-a_1tOb8XcyDe8IiJJqERhn0g-u0tEX7bp9Ak,25 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0/model.onnx,sha256=6FTymSB7VUYcPZKAp3b5kYQ87SaZnfg-FQZohEflz3c,309 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0/test_data_set_0/input_1.pb,sha256=SQ1M8CaJ0qLmcLTvIkOMlgNO0L8HwaXc_m0K26vIbnI,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0/test_data_set_0/input_2.pb,sha256=__QE7EAq717I04_JG7I_51Z5sC2scQ1_WLMFDvKlhl4,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0/test_data_set_0/output_0.pb,sha256=oMQtjXP8GiZgJtDBJ1pNz2JI7FdH7M-rQiiT4b63_rY,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0/test_data_set_0/output_1.pb,sha256=h_PlYPPc_oA1NHtZrWv0D55Gb9RZgmTgYnO0VZ2TI34,22 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0/test_data_set_0/output_2.pb,sha256=hVXqX9wgl-HPPlQrrhqClVhRvWG1HC21LrbjWPq5HcA,27 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0_expanded/model.onnx,sha256=yzgNEYHW9v9br3Uvujgff1JNa3T_oqsWiRP1T09zcP0,6096 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0_expanded/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0_expanded/test_data_set_0/input_1.pb,sha256=SQ1M8CaJ0qLmcLTvIkOMlgNO0L8HwaXc_m0K26vIbnI,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0_expanded/test_data_set_0/input_2.pb,sha256=__QE7EAq717I04_JG7I_51Z5sC2scQ1_WLMFDvKlhl4,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0_expanded/test_data_set_0/output_0.pb,sha256=oMQtjXP8GiZgJtDBJ1pNz2JI7FdH7M-rQiiT4b63_rY,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0_expanded/test_data_set_0/output_1.pb,sha256=h_PlYPPc_oA1NHtZrWv0D55Gb9RZgmTgYnO0VZ2TI34,22 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0_expanded/test_data_set_0/output_2.pb,sha256=hVXqX9wgl-HPPlQrrhqClVhRvWG1HC21LrbjWPq5HcA,27 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0_expanded_ver18/model.onnx,sha256=gCIb_iUdusfUvRyCuxAqngine6lFStpfNZ69Ob9IM5E,6348 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0_expanded_ver18/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0_expanded_ver18/test_data_set_0/input_1.pb,sha256=SQ1M8CaJ0qLmcLTvIkOMlgNO0L8HwaXc_m0K26vIbnI,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0_expanded_ver18/test_data_set_0/input_2.pb,sha256=__QE7EAq717I04_JG7I_51Z5sC2scQ1_WLMFDvKlhl4,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0_expanded_ver18/test_data_set_0/output_0.pb,sha256=oMQtjXP8GiZgJtDBJ1pNz2JI7FdH7M-rQiiT4b63_rY,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0_expanded_ver18/test_data_set_0/output_1.pb,sha256=h_PlYPPc_oA1NHtZrWv0D55Gb9RZgmTgYnO0VZ2TI34,22 +onnx/backend/test/data/node/test_layer_normalization_4d_axis0_expanded_ver18/test_data_set_0/output_2.pb,sha256=hVXqX9wgl-HPPlQrrhqClVhRvWG1HC21LrbjWPq5HcA,27 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1/model.onnx,sha256=hnQKrdk74MuMs4Tg3gWL2wnn6FImP1OfquiXvAdW62g,301 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1/test_data_set_0/input_1.pb,sha256=7zaL3tDlpElVdcFGJowAUcp6j2Nq_xL1ZjeEUWn73QQ,254 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1/test_data_set_0/input_2.pb,sha256=lLuiBlurFP19GIwIpD-S93B2EPY2gV630HErOlKNK-Q,254 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1/test_data_set_0/output_0.pb,sha256=SEut2uddWoC5hL8h16RBz-7H9XquUwDwmbYco_WFWEw,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1/test_data_set_0/output_1.pb,sha256=div91pv-EhX8CtojPjzcl-OCV7-3ucF2WMaqIA4Owi0,26 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1/test_data_set_0/output_2.pb,sha256=SFEhUFLf29xLZCuQskP1rqZTOtPHZurgQaHJ7iIloMg,31 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1_expanded/model.onnx,sha256=jNy7YkdY02dnpplGtFAzO06i_D9fVavlHJo5YFx_hlc,6088 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1_expanded/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1_expanded/test_data_set_0/input_1.pb,sha256=7zaL3tDlpElVdcFGJowAUcp6j2Nq_xL1ZjeEUWn73QQ,254 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1_expanded/test_data_set_0/input_2.pb,sha256=lLuiBlurFP19GIwIpD-S93B2EPY2gV630HErOlKNK-Q,254 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1_expanded/test_data_set_0/output_0.pb,sha256=SEut2uddWoC5hL8h16RBz-7H9XquUwDwmbYco_WFWEw,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1_expanded/test_data_set_0/output_1.pb,sha256=div91pv-EhX8CtojPjzcl-OCV7-3ucF2WMaqIA4Owi0,26 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1_expanded/test_data_set_0/output_2.pb,sha256=SFEhUFLf29xLZCuQskP1rqZTOtPHZurgQaHJ7iIloMg,31 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1_expanded_ver18/model.onnx,sha256=P9ZmRA9zsujJH7q1P2GU2ZhZy7XO_vyTJZ2YBLP6SnI,6340 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1_expanded_ver18/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1_expanded_ver18/test_data_set_0/input_1.pb,sha256=7zaL3tDlpElVdcFGJowAUcp6j2Nq_xL1ZjeEUWn73QQ,254 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1_expanded_ver18/test_data_set_0/input_2.pb,sha256=lLuiBlurFP19GIwIpD-S93B2EPY2gV630HErOlKNK-Q,254 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1_expanded_ver18/test_data_set_0/output_0.pb,sha256=SEut2uddWoC5hL8h16RBz-7H9XquUwDwmbYco_WFWEw,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1_expanded_ver18/test_data_set_0/output_1.pb,sha256=div91pv-EhX8CtojPjzcl-OCV7-3ucF2WMaqIA4Owi0,26 +onnx/backend/test/data/node/test_layer_normalization_4d_axis1_expanded_ver18/test_data_set_0/output_2.pb,sha256=SFEhUFLf29xLZCuQskP1rqZTOtPHZurgQaHJ7iIloMg,31 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2/model.onnx,sha256=sFRKeanTQ5lDP1B82P-m3otZ3wZLy3bFjx2UASnY-yY,293 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2/test_data_set_0/input_1.pb,sha256=qe-XVfBNefBCFc7BqwFVPauls8p6xc7Kg_zN9Z7Z37E,91 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2/test_data_set_0/input_2.pb,sha256=FJdFCcsdB1KsWHVHfzL8qSH4MOsZeA-MpqXybaSUyj4,91 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2/test_data_set_0/output_0.pb,sha256=bZ21DorsMQaEkatMnhbenFA8rDNstpgIdOKwN8CvD30,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2/test_data_set_0/output_1.pb,sha256=Jdb9L1HSJpPk-w9babAzkefVo1_1OtnrycFuzhsqWyc,42 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2/test_data_set_0/output_2.pb,sha256=bTgUzSWmCvzUbVNgz5UitP82lYNZb6gp_d3N6yGwNMI,47 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2_expanded/model.onnx,sha256=mVKO0Uefb9JPe411LiAS2tT1MD0uA-b595TyKUVxLEE,6080 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2_expanded/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2_expanded/test_data_set_0/input_1.pb,sha256=qe-XVfBNefBCFc7BqwFVPauls8p6xc7Kg_zN9Z7Z37E,91 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2_expanded/test_data_set_0/input_2.pb,sha256=FJdFCcsdB1KsWHVHfzL8qSH4MOsZeA-MpqXybaSUyj4,91 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2_expanded/test_data_set_0/output_0.pb,sha256=bZ21DorsMQaEkatMnhbenFA8rDNstpgIdOKwN8CvD30,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2_expanded/test_data_set_0/output_1.pb,sha256=Jdb9L1HSJpPk-w9babAzkefVo1_1OtnrycFuzhsqWyc,42 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2_expanded/test_data_set_0/output_2.pb,sha256=bTgUzSWmCvzUbVNgz5UitP82lYNZb6gp_d3N6yGwNMI,47 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2_expanded_ver18/model.onnx,sha256=K6H8uEh5a0FOxFCaQJPHsxnyj8Js9OL3g7Wcu9onojc,6332 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2_expanded_ver18/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2_expanded_ver18/test_data_set_0/input_1.pb,sha256=qe-XVfBNefBCFc7BqwFVPauls8p6xc7Kg_zN9Z7Z37E,91 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2_expanded_ver18/test_data_set_0/input_2.pb,sha256=FJdFCcsdB1KsWHVHfzL8qSH4MOsZeA-MpqXybaSUyj4,91 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2_expanded_ver18/test_data_set_0/output_0.pb,sha256=bZ21DorsMQaEkatMnhbenFA8rDNstpgIdOKwN8CvD30,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2_expanded_ver18/test_data_set_0/output_1.pb,sha256=Jdb9L1HSJpPk-w9babAzkefVo1_1OtnrycFuzhsqWyc,42 +onnx/backend/test/data/node/test_layer_normalization_4d_axis2_expanded_ver18/test_data_set_0/output_2.pb,sha256=bTgUzSWmCvzUbVNgz5UitP82lYNZb6gp_d3N6yGwNMI,47 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3/model.onnx,sha256=f3vB99GHKLnzZ6VMiUiLAYGhsUS_p9rv5q7xLL9arig,285 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3/test_data_set_0/input_1.pb,sha256=dhPFEwCCeDrZzyZzzTc4B8YdEbZB8eOdvjTBRWTna0o,29 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3/test_data_set_0/input_2.pb,sha256=I3r-GMpY-sWqu4lHyIlkD5s7nRziiDkrGNkhZxJ6PpQ,29 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3/test_data_set_0/output_0.pb,sha256=ZZgPs-pyo4XnW-gGUahZhyyefO-v_IsCwu4fdYG-jGU,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3/test_data_set_0/output_1.pb,sha256=PyQxLcwwC8Zmn6ihwCHpRkKnLdOtTKa19fmW23TrU9g,114 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3/test_data_set_0/output_2.pb,sha256=OHN3HBAA9M0qcG3nWWb7JqxwLw8gfKJMlIsb3xK2S3E,119 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3_expanded/model.onnx,sha256=wHnllra7f3F0Yn7bx76ltd0uF4Jo38Bxh9cHiobLFR0,6072 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3_expanded/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3_expanded/test_data_set_0/input_1.pb,sha256=dhPFEwCCeDrZzyZzzTc4B8YdEbZB8eOdvjTBRWTna0o,29 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3_expanded/test_data_set_0/input_2.pb,sha256=I3r-GMpY-sWqu4lHyIlkD5s7nRziiDkrGNkhZxJ6PpQ,29 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3_expanded/test_data_set_0/output_0.pb,sha256=ZZgPs-pyo4XnW-gGUahZhyyefO-v_IsCwu4fdYG-jGU,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3_expanded/test_data_set_0/output_1.pb,sha256=PyQxLcwwC8Zmn6ihwCHpRkKnLdOtTKa19fmW23TrU9g,114 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3_expanded/test_data_set_0/output_2.pb,sha256=OHN3HBAA9M0qcG3nWWb7JqxwLw8gfKJMlIsb3xK2S3E,119 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3_expanded_ver18/model.onnx,sha256=tc5gpKsV8OptgZw0XhllKuJ_e5D23kH9Zba_-PA8hTc,6324 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3_expanded_ver18/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3_expanded_ver18/test_data_set_0/input_1.pb,sha256=dhPFEwCCeDrZzyZzzTc4B8YdEbZB8eOdvjTBRWTna0o,29 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3_expanded_ver18/test_data_set_0/input_2.pb,sha256=I3r-GMpY-sWqu4lHyIlkD5s7nRziiDkrGNkhZxJ6PpQ,29 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3_expanded_ver18/test_data_set_0/output_0.pb,sha256=ZZgPs-pyo4XnW-gGUahZhyyefO-v_IsCwu4fdYG-jGU,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3_expanded_ver18/test_data_set_0/output_1.pb,sha256=PyQxLcwwC8Zmn6ihwCHpRkKnLdOtTKa19fmW23TrU9g,114 +onnx/backend/test/data/node/test_layer_normalization_4d_axis3_expanded_ver18/test_data_set_0/output_2.pb,sha256=OHN3HBAA9M0qcG3nWWb7JqxwLw8gfKJMlIsb3xK2S3E,119 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1/model.onnx,sha256=73UhfFj6hqI5f7kRWoruEDcfbuEaSx1xviw5cYFsRR4,304 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1/test_data_set_0/input_1.pb,sha256=6GZ_k7Bg5S-sVwagkPPRjF351HRT5efq3L0RXbbFfPo,29 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1/test_data_set_0/input_2.pb,sha256=XJX83EpHjA-svtp5ZUT-cGHo6rmxPJPuge7JtGCcIL4,29 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1/test_data_set_0/output_0.pb,sha256=ZuFfotGGllfUiOngMYM4jmdmjdC-Jrrz9jj9tpFRkzQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1/test_data_set_0/output_1.pb,sha256=PyQxLcwwC8Zmn6ihwCHpRkKnLdOtTKa19fmW23TrU9g,114 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1/test_data_set_0/output_2.pb,sha256=OHN3HBAA9M0qcG3nWWb7JqxwLw8gfKJMlIsb3xK2S3E,119 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1_expanded/model.onnx,sha256=7PhXg1U9I2yFSYJKePbnKTjGHublQ2h8zflWqeuMuDU,6665 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1_expanded/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1_expanded/test_data_set_0/input_1.pb,sha256=6GZ_k7Bg5S-sVwagkPPRjF351HRT5efq3L0RXbbFfPo,29 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1_expanded/test_data_set_0/input_2.pb,sha256=XJX83EpHjA-svtp5ZUT-cGHo6rmxPJPuge7JtGCcIL4,29 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1_expanded/test_data_set_0/output_0.pb,sha256=ZuFfotGGllfUiOngMYM4jmdmjdC-Jrrz9jj9tpFRkzQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1_expanded/test_data_set_0/output_1.pb,sha256=PyQxLcwwC8Zmn6ihwCHpRkKnLdOtTKa19fmW23TrU9g,114 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1_expanded/test_data_set_0/output_2.pb,sha256=OHN3HBAA9M0qcG3nWWb7JqxwLw8gfKJMlIsb3xK2S3E,119 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1_expanded_ver18/model.onnx,sha256=fl0G-dS-cClTWvnynmx9TjXyx5KKrO4jhlL00yvyoxE,6947 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1_expanded_ver18/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1_expanded_ver18/test_data_set_0/input_1.pb,sha256=6GZ_k7Bg5S-sVwagkPPRjF351HRT5efq3L0RXbbFfPo,29 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1_expanded_ver18/test_data_set_0/input_2.pb,sha256=XJX83EpHjA-svtp5ZUT-cGHo6rmxPJPuge7JtGCcIL4,29 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1_expanded_ver18/test_data_set_0/output_0.pb,sha256=ZuFfotGGllfUiOngMYM4jmdmjdC-Jrrz9jj9tpFRkzQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1_expanded_ver18/test_data_set_0/output_1.pb,sha256=PyQxLcwwC8Zmn6ihwCHpRkKnLdOtTKa19fmW23TrU9g,114 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_1_expanded_ver18/test_data_set_0/output_2.pb,sha256=OHN3HBAA9M0qcG3nWWb7JqxwLw8gfKJMlIsb3xK2S3E,119 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2/model.onnx,sha256=A2htY41elXdKJX_T2mPfTFyS0GqQWit50Be8HTzM7a4,312 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2/test_data_set_0/input_1.pb,sha256=lldUvoN8cEAHdj14iPu_W2NaWno-DAHLBIhPF_p4Pd8,91 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2/test_data_set_0/input_2.pb,sha256=IdlwA87uvKYeYSPClPtWIUU9EwPQigWZfgLGhsI5E-8,91 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2/test_data_set_0/output_0.pb,sha256=asFNXrgO-KAfHYrriGkiIwAWBnnnEv6efe4rmSzGxIQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2/test_data_set_0/output_1.pb,sha256=Jdb9L1HSJpPk-w9babAzkefVo1_1OtnrycFuzhsqWyc,42 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2/test_data_set_0/output_2.pb,sha256=bTgUzSWmCvzUbVNgz5UitP82lYNZb6gp_d3N6yGwNMI,47 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2_expanded/model.onnx,sha256=pF9o1Z9It45xK3tp6cYAld9W76DFjgGbAevaW6WotEY,6673 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2_expanded/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2_expanded/test_data_set_0/input_1.pb,sha256=lldUvoN8cEAHdj14iPu_W2NaWno-DAHLBIhPF_p4Pd8,91 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2_expanded/test_data_set_0/input_2.pb,sha256=IdlwA87uvKYeYSPClPtWIUU9EwPQigWZfgLGhsI5E-8,91 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2_expanded/test_data_set_0/output_0.pb,sha256=asFNXrgO-KAfHYrriGkiIwAWBnnnEv6efe4rmSzGxIQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2_expanded/test_data_set_0/output_1.pb,sha256=Jdb9L1HSJpPk-w9babAzkefVo1_1OtnrycFuzhsqWyc,42 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2_expanded/test_data_set_0/output_2.pb,sha256=bTgUzSWmCvzUbVNgz5UitP82lYNZb6gp_d3N6yGwNMI,47 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2_expanded_ver18/model.onnx,sha256=8Y-ybd42hcymPB_-xOT7GF0u6VtKzQypdE97bEGFX_Y,6955 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2_expanded_ver18/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2_expanded_ver18/test_data_set_0/input_1.pb,sha256=lldUvoN8cEAHdj14iPu_W2NaWno-DAHLBIhPF_p4Pd8,91 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2_expanded_ver18/test_data_set_0/input_2.pb,sha256=IdlwA87uvKYeYSPClPtWIUU9EwPQigWZfgLGhsI5E-8,91 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2_expanded_ver18/test_data_set_0/output_0.pb,sha256=asFNXrgO-KAfHYrriGkiIwAWBnnnEv6efe4rmSzGxIQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2_expanded_ver18/test_data_set_0/output_1.pb,sha256=Jdb9L1HSJpPk-w9babAzkefVo1_1OtnrycFuzhsqWyc,42 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_2_expanded_ver18/test_data_set_0/output_2.pb,sha256=bTgUzSWmCvzUbVNgz5UitP82lYNZb6gp_d3N6yGwNMI,47 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3/model.onnx,sha256=VNCqFZoNsJFTB0ngEmUrJH1xWBQz_NgmSPvTEir5W0w,320 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3/test_data_set_0/input_1.pb,sha256=Gq90Mctuyji8qFBteJmVpb2Hrpyq8LdiSb3PnC7udRs,254 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3/test_data_set_0/input_2.pb,sha256=HF2D2CKuGbjo2s8uM5wNXzz7x55YyplI1OBvIXzUr2g,254 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3/test_data_set_0/output_0.pb,sha256=l5U23ab2-R8Md6IYMyC72oD0P2uU52TH0JhH_VPDIA0,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3/test_data_set_0/output_1.pb,sha256=div91pv-EhX8CtojPjzcl-OCV7-3ucF2WMaqIA4Owi0,26 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3/test_data_set_0/output_2.pb,sha256=SFEhUFLf29xLZCuQskP1rqZTOtPHZurgQaHJ7iIloMg,31 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3_expanded/model.onnx,sha256=Kby-ajsIb_vu3rEUCBYufMh2qVybDA7iRnakIPqUqhc,6681 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3_expanded/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3_expanded/test_data_set_0/input_1.pb,sha256=Gq90Mctuyji8qFBteJmVpb2Hrpyq8LdiSb3PnC7udRs,254 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3_expanded/test_data_set_0/input_2.pb,sha256=HF2D2CKuGbjo2s8uM5wNXzz7x55YyplI1OBvIXzUr2g,254 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3_expanded/test_data_set_0/output_0.pb,sha256=l5U23ab2-R8Md6IYMyC72oD0P2uU52TH0JhH_VPDIA0,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3_expanded/test_data_set_0/output_1.pb,sha256=div91pv-EhX8CtojPjzcl-OCV7-3ucF2WMaqIA4Owi0,26 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3_expanded/test_data_set_0/output_2.pb,sha256=SFEhUFLf29xLZCuQskP1rqZTOtPHZurgQaHJ7iIloMg,31 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3_expanded_ver18/model.onnx,sha256=nRuSFMXeVKgz0etyhiyLfadJW1Azl8Y7xaGnwBjg9No,6963 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3_expanded_ver18/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3_expanded_ver18/test_data_set_0/input_1.pb,sha256=Gq90Mctuyji8qFBteJmVpb2Hrpyq8LdiSb3PnC7udRs,254 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3_expanded_ver18/test_data_set_0/input_2.pb,sha256=HF2D2CKuGbjo2s8uM5wNXzz7x55YyplI1OBvIXzUr2g,254 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3_expanded_ver18/test_data_set_0/output_0.pb,sha256=l5U23ab2-R8Md6IYMyC72oD0P2uU52TH0JhH_VPDIA0,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3_expanded_ver18/test_data_set_0/output_1.pb,sha256=div91pv-EhX8CtojPjzcl-OCV7-3ucF2WMaqIA4Owi0,26 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_3_expanded_ver18/test_data_set_0/output_2.pb,sha256=SFEhUFLf29xLZCuQskP1rqZTOtPHZurgQaHJ7iIloMg,31 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4/model.onnx,sha256=70k7y1Dw1jZs9VJuQpe1maGoy9T8ZBGvVFF7ufVF5NU,328 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4/test_data_set_0/input_1.pb,sha256=pjzWsxYof-fpDuo60I8TJ_IoOXGfsn9uGNzmD-dqse8,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4/test_data_set_0/input_2.pb,sha256=F4lODpYxgJ7bw__BS9HzQiEIvcezx2NAUr-bdpeWLME,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4/test_data_set_0/output_0.pb,sha256=CNZnGSYfMqynnB9RBPfSfz0Z107X_uZQ5YyxsQHrZPo,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4/test_data_set_0/output_1.pb,sha256=h_PlYPPc_oA1NHtZrWv0D55Gb9RZgmTgYnO0VZ2TI34,22 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4/test_data_set_0/output_2.pb,sha256=hVXqX9wgl-HPPlQrrhqClVhRvWG1HC21LrbjWPq5HcA,27 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4_expanded/model.onnx,sha256=4kuJXvNy2sKGNjaWL-_137AAKHEwKq985u-YXKNKGKo,6689 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4_expanded/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4_expanded/test_data_set_0/input_1.pb,sha256=pjzWsxYof-fpDuo60I8TJ_IoOXGfsn9uGNzmD-dqse8,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4_expanded/test_data_set_0/input_2.pb,sha256=F4lODpYxgJ7bw__BS9HzQiEIvcezx2NAUr-bdpeWLME,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4_expanded/test_data_set_0/output_0.pb,sha256=CNZnGSYfMqynnB9RBPfSfz0Z107X_uZQ5YyxsQHrZPo,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4_expanded/test_data_set_0/output_1.pb,sha256=h_PlYPPc_oA1NHtZrWv0D55Gb9RZgmTgYnO0VZ2TI34,22 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4_expanded/test_data_set_0/output_2.pb,sha256=hVXqX9wgl-HPPlQrrhqClVhRvWG1HC21LrbjWPq5HcA,27 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4_expanded_ver18/model.onnx,sha256=NW8yWja0CKsEwYw_M9mHJ0nj-Jc5dVEXcO5foPKFQcU,6971 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4_expanded_ver18/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4_expanded_ver18/test_data_set_0/input_1.pb,sha256=pjzWsxYof-fpDuo60I8TJ_IoOXGfsn9uGNzmD-dqse8,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4_expanded_ver18/test_data_set_0/input_2.pb,sha256=F4lODpYxgJ7bw__BS9HzQiEIvcezx2NAUr-bdpeWLME,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4_expanded_ver18/test_data_set_0/output_0.pb,sha256=CNZnGSYfMqynnB9RBPfSfz0Z107X_uZQ5YyxsQHrZPo,496 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4_expanded_ver18/test_data_set_0/output_1.pb,sha256=h_PlYPPc_oA1NHtZrWv0D55Gb9RZgmTgYnO0VZ2TI34,22 +onnx/backend/test/data/node/test_layer_normalization_4d_axis_negative_4_expanded_ver18/test_data_set_0/output_2.pb,sha256=hVXqX9wgl-HPPlQrrhqClVhRvWG1HC21LrbjWPq5HcA,27 +onnx/backend/test/data/node/test_layer_normalization_default_axis/model.onnx,sha256=b_2kEk9Ayq6XBTG-oSOrfJFe_FKs_ZyYmWkU9OxDOSU,276 +onnx/backend/test/data/node/test_layer_normalization_default_axis/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_default_axis/test_data_set_0/input_1.pb,sha256=lnzferwhTiJ1206NWKt09uMfuERk-l8gPAOC9Rz5TEI,29 +onnx/backend/test/data/node/test_layer_normalization_default_axis/test_data_set_0/input_2.pb,sha256=XsRGoQhLOWNEyIeZqVVjYnHk4mZKF8lajaF9UI8GZts,29 +onnx/backend/test/data/node/test_layer_normalization_default_axis/test_data_set_0/output_0.pb,sha256=RfWw3_rb9bhFMIefbpqMRHBeSAkrDZx9kMlblgbFlT8,496 +onnx/backend/test/data/node/test_layer_normalization_default_axis/test_data_set_0/output_1.pb,sha256=PyQxLcwwC8Zmn6ihwCHpRkKnLdOtTKa19fmW23TrU9g,114 +onnx/backend/test/data/node/test_layer_normalization_default_axis/test_data_set_0/output_2.pb,sha256=OHN3HBAA9M0qcG3nWWb7JqxwLw8gfKJMlIsb3xK2S3E,119 +onnx/backend/test/data/node/test_layer_normalization_default_axis_expanded/model.onnx,sha256=TDt2_Jc5tCXHj2l5O7QY9-ItZFhp2rUg2XN88nXzgBc,6273 +onnx/backend/test/data/node/test_layer_normalization_default_axis_expanded/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_default_axis_expanded/test_data_set_0/input_1.pb,sha256=lnzferwhTiJ1206NWKt09uMfuERk-l8gPAOC9Rz5TEI,29 +onnx/backend/test/data/node/test_layer_normalization_default_axis_expanded/test_data_set_0/input_2.pb,sha256=XsRGoQhLOWNEyIeZqVVjYnHk4mZKF8lajaF9UI8GZts,29 +onnx/backend/test/data/node/test_layer_normalization_default_axis_expanded/test_data_set_0/output_0.pb,sha256=RfWw3_rb9bhFMIefbpqMRHBeSAkrDZx9kMlblgbFlT8,496 +onnx/backend/test/data/node/test_layer_normalization_default_axis_expanded/test_data_set_0/output_1.pb,sha256=PyQxLcwwC8Zmn6ihwCHpRkKnLdOtTKa19fmW23TrU9g,114 +onnx/backend/test/data/node/test_layer_normalization_default_axis_expanded/test_data_set_0/output_2.pb,sha256=OHN3HBAA9M0qcG3nWWb7JqxwLw8gfKJMlIsb3xK2S3E,119 +onnx/backend/test/data/node/test_layer_normalization_default_axis_expanded_ver18/model.onnx,sha256=HO6zcuo5N3mdS6EXK2OYuFoECMlKUYJhfOxhZw232DE,6537 +onnx/backend/test/data/node/test_layer_normalization_default_axis_expanded_ver18/test_data_set_0/input_0.pb,sha256=x-FxhkwixA66m5bcrvZSS0AmXxQGtgXOW9RaiNdUFtQ,496 +onnx/backend/test/data/node/test_layer_normalization_default_axis_expanded_ver18/test_data_set_0/input_1.pb,sha256=lnzferwhTiJ1206NWKt09uMfuERk-l8gPAOC9Rz5TEI,29 +onnx/backend/test/data/node/test_layer_normalization_default_axis_expanded_ver18/test_data_set_0/input_2.pb,sha256=XsRGoQhLOWNEyIeZqVVjYnHk4mZKF8lajaF9UI8GZts,29 +onnx/backend/test/data/node/test_layer_normalization_default_axis_expanded_ver18/test_data_set_0/output_0.pb,sha256=RfWw3_rb9bhFMIefbpqMRHBeSAkrDZx9kMlblgbFlT8,496 +onnx/backend/test/data/node/test_layer_normalization_default_axis_expanded_ver18/test_data_set_0/output_1.pb,sha256=PyQxLcwwC8Zmn6ihwCHpRkKnLdOtTKa19fmW23TrU9g,114 +onnx/backend/test/data/node/test_layer_normalization_default_axis_expanded_ver18/test_data_set_0/output_2.pb,sha256=OHN3HBAA9M0qcG3nWWb7JqxwLw8gfKJMlIsb3xK2S3E,119 +onnx/backend/test/data/node/test_leakyrelu/model.onnx,sha256=_22Ni5RQMG7uIlr6XEeVpMPtd6m-wghY5OPw7MX_Z0o,126 +onnx/backend/test/data/node/test_leakyrelu/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_leakyrelu/test_data_set_0/output_0.pb,sha256=HL5T46BD6dJDcU8eZMF5Lm5I4maShdEDBguWdgLux3c,254 +onnx/backend/test/data/node/test_leakyrelu_default/model.onnx,sha256=I1tKwPadbD9YaefW-ZBgElwp3GNleBeR8pzzGqi0PA0,117 +onnx/backend/test/data/node/test_leakyrelu_default/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_leakyrelu_default/test_data_set_0/output_0.pb,sha256=n198khm90YbTozNWp1IATonknkccZtYunsb6PRQA_hU,254 +onnx/backend/test/data/node/test_leakyrelu_default_expanded/model.onnx,sha256=pc1KytIHlwlrT6uSt0Aar9M3KrHiUadTpnjEmhAKaf4,996 +onnx/backend/test/data/node/test_leakyrelu_default_expanded/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_leakyrelu_default_expanded/test_data_set_0/output_0.pb,sha256=n198khm90YbTozNWp1IATonknkccZtYunsb6PRQA_hU,254 +onnx/backend/test/data/node/test_leakyrelu_example/model.onnx,sha256=4uswPgT5c4NX9AAy8k97e9wHTixFkyH3MI35Ea63G3o,118 +onnx/backend/test/data/node/test_leakyrelu_example/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_leakyrelu_example/test_data_set_0/output_0.pb,sha256=MY0cN1MP7gOByzkKkJceoWxHG-kwUcs4D8IgLgDgXdo,21 +onnx/backend/test/data/node/test_leakyrelu_example_expanded/model.onnx,sha256=HqNSxSkEqZ4H6f2T-afxoG18D1pwpNVVzgBZgQLHBlo,980 +onnx/backend/test/data/node/test_leakyrelu_example_expanded/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_leakyrelu_example_expanded/test_data_set_0/output_0.pb,sha256=MY0cN1MP7gOByzkKkJceoWxHG-kwUcs4D8IgLgDgXdo,21 +onnx/backend/test/data/node/test_leakyrelu_expanded/model.onnx,sha256=m3NuI8g_U_7Czdc9tX1bnLwAaSoohmMDg3cgl6gdock,887 +onnx/backend/test/data/node/test_leakyrelu_expanded/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_leakyrelu_expanded/test_data_set_0/output_0.pb,sha256=HL5T46BD6dJDcU8eZMF5Lm5I4maShdEDBguWdgLux3c,254 +onnx/backend/test/data/node/test_less/model.onnx,sha256=VarII_RAwHj31PsI-Pxtwxnn9uJ3impXIoeHdX8xupU,133 +onnx/backend/test/data/node/test_less/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_less/test_data_set_0/input_1.pb,sha256=qlmuuQNl3eLtxabKfNE6cbZHjYcj-DzNBr1M4rz7GSo,254 +onnx/backend/test/data/node/test_less/test_data_set_0/output_0.pb,sha256=oLRYA8-ghpFNYXX8_SU6S82c8LMGyLC3hcXpYcRAegA,76 +onnx/backend/test/data/node/test_less_bcast/model.onnx,sha256=iTJmFimcwmkhrembVS2cERHKyTfUr2y2cFiOo3AAbMI,131 +onnx/backend/test/data/node/test_less_bcast/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_less_bcast/test_data_set_0/input_1.pb,sha256=jYn9FLUzE9ETJcxFW48XqSehRlyePPjUoI-gtNyozBs,29 +onnx/backend/test/data/node/test_less_bcast/test_data_set_0/output_0.pb,sha256=ISvgvc7TKhgnUzs1arbsFxBlMJnFlb3SSY3MxVCm2NU,76 +onnx/backend/test/data/node/test_less_equal/model.onnx,sha256=G7cUKwHYYg_vsrRmj_m_BQXHzgjG_OYxEGAdhDxDid4,159 +onnx/backend/test/data/node/test_less_equal/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_less_equal/test_data_set_0/input_1.pb,sha256=qlmuuQNl3eLtxabKfNE6cbZHjYcj-DzNBr1M4rz7GSo,254 +onnx/backend/test/data/node/test_less_equal/test_data_set_0/output_0.pb,sha256=6KQRJD-Zzbqgjq8LLBi1XYJ2HcB9SkzS4oPm8hDTT3U,82 +onnx/backend/test/data/node/test_less_equal_bcast/model.onnx,sha256=R73v6AOmEUmMTYS4DEzM6rgp89TErgdIpAzq3GcDdfU,157 +onnx/backend/test/data/node/test_less_equal_bcast/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_less_equal_bcast/test_data_set_0/input_1.pb,sha256=jYn9FLUzE9ETJcxFW48XqSehRlyePPjUoI-gtNyozBs,29 +onnx/backend/test/data/node/test_less_equal_bcast/test_data_set_0/output_0.pb,sha256=6OSOoce2x4z76WkKaCJFJbNncAEOkLSY8hY7Mp7kfNk,82 +onnx/backend/test/data/node/test_less_equal_bcast_expanded/model.onnx,sha256=4ArSKpbhsIhQh51IyTmlp2jxRjtlRmWYhxJEZWDmhSs,411 +onnx/backend/test/data/node/test_less_equal_bcast_expanded/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_less_equal_bcast_expanded/test_data_set_0/input_1.pb,sha256=jYn9FLUzE9ETJcxFW48XqSehRlyePPjUoI-gtNyozBs,29 +onnx/backend/test/data/node/test_less_equal_bcast_expanded/test_data_set_0/output_0.pb,sha256=6OSOoce2x4z76WkKaCJFJbNncAEOkLSY8hY7Mp7kfNk,82 +onnx/backend/test/data/node/test_less_equal_expanded/model.onnx,sha256=jCJ-6W4caO1SFch4j0QbuPwSEd4GVRJHcwhwBGxeWKU,388 +onnx/backend/test/data/node/test_less_equal_expanded/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_less_equal_expanded/test_data_set_0/input_1.pb,sha256=qlmuuQNl3eLtxabKfNE6cbZHjYcj-DzNBr1M4rz7GSo,254 +onnx/backend/test/data/node/test_less_equal_expanded/test_data_set_0/output_0.pb,sha256=6KQRJD-Zzbqgjq8LLBi1XYJ2HcB9SkzS4oPm8hDTT3U,82 +onnx/backend/test/data/node/test_log/model.onnx,sha256=cjzxxnA2vdNVbEX5HvAKpSmjYnWrEJoMDppGn_nVs6U,97 +onnx/backend/test/data/node/test_log/test_data_set_0/input_0.pb,sha256=h8CKUWP5FlTAgSDNi1yNwj568pm_N5ceqp_-U0ll_Vk,254 +onnx/backend/test/data/node/test_log/test_data_set_0/output_0.pb,sha256=pETp9F1Xftoyb5uzN-CDODSiMM1O3kKb6bqDTq8nloo,254 +onnx/backend/test/data/node/test_log_example/model.onnx,sha256=XVquiQ33XPpN2dX6vINv5rNtk-sTtg9bXICaZ2bPjwM,89 +onnx/backend/test/data/node/test_log_example/test_data_set_0/input_0.pb,sha256=0YatubYDmMpLjT3RWtFeMNO0bgyvikdU41tE6Or7TXw,17 +onnx/backend/test/data/node/test_log_example/test_data_set_0/output_0.pb,sha256=uhyyuENMdNx-nQ9styrIbgpA_f86OaR-2N7rPvmRBMk,17 +onnx/backend/test/data/node/test_logsoftmax_axis_0/model.onnx,sha256=VJwFm_Yn_uQ5JJ8u_eWlbgtiWZrtUi6NAPAsOJ8-AGE,131 +onnx/backend/test/data/node/test_logsoftmax_axis_0/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_logsoftmax_axis_0/test_data_set_0/output_0.pb,sha256=rxRI3f_tjrg9jlkJETIIY0Ezuf2mdi8sJVui7jN45os,254 +onnx/backend/test/data/node/test_logsoftmax_axis_0_expanded/model.onnx,sha256=YfCbAB070y4n4iY1Jd1GKDYl9dLoNDmQUPi6hfk3H6Q,1058 +onnx/backend/test/data/node/test_logsoftmax_axis_0_expanded/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_logsoftmax_axis_0_expanded/test_data_set_0/output_0.pb,sha256=rxRI3f_tjrg9jlkJETIIY0Ezuf2mdi8sJVui7jN45os,254 +onnx/backend/test/data/node/test_logsoftmax_axis_0_expanded_ver18/model.onnx,sha256=0xlpmgQ7u3QkgZrwjSAQKfoM_I0RCzyThOhRPHvS85A,1110 +onnx/backend/test/data/node/test_logsoftmax_axis_0_expanded_ver18/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_logsoftmax_axis_0_expanded_ver18/test_data_set_0/output_0.pb,sha256=rxRI3f_tjrg9jlkJETIIY0Ezuf2mdi8sJVui7jN45os,254 +onnx/backend/test/data/node/test_logsoftmax_axis_1/model.onnx,sha256=ecVwQeM2qxClBhjEWUbOb2ag80E0M72V3PKfPoB107c,131 +onnx/backend/test/data/node/test_logsoftmax_axis_1/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_logsoftmax_axis_1/test_data_set_0/output_0.pb,sha256=y-ROhxTKXw_3lML2t7eeO6eumoUcDiK7HUCWDrM0zjQ,254 +onnx/backend/test/data/node/test_logsoftmax_axis_1_expanded/model.onnx,sha256=g5Dj4agZBHB5oYZpG5fDhDSzlP6r5eAnq7lclEn7XH4,1058 +onnx/backend/test/data/node/test_logsoftmax_axis_1_expanded/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_logsoftmax_axis_1_expanded/test_data_set_0/output_0.pb,sha256=y-ROhxTKXw_3lML2t7eeO6eumoUcDiK7HUCWDrM0zjQ,254 +onnx/backend/test/data/node/test_logsoftmax_axis_1_expanded_ver18/model.onnx,sha256=DFEInUgsiO7XfgcAXECqwsmPRs5vxeg1jMAy3drbkS8,1110 +onnx/backend/test/data/node/test_logsoftmax_axis_1_expanded_ver18/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_logsoftmax_axis_1_expanded_ver18/test_data_set_0/output_0.pb,sha256=y-ROhxTKXw_3lML2t7eeO6eumoUcDiK7HUCWDrM0zjQ,254 +onnx/backend/test/data/node/test_logsoftmax_axis_2/model.onnx,sha256=nshnXZ3VAXw3BDp8QNoyBWN8szqKSIfh2w6v5fuFMeY,131 +onnx/backend/test/data/node/test_logsoftmax_axis_2/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_logsoftmax_axis_2/test_data_set_0/output_0.pb,sha256=msG-YqFWPcBJzTIYQ7pC_R9R9WTqi6MP7WC-fm4Ysi8,254 +onnx/backend/test/data/node/test_logsoftmax_axis_2_expanded/model.onnx,sha256=MiHL1XMsKoyIm9MPa-EeZTOBmmIShCvteQR0igOGgug,1058 +onnx/backend/test/data/node/test_logsoftmax_axis_2_expanded/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_logsoftmax_axis_2_expanded/test_data_set_0/output_0.pb,sha256=msG-YqFWPcBJzTIYQ7pC_R9R9WTqi6MP7WC-fm4Ysi8,254 +onnx/backend/test/data/node/test_logsoftmax_axis_2_expanded_ver18/model.onnx,sha256=cWJhXBgARwv2HFpK0boBVxxbAB-uJqiRJEEYn6z9ZSs,1110 +onnx/backend/test/data/node/test_logsoftmax_axis_2_expanded_ver18/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_logsoftmax_axis_2_expanded_ver18/test_data_set_0/output_0.pb,sha256=msG-YqFWPcBJzTIYQ7pC_R9R9WTqi6MP7WC-fm4Ysi8,254 +onnx/backend/test/data/node/test_logsoftmax_default_axis/model.onnx,sha256=W_xTO0KtwNOY1vT1hr9XmelZo54m6yXz4lPD4aNYnbQ,124 +onnx/backend/test/data/node/test_logsoftmax_default_axis/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_logsoftmax_default_axis/test_data_set_0/output_0.pb,sha256=msG-YqFWPcBJzTIYQ7pC_R9R9WTqi6MP7WC-fm4Ysi8,254 +onnx/backend/test/data/node/test_logsoftmax_default_axis_expanded/model.onnx,sha256=KlfdRLL8jlBhSlKlS-GX9L8nQ_gVqqH9Rm7zMQaDNZE,1161 +onnx/backend/test/data/node/test_logsoftmax_default_axis_expanded/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_logsoftmax_default_axis_expanded/test_data_set_0/output_0.pb,sha256=msG-YqFWPcBJzTIYQ7pC_R9R9WTqi6MP7WC-fm4Ysi8,254 +onnx/backend/test/data/node/test_logsoftmax_default_axis_expanded_ver18/model.onnx,sha256=wC2W2D5LPxEUCiGU9aAutzkUbr_SK4TJwRP35vfq6tQ,1210 +onnx/backend/test/data/node/test_logsoftmax_default_axis_expanded_ver18/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_logsoftmax_default_axis_expanded_ver18/test_data_set_0/output_0.pb,sha256=msG-YqFWPcBJzTIYQ7pC_R9R9WTqi6MP7WC-fm4Ysi8,254 +onnx/backend/test/data/node/test_logsoftmax_example_1/model.onnx,sha256=0AlAWkvbmN5EuyqEmOQTqSwtUfKwopJz1z5RgxXbS9w,113 +onnx/backend/test/data/node/test_logsoftmax_example_1/test_data_set_0/input_0.pb,sha256=NHt6xFA_Q0se0ekO4FsGJVcWaEWivQQoZluHhSWn6JY,23 +onnx/backend/test/data/node/test_logsoftmax_example_1/test_data_set_0/output_0.pb,sha256=0E88Sg_EHhUCNlKNOtJhRSD4YLju2X8tGUYcZ-68M98,23 +onnx/backend/test/data/node/test_logsoftmax_example_1_expanded/model.onnx,sha256=BzlBt5Ku0H1N8HqQOwSqH8TgegEiWJ0uOTiXPpp5gmw,1111 +onnx/backend/test/data/node/test_logsoftmax_example_1_expanded/test_data_set_0/input_0.pb,sha256=NHt6xFA_Q0se0ekO4FsGJVcWaEWivQQoZluHhSWn6JY,23 +onnx/backend/test/data/node/test_logsoftmax_example_1_expanded/test_data_set_0/output_0.pb,sha256=0E88Sg_EHhUCNlKNOtJhRSD4YLju2X8tGUYcZ-68M98,23 +onnx/backend/test/data/node/test_logsoftmax_example_1_expanded_ver18/model.onnx,sha256=Fm1B-d4hmXuY6aIMje5xrjHpaj4QxJgz1_Vk0ZBG9to,1157 +onnx/backend/test/data/node/test_logsoftmax_example_1_expanded_ver18/test_data_set_0/input_0.pb,sha256=NHt6xFA_Q0se0ekO4FsGJVcWaEWivQQoZluHhSWn6JY,23 +onnx/backend/test/data/node/test_logsoftmax_example_1_expanded_ver18/test_data_set_0/output_0.pb,sha256=0E88Sg_EHhUCNlKNOtJhRSD4YLju2X8tGUYcZ-68M98,23 +onnx/backend/test/data/node/test_logsoftmax_large_number/model.onnx,sha256=gJWUGFTLCqc41krZNiUU9j2zySmHPqJK2ZJbgP5o0ng,116 +onnx/backend/test/data/node/test_logsoftmax_large_number/test_data_set_0/input_0.pb,sha256=8aqq9T0ktdil6R8zGhL-Sdrp2dpDCnSGzKl7HZiLcaI,43 +onnx/backend/test/data/node/test_logsoftmax_large_number/test_data_set_0/output_0.pb,sha256=Sr55i9PgvLCs-n1Ew4mlUpnkhsgmu3BaizigOb_SFwk,43 +onnx/backend/test/data/node/test_logsoftmax_large_number_expanded/model.onnx,sha256=byHDQ5CYCFgvkuFSiTxaETwHkpsasZMFBUfq1-AWUjE,1153 +onnx/backend/test/data/node/test_logsoftmax_large_number_expanded/test_data_set_0/input_0.pb,sha256=8aqq9T0ktdil6R8zGhL-Sdrp2dpDCnSGzKl7HZiLcaI,43 +onnx/backend/test/data/node/test_logsoftmax_large_number_expanded/test_data_set_0/output_0.pb,sha256=Sr55i9PgvLCs-n1Ew4mlUpnkhsgmu3BaizigOb_SFwk,43 +onnx/backend/test/data/node/test_logsoftmax_large_number_expanded_ver18/model.onnx,sha256=fXI6GvlhebnyoFrv0QPQ98kJQksjCFpjlYleYdUlnCU,1202 +onnx/backend/test/data/node/test_logsoftmax_large_number_expanded_ver18/test_data_set_0/input_0.pb,sha256=8aqq9T0ktdil6R8zGhL-Sdrp2dpDCnSGzKl7HZiLcaI,43 +onnx/backend/test/data/node/test_logsoftmax_large_number_expanded_ver18/test_data_set_0/output_0.pb,sha256=Sr55i9PgvLCs-n1Ew4mlUpnkhsgmu3BaizigOb_SFwk,43 +onnx/backend/test/data/node/test_logsoftmax_negative_axis/model.onnx,sha256=eycSf6ZZu87Dr64zGxydYF7uwDJNon6U0YOxlJVZ5Ms,147 +onnx/backend/test/data/node/test_logsoftmax_negative_axis/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_logsoftmax_negative_axis/test_data_set_0/output_0.pb,sha256=msG-YqFWPcBJzTIYQ7pC_R9R9WTqi6MP7WC-fm4Ysi8,254 +onnx/backend/test/data/node/test_logsoftmax_negative_axis_expanded/model.onnx,sha256=4URNtnb9AximMCbvDlR6JOkAot2n-0bqXA9aImyKIys,1175 +onnx/backend/test/data/node/test_logsoftmax_negative_axis_expanded/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_logsoftmax_negative_axis_expanded/test_data_set_0/output_0.pb,sha256=msG-YqFWPcBJzTIYQ7pC_R9R9WTqi6MP7WC-fm4Ysi8,254 +onnx/backend/test/data/node/test_logsoftmax_negative_axis_expanded_ver18/model.onnx,sha256=ISmFRynfhw5CyXIuuuQKQRhZ7SIOGGuVD34vSDUCquM,1225 +onnx/backend/test/data/node/test_logsoftmax_negative_axis_expanded_ver18/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_logsoftmax_negative_axis_expanded_ver18/test_data_set_0/output_0.pb,sha256=msG-YqFWPcBJzTIYQ7pC_R9R9WTqi6MP7WC-fm4Ysi8,254 +onnx/backend/test/data/node/test_loop11/model.onnx,sha256=NUlbusd6NPWkUocdEIyVtNhyLkrj8dNo-Jt4G1Cmv9c,726 +onnx/backend/test/data/node/test_loop11/test_data_set_0/input_0.pb,sha256=21JE-MBIhsvfmum8LEJZRp-Ye4W2jOC8yOKzQDWQeSg,24 +onnx/backend/test/data/node/test_loop11/test_data_set_0/input_1.pb,sha256=OH775Ik9hGMBcOHijMYFq_DdWSSqgPdT-CZfhNjcOpw,11 +onnx/backend/test/data/node/test_loop11/test_data_set_0/input_2.pb,sha256=odGLrEmd0nK29vu1cr7vt82v8nFq43Ps-bhrbn3_7rc,13 +onnx/backend/test/data/node/test_loop11/test_data_set_0/output_0.pb,sha256=4ToJMgBIR2YxjkszwBk7SawzzOTz3WUAz8cgo_aEH0E,17 +onnx/backend/test/data/node/test_loop11/test_data_set_0/output_1.pb,sha256=4FgI1CLKyj4rzxQPcdL6YDctQ1tijp2ao1XFgbLEtX8,38 +onnx/backend/test/data/node/test_loop13_seq/model.onnx,sha256=X2_q1DMYFPlgKOXWtNil8nGpg77BOc5BTqwTOKNPeGY,735 +onnx/backend/test/data/node/test_loop13_seq/test_data_set_0/input_0.pb,sha256=21JE-MBIhsvfmum8LEJZRp-Ye4W2jOC8yOKzQDWQeSg,24 +onnx/backend/test/data/node/test_loop13_seq/test_data_set_0/input_1.pb,sha256=OH775Ik9hGMBcOHijMYFq_DdWSSqgPdT-CZfhNjcOpw,11 +onnx/backend/test/data/node/test_loop13_seq/test_data_set_0/input_2.pb,sha256=06j9wXmbVFTVecpbq-DEWVtWWQSzCcllrwnC--Jf54g,13 +onnx/backend/test/data/node/test_loop13_seq/test_data_set_0/output_0.pb,sha256=zTPY0hjFSeAJ8JDXtLJ9RqaiGe3bkvi-kIZnICmmnrw,111 +onnx/backend/test/data/node/test_loop16_seq_none/model.onnx,sha256=qI0mQQyRbK_o6Xkf0p_SX-TsdIMCfm8SIktEfIJwNgE,1149 +onnx/backend/test/data/node/test_loop16_seq_none/test_data_set_0/input_0.pb,sha256=21JE-MBIhsvfmum8LEJZRp-Ye4W2jOC8yOKzQDWQeSg,24 +onnx/backend/test/data/node/test_loop16_seq_none/test_data_set_0/input_1.pb,sha256=OH775Ik9hGMBcOHijMYFq_DdWSSqgPdT-CZfhNjcOpw,11 +onnx/backend/test/data/node/test_loop16_seq_none/test_data_set_0/input_2.pb,sha256=eJaYeF3jiE9xLuq2AoSHII-61tNtlFUkew3iRtmr60A,25 +onnx/backend/test/data/node/test_loop16_seq_none/test_data_set_0/output_0.pb,sha256=9SLymmgUBDSk1J-jBePC9CBoLiabCczNkHDaJMMZzwM,121 +onnx/backend/test/data/node/test_lppool_1d_default/model.onnx,sha256=eZ94zxHGXJjX9W9M8A5R1BOzT3z-UmsGo57CSeAzkVw,162 +onnx/backend/test/data/node/test_lppool_1d_default/test_data_set_0/input_0.pb,sha256=Za3StahBL7BbhQGm5tBIbmHL40mPVKMeoJms01DCbkU,398 +onnx/backend/test/data/node/test_lppool_1d_default/test_data_set_0/output_0.pb,sha256=zs7RzbAqfv4nBOBDrk4T9ljczt1-lu_rzDcznI8szDI,386 +onnx/backend/test/data/node/test_lppool_2d_default/model.onnx,sha256=8POaI7nLcfBRgRb5Ehb93XIpNzeb2bEG5L81IJGORV0,156 +onnx/backend/test/data/node/test_lppool_2d_default/test_data_set_0/input_0.pb,sha256=Unze_of2KARaDBbo9Z3SVvlMUfaP1AUSekfdK0VNWoY,12304 +onnx/backend/test/data/node/test_lppool_2d_default/test_data_set_0/output_0.pb,sha256=nhYIrtRs818qGeRFyl185QgEG7gCQfPdFmlYK7L_b3k,11548 +onnx/backend/test/data/node/test_lppool_2d_dilations/model.onnx,sha256=FahwMMQWbiZcLyAt-hf7mPfHOSjSeAAk_jrjvGzsjKs,196 +onnx/backend/test/data/node/test_lppool_2d_dilations/test_data_set_0/input_0.pb,sha256=wWnR5nCmHbKNWucpc2qwGWA_byviZxAN4EWpmBWxWjA,79 +onnx/backend/test/data/node/test_lppool_2d_dilations/test_data_set_0/output_0.pb,sha256=4a9-Q0pFJed5AhCQ2R01nG38O2j5TevoKwad9oslwnY,31 +onnx/backend/test/data/node/test_lppool_2d_pads/model.onnx,sha256=BONj-FtCLGFPUpqRfMLNKlVjKjP2utxDESkvfhfUeuE,172 +onnx/backend/test/data/node/test_lppool_2d_pads/test_data_set_0/input_0.pb,sha256=HFGpOb8k-_eSox3wcj8SOjH7WcbqDYrAaqiceZ_ZzdI,9424 +onnx/backend/test/data/node/test_lppool_2d_pads/test_data_set_0/output_0.pb,sha256=AsJ1Wg4_Ven9yPRn6Rj0e_-BIrAdDlGfPp01JgBfx8U,10816 +onnx/backend/test/data/node/test_lppool_2d_same_lower/model.onnx,sha256=P1FRJ1ql2Herm3URmFZC7HjlHhMXEPjIApV518vzj98,186 +onnx/backend/test/data/node/test_lppool_2d_same_lower/test_data_set_0/input_0.pb,sha256=Unze_of2KARaDBbo9Z3SVvlMUfaP1AUSekfdK0VNWoY,12304 +onnx/backend/test/data/node/test_lppool_2d_same_lower/test_data_set_0/output_0.pb,sha256=37Fol-hh2swC4VufGVXZ7yw7E_W4rFTJhIcTENHf5Y0,12304 +onnx/backend/test/data/node/test_lppool_2d_same_upper/model.onnx,sha256=l3auNj5Udd_-copC8dLj3g7Ptiq2zuqKiHZcSLYO2Q8,186 +onnx/backend/test/data/node/test_lppool_2d_same_upper/test_data_set_0/input_0.pb,sha256=Unze_of2KARaDBbo9Z3SVvlMUfaP1AUSekfdK0VNWoY,12304 +onnx/backend/test/data/node/test_lppool_2d_same_upper/test_data_set_0/output_0.pb,sha256=QIHS98H9r-Pvz4VM4cR7eYH6WUhsJdAomJkAnV3Dweo,12304 +onnx/backend/test/data/node/test_lppool_2d_strides/model.onnx,sha256=9qlwhLSX9ErP3L2AMUS5qJd0bi0oqrhb8iPHQ-O66WI,174 +onnx/backend/test/data/node/test_lppool_2d_strides/test_data_set_0/input_0.pb,sha256=Unze_of2KARaDBbo9Z3SVvlMUfaP1AUSekfdK0VNWoY,12304 +onnx/backend/test/data/node/test_lppool_2d_strides/test_data_set_0/output_0.pb,sha256=BDPKhQX2BBLBEwyfV7s5KfL2ua_fmKJAKOaLe983ohQ,1216 +onnx/backend/test/data/node/test_lppool_3d_default/model.onnx,sha256=dXN3axvo21IAXIkTeiezdD7BUVOvH95iRYV7EHAMLTs,166 +onnx/backend/test/data/node/test_lppool_3d_default/test_data_set_0/input_0.pb,sha256=L_c9P56CmA7WzPjy1TlkWrKgYm0YkeXEYxiPZ6VV1wY,393235 +onnx/backend/test/data/node/test_lppool_3d_default/test_data_set_0/output_0.pb,sha256=6i0TyhC634cHG3TRMNQOmHsvHvFJzz5J6g1IrRxxojA,357511 +onnx/backend/test/data/node/test_lrn/model.onnx,sha256=2RmKdi_xdJKG2nQ6Sydr0RhKp9hx2sYugj0rrLJiwHg,168 +onnx/backend/test/data/node/test_lrn/test_data_set_0/input_0.pb,sha256=WTjaurAfJF9L9t73S5CfE2SmsNIVXuYgfYNgVEkypVQ,2516 +onnx/backend/test/data/node/test_lrn/test_data_set_0/output_0.pb,sha256=xdnB1rOmadLvzwf321Xc8uXA01Lyr_k6mac3gS-LQAg,2516 +onnx/backend/test/data/node/test_lrn_default/model.onnx,sha256=pLo59lyQp0avFhRmH_TiiS2qWXApwiREShuiL_3uuVE,126 +onnx/backend/test/data/node/test_lrn_default/test_data_set_0/input_0.pb,sha256=WTjaurAfJF9L9t73S5CfE2SmsNIVXuYgfYNgVEkypVQ,2516 +onnx/backend/test/data/node/test_lrn_default/test_data_set_0/output_0.pb,sha256=cxUk8-yvr-GWQUwRs5k296MxN76GuC3f9Ry4MYC-nhg,2516 +onnx/backend/test/data/node/test_lstm_batchwise/model.onnx,sha256=YlBIlwsPqPjo_z1dSVNgavYhwzivz_6IEq9lRh1xwKw,237 +onnx/backend/test/data/node/test_lstm_batchwise/test_data_set_0/input_0.pb,sha256=X4Jx731skujCbqYlfE79v4ovLqbqAar9VfhQTegviw8,37 +onnx/backend/test/data/node/test_lstm_batchwise/test_data_set_0/input_1.pb,sha256=OtNadmJqOUdzMif6TE-Lr_MvFZLW5uPxpNN307RTPqI,238 +onnx/backend/test/data/node/test_lstm_batchwise/test_data_set_0/input_2.pb,sha256=4TgQIT8ZsnJmaBYxUhHn-MwfrldN0Nl5ubXhWUqr11A,798 +onnx/backend/test/data/node/test_lstm_batchwise/test_data_set_0/output_0.pb,sha256=OTkQCUErjFkNHjBcm6xzsKYQ0Ihg5HMl5uSOi7oN-Rw,99 +onnx/backend/test/data/node/test_lstm_batchwise/test_data_set_0/output_1.pb,sha256=5GQt1_4Z8Z1ArFiNEbzFUZxEAMVDr9AsjRXeAg1tiME,99 +onnx/backend/test/data/node/test_lstm_defaults/model.onnx,sha256=YYraEws0PGMvXwyc6jyGNu4GJxpY7sOvo5ARcygZ2dE,191 +onnx/backend/test/data/node/test_lstm_defaults/test_data_set_0/input_0.pb,sha256=CfdyEi1TF1qNXdr7S9ZjPY0_c7pWtxM7ou3bNl3fNIo,37 +onnx/backend/test/data/node/test_lstm_defaults/test_data_set_0/input_1.pb,sha256=TdJ94c4zwrACHQohXoMpgqyrwu4mcgsXbjFRuNlhp1k,109 +onnx/backend/test/data/node/test_lstm_defaults/test_data_set_0/input_2.pb,sha256=J0_sd5TU-GsTX-rbQovkFPVlVl7vgYypkPSUwo5ssZM,158 +onnx/backend/test/data/node/test_lstm_defaults/test_data_set_0/output_0.pb,sha256=lFvUjpPnQcNn5X_C-qFjM4CvBRwylGP0bgCPpFV9u20,51 +onnx/backend/test/data/node/test_lstm_with_initial_bias/model.onnx,sha256=7ckQkqxrwg1YaAcYM4rylKDnN8Md9MOr05cXOjbuC8E,224 +onnx/backend/test/data/node/test_lstm_with_initial_bias/test_data_set_0/input_0.pb,sha256=eujm2YGHVRU93EAGK-aPA-aFEublku2wP288wJjVc4M,49 +onnx/backend/test/data/node/test_lstm_with_initial_bias/test_data_set_0/input_1.pb,sha256=4nJnesA4ulDJrcu7HmldoPAKiHBRrrYEixxYzM93W8Y,206 +onnx/backend/test/data/node/test_lstm_with_initial_bias/test_data_set_0/input_2.pb,sha256=B_-IsKA8_GE6IY8PnBmwX9m6t6_3vdFJM9Z2dHMOHC4,270 +onnx/backend/test/data/node/test_lstm_with_initial_bias/test_data_set_0/input_3.pb,sha256=FWf6rrp6unNAWgeBMcpNQbQGnOLdeqkuEb3JpC0hKGI,140 +onnx/backend/test/data/node/test_lstm_with_initial_bias/test_data_set_0/output_0.pb,sha256=bnEbfd3S79ZtqIA_BeGIOrBw9dxm1KGb_czY6mtL6iU,63 +onnx/backend/test/data/node/test_lstm_with_peepholes/model.onnx,sha256=3EA-TGf7bLWpLmGO2FAtpvnhioklrKZEh2kzWniU_cI,377 +onnx/backend/test/data/node/test_lstm_with_peepholes/test_data_set_0/input_0.pb,sha256=2OeoosrOatiTi0V9BOYjvhD88vWXbcfkKA6c20gxRr0,45 +onnx/backend/test/data/node/test_lstm_with_peepholes/test_data_set_0/input_1.pb,sha256=YkSD-HUoRWHWy5VblSruzpovxbXfIx3hpt_ixzQtv1Q,206 +onnx/backend/test/data/node/test_lstm_with_peepholes/test_data_set_0/input_2.pb,sha256=J0_sd5TU-GsTX-rbQovkFPVlVl7vgYypkPSUwo5ssZM,158 +onnx/backend/test/data/node/test_lstm_with_peepholes/test_data_set_0/input_3.pb,sha256=UTQhV68H3RnHN8YKybJuSU0jokxWMdiPQJ8MvnNYS6U,107 +onnx/backend/test/data/node/test_lstm_with_peepholes/test_data_set_0/input_4.pb,sha256=MkJoqFV7-u_t8_o6Y2rIguYZimj9D8eeb341jV847QI,29 +onnx/backend/test/data/node/test_lstm_with_peepholes/test_data_set_0/input_5.pb,sha256=-V_w7_cGyZ0GDnxvC1kRRN6fgpl0cpKetpIig_LQwC0,45 +onnx/backend/test/data/node/test_lstm_with_peepholes/test_data_set_0/input_6.pb,sha256=ZyP6k0P4yDzfvH4T-npTdIMB-DmtFmKrZbXjRhpq1MM,45 +onnx/backend/test/data/node/test_lstm_with_peepholes/test_data_set_0/input_7.pb,sha256=o1t21DuNUncpuswkkA69WCsPzw0sPPeGbw4AO6heHEQ,47 +onnx/backend/test/data/node/test_lstm_with_peepholes/test_data_set_0/output_0.pb,sha256=QcMlaZa8V3bViVMaUZ4VCqhpktnx3QHRL8mp2b1kUkw,39 +onnx/backend/test/data/node/test_matmul_2d/model.onnx,sha256=lG86Scz9r2JYa7roQ4Bl_kGn5qm4AGsUrzIZmZtJu3Y,122 +onnx/backend/test/data/node/test_matmul_2d/test_data_set_0/input_0.pb,sha256=XJ-hOynTG0EF6hYsDK7MlRgESZsBa98mi49HC0BMH3k,59 +onnx/backend/test/data/node/test_matmul_2d/test_data_set_0/input_1.pb,sha256=fZUNbWrynfo6yrjJ_N6nbVU8ERJAE7EG62YivA6_PdQ,59 +onnx/backend/test/data/node/test_matmul_2d/test_data_set_0/output_0.pb,sha256=x5KLo-j-hQdpdMlVIAUywqJGlxGetUszj7x8Jjkijkg,47 +onnx/backend/test/data/node/test_matmul_3d/model.onnx,sha256=DSaZ3zVKvneKN1wcjZAH2WW5yjn7StDbbPU_zOA1lC0,134 +onnx/backend/test/data/node/test_matmul_3d/test_data_set_0/input_0.pb,sha256=X2JZRe6Q-1UZLzTUnGefHdp5psiyGGbOZUoOU2PJJFY,109 +onnx/backend/test/data/node/test_matmul_3d/test_data_set_0/input_1.pb,sha256=uNcHeZg62FvPXXLbAOIfP2JA-S0GbYLdWCPiOFju_B0,109 +onnx/backend/test/data/node/test_matmul_3d/test_data_set_0/output_0.pb,sha256=Q2pR-WqVLQPaV8_iSRR_Yl4z_ddIBOA_7qJ72M9CFW4,85 +onnx/backend/test/data/node/test_matmul_4d/model.onnx,sha256=zvG_PFUSeYw4uirzowT8rtfUHmwcngKIpEpcxTaymq8,146 +onnx/backend/test/data/node/test_matmul_4d/test_data_set_0/input_0.pb,sha256=BOQ_EXZfe_quR07HGszrPSBrS2-k42x1hH8FiMBFPAs,111 +onnx/backend/test/data/node/test_matmul_4d/test_data_set_0/input_1.pb,sha256=6jox02jdTNxPHGwscLuIGr_GWZZhcG9iyOvRGH3Wlwg,111 +onnx/backend/test/data/node/test_matmul_4d/test_data_set_0/output_0.pb,sha256=koxqNtVvc6ykaa0u9WWTchqTWy-WxdyQJYMV4-LdDUI,87 +onnx/backend/test/data/node/test_matmulinteger/model.onnx,sha256=80liWhDJ10RJr-RA7pL_LffuyFV9P-fu4u1J4QO6yCI,218 +onnx/backend/test/data/node/test_matmulinteger/test_data_set_0/input_0.pb,sha256=CW_qu6MGZeHIi4xtdzFAE3EunQBARAnSpXpphiq4n3E,23 +onnx/backend/test/data/node/test_matmulinteger/test_data_set_0/input_1.pb,sha256=Zi7ccul40b_4WbGzddk6zQxNcul5WpeybbqROgjR_fE,17 +onnx/backend/test/data/node/test_matmulinteger/test_data_set_0/input_2.pb,sha256=n9w_0UFZSEO_KZ8ixK8FVb_f0hz4De5Q3qVfuoU90WE,21 +onnx/backend/test/data/node/test_matmulinteger/test_data_set_0/input_3.pb,sha256=2QOpPgCU1uUvr5u3dTi6vhiiHB_Rp5lh2LZGrHlup38,21 +onnx/backend/test/data/node/test_matmulinteger/test_data_set_0/output_0.pb,sha256=q285XbBU3WGQ67jFVoD_LqN-Zc_Fs5chpbtCmJh8mm4,43 +onnx/backend/test/data/node/test_max_example/model.onnx,sha256=uZcZ8DpvjVFEZwu03Aa3A7twKGWU6ZduHpPZOIYiN3g,170 +onnx/backend/test/data/node/test_max_example/test_data_set_0/input_0.pb,sha256=HJclva4-TFD98X2o011t-i_w7K5ow9hG8aJER5V_xN8,26 +onnx/backend/test/data/node/test_max_example/test_data_set_0/input_1.pb,sha256=2V5ZDb_t112Xa7JZY7c78huQL9pnJ4-jQGxtlvuagks,26 +onnx/backend/test/data/node/test_max_example/test_data_set_0/input_2.pb,sha256=AIB1cbcFh3kj1j43N81kkDuwnI8-0qQyvmV-b-FJ_UI,26 +onnx/backend/test/data/node/test_max_example/test_data_set_0/output_0.pb,sha256=sJp2P7F8KKH4NdxwIWBG9p6S_EpBkKyZjvqeEH9hs1k,26 +onnx/backend/test/data/node/test_max_float16/model.onnx,sha256=PrWzBjjbzgVYz_zPCtTL1HpfXqW2njVwePab2C6gn8M,139 +onnx/backend/test/data/node/test_max_float16/test_data_set_0/input_0.pb,sha256=XkpNeOliXBMqFxuPluFGL90YeFBKPQgFn1VCcdN0JTc,20 +onnx/backend/test/data/node/test_max_float16/test_data_set_0/input_1.pb,sha256=PGuS9cFumaH9L8uba6S7fx1HAjkPixoPdhEnLzKsJR0,20 +onnx/backend/test/data/node/test_max_float16/test_data_set_0/output_0.pb,sha256=WubrX5BBksfMhnwR41XLC7FM_0k0Ilv6nLUrrDRyGFc,20 +onnx/backend/test/data/node/test_max_float32/model.onnx,sha256=tOJ-16c5LKcjYgoFmD7SSZ-3zsw5jR_1PcakIUGgEYk,139 +onnx/backend/test/data/node/test_max_float32/test_data_set_0/input_0.pb,sha256=HJclva4-TFD98X2o011t-i_w7K5ow9hG8aJER5V_xN8,26 +onnx/backend/test/data/node/test_max_float32/test_data_set_0/input_1.pb,sha256=2V5ZDb_t112Xa7JZY7c78huQL9pnJ4-jQGxtlvuagks,26 +onnx/backend/test/data/node/test_max_float32/test_data_set_0/output_0.pb,sha256=E_wXnHEmLizJ3s9zseH5tQKuAokfDXYQ7so-RUpR2SI,26 +onnx/backend/test/data/node/test_max_float64/model.onnx,sha256=adsd71YIOGdvQR0grieRN8r5IWqah5goY0iXR9K9brs,139 +onnx/backend/test/data/node/test_max_float64/test_data_set_0/input_0.pb,sha256=aYo1cyxhD5UJSoRB6MYQY5rIIODrGNSUJtchZ3acHIo,38 +onnx/backend/test/data/node/test_max_float64/test_data_set_0/input_1.pb,sha256=ZVu4MYPV1wvct-Tk1Ju5kemkLyKfY1O7yyOxDXn3HBU,38 +onnx/backend/test/data/node/test_max_float64/test_data_set_0/output_0.pb,sha256=4oiQWJRtVlEw2oLoRfFsiwZOfTb3ABrE3kuqiC106sw,38 +onnx/backend/test/data/node/test_max_int16/model.onnx,sha256=aUD1AZjE3QcudcAaK6RHNWJW1Ge0YxEpkyJa8t1iTI0,137 +onnx/backend/test/data/node/test_max_int16/test_data_set_0/input_0.pb,sha256=FJtiLllRWIS-a9GZyj5gx3m04341cxeH0urJ1Z68vfc,20 +onnx/backend/test/data/node/test_max_int16/test_data_set_0/input_1.pb,sha256=PsSyF9rrkKuUygjrqNhdMCyZQCKu6RXs5lgH-MYow1Q,20 +onnx/backend/test/data/node/test_max_int16/test_data_set_0/output_0.pb,sha256=OGejQtNKbP-XeLonJgWAEuoOxN0YVuXhppnh8Wyd2Rc,20 +onnx/backend/test/data/node/test_max_int32/model.onnx,sha256=2fg800_ywUTwN8eNexg2EsRsLEHDaMCiJDxQWeV1_24,137 +onnx/backend/test/data/node/test_max_int32/test_data_set_0/input_0.pb,sha256=oMt8bgYATqCj4PhzKh_oNtszKREjsNTuk5YwoMxWvHk,26 +onnx/backend/test/data/node/test_max_int32/test_data_set_0/input_1.pb,sha256=dv4_C3z4H-aIoWXzNNmORQ6jQ1A2eaaR0CJVwTkSs38,26 +onnx/backend/test/data/node/test_max_int32/test_data_set_0/output_0.pb,sha256=kRy9ZYyhzDGeGwhqNnHQhpZ7-0MXIpwXqo8gSY_BF0Q,26 +onnx/backend/test/data/node/test_max_int64/model.onnx,sha256=9fkGP9GdQqsU3B7HM4sBXodpxVwO4x_MRv2pZ0jqmuo,137 +onnx/backend/test/data/node/test_max_int64/test_data_set_0/input_0.pb,sha256=KjwVyx6CIaL3FdqxbpcjXEFaaGaO_Ru1yHutAcTm64Y,38 +onnx/backend/test/data/node/test_max_int64/test_data_set_0/input_1.pb,sha256=fvapJViThkYxR2fL98GASrAg9vLpzpzDQsyrcHE0g8s,38 +onnx/backend/test/data/node/test_max_int64/test_data_set_0/output_0.pb,sha256=VycEHo6uFBG7JFssCyGxuXiyiRHCPeHzAukqVU3jMMk,38 +onnx/backend/test/data/node/test_max_int8/model.onnx,sha256=swSBQ_LLuz_5QhQTeYRZJaAPhoxSfnyQEnm56BGoY5Y,136 +onnx/backend/test/data/node/test_max_int8/test_data_set_0/input_0.pb,sha256=wcyuFxX3kcHkurfMWEZ7SIbnKsYcn2OL5ybQBrJoGOI,17 +onnx/backend/test/data/node/test_max_int8/test_data_set_0/input_1.pb,sha256=jMvP1oUjUx-YfHhzRx9y6TktJZrXCoGCA_1TqSRUVIA,17 +onnx/backend/test/data/node/test_max_int8/test_data_set_0/output_0.pb,sha256=MN1XN7VS9eX5ZE3BGlpy8uL0gGug-JVx67jOYzPoX8s,17 +onnx/backend/test/data/node/test_max_one_input/model.onnx,sha256=KE6w2WFpaTpjbAlELW2vIRYjuUSWK5usM_kZW10ORtk,111 +onnx/backend/test/data/node/test_max_one_input/test_data_set_0/input_0.pb,sha256=HJclva4-TFD98X2o011t-i_w7K5ow9hG8aJER5V_xN8,26 +onnx/backend/test/data/node/test_max_one_input/test_data_set_0/output_0.pb,sha256=2zyDGx5VM6wREENPQvmSlNd-hsuWcMKvHDS30GC4txo,26 +onnx/backend/test/data/node/test_max_two_inputs/model.onnx,sha256=b0f8X9_6y9g7RdIRdWk7INpwxcEHb0jWe9njs5PNtk8,142 +onnx/backend/test/data/node/test_max_two_inputs/test_data_set_0/input_0.pb,sha256=HJclva4-TFD98X2o011t-i_w7K5ow9hG8aJER5V_xN8,26 +onnx/backend/test/data/node/test_max_two_inputs/test_data_set_0/input_1.pb,sha256=2V5ZDb_t112Xa7JZY7c78huQL9pnJ4-jQGxtlvuagks,26 +onnx/backend/test/data/node/test_max_two_inputs/test_data_set_0/output_0.pb,sha256=E_wXnHEmLizJ3s9zseH5tQKuAokfDXYQ7so-RUpR2SI,26 +onnx/backend/test/data/node/test_max_uint16/model.onnx,sha256=gh5elrUdqR6TAE-9GLd73ezLANIdM5iVT6A49uy-cR4,138 +onnx/backend/test/data/node/test_max_uint16/test_data_set_0/input_0.pb,sha256=X-cnRcEYXi6OPoDAGivylhC6W0b97RiSbZYosOrTjyk,20 +onnx/backend/test/data/node/test_max_uint16/test_data_set_0/input_1.pb,sha256=LNaHLa0pYg31eVGgKD2j2_K0qV9p_04E7rwNwE58WIM,20 +onnx/backend/test/data/node/test_max_uint16/test_data_set_0/output_0.pb,sha256=MwEWUB-O9UEcFkKe1qFJ31bNOlYhbzMNYHkKuZeoKz4,20 +onnx/backend/test/data/node/test_max_uint32/model.onnx,sha256=5ss8IzMCUZIYRXZUvXDC2ytzjEYjYeWrChcyCtgAsY4,138 +onnx/backend/test/data/node/test_max_uint32/test_data_set_0/input_0.pb,sha256=GUldKT7G3azwH1ALqcSPzCRwnvYQaQYORo_nwdufOvA,26 +onnx/backend/test/data/node/test_max_uint32/test_data_set_0/input_1.pb,sha256=HxXkqFhIrH8fFY0dh2U29hfzGO_kE_47ahyZ9QH3oBU,26 +onnx/backend/test/data/node/test_max_uint32/test_data_set_0/output_0.pb,sha256=wJRLHAF8aMq0b6QMJ4nioLkGeZ7QN9h8U-YMkRo9AU4,26 +onnx/backend/test/data/node/test_max_uint64/model.onnx,sha256=twFkVgUAjihfUBCujVhIWcHk58B9SLDOTeIa963Y8fI,138 +onnx/backend/test/data/node/test_max_uint64/test_data_set_0/input_0.pb,sha256=URHhAcMIJ52W8ExjVsDmTYPLDu0cRKN9sXcoOPF6nds,38 +onnx/backend/test/data/node/test_max_uint64/test_data_set_0/input_1.pb,sha256=Kc229gfiuo2eKmyts9JJNXFrnA0U1sKhgM6uyban3W4,38 +onnx/backend/test/data/node/test_max_uint64/test_data_set_0/output_0.pb,sha256=6r1bXvn_3-S04qjHSshAQ-EGpXeZ01H-4Ii3WlAivm8,38 +onnx/backend/test/data/node/test_max_uint8/model.onnx,sha256=9xvgReV9iBBgU617ADLrYDh8X5aPZey_TcTGqPhtRXM,137 +onnx/backend/test/data/node/test_max_uint8/test_data_set_0/input_0.pb,sha256=d8-V_cmlfWKcdUnF5wYjohZ3PM7DtOeQaLO_BWcOZ44,17 +onnx/backend/test/data/node/test_max_uint8/test_data_set_0/input_1.pb,sha256=u_Ig9DtP5w3-ADv0CKHHwWzzexXoKKuQXHENbJNK8Jw,17 +onnx/backend/test/data/node/test_max_uint8/test_data_set_0/output_0.pb,sha256=4ijjUbkUJeRJUYuCQBu54lcJJsZEVH34jiQCzHBzZ0E,17 +onnx/backend/test/data/node/test_maxpool_1d_default/model.onnx,sha256=Ica_ck9iK0z2PAGUc2S94cyXFqBP-AQKLCcf9PWv7Pw,137 +onnx/backend/test/data/node/test_maxpool_1d_default/test_data_set_0/input_0.pb,sha256=Za3StahBL7BbhQGm5tBIbmHL40mPVKMeoJms01DCbkU,398 +onnx/backend/test/data/node/test_maxpool_1d_default/test_data_set_0/output_0.pb,sha256=ztthcPetAeoKY7HTdeVucMBBtGPI0tjx42jpBggsano,386 +onnx/backend/test/data/node/test_maxpool_2d_ceil/model.onnx,sha256=A3VRi6VRTkaI0wi_Xw-mmMc0twQQ-ImWSSKOS2L8njo,181 +onnx/backend/test/data/node/test_maxpool_2d_ceil/test_data_set_0/input_0.pb,sha256=wWnR5nCmHbKNWucpc2qwGWA_byviZxAN4EWpmBWxWjA,79 +onnx/backend/test/data/node/test_maxpool_2d_ceil/test_data_set_0/output_0.pb,sha256=BhWwtQ7jPUcvpLyfxmX6TehlXYva35_ohmw3Zjd4Hj8,31 +onnx/backend/test/data/node/test_maxpool_2d_ceil_output_size_reduce_by_one/model.onnx,sha256=eRD_Zogg8Doigo1UbC1tMqFQtgtVFIf3adXK4_nalug,207 +onnx/backend/test/data/node/test_maxpool_2d_ceil_output_size_reduce_by_one/test_data_set_0/input_0.pb,sha256=jVVsnPYTtqJkezoas6FKgAvUJVH-Z4OI0Eocd13lMSg,31 +onnx/backend/test/data/node/test_maxpool_2d_ceil_output_size_reduce_by_one/test_data_set_0/output_0.pb,sha256=UfXp_oNmvaQtHL218Gt4BcyJqwrkzYsfHOpJ50BoRlg,19 +onnx/backend/test/data/node/test_maxpool_2d_default/model.onnx,sha256=kjsexTcdtUU-oREpJ9HsZI-8pHXriIB3dgTD398O7J8,147 +onnx/backend/test/data/node/test_maxpool_2d_default/test_data_set_0/input_0.pb,sha256=Unze_of2KARaDBbo9Z3SVvlMUfaP1AUSekfdK0VNWoY,12304 +onnx/backend/test/data/node/test_maxpool_2d_default/test_data_set_0/output_0.pb,sha256=JsM1y5aj-I4KCDSIoohJxNtHERuuFh1BDJk7FagRyUg,11548 +onnx/backend/test/data/node/test_maxpool_2d_dilations/model.onnx,sha256=pG6syTutDItXnvxvElEVlvY3EAL7guLwyeAAdn_hoRQ,188 +onnx/backend/test/data/node/test_maxpool_2d_dilations/test_data_set_0/input_0.pb,sha256=wWnR5nCmHbKNWucpc2qwGWA_byviZxAN4EWpmBWxWjA,79 +onnx/backend/test/data/node/test_maxpool_2d_dilations/test_data_set_0/output_0.pb,sha256=BhWwtQ7jPUcvpLyfxmX6TehlXYva35_ohmw3Zjd4Hj8,31 +onnx/backend/test/data/node/test_maxpool_2d_pads/model.onnx,sha256=pQ8lgZCwgmiZY8lCKh9arfJJc5v_dCg4mtO5lBaehH0,164 +onnx/backend/test/data/node/test_maxpool_2d_pads/test_data_set_0/input_0.pb,sha256=HFGpOb8k-_eSox3wcj8SOjH7WcbqDYrAaqiceZ_ZzdI,9424 +onnx/backend/test/data/node/test_maxpool_2d_pads/test_data_set_0/output_0.pb,sha256=xqjyYXTRUlDv6gfMp9O3f_nmQTqOzwl31kYWzyocyA8,10816 +onnx/backend/test/data/node/test_maxpool_2d_precomputed_pads/model.onnx,sha256=-1tvv9xOnkTGpEB10W19BxAdJeKKEswMOeuZLRmC3aU,176 +onnx/backend/test/data/node/test_maxpool_2d_precomputed_pads/test_data_set_0/input_0.pb,sha256=OTlNmprtJOUuLYiDg3HHSwDSMZ9I07l2M5FcxdQLysw,115 +onnx/backend/test/data/node/test_maxpool_2d_precomputed_pads/test_data_set_0/output_0.pb,sha256=BHyTkA2yfKPVso9bJ4yBq_7GlaexvNXyNEixQHDZfKk,115 +onnx/backend/test/data/node/test_maxpool_2d_precomputed_same_upper/model.onnx,sha256=AKun967BpHFPVrX9SxrEFpVlu1ziEgh-W1KEJiEO11Q,208 +onnx/backend/test/data/node/test_maxpool_2d_precomputed_same_upper/test_data_set_0/input_0.pb,sha256=OTlNmprtJOUuLYiDg3HHSwDSMZ9I07l2M5FcxdQLysw,115 +onnx/backend/test/data/node/test_maxpool_2d_precomputed_same_upper/test_data_set_0/output_0.pb,sha256=NwkDSgVwZffOfc-RYMiB-T4Bi9cG3Hu6ckE1QL5KUOQ,51 +onnx/backend/test/data/node/test_maxpool_2d_precomputed_strides/model.onnx,sha256=aHJcqm9hHB6lDfmJRrpSswoUjBNl8ClJoOZVZOrYUBc,178 +onnx/backend/test/data/node/test_maxpool_2d_precomputed_strides/test_data_set_0/input_0.pb,sha256=OTlNmprtJOUuLYiDg3HHSwDSMZ9I07l2M5FcxdQLysw,115 +onnx/backend/test/data/node/test_maxpool_2d_precomputed_strides/test_data_set_0/output_0.pb,sha256=uvNdXstYljuvnE014-jjf3-QdQ47ixmPovGW56jxeOM,31 +onnx/backend/test/data/node/test_maxpool_2d_same_lower/model.onnx,sha256=2lpm_yBrdHAOJDbbNUOM1ImQapnODjREJpKut7WV3gI,178 +onnx/backend/test/data/node/test_maxpool_2d_same_lower/test_data_set_0/input_0.pb,sha256=Unze_of2KARaDBbo9Z3SVvlMUfaP1AUSekfdK0VNWoY,12304 +onnx/backend/test/data/node/test_maxpool_2d_same_lower/test_data_set_0/output_0.pb,sha256=uY9algpaX_ocBYHvZlcdg9ZIduWkiisEWHy-6ZF_cD8,12304 +onnx/backend/test/data/node/test_maxpool_2d_same_upper/model.onnx,sha256=YwQles_itOJm9q6XujHU7K7F9WQlnFKrnlCMQeboJTI,178 +onnx/backend/test/data/node/test_maxpool_2d_same_upper/test_data_set_0/input_0.pb,sha256=Unze_of2KARaDBbo9Z3SVvlMUfaP1AUSekfdK0VNWoY,12304 +onnx/backend/test/data/node/test_maxpool_2d_same_upper/test_data_set_0/output_0.pb,sha256=3VZC2meFU8-OLnTuJM31Fm_TltaQS6RZEXBWSy-lo70,12304 +onnx/backend/test/data/node/test_maxpool_2d_strides/model.onnx,sha256=Dm7cd-yopY6-ubCzmXH7zvNww6Ppu9W0geE_whdcCQE,166 +onnx/backend/test/data/node/test_maxpool_2d_strides/test_data_set_0/input_0.pb,sha256=Unze_of2KARaDBbo9Z3SVvlMUfaP1AUSekfdK0VNWoY,12304 +onnx/backend/test/data/node/test_maxpool_2d_strides/test_data_set_0/output_0.pb,sha256=ydD0_CK6cx87T-WWLIm5fN5Y5FVnMjR3dqVLlDAkrkY,1216 +onnx/backend/test/data/node/test_maxpool_2d_uint8/model.onnx,sha256=5thf5Kmyvy4_0RdO-65xcdOAj4_4R7QDldBEOtHTngk,165 +onnx/backend/test/data/node/test_maxpool_2d_uint8/test_data_set_0/input_0.pb,sha256=3rECJgx1OCrUoFpmAiMYxDU35mjjrL9I1QJb0vTgL_U,40 +onnx/backend/test/data/node/test_maxpool_2d_uint8/test_data_set_0/output_0.pb,sha256=gEtwsq33uBnzsUvNyx84xTRk7AyduVLPFIMUBbzryas,40 +onnx/backend/test/data/node/test_maxpool_3d_default/model.onnx,sha256=a4GfZ3wd_kHRx-jZXlxF_-kHT7AbHOyaFNgFo_Qcva4,158 +onnx/backend/test/data/node/test_maxpool_3d_default/test_data_set_0/input_0.pb,sha256=L_c9P56CmA7WzPjy1TlkWrKgYm0YkeXEYxiPZ6VV1wY,393235 +onnx/backend/test/data/node/test_maxpool_3d_default/test_data_set_0/output_0.pb,sha256=_bML65YsBeYvjV6NIF53p3bWICy0g7hiTWhjfhv5K94,357511 +onnx/backend/test/data/node/test_maxpool_3d_dilations/model.onnx,sha256=sv05cfkAjRZNbI1eAe1k6Wo4iFrdoywvFWz0xfkuV6U,202 +onnx/backend/test/data/node/test_maxpool_3d_dilations/test_data_set_0/input_0.pb,sha256=eUBZRSXolhPmPqfzFXG-IJegZfzGOQpfdDoeK2OEkrA,274 +onnx/backend/test/data/node/test_maxpool_3d_dilations/test_data_set_0/output_0.pb,sha256=SMcrnt8KLpOf6P5d7j5qhvmbm6dCgLH-q7qso_mKpGM,49 +onnx/backend/test/data/node/test_maxpool_3d_dilations_use_ref_impl/model.onnx,sha256=CfkR-R6hlIWg0rAP14JOaDB8d5jM9kpLH1SKLRZsWIs,215 +onnx/backend/test/data/node/test_maxpool_3d_dilations_use_ref_impl/test_data_set_0/input_0.pb,sha256=eUBZRSXolhPmPqfzFXG-IJegZfzGOQpfdDoeK2OEkrA,274 +onnx/backend/test/data/node/test_maxpool_3d_dilations_use_ref_impl/test_data_set_0/output_0.pb,sha256=SMcrnt8KLpOf6P5d7j5qhvmbm6dCgLH-q7qso_mKpGM,49 +onnx/backend/test/data/node/test_maxpool_3d_dilations_use_ref_impl_large/model.onnx,sha256=6_iEyoNHIi_Sc51x_1LIOE7kW8qh6OoLteZZbWX1NU8,239 +onnx/backend/test/data/node/test_maxpool_3d_dilations_use_ref_impl_large/test_data_set_0/input_0.pb,sha256=VKR6T99kXsypZCXWOcGPYobnNiPdA95uKzDtur6-Wps,131091 +onnx/backend/test/data/node/test_maxpool_3d_dilations_use_ref_impl_large/test_data_set_0/output_0.pb,sha256=J_8LdXcDVVGkKWIOaG-4eOAtR-vehlDDoVOQS9zG5n0,2934 +onnx/backend/test/data/node/test_maxpool_with_argmax_2d_precomputed_pads/model.onnx,sha256=2OHtcgNf_Mfv6eQ5NOSVXf9TydPHyLmLdV9qXOVz9VU,220 +onnx/backend/test/data/node/test_maxpool_with_argmax_2d_precomputed_pads/test_data_set_0/input_0.pb,sha256=OTlNmprtJOUuLYiDg3HHSwDSMZ9I07l2M5FcxdQLysw,115 +onnx/backend/test/data/node/test_maxpool_with_argmax_2d_precomputed_pads/test_data_set_0/output_0.pb,sha256=BHyTkA2yfKPVso9bJ4yBq_7GlaexvNXyNEixQHDZfKk,115 +onnx/backend/test/data/node/test_maxpool_with_argmax_2d_precomputed_pads/test_data_set_0/output_1.pb,sha256=Dc74UT0aIeGqzqxQHRA5XIEuMc4LMA78gwdBT3gYEUc,216 +onnx/backend/test/data/node/test_maxpool_with_argmax_2d_precomputed_strides/model.onnx,sha256=LXWZxBBdo8Ho4BOQM0S-5k6PNVjTjSLIKkbaI6sk1tA,244 +onnx/backend/test/data/node/test_maxpool_with_argmax_2d_precomputed_strides/test_data_set_0/input_0.pb,sha256=OTlNmprtJOUuLYiDg3HHSwDSMZ9I07l2M5FcxdQLysw,115 +onnx/backend/test/data/node/test_maxpool_with_argmax_2d_precomputed_strides/test_data_set_0/output_0.pb,sha256=uvNdXstYljuvnE014-jjf3-QdQ47ixmPovGW56jxeOM,31 +onnx/backend/test/data/node/test_maxpool_with_argmax_2d_precomputed_strides/test_data_set_0/output_1.pb,sha256=ux7kfRW2OZDqGYUGfIbVRLuPOCkiwlShBxVO0syCvqA,47 +onnx/backend/test/data/node/test_maxunpool_export_with_output_shape/model.onnx,sha256=hQLf8vbf8SQgSIshrsgdJ4zwjqfJostZ4oqV-HvI9_A,262 +onnx/backend/test/data/node/test_maxunpool_export_with_output_shape/test_data_set_0/input_0.pb,sha256=v2Jafk_IShwMwa2Gew0QBJuJenG-XnnlkQktS0E-suM,32 +onnx/backend/test/data/node/test_maxunpool_export_with_output_shape/test_data_set_0/input_1.pb,sha256=_Ho_y8dI0kAzxqDHfrdxQahOFIrlabEN2V3_9FrvihY,48 +onnx/backend/test/data/node/test_maxunpool_export_with_output_shape/test_data_set_0/input_2.pb,sha256=DjgMaIl2oDbkGYsMWUYGn1-JTp6N9w_xP-ayKrmNEbk,52 +onnx/backend/test/data/node/test_maxunpool_export_with_output_shape/test_data_set_0/output_0.pb,sha256=R33r1KeeZjicEOJg6Ib1Suhskf1qCU1s68adr1fqBhA,115 +onnx/backend/test/data/node/test_maxunpool_export_without_output_shape/model.onnx,sha256=vWjlosFnheXRXuquF4d5Ua4QWOV8jZi2pSEgBUYB3S4,223 +onnx/backend/test/data/node/test_maxunpool_export_without_output_shape/test_data_set_0/input_0.pb,sha256=6AsUaxumi7ZjpBxazO9ZJIE1HyGmav5abfKCoArXX_A,32 +onnx/backend/test/data/node/test_maxunpool_export_without_output_shape/test_data_set_0/input_1.pb,sha256=_Ho_y8dI0kAzxqDHfrdxQahOFIrlabEN2V3_9FrvihY,48 +onnx/backend/test/data/node/test_maxunpool_export_without_output_shape/test_data_set_0/output_0.pb,sha256=AsBxPx9tutDDcg5YVJB63RC9URcoMzdGfCiIj4aX-oE,79 +onnx/backend/test/data/node/test_mean_example/model.onnx,sha256=L0nI3wSlMT3WoBWPnGRvOCjwszB5AOmVQ9sfNALXXqs,172 +onnx/backend/test/data/node/test_mean_example/test_data_set_0/input_0.pb,sha256=N_bAzbz_zIhMsxt_JXexH-iLaQmHJs7Ci6YiUFcbYqo,26 +onnx/backend/test/data/node/test_mean_example/test_data_set_0/input_1.pb,sha256=CT8EqMoWnZgfvIbZCTZCcy5yi-scc6HUEY-s5dz8uC4,26 +onnx/backend/test/data/node/test_mean_example/test_data_set_0/input_2.pb,sha256=_PLxlJ7xl9XIGa6nbKkonHq25eO5mYsALw5JQS7zyx8,26 +onnx/backend/test/data/node/test_mean_example/test_data_set_0/output_0.pb,sha256=KG7ceFD_UsUNZYzkjIM8TFMO7G0-cmnzZLgXel7i9vk,26 +onnx/backend/test/data/node/test_mean_one_input/model.onnx,sha256=1XwAN1QuSDBuIxW8R6ln8m0SYjj6YA86i5X_5wuWO1k,113 +onnx/backend/test/data/node/test_mean_one_input/test_data_set_0/input_0.pb,sha256=N_bAzbz_zIhMsxt_JXexH-iLaQmHJs7Ci6YiUFcbYqo,26 +onnx/backend/test/data/node/test_mean_one_input/test_data_set_0/output_0.pb,sha256=ZKhVafwrQna2mYQVSKmI2eivwdOVRllNuFJ0a3S4awM,26 +onnx/backend/test/data/node/test_mean_two_inputs/model.onnx,sha256=g00GzthG6o0H_aStXffMWED1rUw5J2kFr8pW9AqsoRk,144 +onnx/backend/test/data/node/test_mean_two_inputs/test_data_set_0/input_0.pb,sha256=N_bAzbz_zIhMsxt_JXexH-iLaQmHJs7Ci6YiUFcbYqo,26 +onnx/backend/test/data/node/test_mean_two_inputs/test_data_set_0/input_1.pb,sha256=CT8EqMoWnZgfvIbZCTZCcy5yi-scc6HUEY-s5dz8uC4,26 +onnx/backend/test/data/node/test_mean_two_inputs/test_data_set_0/output_0.pb,sha256=nqC_wFwz02oBVUimtu5pY2Nga3AVKriaEL7M6uMdyC4,26 +onnx/backend/test/data/node/test_melweightmatrix/model.onnx,sha256=b9l8xULlNZVrhxOduh7oVLNuVrdfPgNSyOL9Lh2rbGs,300 +onnx/backend/test/data/node/test_melweightmatrix/test_data_set_0/input_0.pb,sha256=NEPQIRs3FEL6UGqz8HzVTd0uBDI52jfUHtmsPDo6xh8,22 +onnx/backend/test/data/node/test_melweightmatrix/test_data_set_0/input_1.pb,sha256=I3E3ucT0v3A3C96bJeRt6VJerY7Vvo_1jYX-wFC1Xio,20 +onnx/backend/test/data/node/test_melweightmatrix/test_data_set_0/input_2.pb,sha256=riXIveX4Y4MT3-Xz3tvYXF5jNkOeUjLJDYNYzLhmQMA,21 +onnx/backend/test/data/node/test_melweightmatrix/test_data_set_0/input_3.pb,sha256=e8IILeTioZtXTnxsVBJEM_k9Uee58y2clJvgOuz_zmk,26 +onnx/backend/test/data/node/test_melweightmatrix/test_data_set_0/input_4.pb,sha256=RXrXHnMYPUVp0r__a6otNlGy9yyeq6P4B5YXO_47Ors,26 +onnx/backend/test/data/node/test_melweightmatrix/test_data_set_0/output_0.pb,sha256=sBNBK-d3RGC0wetdS2QB7w63nuLz04fTVQBV0AumZuo,305 +onnx/backend/test/data/node/test_min_example/model.onnx,sha256=Jl71j3_A-QdMP_yyVfKSP8ZONEAFXuO_sqCAGWkoKbk,170 +onnx/backend/test/data/node/test_min_example/test_data_set_0/input_0.pb,sha256=HJclva4-TFD98X2o011t-i_w7K5ow9hG8aJER5V_xN8,26 +onnx/backend/test/data/node/test_min_example/test_data_set_0/input_1.pb,sha256=2V5ZDb_t112Xa7JZY7c78huQL9pnJ4-jQGxtlvuagks,26 +onnx/backend/test/data/node/test_min_example/test_data_set_0/input_2.pb,sha256=QpbKt69Jv0zAdktqC0_-h0v6bwKo1Uj7aPUx26Wv0zU,26 +onnx/backend/test/data/node/test_min_example/test_data_set_0/output_0.pb,sha256=NiybG918Px_YARZTv8xLcKeVMg4_DawQIjEP6N-3ax8,26 +onnx/backend/test/data/node/test_min_float16/model.onnx,sha256=7ZSX7Ps22Dx_ZYoC52rHlfMazBGQAIXt1odd1IyfKX4,139 +onnx/backend/test/data/node/test_min_float16/test_data_set_0/input_0.pb,sha256=XkpNeOliXBMqFxuPluFGL90YeFBKPQgFn1VCcdN0JTc,20 +onnx/backend/test/data/node/test_min_float16/test_data_set_0/input_1.pb,sha256=PGuS9cFumaH9L8uba6S7fx1HAjkPixoPdhEnLzKsJR0,20 +onnx/backend/test/data/node/test_min_float16/test_data_set_0/output_0.pb,sha256=RK5AMEDDe5TRXOuJSAaQBPF3jJPVwvGUmDrQl2mFR5E,20 +onnx/backend/test/data/node/test_min_float32/model.onnx,sha256=24f8iC-6SRbQWm-irEf-Pvve6NWJ45BCNk9OfzS-kxs,139 +onnx/backend/test/data/node/test_min_float32/test_data_set_0/input_0.pb,sha256=HJclva4-TFD98X2o011t-i_w7K5ow9hG8aJER5V_xN8,26 +onnx/backend/test/data/node/test_min_float32/test_data_set_0/input_1.pb,sha256=2V5ZDb_t112Xa7JZY7c78huQL9pnJ4-jQGxtlvuagks,26 +onnx/backend/test/data/node/test_min_float32/test_data_set_0/output_0.pb,sha256=l4xYUpOgO-wd16dHDwhvlQnq3U_PRCoHyWTjTgDgWfk,26 +onnx/backend/test/data/node/test_min_float64/model.onnx,sha256=RY2IJ7_4rbMt2CTikHnyafV2QZeC3xeEPhMl7Hqn4Pk,139 +onnx/backend/test/data/node/test_min_float64/test_data_set_0/input_0.pb,sha256=aYo1cyxhD5UJSoRB6MYQY5rIIODrGNSUJtchZ3acHIo,38 +onnx/backend/test/data/node/test_min_float64/test_data_set_0/input_1.pb,sha256=ZVu4MYPV1wvct-Tk1Ju5kemkLyKfY1O7yyOxDXn3HBU,38 +onnx/backend/test/data/node/test_min_float64/test_data_set_0/output_0.pb,sha256=GEKafc3aNO_TQNdJae9RihD3K85Onv7GJAH9KyvLX1c,38 +onnx/backend/test/data/node/test_min_int16/model.onnx,sha256=NRHzzE2zXmhpkuMX8U8EqYxKCmV2X5lpKpr8-lbp8eo,137 +onnx/backend/test/data/node/test_min_int16/test_data_set_0/input_0.pb,sha256=FJtiLllRWIS-a9GZyj5gx3m04341cxeH0urJ1Z68vfc,20 +onnx/backend/test/data/node/test_min_int16/test_data_set_0/input_1.pb,sha256=PsSyF9rrkKuUygjrqNhdMCyZQCKu6RXs5lgH-MYow1Q,20 +onnx/backend/test/data/node/test_min_int16/test_data_set_0/output_0.pb,sha256=hASsoRQp3LTFS1EuynL9QOnLfVLX5MzZWo3ZyB7uMfw,20 +onnx/backend/test/data/node/test_min_int32/model.onnx,sha256=z-JDavbfz2-uoCSQQejk_o8NWsQDQblAjaL2QWVy5r0,137 +onnx/backend/test/data/node/test_min_int32/test_data_set_0/input_0.pb,sha256=oMt8bgYATqCj4PhzKh_oNtszKREjsNTuk5YwoMxWvHk,26 +onnx/backend/test/data/node/test_min_int32/test_data_set_0/input_1.pb,sha256=dv4_C3z4H-aIoWXzNNmORQ6jQ1A2eaaR0CJVwTkSs38,26 +onnx/backend/test/data/node/test_min_int32/test_data_set_0/output_0.pb,sha256=QOgWmBfpG2ZTFzEZ6O0QZQWkTyvU4ka4qkFLJUqBOCg,26 +onnx/backend/test/data/node/test_min_int64/model.onnx,sha256=XhaI6ewm7TosjMjsg3RNo_g79YobPtaFhbAVuc8Nll0,137 +onnx/backend/test/data/node/test_min_int64/test_data_set_0/input_0.pb,sha256=KjwVyx6CIaL3FdqxbpcjXEFaaGaO_Ru1yHutAcTm64Y,38 +onnx/backend/test/data/node/test_min_int64/test_data_set_0/input_1.pb,sha256=fvapJViThkYxR2fL98GASrAg9vLpzpzDQsyrcHE0g8s,38 +onnx/backend/test/data/node/test_min_int64/test_data_set_0/output_0.pb,sha256=qf27EVakskgj0Bv0aNtW4WJQrgME131LP74nHqx6MgM,38 +onnx/backend/test/data/node/test_min_int8/model.onnx,sha256=y3gjGQ2UzLg9LEU1Nr1Hkcee-zv6Q-JRgg5VcSKr-F4,136 +onnx/backend/test/data/node/test_min_int8/test_data_set_0/input_0.pb,sha256=wcyuFxX3kcHkurfMWEZ7SIbnKsYcn2OL5ybQBrJoGOI,17 +onnx/backend/test/data/node/test_min_int8/test_data_set_0/input_1.pb,sha256=jMvP1oUjUx-YfHhzRx9y6TktJZrXCoGCA_1TqSRUVIA,17 +onnx/backend/test/data/node/test_min_int8/test_data_set_0/output_0.pb,sha256=mVByJIahRHozMhvyDghT17o7iB2vYsDonWzby7nB04A,17 +onnx/backend/test/data/node/test_min_one_input/model.onnx,sha256=s8sXPKEgzYrTnfd6dJWh0rYujFVjIad7AalqDtrLqhc,111 +onnx/backend/test/data/node/test_min_one_input/test_data_set_0/input_0.pb,sha256=HJclva4-TFD98X2o011t-i_w7K5ow9hG8aJER5V_xN8,26 +onnx/backend/test/data/node/test_min_one_input/test_data_set_0/output_0.pb,sha256=2zyDGx5VM6wREENPQvmSlNd-hsuWcMKvHDS30GC4txo,26 +onnx/backend/test/data/node/test_min_two_inputs/model.onnx,sha256=LX78oggqjZfNUGhN1iipvJTOINLQp7LIp6YWjRAbC60,142 +onnx/backend/test/data/node/test_min_two_inputs/test_data_set_0/input_0.pb,sha256=HJclva4-TFD98X2o011t-i_w7K5ow9hG8aJER5V_xN8,26 +onnx/backend/test/data/node/test_min_two_inputs/test_data_set_0/input_1.pb,sha256=2V5ZDb_t112Xa7JZY7c78huQL9pnJ4-jQGxtlvuagks,26 +onnx/backend/test/data/node/test_min_two_inputs/test_data_set_0/output_0.pb,sha256=l4xYUpOgO-wd16dHDwhvlQnq3U_PRCoHyWTjTgDgWfk,26 +onnx/backend/test/data/node/test_min_uint16/model.onnx,sha256=hDS6KkmbuOOKDdODax1bcXI9CCZD8cqfctQ4ok719bw,138 +onnx/backend/test/data/node/test_min_uint16/test_data_set_0/input_0.pb,sha256=X-cnRcEYXi6OPoDAGivylhC6W0b97RiSbZYosOrTjyk,20 +onnx/backend/test/data/node/test_min_uint16/test_data_set_0/input_1.pb,sha256=LNaHLa0pYg31eVGgKD2j2_K0qV9p_04E7rwNwE58WIM,20 +onnx/backend/test/data/node/test_min_uint16/test_data_set_0/output_0.pb,sha256=AASj0clqDfoUl5BuA9hzYSB90G5ESdetIgx2CjmGQcE,20 +onnx/backend/test/data/node/test_min_uint32/model.onnx,sha256=-Tpxeb4-r6EAeAhoKZdxYBXO_errt6m1nTLPm8VQtmk,138 +onnx/backend/test/data/node/test_min_uint32/test_data_set_0/input_0.pb,sha256=GUldKT7G3azwH1ALqcSPzCRwnvYQaQYORo_nwdufOvA,26 +onnx/backend/test/data/node/test_min_uint32/test_data_set_0/input_1.pb,sha256=HxXkqFhIrH8fFY0dh2U29hfzGO_kE_47ahyZ9QH3oBU,26 +onnx/backend/test/data/node/test_min_uint32/test_data_set_0/output_0.pb,sha256=7xTVNXY6HjeciLcJV6zdkMW0unSZrIrZn7nibEMoZV4,26 +onnx/backend/test/data/node/test_min_uint64/model.onnx,sha256=spJUPN6TOkif8NhqEN4XnSWOqEhSJ6fyUYdXNsMqnlY,138 +onnx/backend/test/data/node/test_min_uint64/test_data_set_0/input_0.pb,sha256=URHhAcMIJ52W8ExjVsDmTYPLDu0cRKN9sXcoOPF6nds,38 +onnx/backend/test/data/node/test_min_uint64/test_data_set_0/input_1.pb,sha256=Kc229gfiuo2eKmyts9JJNXFrnA0U1sKhgM6uyban3W4,38 +onnx/backend/test/data/node/test_min_uint64/test_data_set_0/output_0.pb,sha256=FE2KqNIEFUjpIl7oaUVxtvE_4BB019djTS7PGK4Im8Q,38 +onnx/backend/test/data/node/test_min_uint8/model.onnx,sha256=g8P4RJbhTlZYfM-uXsCrCLTzUHsvejgu4LYRx7AZaEg,137 +onnx/backend/test/data/node/test_min_uint8/test_data_set_0/input_0.pb,sha256=d8-V_cmlfWKcdUnF5wYjohZ3PM7DtOeQaLO_BWcOZ44,17 +onnx/backend/test/data/node/test_min_uint8/test_data_set_0/input_1.pb,sha256=u_Ig9DtP5w3-ADv0CKHHwWzzexXoKKuQXHENbJNK8Jw,17 +onnx/backend/test/data/node/test_min_uint8/test_data_set_0/output_0.pb,sha256=A08AZUxIdlvdVsUZwY2HJY7t-weOBX3f6n7noU2Uoh4,17 +onnx/backend/test/data/node/test_mish/model.onnx,sha256=tlcpsOd7XTlJMlypvI59-lgGWXBbILeDpOYOmv3-F6w,85 +onnx/backend/test/data/node/test_mish/test_data_set_0/input_0.pb,sha256=RV1aKvQqmlX789Wd5GM1iOelHOcq7mpK2AOc67HTnds,40012 +onnx/backend/test/data/node/test_mish/test_data_set_0/output_0.pb,sha256=1DBosWwhpYvnLv0wX078mh5oPZKA8ajvKIY1nEGCbxw,40012 +onnx/backend/test/data/node/test_mish_expanded/model.onnx,sha256=8t9m016_0LoMctl5Opjg7Ne38xazAVZxkI65R4Y_95E,309 +onnx/backend/test/data/node/test_mish_expanded/test_data_set_0/input_0.pb,sha256=RV1aKvQqmlX789Wd5GM1iOelHOcq7mpK2AOc67HTnds,40012 +onnx/backend/test/data/node/test_mish_expanded/test_data_set_0/output_0.pb,sha256=1DBosWwhpYvnLv0wX078mh5oPZKA8ajvKIY1nEGCbxw,40012 +onnx/backend/test/data/node/test_mod_broadcast/model.onnx,sha256=UZosTR7ILJwdb-R8mjUh0dV6Yto8ml7Hs9Y9Qsc_VoY,127 +onnx/backend/test/data/node/test_mod_broadcast/test_data_set_0/input_0.pb,sha256=3qcX3Wo1hXBUsLziRjzEWPSLBxAIkA6VkDFfdA1ODMU,133 +onnx/backend/test/data/node/test_mod_broadcast/test_data_set_0/input_1.pb,sha256=lhpCdi7Z7eKMpbL-r8ctfPGvcP5tqKpCYNwyvTL_R30,13 +onnx/backend/test/data/node/test_mod_broadcast/test_data_set_0/output_0.pb,sha256=XH7PNJTdEM2rYc6l2D7GV682V_c2XKf8T5IMzZ1_esw,133 +onnx/backend/test/data/node/test_mod_int64_fmod/model.onnx,sha256=Sh2X6OdVI6ekT4fRR4TRVhNxnSh_qOfb2D1XfiEzjLM,125 +onnx/backend/test/data/node/test_mod_int64_fmod/test_data_set_0/input_0.pb,sha256=nOKhA4S_k4oLRRJtc_sBIsbaabKM4Kp44EPsTtGDZhc,57 +onnx/backend/test/data/node/test_mod_int64_fmod/test_data_set_0/input_1.pb,sha256=flHpVhHA6WGEfMmq0TjdhZcLKpScvTBl45zZ8KCgQHU,57 +onnx/backend/test/data/node/test_mod_int64_fmod/test_data_set_0/output_0.pb,sha256=wyOpSZFx-PejmYFgPDzyIxKN4uk7JQ9whEwb-YUuZts,57 +onnx/backend/test/data/node/test_mod_mixed_sign_float16/model.onnx,sha256=gto1zqyjHBJAoruIoES9llZMP-VflH0elbJ6k0Sgcg0,133 +onnx/backend/test/data/node/test_mod_mixed_sign_float16/test_data_set_0/input_0.pb,sha256=MVkaogxKWSWLKCwAWIYIK6ka31cTnE5k7AGtluEGF98,21 +onnx/backend/test/data/node/test_mod_mixed_sign_float16/test_data_set_0/input_1.pb,sha256=v0Ok1u195wYs7KWZpMK_-kRpCU-kJEdKLi2dkVWPFWo,21 +onnx/backend/test/data/node/test_mod_mixed_sign_float16/test_data_set_0/output_0.pb,sha256=XoZwwT9Mq8IIe68Fvh9txJxngP333hN7nptDt1NxAS8,21 +onnx/backend/test/data/node/test_mod_mixed_sign_float32/model.onnx,sha256=fyl1SXwq0Ep0mmmnPCGwg585zjCQ9bT6D-6OHz4Fglc,133 +onnx/backend/test/data/node/test_mod_mixed_sign_float32/test_data_set_0/input_0.pb,sha256=QIyPABIKtekKty_eC1fS4BQXb9SAfOc95d3C-hWuOaI,33 +onnx/backend/test/data/node/test_mod_mixed_sign_float32/test_data_set_0/input_1.pb,sha256=TlfuxHXSKKO59_3ot-9_8TkSOEghBMa1JttpvwE_9HY,33 +onnx/backend/test/data/node/test_mod_mixed_sign_float32/test_data_set_0/output_0.pb,sha256=fThMIPzW6l8EFZIP-cjpRxJ_5SA5LPFmB2iBDgr3aqs,33 +onnx/backend/test/data/node/test_mod_mixed_sign_float64/model.onnx,sha256=ukr-kauED-l7VCdewRDZxkC7WrkJPUbtnobG1U6jrHs,133 +onnx/backend/test/data/node/test_mod_mixed_sign_float64/test_data_set_0/input_0.pb,sha256=jsA_RIDwWdbwaf1pa-Eg2TOKQVSW-InXqUAGRGoIgtI,57 +onnx/backend/test/data/node/test_mod_mixed_sign_float64/test_data_set_0/input_1.pb,sha256=ZkwMjo9QFC5z4WWHXnidDvETp_Vg7HxYH8IW4QH56-M,57 +onnx/backend/test/data/node/test_mod_mixed_sign_float64/test_data_set_0/output_0.pb,sha256=LAlPMxtTz5ncBIakv4KmstJhSW3IVxQuauGbzWqk2GM,57 +onnx/backend/test/data/node/test_mod_mixed_sign_int16/model.onnx,sha256=MGsxSYQNbI0OQCZFNRF3E0pv_T0KeHIVaEMF6U4bOUQ,118 +onnx/backend/test/data/node/test_mod_mixed_sign_int16/test_data_set_0/input_0.pb,sha256=UEIoI4Qd-kq43WszIVwerQdDc-kCz1SdUQlJdigMHec,21 +onnx/backend/test/data/node/test_mod_mixed_sign_int16/test_data_set_0/input_1.pb,sha256=AjsGow0MFJSwTd9jddqniDZA4pk1CiWh-vgZt-Egfn4,21 +onnx/backend/test/data/node/test_mod_mixed_sign_int16/test_data_set_0/output_0.pb,sha256=jv4-0meAPqn4mynSfgFV1NNOq3UKKsgeM1to4cDxn_4,21 +onnx/backend/test/data/node/test_mod_mixed_sign_int32/model.onnx,sha256=PgK31M04H60Q5fcHtpoaQKj-5wiRoDBkb3eVLRr04xM,118 +onnx/backend/test/data/node/test_mod_mixed_sign_int32/test_data_set_0/input_0.pb,sha256=giEjjHtUYRpU_BEU217_pCgE_OEA3y9ZleuAgN-typU,33 +onnx/backend/test/data/node/test_mod_mixed_sign_int32/test_data_set_0/input_1.pb,sha256=22wYw0JM7tNQ401SFLtgGcQ-ne90tvpWWTReoytj7Bo,33 +onnx/backend/test/data/node/test_mod_mixed_sign_int32/test_data_set_0/output_0.pb,sha256=_a4Yjx7uvnZPG-TkIb341yGjdeliR--d3Fu5Trgxpho,33 +onnx/backend/test/data/node/test_mod_mixed_sign_int64/model.onnx,sha256=nTw6OR_BMSC0Y_JjnrTnr5BXwUOMJ9tA9B9R1VljY2Y,118 +onnx/backend/test/data/node/test_mod_mixed_sign_int64/test_data_set_0/input_0.pb,sha256=nOKhA4S_k4oLRRJtc_sBIsbaabKM4Kp44EPsTtGDZhc,57 +onnx/backend/test/data/node/test_mod_mixed_sign_int64/test_data_set_0/input_1.pb,sha256=flHpVhHA6WGEfMmq0TjdhZcLKpScvTBl45zZ8KCgQHU,57 +onnx/backend/test/data/node/test_mod_mixed_sign_int64/test_data_set_0/output_0.pb,sha256=jnR4bDtZ93G6nMkj2CAaTaWk0UxaTaA5AonWF1KjDKY,57 +onnx/backend/test/data/node/test_mod_mixed_sign_int8/model.onnx,sha256=CC0VBcKcNW2g4-82riqZy2byWRYIeaE4Xs_SIfdtBSQ,117 +onnx/backend/test/data/node/test_mod_mixed_sign_int8/test_data_set_0/input_0.pb,sha256=e4SdPaUz7v7WV4GqACVkvDDWv3VVRGyh6zY736g7Avg,15 +onnx/backend/test/data/node/test_mod_mixed_sign_int8/test_data_set_0/input_1.pb,sha256=QWRmvzgxJcXKMTv1vhMCT_n4SZgHtga6NLK0sm17tH4,15 +onnx/backend/test/data/node/test_mod_mixed_sign_int8/test_data_set_0/output_0.pb,sha256=3dXZkOSPNjX9Qe0FFIjUZ9sYNqJBrpeBuDRK9z1gwYo,15 +onnx/backend/test/data/node/test_mod_uint16/model.onnx,sha256=alKlDqYYFEBTlxoP_KUnA0ujiOdmDAPCUpPlN4hBM2E,108 +onnx/backend/test/data/node/test_mod_uint16/test_data_set_0/input_0.pb,sha256=tCsmCRMblNFjQrp58dHq-WdUlovdrwQtDw18AEeCa6M,15 +onnx/backend/test/data/node/test_mod_uint16/test_data_set_0/input_1.pb,sha256=2LAcmWXmGxfnOGwxfznTysJ_tjFk5VK8q_Rr7uAaWL4,15 +onnx/backend/test/data/node/test_mod_uint16/test_data_set_0/output_0.pb,sha256=MKxPEomEIEe4a3G8KHviE6UTv5yi-EfSi_IFkqHypkI,15 +onnx/backend/test/data/node/test_mod_uint32/model.onnx,sha256=MukL3eMwWSctzgKqJFqFkmQdbAVYtzx94H5sVfPcs9M,108 +onnx/backend/test/data/node/test_mod_uint32/test_data_set_0/input_0.pb,sha256=RMNhUhYgBTdqxllQCSq5-7jdbrhadyk-Y67u6inphCc,21 +onnx/backend/test/data/node/test_mod_uint32/test_data_set_0/input_1.pb,sha256=CTViFOfkAPQBZePJI7qcH3xxx-o60oHKZ_Ke-zJVidc,21 +onnx/backend/test/data/node/test_mod_uint32/test_data_set_0/output_0.pb,sha256=6ZpnoFc5CJba_bTAYvYaZLlfHn2oYWn5z16ykLEjYCQ,21 +onnx/backend/test/data/node/test_mod_uint64/model.onnx,sha256=RkACaHHi03wcrloxfUcHz1XxirhsdUkmpyGYR0D09Qw,108 +onnx/backend/test/data/node/test_mod_uint64/test_data_set_0/input_0.pb,sha256=ntDx28CL_G8v9dPpttRHXJ8h6FsFGA0GNQipBt-ouwk,33 +onnx/backend/test/data/node/test_mod_uint64/test_data_set_0/input_1.pb,sha256=4NWx2-tUm-ZC_B5WjZqZzVwE9MdhMRRt38ohiDV6WjI,33 +onnx/backend/test/data/node/test_mod_uint64/test_data_set_0/output_0.pb,sha256=1V8VuyVx0lcS2Df0tt-JrILo_d95XKV4ksyO2OV7ZM8,33 +onnx/backend/test/data/node/test_mod_uint8/model.onnx,sha256=5qmvMgMmlxRslaSdJSYy4VqUhnPZAHEC4h6l6vB8Pjk,107 +onnx/backend/test/data/node/test_mod_uint8/test_data_set_0/input_0.pb,sha256=hDHI1DHkbLpXzkvBdwdVqa0ReVZMhcpGiIlhep4W9YU,12 +onnx/backend/test/data/node/test_mod_uint8/test_data_set_0/input_1.pb,sha256=slzwobrbN-njwaehacvZjMt6LUfm49Bl_dHEOCuB5ZI,12 +onnx/backend/test/data/node/test_mod_uint8/test_data_set_0/output_0.pb,sha256=QiipYfVIuwPVRtPVyMv5lZN-q2JuQys4ocuT4c3dCZ0,12 +onnx/backend/test/data/node/test_momentum/model.onnx,sha256=-5oApOVFhBs0bz9FDBG8ldvVBgrAhU-avnjryfajF6I,333 +onnx/backend/test/data/node/test_momentum/test_data_set_0/input_0.pb,sha256=CxMIfRjPZEo5uxAaZt0Go3_5W6wA6Dv71f9iG1FoqiI,11 +onnx/backend/test/data/node/test_momentum/test_data_set_0/input_1.pb,sha256=xEpBjafU-CYCl1V5uhzdqP2rUfBgaj7glskxTqDGi-k,15 +onnx/backend/test/data/node/test_momentum/test_data_set_0/input_2.pb,sha256=q6Wsydvsci5ZJNiqDej1Kq17QzS3xOPWEruuLIgoorQ,17 +onnx/backend/test/data/node/test_momentum/test_data_set_0/input_3.pb,sha256=NQkKr3rClZp31gGf1Oyh_ECrr2_5AMVYn_fNdDazT8U,17 +onnx/backend/test/data/node/test_momentum/test_data_set_0/input_4.pb,sha256=6upGRVU5-O-5zKXoDu6cRsc2F036k6N02w_CNPHBGqA,17 +onnx/backend/test/data/node/test_momentum/test_data_set_0/output_0.pb,sha256=1cxy6HxWJTlaTZSXXWCoR56T9BWkUpKG7saLuYwgXyc,21 +onnx/backend/test/data/node/test_momentum/test_data_set_0/output_1.pb,sha256=Ci12duo6EDJXkQplTj0GZH3T5xTG-BDRHfUN7eIaR0M,21 +onnx/backend/test/data/node/test_momentum_multiple/model.onnx,sha256=5h0MIOkr-byri8c8cozhvIeSHzz5mfmTnFt7uOclUvk,478 +onnx/backend/test/data/node/test_momentum_multiple/test_data_set_0/input_0.pb,sha256=CxMIfRjPZEo5uxAaZt0Go3_5W6wA6Dv71f9iG1FoqiI,11 +onnx/backend/test/data/node/test_momentum_multiple/test_data_set_0/input_1.pb,sha256=xEpBjafU-CYCl1V5uhzdqP2rUfBgaj7glskxTqDGi-k,15 +onnx/backend/test/data/node/test_momentum_multiple/test_data_set_0/input_2.pb,sha256=zSCQaDJuSfjV89lOKl_e8FAvaYdDg6UrH2n4eykC9bI,14 +onnx/backend/test/data/node/test_momentum_multiple/test_data_set_0/input_3.pb,sha256=A-40FgJNucPNFb_WksbMy0Z-E_EIV7RF0DP90kqbbC4,18 +onnx/backend/test/data/node/test_momentum_multiple/test_data_set_0/input_4.pb,sha256=CxeZGajewrwPha-MBoCdOi3nXJYzF9DgbGhOIwIqx3A,14 +onnx/backend/test/data/node/test_momentum_multiple/test_data_set_0/input_5.pb,sha256=stCGvEbBW8itgJbkRX1pnKZir080kytSIuTmZAkh41Y,18 +onnx/backend/test/data/node/test_momentum_multiple/test_data_set_0/input_6.pb,sha256=1-flVrzPmKolD0GSh-ld1wYZxSEIUx0TUpi_0ywedqA,14 +onnx/backend/test/data/node/test_momentum_multiple/test_data_set_0/input_7.pb,sha256=elUimbVps7sroUEVgzsr7m7OElV1YF29zVkzYA0YS-s,18 +onnx/backend/test/data/node/test_momentum_multiple/test_data_set_0/output_0.pb,sha256=4sSVWd8Z9X1CPKfShQixSU6j1XAuHKwTbI1HDWDK05c,18 +onnx/backend/test/data/node/test_momentum_multiple/test_data_set_0/output_1.pb,sha256=VIx5aXfPMBO-XRc7JlxZvj1LRnOeikNDOmR-uqHntzs,22 +onnx/backend/test/data/node/test_momentum_multiple/test_data_set_0/output_2.pb,sha256=QpvgrRN5sklswqZAtEjKXsjjnjKNTvmEd8eJz4v8VQc,18 +onnx/backend/test/data/node/test_momentum_multiple/test_data_set_0/output_3.pb,sha256=y6bsm396HxD86Z8Gx4SrXpBeBLu1t4YviYpxT9Zp3Bg,22 +onnx/backend/test/data/node/test_mul/model.onnx,sha256=ZuS6sRZvUEzjJgVPrTahZSjon8MxxwR7LmtefNnB24M,125 +onnx/backend/test/data/node/test_mul/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_mul/test_data_set_0/input_1.pb,sha256=qlmuuQNl3eLtxabKfNE6cbZHjYcj-DzNBr1M4rz7GSo,254 +onnx/backend/test/data/node/test_mul/test_data_set_0/output_0.pb,sha256=GvmV3GKzE3tMQaUbtsliIf7YaUyHtSitkjeF-u4Qptw,254 +onnx/backend/test/data/node/test_mul_bcast/model.onnx,sha256=FmrNZbakJHVt0sk6JDgHNkK08Xla6MrTE3U_qv0LntY,123 +onnx/backend/test/data/node/test_mul_bcast/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_mul_bcast/test_data_set_0/input_1.pb,sha256=jYn9FLUzE9ETJcxFW48XqSehRlyePPjUoI-gtNyozBs,29 +onnx/backend/test/data/node/test_mul_bcast/test_data_set_0/output_0.pb,sha256=xVi0Xf_iQ-yNWQKzRUGY86LcvRmnwTV2W9U7iPvAXyc,254 +onnx/backend/test/data/node/test_mul_example/model.onnx,sha256=BbkaFyBUf7-QS7ZH0F3kuoLpjnMhMWXxMFYZ8nTaUU4,109 +onnx/backend/test/data/node/test_mul_example/test_data_set_0/input_0.pb,sha256=07NXD9M8ncP5OIBozL5eyrRmifhNqQKh-yXwmBAQ5_Y,21 +onnx/backend/test/data/node/test_mul_example/test_data_set_0/input_1.pb,sha256=CRO596M4hKvMtegi6TFu8eNUs-prhD7vkphHojkzBj4,21 +onnx/backend/test/data/node/test_mul_example/test_data_set_0/output_0.pb,sha256=tVxYlgVT1AqY2a1VMrqOyiLZAn9wRBDCh7Fdffib0uA,21 +onnx/backend/test/data/node/test_mul_uint8/model.onnx,sha256=D6fmj7leZM-7NYYKX7rKyNcmRryGvdyndMqlNnBrIhM,131 +onnx/backend/test/data/node/test_mul_uint8/test_data_set_0/input_0.pb,sha256=lDVrTjZITNWkVtasiKIF2ak3tbuhepNRhxrgudf0KTs,73 +onnx/backend/test/data/node/test_mul_uint8/test_data_set_0/input_1.pb,sha256=jx9eQGpCxoKBWgfPS0KfbG40pw6mS3ewQqDH5ksDHls,73 +onnx/backend/test/data/node/test_mul_uint8/test_data_set_0/output_0.pb,sha256=y8L5yIKZz6xnyDczj7pFRWo1-I21Bvn-HMVwXU_h2QM,73 +onnx/backend/test/data/node/test_mvn/model.onnx,sha256=epvYrtzDMGUCQaTNZrsSaaJv6sj4Z93jWBiIdkZuTtI,127 +onnx/backend/test/data/node/test_mvn/test_data_set_0/input_0.pb,sha256=JgQmvqlXl4zwhOoqE3bqOj-0uikQHKd2j0Gt1aMhG3Y,123 +onnx/backend/test/data/node/test_mvn/test_data_set_0/output_0.pb,sha256=E83O-chiuVDaVh2K4dg2eJ0Xg40h2shNgDjy6prT-mQ,123 +onnx/backend/test/data/node/test_mvn_expanded/model.onnx,sha256=EZuZmf6XgC8sidl6q5Uf8Gvse-2rTXU8dX8ZeGGRpiQ,1714 +onnx/backend/test/data/node/test_mvn_expanded/test_data_set_0/input_0.pb,sha256=JgQmvqlXl4zwhOoqE3bqOj-0uikQHKd2j0Gt1aMhG3Y,123 +onnx/backend/test/data/node/test_mvn_expanded/test_data_set_0/output_0.pb,sha256=E83O-chiuVDaVh2K4dg2eJ0Xg40h2shNgDjy6prT-mQ,123 +onnx/backend/test/data/node/test_mvn_expanded_ver18/model.onnx,sha256=GUDLfw0FyhbNGi6k2wyaZMfyk1GuZYJD5fwIZjhUDrY,1901 +onnx/backend/test/data/node/test_mvn_expanded_ver18/test_data_set_0/input_0.pb,sha256=JgQmvqlXl4zwhOoqE3bqOj-0uikQHKd2j0Gt1aMhG3Y,123 +onnx/backend/test/data/node/test_mvn_expanded_ver18/test_data_set_0/output_0.pb,sha256=E83O-chiuVDaVh2K4dg2eJ0Xg40h2shNgDjy6prT-mQ,123 +onnx/backend/test/data/node/test_neg/model.onnx,sha256=RG6cCaIqfUQ34xlba_AMQOwsIBnpTHgEE4_2K3bWURs,97 +onnx/backend/test/data/node/test_neg/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_neg/test_data_set_0/output_0.pb,sha256=Ts5Na8UG6-tCk6szO_WjtcUAMMIPYVYm3I09yt3IuAs,254 +onnx/backend/test/data/node/test_neg_example/model.onnx,sha256=1xGuNAOWCh8G6u2QwbF8-n6qtNrao0AvKJ70iKtMX0s,89 +onnx/backend/test/data/node/test_neg_example/test_data_set_0/input_0.pb,sha256=hIDv23oySvd30UHcdMkH6btwYls47z-2HWS1hMB2wdg,17 +onnx/backend/test/data/node/test_neg_example/test_data_set_0/output_0.pb,sha256=4EW4AVpPlfhx4g9wwktECbBn42vmT0mhx-eztsZhymE,17 +onnx/backend/test/data/node/test_nesterov_momentum/model.onnx,sha256=0PJxU7A_pXiJUQYJqlEwQhgUz-7Y4D-K11Y6wItheKI,342 +onnx/backend/test/data/node/test_nesterov_momentum/test_data_set_0/input_0.pb,sha256=CxMIfRjPZEo5uxAaZt0Go3_5W6wA6Dv71f9iG1FoqiI,11 +onnx/backend/test/data/node/test_nesterov_momentum/test_data_set_0/input_1.pb,sha256=xEpBjafU-CYCl1V5uhzdqP2rUfBgaj7glskxTqDGi-k,15 +onnx/backend/test/data/node/test_nesterov_momentum/test_data_set_0/input_2.pb,sha256=q6Wsydvsci5ZJNiqDej1Kq17QzS3xOPWEruuLIgoorQ,17 +onnx/backend/test/data/node/test_nesterov_momentum/test_data_set_0/input_3.pb,sha256=NQkKr3rClZp31gGf1Oyh_ECrr2_5AMVYn_fNdDazT8U,17 +onnx/backend/test/data/node/test_nesterov_momentum/test_data_set_0/input_4.pb,sha256=6upGRVU5-O-5zKXoDu6cRsc2F036k6N02w_CNPHBGqA,17 +onnx/backend/test/data/node/test_nesterov_momentum/test_data_set_0/output_0.pb,sha256=78D4vxBLlg3_1l7fVOJsUgiud1GhCpYye_F23yOxGfQ,21 +onnx/backend/test/data/node/test_nesterov_momentum/test_data_set_0/output_1.pb,sha256=u3TIKVQhJ12mswy7LJAMitidrIrQwfmHZc95LItmh38,21 +onnx/backend/test/data/node/test_nllloss_NC/model.onnx,sha256=k9wuaUyG4vqUGdOasux7BAiNAWXsS-HRQ8Rw5SXZ7TI,181 +onnx/backend/test/data/node/test_nllloss_NC/test_data_set_0/input_0.pb,sha256=old7l3-e0xBCJ0lTuYT6UfvHSpi0zYXGQc1FsncLpIg,75 +onnx/backend/test/data/node/test_nllloss_NC/test_data_set_0/input_1.pb,sha256=4CX0ut_YErV-LAF0q3XBesrAedTFBSOEEoLncdkhSCo,38 +onnx/backend/test/data/node/test_nllloss_NC/test_data_set_0/output_0.pb,sha256=FzPp1aOpi6ruhclBnofi7t6rPu1JBh__2nUlEKB8jRg,24 +onnx/backend/test/data/node/test_nllloss_NC_expanded/model.onnx,sha256=oVgjxEzxZOPMU9_sFPezyzxZFhp6u1bsSCH92XGha2k,1487 +onnx/backend/test/data/node/test_nllloss_NC_expanded/test_data_set_0/input_0.pb,sha256=old7l3-e0xBCJ0lTuYT6UfvHSpi0zYXGQc1FsncLpIg,75 +onnx/backend/test/data/node/test_nllloss_NC_expanded/test_data_set_0/input_1.pb,sha256=4CX0ut_YErV-LAF0q3XBesrAedTFBSOEEoLncdkhSCo,38 +onnx/backend/test/data/node/test_nllloss_NC_expanded/test_data_set_0/output_0.pb,sha256=FzPp1aOpi6ruhclBnofi7t6rPu1JBh__2nUlEKB8jRg,24 +onnx/backend/test/data/node/test_nllloss_NCd1/model.onnx,sha256=BBF-XfZL4KxoptstuVpwnLrKg9aS3GejJTE7ygSD7oQ,187 +onnx/backend/test/data/node/test_nllloss_NCd1/test_data_set_0/input_0.pb,sha256=Y7fKVXkDFZWQMad8JysQ8iP5c3_MKMk6RACAU-4HTfI,137 +onnx/backend/test/data/node/test_nllloss_NCd1/test_data_set_0/input_1.pb,sha256=TxDsHYS2QuW7G4Ta0rbBDixXELufO1KTJTJKsj5PDh8,64 +onnx/backend/test/data/node/test_nllloss_NCd1/test_data_set_0/output_0.pb,sha256=Wy80hsUPrWr21jyHz00NtQPKFLFbm9jkiMOVGrkzYOM,14 +onnx/backend/test/data/node/test_nllloss_NCd1_expanded/model.onnx,sha256=pT81kC4Y2cO4C4ymweviL6GJVxKLoPzcVzDIqQX6IBw,1702 +onnx/backend/test/data/node/test_nllloss_NCd1_expanded/test_data_set_0/input_0.pb,sha256=Y7fKVXkDFZWQMad8JysQ8iP5c3_MKMk6RACAU-4HTfI,137 +onnx/backend/test/data/node/test_nllloss_NCd1_expanded/test_data_set_0/input_1.pb,sha256=TxDsHYS2QuW7G4Ta0rbBDixXELufO1KTJTJKsj5PDh8,64 +onnx/backend/test/data/node/test_nllloss_NCd1_expanded/test_data_set_0/output_0.pb,sha256=Wy80hsUPrWr21jyHz00NtQPKFLFbm9jkiMOVGrkzYOM,14 +onnx/backend/test/data/node/test_nllloss_NCd1_ii/model.onnx,sha256=JtRnrOytz9VvC0WEpD9C42I4WZXvdvSau99Jx4SShq8,211 +onnx/backend/test/data/node/test_nllloss_NCd1_ii/test_data_set_0/input_0.pb,sha256=Y7fKVXkDFZWQMad8JysQ8iP5c3_MKMk6RACAU-4HTfI,137 +onnx/backend/test/data/node/test_nllloss_NCd1_ii/test_data_set_0/input_1.pb,sha256=Ur_oz-w-7AwpWPcD7rOje_KVu5kEOh8mhVS8xvWexXA,64 +onnx/backend/test/data/node/test_nllloss_NCd1_ii/test_data_set_0/output_0.pb,sha256=mvZzSWz0AU3Qvv7pm7RZtTx91YugRDgGdDqN4IjfhRE,14 +onnx/backend/test/data/node/test_nllloss_NCd1_ii_expanded/model.onnx,sha256=azLZlbzUqCzx2WGHZ2NhdpBVKzlToR8eZcttjg1s-F0,4799 +onnx/backend/test/data/node/test_nllloss_NCd1_ii_expanded/test_data_set_0/input_0.pb,sha256=Y7fKVXkDFZWQMad8JysQ8iP5c3_MKMk6RACAU-4HTfI,137 +onnx/backend/test/data/node/test_nllloss_NCd1_ii_expanded/test_data_set_0/input_1.pb,sha256=Ur_oz-w-7AwpWPcD7rOje_KVu5kEOh8mhVS8xvWexXA,64 +onnx/backend/test/data/node/test_nllloss_NCd1_ii_expanded/test_data_set_0/output_0.pb,sha256=mvZzSWz0AU3Qvv7pm7RZtTx91YugRDgGdDqN4IjfhRE,14 +onnx/backend/test/data/node/test_nllloss_NCd1_mean_weight_negative_ii/model.onnx,sha256=_SKEOgXURs-d05J0uofV5nVrvyK28SlJP1AI4R99iFg,271 +onnx/backend/test/data/node/test_nllloss_NCd1_mean_weight_negative_ii/test_data_set_0/input_0.pb,sha256=EV-DL1xuw3Ssoc13Ru3vNTP97mPKvc4FanaWAaurb30,378 +onnx/backend/test/data/node/test_nllloss_NCd1_mean_weight_negative_ii/test_data_set_0/input_1.pb,sha256=jCgGbI3X6Kh1LiIy-D1u1T83sddtcdLsgSGGSfnxv34,161 +onnx/backend/test/data/node/test_nllloss_NCd1_mean_weight_negative_ii/test_data_set_0/input_2.pb,sha256=2pdGNpbiJ_sObKTCzmn5NvpYSnz7nmZf6k9pSWyaiyk,34 +onnx/backend/test/data/node/test_nllloss_NCd1_mean_weight_negative_ii/test_data_set_0/output_0.pb,sha256=xVxmU46seyPHXo5LO5_H1OTgdz4paTCSI5_vP1GO3rY,14 +onnx/backend/test/data/node/test_nllloss_NCd1_mean_weight_negative_ii_expanded/model.onnx,sha256=eFqDQXZs2jCOK35-_sZbmxoUaCCCf4VH5tzFjpDUNQY,6057 +onnx/backend/test/data/node/test_nllloss_NCd1_mean_weight_negative_ii_expanded/test_data_set_0/input_0.pb,sha256=EV-DL1xuw3Ssoc13Ru3vNTP97mPKvc4FanaWAaurb30,378 +onnx/backend/test/data/node/test_nllloss_NCd1_mean_weight_negative_ii_expanded/test_data_set_0/input_1.pb,sha256=jCgGbI3X6Kh1LiIy-D1u1T83sddtcdLsgSGGSfnxv34,161 +onnx/backend/test/data/node/test_nllloss_NCd1_mean_weight_negative_ii_expanded/test_data_set_0/input_2.pb,sha256=2pdGNpbiJ_sObKTCzmn5NvpYSnz7nmZf6k9pSWyaiyk,34 +onnx/backend/test/data/node/test_nllloss_NCd1_mean_weight_negative_ii_expanded/test_data_set_0/output_0.pb,sha256=xVxmU46seyPHXo5LO5_H1OTgdz4paTCSI5_vP1GO3rY,14 +onnx/backend/test/data/node/test_nllloss_NCd1_weight/model.onnx,sha256=chgwfvx1L7zLJdzVZGzzFStfkrodMNRwUpVqEcGnODc,224 +onnx/backend/test/data/node/test_nllloss_NCd1_weight/test_data_set_0/input_0.pb,sha256=Y7fKVXkDFZWQMad8JysQ8iP5c3_MKMk6RACAU-4HTfI,137 +onnx/backend/test/data/node/test_nllloss_NCd1_weight/test_data_set_0/input_1.pb,sha256=TxDsHYS2QuW7G4Ta0rbBDixXELufO1KTJTJKsj5PDh8,64 +onnx/backend/test/data/node/test_nllloss_NCd1_weight/test_data_set_0/input_2.pb,sha256=7DDbvKGGDdq188bMvD_LwMlrhgJmPpCuh6-TbVZJglk,34 +onnx/backend/test/data/node/test_nllloss_NCd1_weight/test_data_set_0/output_0.pb,sha256=VMqJ-Ig4hx6FdES7_UNH17eu-1_iOnoQZdk5N9VOgKk,14 +onnx/backend/test/data/node/test_nllloss_NCd1_weight_expanded/model.onnx,sha256=tSLBSPP-84mbZROlNAFPRntvG541XvV4ocn-j5a15uM,2696 +onnx/backend/test/data/node/test_nllloss_NCd1_weight_expanded/test_data_set_0/input_0.pb,sha256=Y7fKVXkDFZWQMad8JysQ8iP5c3_MKMk6RACAU-4HTfI,137 +onnx/backend/test/data/node/test_nllloss_NCd1_weight_expanded/test_data_set_0/input_1.pb,sha256=TxDsHYS2QuW7G4Ta0rbBDixXELufO1KTJTJKsj5PDh8,64 +onnx/backend/test/data/node/test_nllloss_NCd1_weight_expanded/test_data_set_0/input_2.pb,sha256=7DDbvKGGDdq188bMvD_LwMlrhgJmPpCuh6-TbVZJglk,34 +onnx/backend/test/data/node/test_nllloss_NCd1_weight_expanded/test_data_set_0/output_0.pb,sha256=VMqJ-Ig4hx6FdES7_UNH17eu-1_iOnoQZdk5N9VOgKk,14 +onnx/backend/test/data/node/test_nllloss_NCd1_weight_ii/model.onnx,sha256=0i2ax35T04SSaY_zKFcEXWutZuP9fw2-C8_xmMVZ83A,248 +onnx/backend/test/data/node/test_nllloss_NCd1_weight_ii/test_data_set_0/input_0.pb,sha256=Y7fKVXkDFZWQMad8JysQ8iP5c3_MKMk6RACAU-4HTfI,137 +onnx/backend/test/data/node/test_nllloss_NCd1_weight_ii/test_data_set_0/input_1.pb,sha256=Ur_oz-w-7AwpWPcD7rOje_KVu5kEOh8mhVS8xvWexXA,64 +onnx/backend/test/data/node/test_nllloss_NCd1_weight_ii/test_data_set_0/input_2.pb,sha256=7DDbvKGGDdq188bMvD_LwMlrhgJmPpCuh6-TbVZJglk,34 +onnx/backend/test/data/node/test_nllloss_NCd1_weight_ii/test_data_set_0/output_0.pb,sha256=8_UkUhjk3o7HpAwm_8qujCN_005PKGId3cvF1VYaCTQ,14 +onnx/backend/test/data/node/test_nllloss_NCd1_weight_ii_expanded/model.onnx,sha256=1bYMav1I-YQjA-mvde3-GlufskTDZ0kRSQ03YBk3eJk,5288 +onnx/backend/test/data/node/test_nllloss_NCd1_weight_ii_expanded/test_data_set_0/input_0.pb,sha256=Y7fKVXkDFZWQMad8JysQ8iP5c3_MKMk6RACAU-4HTfI,137 +onnx/backend/test/data/node/test_nllloss_NCd1_weight_ii_expanded/test_data_set_0/input_1.pb,sha256=Ur_oz-w-7AwpWPcD7rOje_KVu5kEOh8mhVS8xvWexXA,64 +onnx/backend/test/data/node/test_nllloss_NCd1_weight_ii_expanded/test_data_set_0/input_2.pb,sha256=7DDbvKGGDdq188bMvD_LwMlrhgJmPpCuh6-TbVZJglk,34 +onnx/backend/test/data/node/test_nllloss_NCd1_weight_ii_expanded/test_data_set_0/output_0.pb,sha256=8_UkUhjk3o7HpAwm_8qujCN_005PKGId3cvF1VYaCTQ,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2/model.onnx,sha256=1sYM_xrJw6DuaaweSKCZyKivC3advgOBU1nxoXAANsI,209 +onnx/backend/test/data/node/test_nllloss_NCd1d2/test_data_set_0/input_0.pb,sha256=-hNsEYq4QMPiZ8vIgKKttbbEJpcJ58FgVS-Bzh0VRY0,2180 +onnx/backend/test/data/node/test_nllloss_NCd1d2/test_data_set_0/input_1.pb,sha256=Ze7CWNk2I2nyDVtw4aEqkvKfchvRITCMQlg1iMQTqPI,883 +onnx/backend/test/data/node/test_nllloss_NCd1d2/test_data_set_0/output_0.pb,sha256=i2iYMamczG-7HCsEJauZO4tMxXxOii0ei0TGmGaTimM,449 +onnx/backend/test/data/node/test_nllloss_NCd1d2_expanded/model.onnx,sha256=NfgOlvNOYP_ooJgeBEZwp_vNS2vg43WI6uxYNxRrrqo,1579 +onnx/backend/test/data/node/test_nllloss_NCd1d2_expanded/test_data_set_0/input_0.pb,sha256=-hNsEYq4QMPiZ8vIgKKttbbEJpcJ58FgVS-Bzh0VRY0,2180 +onnx/backend/test/data/node/test_nllloss_NCd1d2_expanded/test_data_set_0/input_1.pb,sha256=Ze7CWNk2I2nyDVtw4aEqkvKfchvRITCMQlg1iMQTqPI,883 +onnx/backend/test/data/node/test_nllloss_NCd1d2_expanded/test_data_set_0/output_0.pb,sha256=i2iYMamczG-7HCsEJauZO4tMxXxOii0ei0TGmGaTimM,449 +onnx/backend/test/data/node/test_nllloss_NCd1d2_no_weight_reduction_mean_ii/model.onnx,sha256=ued4Hm19VkYsOFUK_EZwYy-EdhgraMSRnvUN1XtsD6o,246 +onnx/backend/test/data/node/test_nllloss_NCd1d2_no_weight_reduction_mean_ii/test_data_set_0/input_0.pb,sha256=-hNsEYq4QMPiZ8vIgKKttbbEJpcJ58FgVS-Bzh0VRY0,2180 +onnx/backend/test/data/node/test_nllloss_NCd1d2_no_weight_reduction_mean_ii/test_data_set_0/input_1.pb,sha256=Ze7CWNk2I2nyDVtw4aEqkvKfchvRITCMQlg1iMQTqPI,883 +onnx/backend/test/data/node/test_nllloss_NCd1d2_no_weight_reduction_mean_ii/test_data_set_0/output_0.pb,sha256=hSGnu1TNFV4r9LcNJTndtBoNX_1GblNESfOk-W2HrLQ,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2_no_weight_reduction_mean_ii_expanded/model.onnx,sha256=2sUj1G2s_CorrTH1oQB9wsQ9aeJNZi7uxVyAt5YfneE,6244 +onnx/backend/test/data/node/test_nllloss_NCd1d2_no_weight_reduction_mean_ii_expanded/test_data_set_0/input_0.pb,sha256=-hNsEYq4QMPiZ8vIgKKttbbEJpcJ58FgVS-Bzh0VRY0,2180 +onnx/backend/test/data/node/test_nllloss_NCd1d2_no_weight_reduction_mean_ii_expanded/test_data_set_0/input_1.pb,sha256=Ze7CWNk2I2nyDVtw4aEqkvKfchvRITCMQlg1iMQTqPI,883 +onnx/backend/test/data/node/test_nllloss_NCd1d2_no_weight_reduction_mean_ii_expanded/test_data_set_0/output_0.pb,sha256=hSGnu1TNFV4r9LcNJTndtBoNX_1GblNESfOk-W2HrLQ,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_mean/model.onnx,sha256=WhjQvu4HNTpdmXKYeCF665Njs0wi3zI2-fbJxL-2KZQ,212 +onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_mean/test_data_set_0/input_0.pb,sha256=-hNsEYq4QMPiZ8vIgKKttbbEJpcJ58FgVS-Bzh0VRY0,2180 +onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_mean/test_data_set_0/input_1.pb,sha256=Ze7CWNk2I2nyDVtw4aEqkvKfchvRITCMQlg1iMQTqPI,883 +onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_mean/test_data_set_0/output_0.pb,sha256=qSq6WlgazeDCIm1mTyLxUKhzYL5DcoLCqbHb-BXtdAM,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_mean_expanded/model.onnx,sha256=X3zd_AXuTxG5E9V3pPosriKKcqq7XaB6ieoUql_3Vcg,2033 +onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_mean_expanded/test_data_set_0/input_0.pb,sha256=-hNsEYq4QMPiZ8vIgKKttbbEJpcJ58FgVS-Bzh0VRY0,2180 +onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_mean_expanded/test_data_set_0/input_1.pb,sha256=Ze7CWNk2I2nyDVtw4aEqkvKfchvRITCMQlg1iMQTqPI,883 +onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_mean_expanded/test_data_set_0/output_0.pb,sha256=qSq6WlgazeDCIm1mTyLxUKhzYL5DcoLCqbHb-BXtdAM,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_sum/model.onnx,sha256=Q0glDHkUuBJv3FfCzBtylxEHBoafQ1jx1F7I-VVeIxA,210 +onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_sum/test_data_set_0/input_0.pb,sha256=-hNsEYq4QMPiZ8vIgKKttbbEJpcJ58FgVS-Bzh0VRY0,2180 +onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_sum/test_data_set_0/input_1.pb,sha256=Ze7CWNk2I2nyDVtw4aEqkvKfchvRITCMQlg1iMQTqPI,883 +onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_sum/test_data_set_0/output_0.pb,sha256=x1AM6O_evyx9hb1eICxW4zTGdItDWz9rf-65meUkzCM,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_sum_expanded/model.onnx,sha256=fGcWHO2ULrCKPBiCitaaniXeXHapQK_57w5gF6IkkoE,2013 +onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_sum_expanded/test_data_set_0/input_0.pb,sha256=-hNsEYq4QMPiZ8vIgKKttbbEJpcJ58FgVS-Bzh0VRY0,2180 +onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_sum_expanded/test_data_set_0/input_1.pb,sha256=Ze7CWNk2I2nyDVtw4aEqkvKfchvRITCMQlg1iMQTqPI,883 +onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_sum_expanded/test_data_set_0/output_0.pb,sha256=x1AM6O_evyx9hb1eICxW4zTGdItDWz9rf-65meUkzCM,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight/model.onnx,sha256=NJpYNKc_dKkK2LxipaT2IdVTz2WKKWZhQ1DhcWnTLUY,251 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight/test_data_set_0/input_0.pb,sha256=-hNsEYq4QMPiZ8vIgKKttbbEJpcJ58FgVS-Bzh0VRY0,2180 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight/test_data_set_0/input_1.pb,sha256=Ze7CWNk2I2nyDVtw4aEqkvKfchvRITCMQlg1iMQTqPI,883 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight/test_data_set_0/input_2.pb,sha256=LXGbi9KzO_VgKlKgxnnFfqBGcYMX6pKqnQCsK7rip2w,34 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight/test_data_set_0/output_0.pb,sha256=JSqPDGiOsobP6xup-vxOXxOSOJTnn6J3C0Z4VCboW54,449 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_expanded/model.onnx,sha256=GEgEGirkeM__zc31nmNNNY1y-6AsaYmLwDMBy0m0rfk,2211 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_expanded/test_data_set_0/input_0.pb,sha256=-hNsEYq4QMPiZ8vIgKKttbbEJpcJ58FgVS-Bzh0VRY0,2180 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_expanded/test_data_set_0/input_1.pb,sha256=Ze7CWNk2I2nyDVtw4aEqkvKfchvRITCMQlg1iMQTqPI,883 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_expanded/test_data_set_0/input_2.pb,sha256=LXGbi9KzO_VgKlKgxnnFfqBGcYMX6pKqnQCsK7rip2w,34 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_expanded/test_data_set_0/output_0.pb,sha256=JSqPDGiOsobP6xup-vxOXxOSOJTnn6J3C0Z4VCboW54,449 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_mean/model.onnx,sha256=r5ln075G2yIhyJQlk3PFmAN-YO3z708FbKu35HbHXOY,254 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_mean/test_data_set_0/input_0.pb,sha256=-hNsEYq4QMPiZ8vIgKKttbbEJpcJ58FgVS-Bzh0VRY0,2180 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_mean/test_data_set_0/input_1.pb,sha256=Ze7CWNk2I2nyDVtw4aEqkvKfchvRITCMQlg1iMQTqPI,883 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_mean/test_data_set_0/input_2.pb,sha256=LXGbi9KzO_VgKlKgxnnFfqBGcYMX6pKqnQCsK7rip2w,34 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_mean/test_data_set_0/output_0.pb,sha256=Y2jLiaibOR3bdOa9HsLCodhKnjvPmkYX4oUXLJlpg7Q,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_mean_expanded/model.onnx,sha256=zFnBrMN4Un0uquM2wpsyJNYG98h3aWW4DAmGy9KhDzY,3324 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_mean_expanded/test_data_set_0/input_0.pb,sha256=-hNsEYq4QMPiZ8vIgKKttbbEJpcJ58FgVS-Bzh0VRY0,2180 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_mean_expanded/test_data_set_0/input_1.pb,sha256=Ze7CWNk2I2nyDVtw4aEqkvKfchvRITCMQlg1iMQTqPI,883 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_mean_expanded/test_data_set_0/input_2.pb,sha256=LXGbi9KzO_VgKlKgxnnFfqBGcYMX6pKqnQCsK7rip2w,34 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_mean_expanded/test_data_set_0/output_0.pb,sha256=Y2jLiaibOR3bdOa9HsLCodhKnjvPmkYX4oUXLJlpg7Q,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum/model.onnx,sha256=9Hj1soS3_ZubUoKxZv8nr4V11M-cPuoPefooNsQ_DTU,252 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum/test_data_set_0/input_0.pb,sha256=-hNsEYq4QMPiZ8vIgKKttbbEJpcJ58FgVS-Bzh0VRY0,2180 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum/test_data_set_0/input_1.pb,sha256=Ze7CWNk2I2nyDVtw4aEqkvKfchvRITCMQlg1iMQTqPI,883 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum/test_data_set_0/input_2.pb,sha256=LXGbi9KzO_VgKlKgxnnFfqBGcYMX6pKqnQCsK7rip2w,34 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum/test_data_set_0/output_0.pb,sha256=ttk6qgLV4PrBOJwBA0MNagYSn2xbblmFgdT5G4Y6DSo,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_expanded/model.onnx,sha256=VxEpAEiKzOrH7ZaTnJI1KehBctZIaBt4k6A8uj0yuuA,2730 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_expanded/test_data_set_0/input_0.pb,sha256=-hNsEYq4QMPiZ8vIgKKttbbEJpcJ58FgVS-Bzh0VRY0,2180 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_expanded/test_data_set_0/input_1.pb,sha256=Ze7CWNk2I2nyDVtw4aEqkvKfchvRITCMQlg1iMQTqPI,883 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_expanded/test_data_set_0/input_2.pb,sha256=LXGbi9KzO_VgKlKgxnnFfqBGcYMX6pKqnQCsK7rip2w,34 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_expanded/test_data_set_0/output_0.pb,sha256=ttk6qgLV4PrBOJwBA0MNagYSn2xbblmFgdT5G4Y6DSo,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_ii/model.onnx,sha256=7eEbvlOB8Q16vcAlutJ66OeTSkpOYPNMUfDMRn-QoNY,276 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_ii/test_data_set_0/input_0.pb,sha256=-hNsEYq4QMPiZ8vIgKKttbbEJpcJ58FgVS-Bzh0VRY0,2180 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_ii/test_data_set_0/input_1.pb,sha256=t37hGGOZpY76Z076TWnyUXHJ9RhjExXsTMzsFDongOE,883 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_ii/test_data_set_0/input_2.pb,sha256=LXGbi9KzO_VgKlKgxnnFfqBGcYMX6pKqnQCsK7rip2w,34 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_ii/test_data_set_0/output_0.pb,sha256=G1tuk8Dm8zZmN2j-1zlY1fE7Dhiqg62AePzHDxzw9Vk,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_ii_expanded/model.onnx,sha256=uxWTspWFWjnXaTYEYFdTNQ3hHg8U4dpUhabJuDjT3rc,5854 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_ii_expanded/test_data_set_0/input_0.pb,sha256=-hNsEYq4QMPiZ8vIgKKttbbEJpcJ58FgVS-Bzh0VRY0,2180 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_ii_expanded/test_data_set_0/input_1.pb,sha256=t37hGGOZpY76Z076TWnyUXHJ9RhjExXsTMzsFDongOE,883 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_ii_expanded/test_data_set_0/input_2.pb,sha256=LXGbi9KzO_VgKlKgxnnFfqBGcYMX6pKqnQCsK7rip2w,34 +onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_ii_expanded/test_data_set_0/output_0.pb,sha256=G1tuk8Dm8zZmN2j-1zlY1fE7Dhiqg62AePzHDxzw9Vk,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_none_no_weight_negative_ii/model.onnx,sha256=aGnx-ACxAObbXX1-FTiwAEV-PlY0J0IB4pAL0FdZlCs,280 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_none_no_weight_negative_ii/test_data_set_0/input_0.pb,sha256=z23LiYXVLuyr6YhOh59dhvGqVqx8aoJP7eOh4qE51QM,10822 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_none_no_weight_negative_ii/test_data_set_0/input_1.pb,sha256=cKLm3K_3JxpfYsG4am6zDDJskYvJQbVzjuENYNBUkpw,4341 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_none_no_weight_negative_ii/test_data_set_0/output_0.pb,sha256=S_h5iqr0rOuKrKp6BbeSJ3fJ-2kKAljKD94lVeL46Wc,2179 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_none_no_weight_negative_ii_expanded/model.onnx,sha256=jYR1ngpcN28t90XDR4WuP1Sl7BSAMrUv4YNoJ8_dWek,5510 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_none_no_weight_negative_ii_expanded/test_data_set_0/input_0.pb,sha256=z23LiYXVLuyr6YhOh59dhvGqVqx8aoJP7eOh4qE51QM,10822 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_none_no_weight_negative_ii_expanded/test_data_set_0/input_1.pb,sha256=cKLm3K_3JxpfYsG4am6zDDJskYvJQbVzjuENYNBUkpw,4341 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_none_no_weight_negative_ii_expanded/test_data_set_0/output_0.pb,sha256=S_h5iqr0rOuKrKp6BbeSJ3fJ-2kKAljKD94lVeL46Wc,2179 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_sum_weight_high_ii/model.onnx,sha256=Pakdemg0frIEpUOxbNd0yTFCShGCoUPTzt3i_7y9HHk,252 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_sum_weight_high_ii/test_data_set_0/input_0.pb,sha256=old7l3-e0xBCJ0lTuYT6UfvHSpi0zYXGQc1FsncLpIg,75 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_sum_weight_high_ii/test_data_set_0/input_1.pb,sha256=RXI-xC4uNH5h_9bRIqFZxVMyWsu6CV5b0pbQecU4Bgs,38 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_sum_weight_high_ii/test_data_set_0/input_2.pb,sha256=Jm5nsejoZUXJLQiQBzP-LlZ0LpN6o5WgziqWVVxouXg,34 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_sum_weight_high_ii/test_data_set_0/output_0.pb,sha256=nZ7GyTo8pI1otqZg8N04qEXGK8KMRoAVsVCuNp6KZKs,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_sum_weight_high_ii_expanded/model.onnx,sha256=VKl8WWNMlOMsMw2OraymtpuZkDulF6radt80AhXTw-M,5445 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_sum_weight_high_ii_expanded/test_data_set_0/input_0.pb,sha256=old7l3-e0xBCJ0lTuYT6UfvHSpi0zYXGQc1FsncLpIg,75 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_sum_weight_high_ii_expanded/test_data_set_0/input_1.pb,sha256=RXI-xC4uNH5h_9bRIqFZxVMyWsu6CV5b0pbQecU4Bgs,38 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_sum_weight_high_ii_expanded/test_data_set_0/input_2.pb,sha256=Jm5nsejoZUXJLQiQBzP-LlZ0LpN6o5WgziqWVVxouXg,34 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3_sum_weight_high_ii_expanded/test_data_set_0/output_0.pb,sha256=nZ7GyTo8pI1otqZg8N04qEXGK8KMRoAVsVCuNp6KZKs,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_mean_weight/model.onnx,sha256=B4i75xU-VtMflOT8rnkwNW1-ovGUD8O6aYX2uLM2b7A,269 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_mean_weight/test_data_set_0/input_0.pb,sha256=P-KwI3TkPu9I4UH_2t-0e1iCCIJe7S3kGZNXzc_N2GU,129627 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_mean_weight/test_data_set_0/input_1.pb,sha256=2oaVEIGngV21FQn4FgoG7qbtTYELaSA8w1z1HdyQSIc,51866 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_mean_weight/test_data_set_0/input_2.pb,sha256=ELwcD97zClJjm-0q0aWGNMO2lFpemh7tQC1XoEkDCCo,34 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_mean_weight/test_data_set_0/output_0.pb,sha256=PSbLzivPmtfKKKqqHg4CfuFqAdZRxWJCEJN4QLYvFbQ,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_mean_weight_expanded/model.onnx,sha256=F1Q3B2R3NCpGRozhHe5BBtOLdP4KdxzQepZ_oPYKjC0,3092 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_mean_weight_expanded/test_data_set_0/input_0.pb,sha256=P-KwI3TkPu9I4UH_2t-0e1iCCIJe7S3kGZNXzc_N2GU,129627 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_mean_weight_expanded/test_data_set_0/input_1.pb,sha256=2oaVEIGngV21FQn4FgoG7qbtTYELaSA8w1z1HdyQSIc,51866 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_mean_weight_expanded/test_data_set_0/input_2.pb,sha256=ELwcD97zClJjm-0q0aWGNMO2lFpemh7tQC1XoEkDCCo,34 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_mean_weight_expanded/test_data_set_0/output_0.pb,sha256=PSbLzivPmtfKKKqqHg4CfuFqAdZRxWJCEJN4QLYvFbQ,14 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_none_no_weight/model.onnx,sha256=gB0J5vDo6X0PqjjVfUAGFTX51lzgAmCoII_lfIi7qSA,266 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_none_no_weight/test_data_set_0/input_0.pb,sha256=P-KwI3TkPu9I4UH_2t-0e1iCCIJe7S3kGZNXzc_N2GU,129627 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_none_no_weight/test_data_set_0/input_1.pb,sha256=2oaVEIGngV21FQn4FgoG7qbtTYELaSA8w1z1HdyQSIc,51866 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_none_no_weight/test_data_set_0/output_0.pb,sha256=y3Zghr4cDyuqm8pbBCrybjPAkSLe_eJ7sR0zCMdCpyM,25944 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_none_no_weight_expanded/model.onnx,sha256=KLP6Qfeaq9OXA3sRbOAVPKN0yUIqF67Akz5NAbuZWsQ,1974 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_none_no_weight_expanded/test_data_set_0/input_0.pb,sha256=P-KwI3TkPu9I4UH_2t-0e1iCCIJe7S3kGZNXzc_N2GU,129627 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_none_no_weight_expanded/test_data_set_0/input_1.pb,sha256=2oaVEIGngV21FQn4FgoG7qbtTYELaSA8w1z1HdyQSIc,51866 +onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_none_no_weight_expanded/test_data_set_0/output_0.pb,sha256=y3Zghr4cDyuqm8pbBCrybjPAkSLe_eJ7sR0zCMdCpyM,25944 +onnx/backend/test/data/node/test_nonmaxsuppression_center_point_box_format/model.onnx,sha256=b5A4lcIFtquT_uLGjS0-Yh_HxEPuxsj-sfdxboSMfhE,410 +onnx/backend/test/data/node/test_nonmaxsuppression_center_point_box_format/test_data_set_0/input_0.pb,sha256=aQPSV_4Z6zQcZDVGmekHfnVc5BHUPfOMSfgoCOUCiX8,113 +onnx/backend/test/data/node/test_nonmaxsuppression_center_point_box_format/test_data_set_0/input_1.pb,sha256=F8Eh7WUjXtKojmIpSeXsbYD2qPsRcLXA3WtXC4vwkNw,42 +onnx/backend/test/data/node/test_nonmaxsuppression_center_point_box_format/test_data_set_0/input_2.pb,sha256=wHAZTQlX9ckNeo0B8bi7mO9CufjQz6hVH5xSg04PXwY,42 +onnx/backend/test/data/node/test_nonmaxsuppression_center_point_box_format/test_data_set_0/input_3.pb,sha256=B6qRKb1_O_DZYhoAr4ofYWN_KOBsRzWR3G4qNUeFsi4,25 +onnx/backend/test/data/node/test_nonmaxsuppression_center_point_box_format/test_data_set_0/input_4.pb,sha256=udYwTTSV1VPje8RjiHGMu34WDMTRVKTSlblxW4Q8w6Q,27 +onnx/backend/test/data/node/test_nonmaxsuppression_center_point_box_format/test_data_set_0/output_0.pb,sha256=VkYmKplg2aAE_7p6EVba0_QxYiTcUtLIAud4BVNFCVc,98 +onnx/backend/test/data/node/test_nonmaxsuppression_flipped_coordinates/model.onnx,sha256=ChIjMwo0mYkmvrx5mruIWwH6vffhTspoARXoUnTObqM,380 +onnx/backend/test/data/node/test_nonmaxsuppression_flipped_coordinates/test_data_set_0/input_0.pb,sha256=vYpz_7-JGMz48Rd76XAJEU5ZJP9EbbVdbojxgbVQG3Q,113 +onnx/backend/test/data/node/test_nonmaxsuppression_flipped_coordinates/test_data_set_0/input_1.pb,sha256=F8Eh7WUjXtKojmIpSeXsbYD2qPsRcLXA3WtXC4vwkNw,42 +onnx/backend/test/data/node/test_nonmaxsuppression_flipped_coordinates/test_data_set_0/input_2.pb,sha256=wHAZTQlX9ckNeo0B8bi7mO9CufjQz6hVH5xSg04PXwY,42 +onnx/backend/test/data/node/test_nonmaxsuppression_flipped_coordinates/test_data_set_0/input_3.pb,sha256=B6qRKb1_O_DZYhoAr4ofYWN_KOBsRzWR3G4qNUeFsi4,25 +onnx/backend/test/data/node/test_nonmaxsuppression_flipped_coordinates/test_data_set_0/input_4.pb,sha256=udYwTTSV1VPje8RjiHGMu34WDMTRVKTSlblxW4Q8w6Q,27 +onnx/backend/test/data/node/test_nonmaxsuppression_flipped_coordinates/test_data_set_0/output_0.pb,sha256=VkYmKplg2aAE_7p6EVba0_QxYiTcUtLIAud4BVNFCVc,98 +onnx/backend/test/data/node/test_nonmaxsuppression_identical_boxes/model.onnx,sha256=pRqjxjc6bOsMwBV7BFZH11P88ZSmYkxEvjfviO3c0ss,376 +onnx/backend/test/data/node/test_nonmaxsuppression_identical_boxes/test_data_set_0/input_0.pb,sha256=CmhJacibwTo53qN1x9N-gVK36bwbgCdCDi8wFskbaAk,178 +onnx/backend/test/data/node/test_nonmaxsuppression_identical_boxes/test_data_set_0/input_1.pb,sha256=HvcpOmM7nTwjD3KeqZ-FkgFW0xAT9ltbwiA0Db3UmjI,58 +onnx/backend/test/data/node/test_nonmaxsuppression_identical_boxes/test_data_set_0/input_2.pb,sha256=wHAZTQlX9ckNeo0B8bi7mO9CufjQz6hVH5xSg04PXwY,42 +onnx/backend/test/data/node/test_nonmaxsuppression_identical_boxes/test_data_set_0/input_3.pb,sha256=B6qRKb1_O_DZYhoAr4ofYWN_KOBsRzWR3G4qNUeFsi4,25 +onnx/backend/test/data/node/test_nonmaxsuppression_identical_boxes/test_data_set_0/input_4.pb,sha256=udYwTTSV1VPje8RjiHGMu34WDMTRVKTSlblxW4Q8w6Q,27 +onnx/backend/test/data/node/test_nonmaxsuppression_identical_boxes/test_data_set_0/output_0.pb,sha256=9YHRVPgbYrY2cQnDRar4KnLaeonSVWSdU89tbBN7vso,50 +onnx/backend/test/data/node/test_nonmaxsuppression_limit_output_size/model.onnx,sha256=ruPA6FNX-f2C8YKjU9f-fGPtWs8E14vAbin4aStZ7do,378 +onnx/backend/test/data/node/test_nonmaxsuppression_limit_output_size/test_data_set_0/input_0.pb,sha256=QcocbmgMfN9gR0hQmlBCkMN1MOqyzDtOyL0wDo3HH-s,113 +onnx/backend/test/data/node/test_nonmaxsuppression_limit_output_size/test_data_set_0/input_1.pb,sha256=F8Eh7WUjXtKojmIpSeXsbYD2qPsRcLXA3WtXC4vwkNw,42 +onnx/backend/test/data/node/test_nonmaxsuppression_limit_output_size/test_data_set_0/input_2.pb,sha256=2P30Cd3fFGFSGa7DF8F9KCKvZGxbK5uNUhmU-k69NzE,42 +onnx/backend/test/data/node/test_nonmaxsuppression_limit_output_size/test_data_set_0/input_3.pb,sha256=B6qRKb1_O_DZYhoAr4ofYWN_KOBsRzWR3G4qNUeFsi4,25 +onnx/backend/test/data/node/test_nonmaxsuppression_limit_output_size/test_data_set_0/input_4.pb,sha256=udYwTTSV1VPje8RjiHGMu34WDMTRVKTSlblxW4Q8w6Q,27 +onnx/backend/test/data/node/test_nonmaxsuppression_limit_output_size/test_data_set_0/output_0.pb,sha256=6I0l4kmldzPrF5onX4EWG6aE2qEYmjhx2g0NhS6sr0A,74 +onnx/backend/test/data/node/test_nonmaxsuppression_single_box/model.onnx,sha256=G9qIpCAAKZfUE4vQaN5VqiYlR75wqD2z8cu3k21RkFo,371 +onnx/backend/test/data/node/test_nonmaxsuppression_single_box/test_data_set_0/input_0.pb,sha256=b1Nue7dL-O5ckB2PtWiRUbFhbD4I6NhHOAK0KaFIvko,33 +onnx/backend/test/data/node/test_nonmaxsuppression_single_box/test_data_set_0/input_1.pb,sha256=taq4F_r3_H_0TP2WdMSsl7q-8GBinBXy0GgNfbcJT6Q,22 +onnx/backend/test/data/node/test_nonmaxsuppression_single_box/test_data_set_0/input_2.pb,sha256=wHAZTQlX9ckNeo0B8bi7mO9CufjQz6hVH5xSg04PXwY,42 +onnx/backend/test/data/node/test_nonmaxsuppression_single_box/test_data_set_0/input_3.pb,sha256=B6qRKb1_O_DZYhoAr4ofYWN_KOBsRzWR3G4qNUeFsi4,25 +onnx/backend/test/data/node/test_nonmaxsuppression_single_box/test_data_set_0/input_4.pb,sha256=udYwTTSV1VPje8RjiHGMu34WDMTRVKTSlblxW4Q8w6Q,27 +onnx/backend/test/data/node/test_nonmaxsuppression_single_box/test_data_set_0/output_0.pb,sha256=9YHRVPgbYrY2cQnDRar4KnLaeonSVWSdU89tbBN7vso,50 +onnx/backend/test/data/node/test_nonmaxsuppression_suppress_by_IOU/model.onnx,sha256=c4__cnmz8GcfTRmVUcaeSWMKiEgWmYkmN_SOYqSMzAE,376 +onnx/backend/test/data/node/test_nonmaxsuppression_suppress_by_IOU/test_data_set_0/input_0.pb,sha256=QcocbmgMfN9gR0hQmlBCkMN1MOqyzDtOyL0wDo3HH-s,113 +onnx/backend/test/data/node/test_nonmaxsuppression_suppress_by_IOU/test_data_set_0/input_1.pb,sha256=F8Eh7WUjXtKojmIpSeXsbYD2qPsRcLXA3WtXC4vwkNw,42 +onnx/backend/test/data/node/test_nonmaxsuppression_suppress_by_IOU/test_data_set_0/input_2.pb,sha256=wHAZTQlX9ckNeo0B8bi7mO9CufjQz6hVH5xSg04PXwY,42 +onnx/backend/test/data/node/test_nonmaxsuppression_suppress_by_IOU/test_data_set_0/input_3.pb,sha256=B6qRKb1_O_DZYhoAr4ofYWN_KOBsRzWR3G4qNUeFsi4,25 +onnx/backend/test/data/node/test_nonmaxsuppression_suppress_by_IOU/test_data_set_0/input_4.pb,sha256=udYwTTSV1VPje8RjiHGMu34WDMTRVKTSlblxW4Q8w6Q,27 +onnx/backend/test/data/node/test_nonmaxsuppression_suppress_by_IOU/test_data_set_0/output_0.pb,sha256=VkYmKplg2aAE_7p6EVba0_QxYiTcUtLIAud4BVNFCVc,98 +onnx/backend/test/data/node/test_nonmaxsuppression_suppress_by_IOU_and_scores/model.onnx,sha256=I4ZPZIQDwB_6DWvtoH7Jw7StjGTDmldjhkloHZnt_0Y,387 +onnx/backend/test/data/node/test_nonmaxsuppression_suppress_by_IOU_and_scores/test_data_set_0/input_0.pb,sha256=QcocbmgMfN9gR0hQmlBCkMN1MOqyzDtOyL0wDo3HH-s,113 +onnx/backend/test/data/node/test_nonmaxsuppression_suppress_by_IOU_and_scores/test_data_set_0/input_1.pb,sha256=F8Eh7WUjXtKojmIpSeXsbYD2qPsRcLXA3WtXC4vwkNw,42 +onnx/backend/test/data/node/test_nonmaxsuppression_suppress_by_IOU_and_scores/test_data_set_0/input_2.pb,sha256=wHAZTQlX9ckNeo0B8bi7mO9CufjQz6hVH5xSg04PXwY,42 +onnx/backend/test/data/node/test_nonmaxsuppression_suppress_by_IOU_and_scores/test_data_set_0/input_3.pb,sha256=B6qRKb1_O_DZYhoAr4ofYWN_KOBsRzWR3G4qNUeFsi4,25 +onnx/backend/test/data/node/test_nonmaxsuppression_suppress_by_IOU_and_scores/test_data_set_0/input_4.pb,sha256=S_ZLHHF52hnQCGoYFUDGzoXON6ejUj1_45vFLB8oRzE,27 +onnx/backend/test/data/node/test_nonmaxsuppression_suppress_by_IOU_and_scores/test_data_set_0/output_0.pb,sha256=6I0l4kmldzPrF5onX4EWG6aE2qEYmjhx2g0NhS6sr0A,74 +onnx/backend/test/data/node/test_nonmaxsuppression_two_batches/model.onnx,sha256=lFJHZUPBRCbUKh6iUMzOzuRTuujALHctWk8hNDzPz_o,372 +onnx/backend/test/data/node/test_nonmaxsuppression_two_batches/test_data_set_0/input_0.pb,sha256=3HF94qHNxjTM8WjQWgm2G3v0yWzH-uZMwkjNvixauog,210 +onnx/backend/test/data/node/test_nonmaxsuppression_two_batches/test_data_set_0/input_1.pb,sha256=H91iA7UY1O-Q6Bk7qNRv1g6Udhd4PQetKtp4KemkCCg,66 +onnx/backend/test/data/node/test_nonmaxsuppression_two_batches/test_data_set_0/input_2.pb,sha256=2P30Cd3fFGFSGa7DF8F9KCKvZGxbK5uNUhmU-k69NzE,42 +onnx/backend/test/data/node/test_nonmaxsuppression_two_batches/test_data_set_0/input_3.pb,sha256=B6qRKb1_O_DZYhoAr4ofYWN_KOBsRzWR3G4qNUeFsi4,25 +onnx/backend/test/data/node/test_nonmaxsuppression_two_batches/test_data_set_0/input_4.pb,sha256=udYwTTSV1VPje8RjiHGMu34WDMTRVKTSlblxW4Q8w6Q,27 +onnx/backend/test/data/node/test_nonmaxsuppression_two_batches/test_data_set_0/output_0.pb,sha256=8rjpUP7x4wsIEYYTzCWpUhTfdii3TzZu3wVcBJFwdKY,122 +onnx/backend/test/data/node/test_nonmaxsuppression_two_classes/model.onnx,sha256=7EWVtK6PCCwzWUvXtGlJY6RbqU-BQNJnaxGppt3y9AA,372 +onnx/backend/test/data/node/test_nonmaxsuppression_two_classes/test_data_set_0/input_0.pb,sha256=QcocbmgMfN9gR0hQmlBCkMN1MOqyzDtOyL0wDo3HH-s,113 +onnx/backend/test/data/node/test_nonmaxsuppression_two_classes/test_data_set_0/input_1.pb,sha256=txKTRX67rNUdqyDQKt-fzGR7NdzmdWON2-47x_otWBM,66 +onnx/backend/test/data/node/test_nonmaxsuppression_two_classes/test_data_set_0/input_2.pb,sha256=2P30Cd3fFGFSGa7DF8F9KCKvZGxbK5uNUhmU-k69NzE,42 +onnx/backend/test/data/node/test_nonmaxsuppression_two_classes/test_data_set_0/input_3.pb,sha256=B6qRKb1_O_DZYhoAr4ofYWN_KOBsRzWR3G4qNUeFsi4,25 +onnx/backend/test/data/node/test_nonmaxsuppression_two_classes/test_data_set_0/input_4.pb,sha256=udYwTTSV1VPje8RjiHGMu34WDMTRVKTSlblxW4Q8w6Q,27 +onnx/backend/test/data/node/test_nonmaxsuppression_two_classes/test_data_set_0/output_0.pb,sha256=0Xd_ql2omtRQrJWmryeLNA9btZZH6gbiR8HXP0EDDRA,122 +onnx/backend/test/data/node/test_nonzero_example/model.onnx,sha256=NX71cOVVAavMtXWDgcW3432l9-4BxiHnarj-LJq5fkA,131 +onnx/backend/test/data/node/test_nonzero_example/test_data_set_0/input_0.pb,sha256=ghkps-yp5CRRR8fRbC7fiqWG19bL00TcORlce2Dr8og,23 +onnx/backend/test/data/node/test_nonzero_example/test_data_set_0/output_0.pb,sha256=qoM-JaFSLmJRjNoDvsqY2dR0f6q64MPDvlJtjVR6grg,64 +onnx/backend/test/data/node/test_not_2d/model.onnx,sha256=ZmCCGeOUtVvFmdxC1oaRu2PkbBvnhHQ3Gai3PyrsoKo,96 +onnx/backend/test/data/node/test_not_2d/test_data_set_0/input_0.pb,sha256=LjNp_gf5HfToURj-fOV-TL_10b7cTqoOj8RfPTNPYLQ,23 +onnx/backend/test/data/node/test_not_2d/test_data_set_0/output_0.pb,sha256=JEuquI41JHa3vt29a199NzMyPBW9WvAOkSSADkpkpiU,25 +onnx/backend/test/data/node/test_not_3d/model.onnx,sha256=--Dj2nqyBlRaVxLrtH5BZ-anek--8I4MBmVx6xD6QAo,104 +onnx/backend/test/data/node/test_not_3d/test_data_set_0/input_0.pb,sha256=TWHpMMyqdtQO1GlrLNWX631jO6986ZyG-R4tcfX_k9M,73 +onnx/backend/test/data/node/test_not_3d/test_data_set_0/output_0.pb,sha256=-oL9DNgUrH4pCs7vwUYL5uq1Las7wPU-F4ZJSVNExrY,75 +onnx/backend/test/data/node/test_not_4d/model.onnx,sha256=_GXaFJpy_nuIiMqj2PyWLjPP7FfBx6eaxaujJNpnGmM,112 +onnx/backend/test/data/node/test_not_4d/test_data_set_0/input_0.pb,sha256=iZjPxHloGhOKJNSczLBvGtYZN5Sen7TAs77gLl8-eJA,376 +onnx/backend/test/data/node/test_not_4d/test_data_set_0/output_0.pb,sha256=8PTjOyrS2vPHwwmPn8DXr1_MbVZjMzStaPuHfzenUTI,378 +onnx/backend/test/data/node/test_onehot_negative_indices/model.onnx,sha256=fjlyNhjpN3EFThxhnvvRr7R_IrOBnSZ1v47WdW2X3c0,188 +onnx/backend/test/data/node/test_onehot_negative_indices/test_data_set_0/input_0.pb,sha256=F1-tByU4VnMyQFdReEjjeChbk03-iM4AH8zk9QNM1QQ,39 +onnx/backend/test/data/node/test_onehot_negative_indices/test_data_set_0/input_1.pb,sha256=SniLudwpo_9h-MMMuoLXqe1Ulz9W9uSM28WqXjYKBB4,15 +onnx/backend/test/data/node/test_onehot_negative_indices/test_data_set_0/input_2.pb,sha256=N_K-xBM0o6Y0qLA8Ml_8Q69xVU7fEZBZDFDFtXN5gYo,22 +onnx/backend/test/data/node/test_onehot_negative_indices/test_data_set_0/output_0.pb,sha256=5HdC9_F-HsrRLzGONydCDeMC2U1U7qJAOglTi8kUsWE,131 +onnx/backend/test/data/node/test_onehot_with_axis/model.onnx,sha256=7Tn4gjOHr5C80bna6RBkHEvFdSKx5ucs4Mw29ScjCKI,189 +onnx/backend/test/data/node/test_onehot_with_axis/test_data_set_0/input_0.pb,sha256=qPjxHytD_tom84Yx4twm4SstHXXF6aMSkw1A0I1NzHs,33 +onnx/backend/test/data/node/test_onehot_with_axis/test_data_set_0/input_1.pb,sha256=SniLudwpo_9h-MMMuoLXqe1Ulz9W9uSM28WqXjYKBB4,15 +onnx/backend/test/data/node/test_onehot_with_axis/test_data_set_0/input_2.pb,sha256=N_K-xBM0o6Y0qLA8Ml_8Q69xVU7fEZBZDFDFtXN5gYo,22 +onnx/backend/test/data/node/test_onehot_with_axis/test_data_set_0/output_0.pb,sha256=bu2Cxu0sCAAENGCwmOdA4QRBlbkqJBMAxunzVkOUbIs,174 +onnx/backend/test/data/node/test_onehot_with_negative_axis/model.onnx,sha256=f8RCLQLUojpYObQA3yVz6ZEGA350Ak1TdU48FyZSrd0,207 +onnx/backend/test/data/node/test_onehot_with_negative_axis/test_data_set_0/input_0.pb,sha256=qPjxHytD_tom84Yx4twm4SstHXXF6aMSkw1A0I1NzHs,33 +onnx/backend/test/data/node/test_onehot_with_negative_axis/test_data_set_0/input_1.pb,sha256=SniLudwpo_9h-MMMuoLXqe1Ulz9W9uSM28WqXjYKBB4,15 +onnx/backend/test/data/node/test_onehot_with_negative_axis/test_data_set_0/input_2.pb,sha256=N_K-xBM0o6Y0qLA8Ml_8Q69xVU7fEZBZDFDFtXN5gYo,22 +onnx/backend/test/data/node/test_onehot_with_negative_axis/test_data_set_0/output_0.pb,sha256=bu2Cxu0sCAAENGCwmOdA4QRBlbkqJBMAxunzVkOUbIs,174 +onnx/backend/test/data/node/test_onehot_without_axis/model.onnx,sha256=U0MILFaATSHjw9zDIMB4sRTqS_roPxfMWY6J84vV80o,171 +onnx/backend/test/data/node/test_onehot_without_axis/test_data_set_0/input_0.pb,sha256=ta7hFCjVAb1WUU9sgiAov4CmflwgDNr3ak4YwDYUcxs,39 +onnx/backend/test/data/node/test_onehot_without_axis/test_data_set_0/input_1.pb,sha256=4Ww8eBVFbMYzzKNzz3vLhRTT7_zLFojaP213mTFSVvU,15 +onnx/backend/test/data/node/test_onehot_without_axis/test_data_set_0/input_2.pb,sha256=N73BjNA73IsU2QnQRSOqdYwn0AZlbwJonbgfKwmjLnU,22 +onnx/backend/test/data/node/test_onehot_without_axis/test_data_set_0/output_0.pb,sha256=O8e-sVvv0ez-QO3vJ6e5qiVmVID9fwqfA1bgs3uHKyA,156 +onnx/backend/test/data/node/test_optional_get_element_optional_sequence/model.onnx,sha256=cKBCcL2XcDo1Y-HE02G7OEjeuqc_7V9tbNCP_4s00T8,174 +onnx/backend/test/data/node/test_optional_get_element_optional_sequence/test_data_set_0/input_0.pb,sha256=vpd8FwYbVh9wyMZ9xsWKs5AxADA98-eucJKMK8T-tU0,46 +onnx/backend/test/data/node/test_optional_get_element_optional_sequence/test_data_set_0/output_0.pb,sha256=PTgucfFItJi7etGLkHgc63wqJeAs5Fm_jZEdQGFHH_k,34 +onnx/backend/test/data/node/test_optional_get_element_optional_tensor/model.onnx,sha256=Kc4P3uTKFr35Ezt5zmkEDu8KFHxj3ipovRpfB7E8OR4,170 +onnx/backend/test/data/node/test_optional_get_element_optional_tensor/test_data_set_0/input_0.pb,sha256=A-AbnXPvbH-QEhigtVShIBP4Fxgyw6jbvckX2cSUAlM,42 +onnx/backend/test/data/node/test_optional_get_element_optional_tensor/test_data_set_0/output_0.pb,sha256=vSCHzMFSvur8t7ADW1BDQbERwPltOiCd-JuPcQ6-XcQ,30 +onnx/backend/test/data/node/test_optional_get_element_sequence/model.onnx,sha256=r2cz0m_Z6oUSQ3MpWM2huW1MfIB9tNN3qfUunUpkYm8,161 +onnx/backend/test/data/node/test_optional_get_element_sequence/test_data_set_0/input_0.pb,sha256=mw1O20tvt7b6NaD0nRT-YqH7HmJARdY9dK4rnHwQ8_E,42 +onnx/backend/test/data/node/test_optional_get_element_sequence/test_data_set_0/output_0.pb,sha256=PTgucfFItJi7etGLkHgc63wqJeAs5Fm_jZEdQGFHH_k,34 +onnx/backend/test/data/node/test_optional_get_element_tensor/model.onnx,sha256=smEO352xSf3_ms-9u2kyG5NKsJV_SBji5n1wP666qAk,157 +onnx/backend/test/data/node/test_optional_get_element_tensor/test_data_set_0/input_0.pb,sha256=uUsn8ZJZNXJMkBUOaeS4pl1SPQtNedl6dZNV_MGJE90,38 +onnx/backend/test/data/node/test_optional_get_element_tensor/test_data_set_0/output_0.pb,sha256=vSCHzMFSvur8t7ADW1BDQbERwPltOiCd-JuPcQ6-XcQ,30 +onnx/backend/test/data/node/test_optional_has_element_empty_no_input_name_optional_input/model.onnx,sha256=gCOrPv1m7gcQjjBim0-S8q0RFs1erBi31VkyHuT3Yl8,136 +onnx/backend/test/data/node/test_optional_has_element_empty_no_input_name_optional_input/test_data_set_0/output_0.pb,sha256=KPmYRG7Wt1DNktECBF5SN8KtQ0EKfo1s1gXtOI-Tw50,13 +onnx/backend/test/data/node/test_optional_has_element_empty_no_input_name_tensor_input/model.onnx,sha256=wUjCLI9sTzqKZV8slgHm50AdjHBPULNsO-UywuHZZl0,134 +onnx/backend/test/data/node/test_optional_has_element_empty_no_input_name_tensor_input/test_data_set_0/output_0.pb,sha256=KPmYRG7Wt1DNktECBF5SN8KtQ0EKfo1s1gXtOI-Tw50,13 +onnx/backend/test/data/node/test_optional_has_element_empty_no_input_optional_input/model.onnx,sha256=fcThUtkxSofwsh9bpW_1o_zI2QwbYX7e1RN4mx2GzqM,129 +onnx/backend/test/data/node/test_optional_has_element_empty_no_input_optional_input/test_data_set_0/output_0.pb,sha256=KPmYRG7Wt1DNktECBF5SN8KtQ0EKfo1s1gXtOI-Tw50,13 +onnx/backend/test/data/node/test_optional_has_element_empty_no_input_tensor_input/model.onnx,sha256=B8L4KqEAU6oYpjOcXpByxFtlNK7AznjTdImTR7vAbQo,127 +onnx/backend/test/data/node/test_optional_has_element_empty_no_input_tensor_input/test_data_set_0/output_0.pb,sha256=KPmYRG7Wt1DNktECBF5SN8KtQ0EKfo1s1gXtOI-Tw50,13 +onnx/backend/test/data/node/test_optional_has_element_empty_optional_input/model.onnx,sha256=lhf09ImdqqoFt2WX5kuR9OH1ihyT85AOhSjgZX_dQzo,167 +onnx/backend/test/data/node/test_optional_has_element_empty_optional_input/test_data_set_0/input_0.pb,sha256=wv94C5xbz5m4GuMdyxohS6Lqilkq4fNCzFygsTgGIPs,18 +onnx/backend/test/data/node/test_optional_has_element_empty_optional_input/test_data_set_0/output_0.pb,sha256=KPmYRG7Wt1DNktECBF5SN8KtQ0EKfo1s1gXtOI-Tw50,13 +onnx/backend/test/data/node/test_optional_has_element_optional_input/model.onnx,sha256=oZsbFysfy2fzzLDLaRl5pp6yAKt_JBLQwYKKbCb_O5M,165 +onnx/backend/test/data/node/test_optional_has_element_optional_input/test_data_set_0/input_0.pb,sha256=A-AbnXPvbH-QEhigtVShIBP4Fxgyw6jbvckX2cSUAlM,42 +onnx/backend/test/data/node/test_optional_has_element_optional_input/test_data_set_0/output_0.pb,sha256=hpn8eiTjKgyiZryvhHYl3b_SKNz3D9ZMneyziZ0AkvA,13 +onnx/backend/test/data/node/test_optional_has_element_tensor_input/model.onnx,sha256=uU1Svb-zZhq-bhKKY5XCI1gDnmfKllnJOvDEK7JrfmA,163 +onnx/backend/test/data/node/test_optional_has_element_tensor_input/test_data_set_0/input_0.pb,sha256=A-AbnXPvbH-QEhigtVShIBP4Fxgyw6jbvckX2cSUAlM,42 +onnx/backend/test/data/node/test_optional_has_element_tensor_input/test_data_set_0/output_0.pb,sha256=hpn8eiTjKgyiZryvhHYl3b_SKNz3D9ZMneyziZ0AkvA,13 +onnx/backend/test/data/node/test_or2d/model.onnx,sha256=YxZyfgzueJXXO82yAk2d0oFy_YCthbGNgd1ZHuDZ7WU,115 +onnx/backend/test/data/node/test_or2d/test_data_set_0/input_0.pb,sha256=LjNp_gf5HfToURj-fOV-TL_10b7cTqoOj8RfPTNPYLQ,23 +onnx/backend/test/data/node/test_or2d/test_data_set_0/input_1.pb,sha256=goZmxegiEZPjRTXScUiQU9fpxPndF4EqAZy8oeToa8k,23 +onnx/backend/test/data/node/test_or2d/test_data_set_0/output_0.pb,sha256=5Xl0XiY-aaQiF0RTwPftqw-gxf2WK-W0mXlMNdMDSyc,24 +onnx/backend/test/data/node/test_or3d/model.onnx,sha256=14UsGEpmYYb9juxWEBxdx89196ZQU8cWRIawcP-mbWs,127 +onnx/backend/test/data/node/test_or3d/test_data_set_0/input_0.pb,sha256=Dr4k5dppNqEaWmFsMUXw_Pbp7QoEpqkXv4Bodqt7zMQ,73 +onnx/backend/test/data/node/test_or3d/test_data_set_0/input_1.pb,sha256=v5SpTzEGQijgxpt3GQdqFdpD-LCqlDgrPace75EL5pQ,73 +onnx/backend/test/data/node/test_or3d/test_data_set_0/output_0.pb,sha256=mw8NzywgY2NAFHAr4f4QQLIffghFyfSifzaeN457XRI,74 +onnx/backend/test/data/node/test_or4d/model.onnx,sha256=bKfqW6lqdxp4zZ8X7G6MD1eyfUCI2TS9qECPjystd3s,139 +onnx/backend/test/data/node/test_or4d/test_data_set_0/input_0.pb,sha256=sMKxU5gNQx6mv89ZOkmV0QCdEu9f8EhE-MZuoWNbcek,376 +onnx/backend/test/data/node/test_or4d/test_data_set_0/input_1.pb,sha256=9JaUvXVRf_NYTRwYWtPYsbSoKJDwOXPZuRBgFY7pYPc,376 +onnx/backend/test/data/node/test_or4d/test_data_set_0/output_0.pb,sha256=NWaSdDXxpWJ2G5_V1j1D4xBCLVAfA5AibswMzTatBs4,377 +onnx/backend/test/data/node/test_or_bcast3v1d/model.onnx,sha256=cXjsmeVsR0T2G5yMsihChNeQN_zpdqJ6i6IbPlJi5RA,127 +onnx/backend/test/data/node/test_or_bcast3v1d/test_data_set_0/input_0.pb,sha256=wF52hvLEXCfryTN0FvgWPKXSx3PPlpiaC_aAZ5IXCMc,73 +onnx/backend/test/data/node/test_or_bcast3v1d/test_data_set_0/input_1.pb,sha256=0cP0kHJ95VbmGsyJW3rlozPO38YQocSctc5buLas0ZI,14 +onnx/backend/test/data/node/test_or_bcast3v1d/test_data_set_0/output_0.pb,sha256=j0nTh8QRsYHOGVMoWh_46oB-6On3aBJH4SI24vIohRI,74 +onnx/backend/test/data/node/test_or_bcast3v2d/model.onnx,sha256=G9GQtIbSTfAFO3EFW38ioWu9y_2NkgU1Iav8Ci5HCrs,131 +onnx/backend/test/data/node/test_or_bcast3v2d/test_data_set_0/input_0.pb,sha256=2Hi5gZzCyk3SWtB8dhtS5tIMipISnPJGI1SIjx08Fe0,73 +onnx/backend/test/data/node/test_or_bcast3v2d/test_data_set_0/input_1.pb,sha256=J1neLsOqZHTvGdagBF8_3O6sQk1byuUIMABR0wJxASA,31 +onnx/backend/test/data/node/test_or_bcast3v2d/test_data_set_0/output_0.pb,sha256=Glafui2pjURlqmm1rfvld49F1orTOyvv3DaBtU3j1gI,74 +onnx/backend/test/data/node/test_or_bcast4v2d/model.onnx,sha256=TZJalmR1gazv8940_76Fk0UXbxLJLqVAKlZnbMATqKI,139 +onnx/backend/test/data/node/test_or_bcast4v2d/test_data_set_0/input_0.pb,sha256=TG9PpNJBW-rhSdwYDpmwCMotWc8SOe2e2cLAoXaNQ8o,376 +onnx/backend/test/data/node/test_or_bcast4v2d/test_data_set_0/input_1.pb,sha256=D4JIlfVUzijC8rk152pkPsDWa33YJRlmutjMvkT-1FI,41 +onnx/backend/test/data/node/test_or_bcast4v2d/test_data_set_0/output_0.pb,sha256=3VcBGGnUIc16zDtVFdh1ecfxPo5Amdeloer8Gs6iGWc,377 +onnx/backend/test/data/node/test_or_bcast4v3d/model.onnx,sha256=fgL1jyS62RVe7hVUtiNc0VXR-Qi_Hd-D6QAhvFN54ns,143 +onnx/backend/test/data/node/test_or_bcast4v3d/test_data_set_0/input_0.pb,sha256=1gaunFCw_HO0ZAf_sAp9bfcwVRJW_aTJIwkv-2WYWMw,376 +onnx/backend/test/data/node/test_or_bcast4v3d/test_data_set_0/input_1.pb,sha256=26gLYNI9eQESqKzWzx1zrE8YDDCNIvfAE_0m_FoeT3E,133 +onnx/backend/test/data/node/test_or_bcast4v3d/test_data_set_0/output_0.pb,sha256=L4Mruier4Yg6xS5HZXDfBii0EK9xF13r_INAS00D8fU,377 +onnx/backend/test/data/node/test_or_bcast4v4d/model.onnx,sha256=0jt8W5Qyv_1nUR5K7fMmga8rLmjsjePDZvS7ZKtdnkc,147 +onnx/backend/test/data/node/test_or_bcast4v4d/test_data_set_0/input_0.pb,sha256=f63fGhPch5eTkAvMXCrJTC0FOIdhKnaSfJ8Ox9hnol0,39 +onnx/backend/test/data/node/test_or_bcast4v4d/test_data_set_0/input_1.pb,sha256=iCGDAU6H1-WHvqKKRCD44UWeluKsnFCWMpevbsJ5ASU,105 +onnx/backend/test/data/node/test_or_bcast4v4d/test_data_set_0/output_0.pb,sha256=FDGBOwzj2VIUJgfnXU22QetJ_GOkZCqrZ27em-O04Lk,377 +onnx/backend/test/data/node/test_pow/model.onnx,sha256=oSi3o33WPiiwEEtIqeL8xWz5h2VGhoieq2vfZxsTdkM,125 +onnx/backend/test/data/node/test_pow/test_data_set_0/input_0.pb,sha256=AY_VMGnKJBQrw98tz_qYFRZu6obuXC-dGQyJjKrKjKo,254 +onnx/backend/test/data/node/test_pow/test_data_set_0/input_1.pb,sha256=4E7JZEL-6RPD2ZoGlbe4UZXnssgawDGOHg15V0RhX58,254 +onnx/backend/test/data/node/test_pow/test_data_set_0/output_0.pb,sha256=y-34eZm7CPL6irwI7C6amp564eVmFsIb9OvVtgp0cjg,254 +onnx/backend/test/data/node/test_pow_bcast_array/model.onnx,sha256=aWkVJssCZgZJkRcz1r9097G5eEy7N-yy2FhQszIXYeQ,121 +onnx/backend/test/data/node/test_pow_bcast_array/test_data_set_0/input_0.pb,sha256=4pQ6jBxU8_tBNnaRVRgYgck68GhQp-YW_tL388GcCvE,35 +onnx/backend/test/data/node/test_pow_bcast_array/test_data_set_0/input_1.pb,sha256=KW9J7uTWmyfHYqYwYwZyv64IeAl6_iDrPha7DI-bhcU,21 +onnx/backend/test/data/node/test_pow_bcast_array/test_data_set_0/output_0.pb,sha256=oINJYm3arnswSBGkl8GBHl_T8_2yIhrVB53JwpiKAvE,35 +onnx/backend/test/data/node/test_pow_bcast_scalar/model.onnx,sha256=UuP9qM1PUFNvkdMvtnuBcHQ2-HIMkO6eRJ6v5JzOkBc,110 +onnx/backend/test/data/node/test_pow_bcast_scalar/test_data_set_0/input_0.pb,sha256=07NXD9M8ncP5OIBozL5eyrRmifhNqQKh-yXwmBAQ5_Y,21 +onnx/backend/test/data/node/test_pow_bcast_scalar/test_data_set_0/input_1.pb,sha256=ydsAlIsWhTc_J1CFdrtsGmByX5clFiagrVStpbLbEm4,11 +onnx/backend/test/data/node/test_pow_bcast_scalar/test_data_set_0/output_0.pb,sha256=KJlLjivIdBlVq1m1jgyIwPU5tSpNIjw-blrFrPmtr3Q,21 +onnx/backend/test/data/node/test_pow_example/model.onnx,sha256=W--GGh80r5ebmjKz0wJSx74FtnlQW26I5-rqJ3Pe_sk,109 +onnx/backend/test/data/node/test_pow_example/test_data_set_0/input_0.pb,sha256=07NXD9M8ncP5OIBozL5eyrRmifhNqQKh-yXwmBAQ5_Y,21 +onnx/backend/test/data/node/test_pow_example/test_data_set_0/input_1.pb,sha256=CRO596M4hKvMtegi6TFu8eNUs-prhD7vkphHojkzBj4,21 +onnx/backend/test/data/node/test_pow_example/test_data_set_0/output_0.pb,sha256=57LTWuvzfgrVUv-dX28zQm97yYNf09UNTo0WGGLE4Vo,21 +onnx/backend/test/data/node/test_pow_types_float32_int32/model.onnx,sha256=dxLTq6DJ-ekrMCQY4IxPqzB5oIDKNcEJDFKWNTk4iO8,121 +onnx/backend/test/data/node/test_pow_types_float32_int32/test_data_set_0/input_0.pb,sha256=07NXD9M8ncP5OIBozL5eyrRmifhNqQKh-yXwmBAQ5_Y,21 +onnx/backend/test/data/node/test_pow_types_float32_int32/test_data_set_0/input_1.pb,sha256=r8BFGDyB4p4Qqn72TCCqllX4VYjNBD2WqlCAVxUuF10,21 +onnx/backend/test/data/node/test_pow_types_float32_int32/test_data_set_0/output_0.pb,sha256=57LTWuvzfgrVUv-dX28zQm97yYNf09UNTo0WGGLE4Vo,21 +onnx/backend/test/data/node/test_pow_types_float32_int64/model.onnx,sha256=bva03rFSbrJ18uJMKS5oBQTMI8XdxebRhodAICC0M7c,121 +onnx/backend/test/data/node/test_pow_types_float32_int64/test_data_set_0/input_0.pb,sha256=07NXD9M8ncP5OIBozL5eyrRmifhNqQKh-yXwmBAQ5_Y,21 +onnx/backend/test/data/node/test_pow_types_float32_int64/test_data_set_0/input_1.pb,sha256=-Mz9howslt6OFLvLPy_8aC6Z7H6lpX4ULsCU9rbuITQ,33 +onnx/backend/test/data/node/test_pow_types_float32_int64/test_data_set_0/output_0.pb,sha256=57LTWuvzfgrVUv-dX28zQm97yYNf09UNTo0WGGLE4Vo,21 +onnx/backend/test/data/node/test_pow_types_float32_uint32/model.onnx,sha256=8Pbwtz_fJDSXpX-bstlXsg6MCAanm_cXB4VcH2dYDVQ,122 +onnx/backend/test/data/node/test_pow_types_float32_uint32/test_data_set_0/input_0.pb,sha256=07NXD9M8ncP5OIBozL5eyrRmifhNqQKh-yXwmBAQ5_Y,21 +onnx/backend/test/data/node/test_pow_types_float32_uint32/test_data_set_0/input_1.pb,sha256=58O5vvDhR1HzOWUoqyt2e86myJ4FYDger-v8DT6IeR0,21 +onnx/backend/test/data/node/test_pow_types_float32_uint32/test_data_set_0/output_0.pb,sha256=57LTWuvzfgrVUv-dX28zQm97yYNf09UNTo0WGGLE4Vo,21 +onnx/backend/test/data/node/test_pow_types_float32_uint64/model.onnx,sha256=oX2b_ITd8r-Hl9OeBki0iVT5oNggIYhN_0AAHIUTiRA,122 +onnx/backend/test/data/node/test_pow_types_float32_uint64/test_data_set_0/input_0.pb,sha256=07NXD9M8ncP5OIBozL5eyrRmifhNqQKh-yXwmBAQ5_Y,21 +onnx/backend/test/data/node/test_pow_types_float32_uint64/test_data_set_0/input_1.pb,sha256=yY4W3jqfz1Qc7B9EKL41WxTKzhmiwQlgkd_3eJoJtQA,33 +onnx/backend/test/data/node/test_pow_types_float32_uint64/test_data_set_0/output_0.pb,sha256=57LTWuvzfgrVUv-dX28zQm97yYNf09UNTo0WGGLE4Vo,21 +onnx/backend/test/data/node/test_pow_types_int32_float32/model.onnx,sha256=lEyY8p-kFy2-Um7rIKeF2W6wSppZvfBrYuUdWS-KNEI,121 +onnx/backend/test/data/node/test_pow_types_int32_float32/test_data_set_0/input_0.pb,sha256=nXGXrHzTelDfYXqyN-ePUSAnKkGHHf4Em7tfQoCy8gI,21 +onnx/backend/test/data/node/test_pow_types_int32_float32/test_data_set_0/input_1.pb,sha256=CRO596M4hKvMtegi6TFu8eNUs-prhD7vkphHojkzBj4,21 +onnx/backend/test/data/node/test_pow_types_int32_float32/test_data_set_0/output_0.pb,sha256=5HGZJaBp_kTdE1qP63n0RHw17mLW6osJWWIKcdTAPRE,21 +onnx/backend/test/data/node/test_pow_types_int32_int32/model.onnx,sha256=b94RDUFctnJPBTWKCC3gXLaM152e7zkloWeyZpqGsnU,119 +onnx/backend/test/data/node/test_pow_types_int32_int32/test_data_set_0/input_0.pb,sha256=nXGXrHzTelDfYXqyN-ePUSAnKkGHHf4Em7tfQoCy8gI,21 +onnx/backend/test/data/node/test_pow_types_int32_int32/test_data_set_0/input_1.pb,sha256=r8BFGDyB4p4Qqn72TCCqllX4VYjNBD2WqlCAVxUuF10,21 +onnx/backend/test/data/node/test_pow_types_int32_int32/test_data_set_0/output_0.pb,sha256=5HGZJaBp_kTdE1qP63n0RHw17mLW6osJWWIKcdTAPRE,21 +onnx/backend/test/data/node/test_pow_types_int64_float32/model.onnx,sha256=h4Bky73HUDVZ3HfCJr8Bd-mmkMeQq-Lmktqp5NxZL-s,121 +onnx/backend/test/data/node/test_pow_types_int64_float32/test_data_set_0/input_0.pb,sha256=_cGeaGhZxcfOOmaCCsiTo_hNZRp2Zxo91TaoYiWtOAM,33 +onnx/backend/test/data/node/test_pow_types_int64_float32/test_data_set_0/input_1.pb,sha256=CRO596M4hKvMtegi6TFu8eNUs-prhD7vkphHojkzBj4,21 +onnx/backend/test/data/node/test_pow_types_int64_float32/test_data_set_0/output_0.pb,sha256=zDXqkOFSIfyWuTMnQxcIRhTC-wGpB5rhBrjWeAOZdLw,33 +onnx/backend/test/data/node/test_pow_types_int64_int64/model.onnx,sha256=aobGdTeCPRQjcsm-0MQ55aszgjd3WW1DU5Po3uzsoKw,119 +onnx/backend/test/data/node/test_pow_types_int64_int64/test_data_set_0/input_0.pb,sha256=_cGeaGhZxcfOOmaCCsiTo_hNZRp2Zxo91TaoYiWtOAM,33 +onnx/backend/test/data/node/test_pow_types_int64_int64/test_data_set_0/input_1.pb,sha256=-Mz9howslt6OFLvLPy_8aC6Z7H6lpX4ULsCU9rbuITQ,33 +onnx/backend/test/data/node/test_pow_types_int64_int64/test_data_set_0/output_0.pb,sha256=zDXqkOFSIfyWuTMnQxcIRhTC-wGpB5rhBrjWeAOZdLw,33 +onnx/backend/test/data/node/test_prelu_broadcast/model.onnx,sha256=VOu6i8tiLpeQK6YWXdF7oJCXK-4WT3E0bDszBxRc4l4,139 +onnx/backend/test/data/node/test_prelu_broadcast/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_prelu_broadcast/test_data_set_0/input_1.pb,sha256=zCWXggDw6QArEHtYuiVS23cWS2G7lNjtq_XeeHy65sc,33 +onnx/backend/test/data/node/test_prelu_broadcast/test_data_set_0/output_0.pb,sha256=KmwLJ-2oKWwwgugpBrNYG2ynhsgvc_0m_vdW_NCbEFI,254 +onnx/backend/test/data/node/test_prelu_broadcast_expanded/model.onnx,sha256=3VpORuXsca8Ufl65oJO1bkyRLQZ-KJS3La0KlcSTQbU,676 +onnx/backend/test/data/node/test_prelu_broadcast_expanded/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_prelu_broadcast_expanded/test_data_set_0/input_1.pb,sha256=zCWXggDw6QArEHtYuiVS23cWS2G7lNjtq_XeeHy65sc,33 +onnx/backend/test/data/node/test_prelu_broadcast_expanded/test_data_set_0/output_0.pb,sha256=KmwLJ-2oKWwwgugpBrNYG2ynhsgvc_0m_vdW_NCbEFI,254 +onnx/backend/test/data/node/test_prelu_example/model.onnx,sha256=fTqBhgsJarObGlLYNycwzJD2NIggfMg4_pjC0LrZ_c8,145 +onnx/backend/test/data/node/test_prelu_example/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_prelu_example/test_data_set_0/input_1.pb,sha256=t8-Rs14nU1gM46Bqcj7DCLL8KQt3PwZNrP-n6Yxs050,258 +onnx/backend/test/data/node/test_prelu_example/test_data_set_0/output_0.pb,sha256=hkpum_lGz_jBnbvAIljQ9y5IzwU1AbXJRLXvIuVCK5I,254 +onnx/backend/test/data/node/test_prelu_example_expanded/model.onnx,sha256=Pu-_pG45_wdvyBPfSRxzgpHH-Ou7-y5XY7K7ls2Fh_4,665 +onnx/backend/test/data/node/test_prelu_example_expanded/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_prelu_example_expanded/test_data_set_0/input_1.pb,sha256=t8-Rs14nU1gM46Bqcj7DCLL8KQt3PwZNrP-n6Yxs050,258 +onnx/backend/test/data/node/test_prelu_example_expanded/test_data_set_0/output_0.pb,sha256=hkpum_lGz_jBnbvAIljQ9y5IzwU1AbXJRLXvIuVCK5I,254 +onnx/backend/test/data/node/test_qlinearconv/model.onnx,sha256=bqDrNQcDv3Qw4DbA2a6-uTWtkLlPxWm1Sk9EJa0uOKk,360 +onnx/backend/test/data/node/test_qlinearconv/test_data_set_0/input_0.pb,sha256=nixNRkyGN_KgVkWV5xeFSwW2P9g5MGBOe-fgB5gW744,64 +onnx/backend/test/data/node/test_qlinearconv/test_data_set_0/input_1.pb,sha256=a9DMFXaeuYAOi_FHwlb4SDFEqpxh_gwHrH06td0hjQ0,17 +onnx/backend/test/data/node/test_qlinearconv/test_data_set_0/input_2.pb,sha256=afKUomTgzPpltKVwfiOFEPSOezG6ntRvqW9enoddjJA,19 +onnx/backend/test/data/node/test_qlinearconv/test_data_set_0/input_3.pb,sha256=YDlA6BZ9CIGFwKl9IfXE7vxH2gKpIftKThESMEtEHgI,16 +onnx/backend/test/data/node/test_qlinearconv/test_data_set_0/input_4.pb,sha256=kdHz8BDgovMXRdFV-OaK5z36bbaWGT3-ZI0h6qw2uh0,19 +onnx/backend/test/data/node/test_qlinearconv/test_data_set_0/input_5.pb,sha256=CQhc7l4YdSLie9L3sGF-eLE3KZIxewlTFTmvOmZRym0,21 +onnx/backend/test/data/node/test_qlinearconv/test_data_set_0/input_6.pb,sha256=T4bbtORqnLDgoXiJlaOQ2C-Rbwqp2nQ_t4VN6Rbc84Y,17 +onnx/backend/test/data/node/test_qlinearconv/test_data_set_0/input_7.pb,sha256=qrW9JA42dIlWU4LSxNQEZVx6ktF-KAhGf4gYgy-4bX4,19 +onnx/backend/test/data/node/test_qlinearconv/test_data_set_0/output_0.pb,sha256=fXjZHJuLHRtyTJIj46iMB6A-32OAXHixEdz5OWZ7t0E,64 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float16/model.onnx,sha256=YMtaZlgncQFarWKRaFS5GHxrmep7nShkNyCJSftAth8,372 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float16/test_data_set_0/input_0.pb,sha256=EW4KzwC4IDcjv5uG14lyLOx9ieD3E6OQZtnO4MAhlb4,19 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float16/test_data_set_0/input_1.pb,sha256=nnYTBKWGRnIW1s-p2ROJAwiL-Q_M_TS4m-kBAjuFaGQ,17 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float16/test_data_set_0/input_2.pb,sha256=NJ1z8xFynGtHNA3xluGHxNKI2mU5GMzAIylBdZbSfBQ,21 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float16/test_data_set_0/input_3.pb,sha256=3Su8-6njxvvsNh2-C2VxCrZM3hlJwOe61sBe77Uxbx4,23 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float16/test_data_set_0/input_4.pb,sha256=DhgPgyPecHsoTXjdgqQ9dUHJ0gm2K3tgCqnHKrtsRdE,17 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float16/test_data_set_0/input_5.pb,sha256=sszTS9AFBbYJkRJC3oCoh6AGx-2IVOprE3COEL-ItUg,21 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float16/test_data_set_0/input_6.pb,sha256=0LyvWUhWqNYVSVkJKOqXWcG3Sjz_wdXBmxGG9aCTtjI,17 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float16/test_data_set_0/input_7.pb,sha256=MgU6P2x2sZyaqZX7fIx-qw7Rq97TpHkcRbW0NmZMQdU,21 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float16/test_data_set_0/output_0.pb,sha256=mtY468bpbqh5ZEhi51CnOMOfwvbLlDvbTb-slw-2hfA,17 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float32/model.onnx,sha256=d57daBhbC2v11IXbHl2jYMBPq6m4se7lcvuurqxok20,372 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float32/test_data_set_0/input_0.pb,sha256=EW4KzwC4IDcjv5uG14lyLOx9ieD3E6OQZtnO4MAhlb4,19 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float32/test_data_set_0/input_1.pb,sha256=aVsxHksGQU9JCf5giP1I5HpY1f659W0A0y09s5oXTVQ,19 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float32/test_data_set_0/input_2.pb,sha256=NJ1z8xFynGtHNA3xluGHxNKI2mU5GMzAIylBdZbSfBQ,21 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float32/test_data_set_0/input_3.pb,sha256=3Su8-6njxvvsNh2-C2VxCrZM3hlJwOe61sBe77Uxbx4,23 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float32/test_data_set_0/input_4.pb,sha256=-XzmSr1oi7OP_jzKt5I8dd4ZiHXLfgttDVHG9h02Asc,19 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float32/test_data_set_0/input_5.pb,sha256=sszTS9AFBbYJkRJC3oCoh6AGx-2IVOprE3COEL-ItUg,21 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float32/test_data_set_0/input_6.pb,sha256=f9fGFxkJMhbFB3Jha4dcX2nTB0pIclfxri5tXYpZZwc,19 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float32/test_data_set_0/input_7.pb,sha256=MgU6P2x2sZyaqZX7fIx-qw7Rq97TpHkcRbW0NmZMQdU,21 +onnx/backend/test/data/node/test_qlinearmatmul_2D_int8_float32/test_data_set_0/output_0.pb,sha256=mtY468bpbqh5ZEhi51CnOMOfwvbLlDvbTb-slw-2hfA,17 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float16/model.onnx,sha256=eSmBLskSpRr71KchtmlSd-inYHC_UMky6kGwKFo9_Q8,373 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float16/test_data_set_0/input_0.pb,sha256=ko9ZfhAcGp41Zcl0RgwHA7UB-6Iqkc0hqXnViR7c-2M,19 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float16/test_data_set_0/input_1.pb,sha256=nnYTBKWGRnIW1s-p2ROJAwiL-Q_M_TS4m-kBAjuFaGQ,17 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float16/test_data_set_0/input_2.pb,sha256=pPLPbQiq1ONgy164_MHBnJM4nH5E_tZbcoFV5sPOYUY,21 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float16/test_data_set_0/input_3.pb,sha256=oIvG3PVjz2QeQCzNzI9o9errRsMMNpLyCqVPNflcHAY,23 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float16/test_data_set_0/input_4.pb,sha256=DhgPgyPecHsoTXjdgqQ9dUHJ0gm2K3tgCqnHKrtsRdE,17 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float16/test_data_set_0/input_5.pb,sha256=CbDV3Ys31D0hYZkLMhnYjqA2TCsFoiAB6Osr8BXOleg,21 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float16/test_data_set_0/input_6.pb,sha256=0LyvWUhWqNYVSVkJKOqXWcG3Sjz_wdXBmxGG9aCTtjI,17 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float16/test_data_set_0/input_7.pb,sha256=ATdkTkCLK9NqW-0ctLP0ByOjC9HDFf6P3w_fAVQhiQc,21 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float16/test_data_set_0/output_0.pb,sha256=Fbb3q876qubUQlK88YFKv7lCDKrp0pAA5yddHKodoqc,17 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float32/model.onnx,sha256=-emyASzk9mYzpvEklornS-KJAwVgO-ZumIFM81iWm1s,373 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float32/test_data_set_0/input_0.pb,sha256=ko9ZfhAcGp41Zcl0RgwHA7UB-6Iqkc0hqXnViR7c-2M,19 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float32/test_data_set_0/input_1.pb,sha256=aVsxHksGQU9JCf5giP1I5HpY1f659W0A0y09s5oXTVQ,19 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float32/test_data_set_0/input_2.pb,sha256=pPLPbQiq1ONgy164_MHBnJM4nH5E_tZbcoFV5sPOYUY,21 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float32/test_data_set_0/input_3.pb,sha256=oIvG3PVjz2QeQCzNzI9o9errRsMMNpLyCqVPNflcHAY,23 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float32/test_data_set_0/input_4.pb,sha256=-XzmSr1oi7OP_jzKt5I8dd4ZiHXLfgttDVHG9h02Asc,19 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float32/test_data_set_0/input_5.pb,sha256=CbDV3Ys31D0hYZkLMhnYjqA2TCsFoiAB6Osr8BXOleg,21 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float32/test_data_set_0/input_6.pb,sha256=f9fGFxkJMhbFB3Jha4dcX2nTB0pIclfxri5tXYpZZwc,19 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float32/test_data_set_0/input_7.pb,sha256=ATdkTkCLK9NqW-0ctLP0ByOjC9HDFf6P3w_fAVQhiQc,21 +onnx/backend/test/data/node/test_qlinearmatmul_2D_uint8_float32/test_data_set_0/output_0.pb,sha256=Fbb3q876qubUQlK88YFKv7lCDKrp0pAA5yddHKodoqc,17 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float16/model.onnx,sha256=4CGBeH4fTxLxAlyrZ5QB4K8oEE6ELvaXTcCqOEx8u8Y,384 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float16/test_data_set_0/input_0.pb,sha256=oKrfJZoWsHvS6q36ryQqtu4sdYzHFxTYSYA1qPIXfTI,29 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float16/test_data_set_0/input_1.pb,sha256=nnYTBKWGRnIW1s-p2ROJAwiL-Q_M_TS4m-kBAjuFaGQ,17 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float16/test_data_set_0/input_2.pb,sha256=NJ1z8xFynGtHNA3xluGHxNKI2mU5GMzAIylBdZbSfBQ,21 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float16/test_data_set_0/input_3.pb,sha256=7-xca2r8N2Vyt3Qpje0wnsi65Uj65ieur9wJdQxMKj8,37 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float16/test_data_set_0/input_4.pb,sha256=DhgPgyPecHsoTXjdgqQ9dUHJ0gm2K3tgCqnHKrtsRdE,17 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float16/test_data_set_0/input_5.pb,sha256=X3jdM38tfezX3Z13EbQ5utaQCL2WXxq20jkn0xl9OaU,21 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float16/test_data_set_0/input_6.pb,sha256=0LyvWUhWqNYVSVkJKOqXWcG3Sjz_wdXBmxGG9aCTtjI,17 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float16/test_data_set_0/input_7.pb,sha256=MgU6P2x2sZyaqZX7fIx-qw7Rq97TpHkcRbW0NmZMQdU,21 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float16/test_data_set_0/output_0.pb,sha256=5I1QoFoXuKoZKYZPttDanFz4LVNwlC3aY9JLcYe0pOg,25 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float32/model.onnx,sha256=8Jjd7cBRcCKVzU_JBD3bWNH3v5KthcxMa2NX6NZoe24,384 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float32/test_data_set_0/input_0.pb,sha256=oKrfJZoWsHvS6q36ryQqtu4sdYzHFxTYSYA1qPIXfTI,29 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float32/test_data_set_0/input_1.pb,sha256=aVsxHksGQU9JCf5giP1I5HpY1f659W0A0y09s5oXTVQ,19 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float32/test_data_set_0/input_2.pb,sha256=NJ1z8xFynGtHNA3xluGHxNKI2mU5GMzAIylBdZbSfBQ,21 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float32/test_data_set_0/input_3.pb,sha256=7-xca2r8N2Vyt3Qpje0wnsi65Uj65ieur9wJdQxMKj8,37 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float32/test_data_set_0/input_4.pb,sha256=-XzmSr1oi7OP_jzKt5I8dd4ZiHXLfgttDVHG9h02Asc,19 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float32/test_data_set_0/input_5.pb,sha256=X3jdM38tfezX3Z13EbQ5utaQCL2WXxq20jkn0xl9OaU,21 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float32/test_data_set_0/input_6.pb,sha256=f9fGFxkJMhbFB3Jha4dcX2nTB0pIclfxri5tXYpZZwc,19 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float32/test_data_set_0/input_7.pb,sha256=MgU6P2x2sZyaqZX7fIx-qw7Rq97TpHkcRbW0NmZMQdU,21 +onnx/backend/test/data/node/test_qlinearmatmul_3D_int8_float32/test_data_set_0/output_0.pb,sha256=tZD1KURmEyaxo2DV_2em8pBZV7jFz6AsgcJxjIWWZCU,25 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float16/model.onnx,sha256=3_zCQ_9VtBCOouPlz7Q5JQEpi8NTeJjysD-LoS49RM0,385 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float16/test_data_set_0/input_0.pb,sha256=g5g_gcA0GltOE1_lFzaVSua7MBYe98NlUfplZayMJtM,29 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float16/test_data_set_0/input_1.pb,sha256=nnYTBKWGRnIW1s-p2ROJAwiL-Q_M_TS4m-kBAjuFaGQ,17 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float16/test_data_set_0/input_2.pb,sha256=pPLPbQiq1ONgy164_MHBnJM4nH5E_tZbcoFV5sPOYUY,21 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float16/test_data_set_0/input_3.pb,sha256=aRoCSENX6yjHjLkmm5vXFbBpo9EbeGo9RxE94GPJHXQ,37 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float16/test_data_set_0/input_4.pb,sha256=DhgPgyPecHsoTXjdgqQ9dUHJ0gm2K3tgCqnHKrtsRdE,17 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float16/test_data_set_0/input_5.pb,sha256=CbDV3Ys31D0hYZkLMhnYjqA2TCsFoiAB6Osr8BXOleg,21 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float16/test_data_set_0/input_6.pb,sha256=0LyvWUhWqNYVSVkJKOqXWcG3Sjz_wdXBmxGG9aCTtjI,17 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float16/test_data_set_0/input_7.pb,sha256=ATdkTkCLK9NqW-0ctLP0ByOjC9HDFf6P3w_fAVQhiQc,21 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float16/test_data_set_0/output_0.pb,sha256=rvU4UCTaVhbhfnNv1gl2oCkUQblLDZkbA4uRlSy0Le0,25 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float32/model.onnx,sha256=1wzomcDWQcy2GltYKKKgVEOn8wHH7T_sJJDJNo2vRXI,385 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float32/test_data_set_0/input_0.pb,sha256=g5g_gcA0GltOE1_lFzaVSua7MBYe98NlUfplZayMJtM,29 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float32/test_data_set_0/input_1.pb,sha256=aVsxHksGQU9JCf5giP1I5HpY1f659W0A0y09s5oXTVQ,19 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float32/test_data_set_0/input_2.pb,sha256=pPLPbQiq1ONgy164_MHBnJM4nH5E_tZbcoFV5sPOYUY,21 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float32/test_data_set_0/input_3.pb,sha256=aRoCSENX6yjHjLkmm5vXFbBpo9EbeGo9RxE94GPJHXQ,37 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float32/test_data_set_0/input_4.pb,sha256=-XzmSr1oi7OP_jzKt5I8dd4ZiHXLfgttDVHG9h02Asc,19 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float32/test_data_set_0/input_5.pb,sha256=CbDV3Ys31D0hYZkLMhnYjqA2TCsFoiAB6Osr8BXOleg,21 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float32/test_data_set_0/input_6.pb,sha256=f9fGFxkJMhbFB3Jha4dcX2nTB0pIclfxri5tXYpZZwc,19 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float32/test_data_set_0/input_7.pb,sha256=ATdkTkCLK9NqW-0ctLP0ByOjC9HDFf6P3w_fAVQhiQc,21 +onnx/backend/test/data/node/test_qlinearmatmul_3D_uint8_float32/test_data_set_0/output_0.pb,sha256=rvU4UCTaVhbhfnNv1gl2oCkUQblLDZkbA4uRlSy0Le0,25 +onnx/backend/test/data/node/test_quantizelinear/model.onnx,sha256=f85a4vmpKQIl098SjcgBLxjk0GyU8aPOhfEb56Nojr4,170 +onnx/backend/test/data/node/test_quantizelinear/test_data_set_0/input_0.pb,sha256=-WdUWtE3Gq6XQCRO6yOoTUcsaUMo-gEvqWT8B_Q2ax8,33 +onnx/backend/test/data/node/test_quantizelinear/test_data_set_0/input_1.pb,sha256=Zv7CydImWalnvbaS5VHQ6zTXvOUCohUym_ClUIDiLJ4,17 +onnx/backend/test/data/node/test_quantizelinear/test_data_set_0/input_2.pb,sha256=wPEVchEft31jMR00LL9m2McOaC1WYliDhjLWm34U2yA,19 +onnx/backend/test/data/node/test_quantizelinear/test_data_set_0/output_0.pb,sha256=tKPhseg-u9oNXCOVqz3GYbmoR7oSaEtKC0Y5KC4txus,15 +onnx/backend/test/data/node/test_quantizelinear_axis/model.onnx,sha256=J72_dgotTMs93gfFp3lFtqTMPeSCwKBvzUexY5CinAQ,207 +onnx/backend/test/data/node/test_quantizelinear_axis/test_data_set_0/input_0.pb,sha256=lOMq5eKWvXmYUXZijznYZGS8ZP7QGcLUOQgOwOc00zE,87 +onnx/backend/test/data/node/test_quantizelinear_axis/test_data_set_0/input_1.pb,sha256=32ZIocYXlTh3bTOfM1h_XZs3_EDpTAXIivA6ljFghzA,27 +onnx/backend/test/data/node/test_quantizelinear_axis/test_data_set_0/input_2.pb,sha256=Nm2IkguOQZ3UhRkLUIs3YMzqEG3ssgm0UKDB1bvBKKk,23 +onnx/backend/test/data/node/test_quantizelinear_axis/test_data_set_0/output_0.pb,sha256=qUJeQDCKjzRejnEfE6gC17CDuVDqA6q5MM1F555aFVA,33 +onnx/backend/test/data/node/test_quantizelinear_blocked_asymmetric/model.onnx,sha256=fNQwTnyFFn9X3Fb3eo6MJ5Rwdm-56Q4xk7GJOVJmt3w,245 +onnx/backend/test/data/node/test_quantizelinear_blocked_asymmetric/test_data_set_0/input_0.pb,sha256=y7Rhu8G-uM9JmVW66VI-VnYQbiuRE5sGV_Z6AQuyxkA,59 +onnx/backend/test/data/node/test_quantizelinear_blocked_asymmetric/test_data_set_0/input_1.pb,sha256=v8inhQKQhz6dS_q_JBaaUuy2xp49SRE1THZSwm-QDkA,41 +onnx/backend/test/data/node/test_quantizelinear_blocked_asymmetric/test_data_set_0/input_2.pb,sha256=1un1WVjefFvWOQnshf2LbMhpAQ2QxiXMunRcoT7vMow,28 +onnx/backend/test/data/node/test_quantizelinear_blocked_asymmetric/test_data_set_0/output_0.pb,sha256=rElUPp_29s5O54rRjWy1dP0WYzZ6KxmJK0yJuHxE5k8,23 +onnx/backend/test/data/node/test_quantizelinear_blocked_symmetric/model.onnx,sha256=oJvvE0FYrqjUVrJ8FS_jwDygqQK1-q67aE17vDQPu8E,219 +onnx/backend/test/data/node/test_quantizelinear_blocked_symmetric/test_data_set_0/input_0.pb,sha256=arGehoBobNUspLhRtF3Ex9xKCa0F0y98oL6fRrQvA_s,59 +onnx/backend/test/data/node/test_quantizelinear_blocked_symmetric/test_data_set_0/input_1.pb,sha256=v8inhQKQhz6dS_q_JBaaUuy2xp49SRE1THZSwm-QDkA,41 +onnx/backend/test/data/node/test_quantizelinear_blocked_symmetric/test_data_set_0/output_0.pb,sha256=bycN13RnEpWSK9Kk8K3evE6KopdVTsEVeKRxRfyZ6n8,41 +onnx/backend/test/data/node/test_quantizelinear_e4m3fn/model.onnx,sha256=22nDQPV_pY3TRB34BlD_KeS8ow0yOzDZjTNotuWr18M,181 +onnx/backend/test/data/node/test_quantizelinear_e4m3fn/test_data_set_0/input_0.pb,sha256=RpINE2Cg2dhwMgzJm3ZxSLDmJ1q0XwI6WmIHcL_j6Yc,29 +onnx/backend/test/data/node/test_quantizelinear_e4m3fn/test_data_set_0/input_1.pb,sha256=Zv7CydImWalnvbaS5VHQ6zTXvOUCohUym_ClUIDiLJ4,17 +onnx/backend/test/data/node/test_quantizelinear_e4m3fn/test_data_set_0/input_2.pb,sha256=_7Vdv5427rLlWyXQDc1ya_9WSxSO8L4LZu_OHnm_FT8,21 +onnx/backend/test/data/node/test_quantizelinear_e4m3fn/test_data_set_0/output_0.pb,sha256=nNu5UvmZ8mpsh_54mZqigNgGwEN8yszynDbulB-hJ5c,14 +onnx/backend/test/data/node/test_quantizelinear_e5m2/model.onnx,sha256=L0yVbjM9lrwdvbAwh8mhf89aKtCJEyrgcxacdH5tcEM,179 +onnx/backend/test/data/node/test_quantizelinear_e5m2/test_data_set_0/input_0.pb,sha256=RpINE2Cg2dhwMgzJm3ZxSLDmJ1q0XwI6WmIHcL_j6Yc,29 +onnx/backend/test/data/node/test_quantizelinear_e5m2/test_data_set_0/input_1.pb,sha256=Zv7CydImWalnvbaS5VHQ6zTXvOUCohUym_ClUIDiLJ4,17 +onnx/backend/test/data/node/test_quantizelinear_e5m2/test_data_set_0/input_2.pb,sha256=trfI5bwzv3rDl1dqKlEM_RmOGYCac0PrHNjqYeGXxa0,21 +onnx/backend/test/data/node/test_quantizelinear_e5m2/test_data_set_0/output_0.pb,sha256=Bk-NH1ZeWwTusSN6X2cuqkFjbYJ1vDxhtJZ592E76fw,14 +onnx/backend/test/data/node/test_quantizelinear_int16/model.onnx,sha256=8OIsIPruUoe4aZC2anHuM4n85GsQrL2BQYfQbYOYZJo,176 +onnx/backend/test/data/node/test_quantizelinear_int16/test_data_set_0/input_0.pb,sha256=VmE8fR23t887aluLk_qjxKg-S07NoLxCSje5xyEL5NI,73 +onnx/backend/test/data/node/test_quantizelinear_int16/test_data_set_0/input_1.pb,sha256=Zv7CydImWalnvbaS5VHQ6zTXvOUCohUym_ClUIDiLJ4,17 +onnx/backend/test/data/node/test_quantizelinear_int16/test_data_set_0/input_2.pb,sha256=IW96bAqV6F71uF1t7ICNMPFMR_nVLfIPA7W1KdkU4B4,20 +onnx/backend/test/data/node/test_quantizelinear_int16/test_data_set_0/output_0.pb,sha256=fO4cGgMJoTMHPXyb7Zk8-8od4K7pei-IdioLjClPpmM,41 +onnx/backend/test/data/node/test_quantizelinear_int4/model.onnx,sha256=_W9CJtKqLcm3zntWa30qryL1IGODqC8Lt-W-r4fMSR0,204 +onnx/backend/test/data/node/test_quantizelinear_int4/test_data_set_0/input_0.pb,sha256=Lg7BLkDJVhKNTmTszgJp0eK_i1afP0XARGAXTfNFdeQ,59 +onnx/backend/test/data/node/test_quantizelinear_int4/test_data_set_0/input_1.pb,sha256=2Pe2lcrRWP5oT1EosWg6ZOv0VcNUIPd__rMnNp0couk,27 +onnx/backend/test/data/node/test_quantizelinear_int4/test_data_set_0/input_2.pb,sha256=1d3dNu_2t0cVW-a0345YfmGWH3i1K_8DOgi_jg-eIdA,22 +onnx/backend/test/data/node/test_quantizelinear_int4/test_data_set_0/output_0.pb,sha256=y06PaLypD_0xbr5-dsEIz-izN4v0u_LvdzH-IedZ_W0,26 +onnx/backend/test/data/node/test_quantizelinear_uint16/model.onnx,sha256=Bzxw9Zgnq1uix9-HBgh_FLVkNfMG8X9RfD2iiDdOxOI,177 +onnx/backend/test/data/node/test_quantizelinear_uint16/test_data_set_0/input_0.pb,sha256=HBChWZzpiiAckJ1Lx-CShRBJIZz9e-vcIp8no74kWhI,57 +onnx/backend/test/data/node/test_quantizelinear_uint16/test_data_set_0/input_1.pb,sha256=Zv7CydImWalnvbaS5VHQ6zTXvOUCohUym_ClUIDiLJ4,17 +onnx/backend/test/data/node/test_quantizelinear_uint16/test_data_set_0/input_2.pb,sha256=jzinJ02Qb3DPCHtpjg5UfGdMvuKAiNXwqDBpDUaDXVY,20 +onnx/backend/test/data/node/test_quantizelinear_uint16/test_data_set_0/output_0.pb,sha256=MbBTwcF-NDF361Rl6ouA8W4Nbyjc9TMYrLFJnhRAAGk,33 +onnx/backend/test/data/node/test_quantizelinear_uint4/model.onnx,sha256=fw0fba_Z4vNck23gKcLSd08zQkpEXxDA5uZmtiTkF_s,205 +onnx/backend/test/data/node/test_quantizelinear_uint4/test_data_set_0/input_0.pb,sha256=Lg7BLkDJVhKNTmTszgJp0eK_i1afP0XARGAXTfNFdeQ,59 +onnx/backend/test/data/node/test_quantizelinear_uint4/test_data_set_0/input_1.pb,sha256=2Pe2lcrRWP5oT1EosWg6ZOv0VcNUIPd__rMnNp0couk,27 +onnx/backend/test/data/node/test_quantizelinear_uint4/test_data_set_0/input_2.pb,sha256=iIP2U9MJwQa9ghUcuN8tpW91KGfsrp1Q8akf22lwe2M,22 +onnx/backend/test/data/node/test_quantizelinear_uint4/test_data_set_0/output_0.pb,sha256=8q530wXE297IpPg4EiJTOU3f4gDmaAuZgtD9-UT2vyM,18 +onnx/backend/test/data/node/test_range_float_type_positive_delta/model.onnx,sha256=yDJm47JALZVJ9S2sTGlShf_fJRLQNqF-BYb_fTfohlo,174 +onnx/backend/test/data/node/test_range_float_type_positive_delta/test_data_set_0/input_0.pb,sha256=PnwKkhwnmd5quiS20ZgN5aEPgWHukh4Z1L_5tQBqFMg,15 +onnx/backend/test/data/node/test_range_float_type_positive_delta/test_data_set_0/input_1.pb,sha256=OT2i3PvMIECBJbEVaUsUV8vy3vq8W619CaqivcgRoFg,15 +onnx/backend/test/data/node/test_range_float_type_positive_delta/test_data_set_0/input_2.pb,sha256=_Azn8KSvchMQKs1IHINUFQPQxYbtW9YaipLS2VsJGOE,15 +onnx/backend/test/data/node/test_range_float_type_positive_delta/test_data_set_0/output_0.pb,sha256=CWcrRK3j1PGFEpIxuIl7JKnveAGbOMams_iDdcqFhJA,22 +onnx/backend/test/data/node/test_range_float_type_positive_delta_expanded/model.onnx,sha256=QCdczSdUJTvbo5-AyJr4JX4AG2mBuEnJrw9D63HYL3w,2647 +onnx/backend/test/data/node/test_range_float_type_positive_delta_expanded/test_data_set_0/input_0.pb,sha256=PnwKkhwnmd5quiS20ZgN5aEPgWHukh4Z1L_5tQBqFMg,15 +onnx/backend/test/data/node/test_range_float_type_positive_delta_expanded/test_data_set_0/input_1.pb,sha256=OT2i3PvMIECBJbEVaUsUV8vy3vq8W619CaqivcgRoFg,15 +onnx/backend/test/data/node/test_range_float_type_positive_delta_expanded/test_data_set_0/input_2.pb,sha256=_Azn8KSvchMQKs1IHINUFQPQxYbtW9YaipLS2VsJGOE,15 +onnx/backend/test/data/node/test_range_float_type_positive_delta_expanded/test_data_set_0/output_0.pb,sha256=CWcrRK3j1PGFEpIxuIl7JKnveAGbOMams_iDdcqFhJA,22 +onnx/backend/test/data/node/test_range_int32_type_negative_delta/model.onnx,sha256=OL5MqVgBtEzjYRwqakzjOZwztFjvpzyH94biMkHRm78,174 +onnx/backend/test/data/node/test_range_int32_type_negative_delta/test_data_set_0/input_0.pb,sha256=IcldFo4nLQkGvXdxZT-tL5zhETapZc_ycbchk-cBlP8,15 +onnx/backend/test/data/node/test_range_int32_type_negative_delta/test_data_set_0/input_1.pb,sha256=uNYgLVHrZEoHuuPTegF9h62BtRk4kWwhnCcH08WlTO4,15 +onnx/backend/test/data/node/test_range_int32_type_negative_delta/test_data_set_0/input_2.pb,sha256=85Fwi4unXUNic9TUurAeph_M0WclGePlXKxTJkarxr8,15 +onnx/backend/test/data/node/test_range_int32_type_negative_delta/test_data_set_0/output_0.pb,sha256=Km4G9yM-_Q369BPQXgij7_N3dWiC05nccMJvbVkCwOY,22 +onnx/backend/test/data/node/test_range_int32_type_negative_delta_expanded/model.onnx,sha256=TwYliCls4W6m3UNvAXO9aWw9kCYfw0rY1lcSPrJLC0Y,2647 +onnx/backend/test/data/node/test_range_int32_type_negative_delta_expanded/test_data_set_0/input_0.pb,sha256=IcldFo4nLQkGvXdxZT-tL5zhETapZc_ycbchk-cBlP8,15 +onnx/backend/test/data/node/test_range_int32_type_negative_delta_expanded/test_data_set_0/input_1.pb,sha256=uNYgLVHrZEoHuuPTegF9h62BtRk4kWwhnCcH08WlTO4,15 +onnx/backend/test/data/node/test_range_int32_type_negative_delta_expanded/test_data_set_0/input_2.pb,sha256=85Fwi4unXUNic9TUurAeph_M0WclGePlXKxTJkarxr8,15 +onnx/backend/test/data/node/test_range_int32_type_negative_delta_expanded/test_data_set_0/output_0.pb,sha256=Km4G9yM-_Q369BPQXgij7_N3dWiC05nccMJvbVkCwOY,22 +onnx/backend/test/data/node/test_reciprocal/model.onnx,sha256=9cYhyYrtsZnNsprI4Lv2Mo4iKGoD2D-2tKAXYkZ21fc,111 +onnx/backend/test/data/node/test_reciprocal/test_data_set_0/input_0.pb,sha256=GgRN6i1Z1pXIrsvfZWkp52hvnZToD0J1d6uAsH-ysOw,254 +onnx/backend/test/data/node/test_reciprocal/test_data_set_0/output_0.pb,sha256=Iw7Mqud4Obp2fRS-HaenQ1G7QoOphufczelvmamQGJY,254 +onnx/backend/test/data/node/test_reciprocal_example/model.onnx,sha256=zqyZFVq34gf2pggBHznhwsBXrlELzUaUou3suG758Ew,103 +onnx/backend/test/data/node/test_reciprocal_example/test_data_set_0/input_0.pb,sha256=hIDv23oySvd30UHcdMkH6btwYls47z-2HWS1hMB2wdg,17 +onnx/backend/test/data/node/test_reciprocal_example/test_data_set_0/output_0.pb,sha256=ep0bNR1nZSAgCmdKAEIQP7ZNm5_nVb12ngt6I-yLdzo,17 +onnx/backend/test/data/node/test_reduce_l1_default_axes_keepdims_example/model.onnx,sha256=ohzqkukts8S1Ekf20SpX5OQEMvQWTUZ5Z3-2iSfLh90,200 +onnx/backend/test/data/node/test_reduce_l1_default_axes_keepdims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_l1_default_axes_keepdims_example/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_l1_default_axes_keepdims_example/test_data_set_0/output_0.pb,sha256=-tM_bqHqK-65eWP1RbkvxizF8zO7aiimwgb1H-034CM,23 +onnx/backend/test/data/node/test_reduce_l1_default_axes_keepdims_example_expanded/model.onnx,sha256=ttJdqNR1C12VkcacAyILL1AX-K5HEYcTINsN2aKF7Nw,385 +onnx/backend/test/data/node/test_reduce_l1_default_axes_keepdims_example_expanded/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_l1_default_axes_keepdims_example_expanded/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_l1_default_axes_keepdims_example_expanded/test_data_set_0/output_0.pb,sha256=-tM_bqHqK-65eWP1RbkvxizF8zO7aiimwgb1H-034CM,23 +onnx/backend/test/data/node/test_reduce_l1_default_axes_keepdims_random/model.onnx,sha256=Z4DTNgCI8ewEd2WZCu1gIgzJ8TOGXYj40T8fpAB-E2E,199 +onnx/backend/test/data/node/test_reduce_l1_default_axes_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_l1_default_axes_keepdims_random/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_l1_default_axes_keepdims_random/test_data_set_0/output_0.pb,sha256=3IYq1wIPmuGmuhhsnqmPmvnaA3qpKCI5e2wRanUn-EI,23 +onnx/backend/test/data/node/test_reduce_l1_default_axes_keepdims_random_expanded/model.onnx,sha256=3RfBSyOsfEoWya4S8zdLwRBeKYrw3P9-pR52bMMbB5c,382 +onnx/backend/test/data/node/test_reduce_l1_default_axes_keepdims_random_expanded/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_l1_default_axes_keepdims_random_expanded/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_l1_default_axes_keepdims_random_expanded/test_data_set_0/output_0.pb,sha256=3IYq1wIPmuGmuhhsnqmPmvnaA3qpKCI5e2wRanUn-EI,23 +onnx/backend/test/data/node/test_reduce_l1_do_not_keepdims_example/model.onnx,sha256=RPmBjeHx3iQ_RPcKvTxKCE6bveoJrtMXs_32KKOUVTE,190 +onnx/backend/test/data/node/test_reduce_l1_do_not_keepdims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_l1_do_not_keepdims_example/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_l1_do_not_keepdims_example/test_data_set_0/output_0.pb,sha256=JC-ByfOkizIkgJ3nwdBl673paZR_4rtYThjJ7UCL2Bs,41 +onnx/backend/test/data/node/test_reduce_l1_do_not_keepdims_example_expanded/model.onnx,sha256=k4g1LJhbvidKtD_1QgBwEVd0EB-HfFOUM5MXwWk7PS4,363 +onnx/backend/test/data/node/test_reduce_l1_do_not_keepdims_example_expanded/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_l1_do_not_keepdims_example_expanded/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_l1_do_not_keepdims_example_expanded/test_data_set_0/output_0.pb,sha256=JC-ByfOkizIkgJ3nwdBl673paZR_4rtYThjJ7UCL2Bs,41 +onnx/backend/test/data/node/test_reduce_l1_do_not_keepdims_random/model.onnx,sha256=2MJdQR4vCOCKHEKWhYRnZvxFfS_EoYcBmzZV9YTc--8,189 +onnx/backend/test/data/node/test_reduce_l1_do_not_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_l1_do_not_keepdims_random/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_l1_do_not_keepdims_random/test_data_set_0/output_0.pb,sha256=boLxo_tKTY-Elb4ILMYTWLBXdqS8I2dDPXrh7wlJYdE,41 +onnx/backend/test/data/node/test_reduce_l1_do_not_keepdims_random_expanded/model.onnx,sha256=64repJxPz_8w13hBcF4miv8kKtiQBa9KCOGelXcHemk,360 +onnx/backend/test/data/node/test_reduce_l1_do_not_keepdims_random_expanded/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_l1_do_not_keepdims_random_expanded/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_l1_do_not_keepdims_random_expanded/test_data_set_0/output_0.pb,sha256=boLxo_tKTY-Elb4ILMYTWLBXdqS8I2dDPXrh7wlJYdE,41 +onnx/backend/test/data/node/test_reduce_l1_empty_set/model.onnx,sha256=X3t5PMvEjg1SyM5zxLLCchMnO2mlypzYA41BlLGXg78,180 +onnx/backend/test/data/node/test_reduce_l1_empty_set/test_data_set_0/input_0.pb,sha256=diEJAtHd_b95FEPFWBBoLLmxB42c3-1U15UBg9rdUuw,16 +onnx/backend/test/data/node/test_reduce_l1_empty_set/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_l1_empty_set/test_data_set_0/output_0.pb,sha256=dkauAAxfP09SxG8x1uXFNv8Ndb1oDMAKAb8GRYfbrG0,51 +onnx/backend/test/data/node/test_reduce_l1_empty_set_expanded/model.onnx,sha256=DbWrAKdTB7OoI-cR8yK3Tx3c1uhnKNCqT2-BAnOSeIU,325 +onnx/backend/test/data/node/test_reduce_l1_empty_set_expanded/test_data_set_0/input_0.pb,sha256=diEJAtHd_b95FEPFWBBoLLmxB42c3-1U15UBg9rdUuw,16 +onnx/backend/test/data/node/test_reduce_l1_empty_set_expanded/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_l1_empty_set_expanded/test_data_set_0/output_0.pb,sha256=dkauAAxfP09SxG8x1uXFNv8Ndb1oDMAKAb8GRYfbrG0,51 +onnx/backend/test/data/node/test_reduce_l1_keep_dims_example/model.onnx,sha256=8tPPR4vedPEERxpY1rTpUT76lMyxWjvum1LptsNgYKQ,188 +onnx/backend/test/data/node/test_reduce_l1_keep_dims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_l1_keep_dims_example/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_l1_keep_dims_example/test_data_set_0/output_0.pb,sha256=dJeaCpTSWSt6LwkQwBqvad3ot7QYb173M1rkBSqLXzY,43 +onnx/backend/test/data/node/test_reduce_l1_keep_dims_example_expanded/model.onnx,sha256=UIzfR9nHpxRp8vQD4-FdQk1x9_EFbL_zMatXqYbgcT4,349 +onnx/backend/test/data/node/test_reduce_l1_keep_dims_example_expanded/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_l1_keep_dims_example_expanded/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_l1_keep_dims_example_expanded/test_data_set_0/output_0.pb,sha256=dJeaCpTSWSt6LwkQwBqvad3ot7QYb173M1rkBSqLXzY,43 +onnx/backend/test/data/node/test_reduce_l1_keep_dims_random/model.onnx,sha256=vMPnjNZPytsdjAWg_-FIT_LynV0Fz_mPTVr6w4OmFjQ,187 +onnx/backend/test/data/node/test_reduce_l1_keep_dims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_l1_keep_dims_random/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_l1_keep_dims_random/test_data_set_0/output_0.pb,sha256=GLZKMWiKlhBmVVM0kbK38-GloFGIoR7eFzqiQlIojqA,43 +onnx/backend/test/data/node/test_reduce_l1_keep_dims_random_expanded/model.onnx,sha256=0vF2lI0lLQfv3f8ErJiKhVx8g2awf64E4KJtczFs6yw,346 +onnx/backend/test/data/node/test_reduce_l1_keep_dims_random_expanded/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_l1_keep_dims_random_expanded/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_l1_keep_dims_random_expanded/test_data_set_0/output_0.pb,sha256=GLZKMWiKlhBmVVM0kbK38-GloFGIoR7eFzqiQlIojqA,43 +onnx/backend/test/data/node/test_reduce_l1_negative_axes_keep_dims_example/model.onnx,sha256=v0bGIRG0HAaaWr8yKBWnfW36snQJuKgB-bgjx_Wz9no,202 +onnx/backend/test/data/node/test_reduce_l1_negative_axes_keep_dims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_l1_negative_axes_keep_dims_example/test_data_set_0/input_1.pb,sha256=TQcMG4GWMtXqeLfE_sX7GRTpBRPd7PNq1CmUW4rnTh8,20 +onnx/backend/test/data/node/test_reduce_l1_negative_axes_keep_dims_example/test_data_set_0/output_0.pb,sha256=dJeaCpTSWSt6LwkQwBqvad3ot7QYb173M1rkBSqLXzY,43 +onnx/backend/test/data/node/test_reduce_l1_negative_axes_keep_dims_example_expanded/model.onnx,sha256=yEu6LJV-6EvuGZiIqt2rVS0OpWTMAwE8a1BZhMt0Zyo,392 +onnx/backend/test/data/node/test_reduce_l1_negative_axes_keep_dims_example_expanded/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_l1_negative_axes_keep_dims_example_expanded/test_data_set_0/input_1.pb,sha256=TQcMG4GWMtXqeLfE_sX7GRTpBRPd7PNq1CmUW4rnTh8,20 +onnx/backend/test/data/node/test_reduce_l1_negative_axes_keep_dims_example_expanded/test_data_set_0/output_0.pb,sha256=dJeaCpTSWSt6LwkQwBqvad3ot7QYb173M1rkBSqLXzY,43 +onnx/backend/test/data/node/test_reduce_l1_negative_axes_keep_dims_random/model.onnx,sha256=c2tLsQtNx4nsU6LOI_6kkmG6HkA8gHBO8EGmBcvnXIY,201 +onnx/backend/test/data/node/test_reduce_l1_negative_axes_keep_dims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_l1_negative_axes_keep_dims_random/test_data_set_0/input_1.pb,sha256=TQcMG4GWMtXqeLfE_sX7GRTpBRPd7PNq1CmUW4rnTh8,20 +onnx/backend/test/data/node/test_reduce_l1_negative_axes_keep_dims_random/test_data_set_0/output_0.pb,sha256=GLZKMWiKlhBmVVM0kbK38-GloFGIoR7eFzqiQlIojqA,43 +onnx/backend/test/data/node/test_reduce_l1_negative_axes_keep_dims_random_expanded/model.onnx,sha256=gQtFJjAYWOPM9aHZzMWZrSksp_5-r4irB_7uj6SLmhU,389 +onnx/backend/test/data/node/test_reduce_l1_negative_axes_keep_dims_random_expanded/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_l1_negative_axes_keep_dims_random_expanded/test_data_set_0/input_1.pb,sha256=TQcMG4GWMtXqeLfE_sX7GRTpBRPd7PNq1CmUW4rnTh8,20 +onnx/backend/test/data/node/test_reduce_l1_negative_axes_keep_dims_random_expanded/test_data_set_0/output_0.pb,sha256=GLZKMWiKlhBmVVM0kbK38-GloFGIoR7eFzqiQlIojqA,43 +onnx/backend/test/data/node/test_reduce_l2_default_axes_keepdims_example/model.onnx,sha256=x3If-zJSSwsskknM8IMaAc-Q1HlsDhAVXNsVuGXxxmk,200 +onnx/backend/test/data/node/test_reduce_l2_default_axes_keepdims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_l2_default_axes_keepdims_example/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_l2_default_axes_keepdims_example/test_data_set_0/output_0.pb,sha256=1nRvbw4k8gX2Lm2fHXH02jiiHY6FJ_bDDg0ZGJ14r08,23 +onnx/backend/test/data/node/test_reduce_l2_default_axes_keepdims_example_expanded/model.onnx,sha256=y7da8FPFP4TiXaD3Mp2vlP_aOpGaU6PlqrVjOiULGoc,951 +onnx/backend/test/data/node/test_reduce_l2_default_axes_keepdims_example_expanded/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_l2_default_axes_keepdims_example_expanded/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_l2_default_axes_keepdims_example_expanded/test_data_set_0/output_0.pb,sha256=1nRvbw4k8gX2Lm2fHXH02jiiHY6FJ_bDDg0ZGJ14r08,23 +onnx/backend/test/data/node/test_reduce_l2_default_axes_keepdims_random/model.onnx,sha256=4bAdBwk_5DWKjJezOCbPeyRYwzguBTYveoYAWqep2nA,199 +onnx/backend/test/data/node/test_reduce_l2_default_axes_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_l2_default_axes_keepdims_random/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_l2_default_axes_keepdims_random/test_data_set_0/output_0.pb,sha256=PIATHW_5LGj0-_ZFZ4z8TpxbqFnMZNPE0ugw_WZjiuM,23 +onnx/backend/test/data/node/test_reduce_l2_default_axes_keepdims_random_expanded/model.onnx,sha256=QqnCJAyaLKEUiZ7fqFmm3pCWZIoRVtjeHEMbly8vlgI,942 +onnx/backend/test/data/node/test_reduce_l2_default_axes_keepdims_random_expanded/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_l2_default_axes_keepdims_random_expanded/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_l2_default_axes_keepdims_random_expanded/test_data_set_0/output_0.pb,sha256=PIATHW_5LGj0-_ZFZ4z8TpxbqFnMZNPE0ugw_WZjiuM,23 +onnx/backend/test/data/node/test_reduce_l2_do_not_keepdims_example/model.onnx,sha256=VXrFAN0MVP4cc2Bo7NZ3334EsiiV3YTy_zJbWRCn0FM,190 +onnx/backend/test/data/node/test_reduce_l2_do_not_keepdims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_l2_do_not_keepdims_example/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_l2_do_not_keepdims_example/test_data_set_0/output_0.pb,sha256=c1D0KDheSLTWalNQZaabVbJ79Rr_RID0w21TeOewaBM,41 +onnx/backend/test/data/node/test_reduce_l2_do_not_keepdims_example_expanded/model.onnx,sha256=pCmGC5F9uaFhRXuOdyXdn3Ebxnk6osLn35P-rtOb2KM,893 +onnx/backend/test/data/node/test_reduce_l2_do_not_keepdims_example_expanded/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_l2_do_not_keepdims_example_expanded/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_l2_do_not_keepdims_example_expanded/test_data_set_0/output_0.pb,sha256=c1D0KDheSLTWalNQZaabVbJ79Rr_RID0w21TeOewaBM,41 +onnx/backend/test/data/node/test_reduce_l2_do_not_keepdims_random/model.onnx,sha256=CvpFw-JYy4MzUPdpdx4RsFNsFQE8z2_ZswiBS_C1HaA,189 +onnx/backend/test/data/node/test_reduce_l2_do_not_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_l2_do_not_keepdims_random/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_l2_do_not_keepdims_random/test_data_set_0/output_0.pb,sha256=JOKjva3pUba9oCjoOdZ5D5cn8DmqUxB4ukD3fJeSbSk,41 +onnx/backend/test/data/node/test_reduce_l2_do_not_keepdims_random_expanded/model.onnx,sha256=vfO6qCnlDHc6SIkcNupjqryhQNvrDVntLwWQPGep3W0,884 +onnx/backend/test/data/node/test_reduce_l2_do_not_keepdims_random_expanded/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_l2_do_not_keepdims_random_expanded/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_l2_do_not_keepdims_random_expanded/test_data_set_0/output_0.pb,sha256=JOKjva3pUba9oCjoOdZ5D5cn8DmqUxB4ukD3fJeSbSk,41 +onnx/backend/test/data/node/test_reduce_l2_empty_set/model.onnx,sha256=bx1dkq_nAG4aOg4tdmvBohm0JY7SyNE2YKM1bWWIhuw,180 +onnx/backend/test/data/node/test_reduce_l2_empty_set/test_data_set_0/input_0.pb,sha256=diEJAtHd_b95FEPFWBBoLLmxB42c3-1U15UBg9rdUuw,16 +onnx/backend/test/data/node/test_reduce_l2_empty_set/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_l2_empty_set/test_data_set_0/output_0.pb,sha256=dkauAAxfP09SxG8x1uXFNv8Ndb1oDMAKAb8GRYfbrG0,51 +onnx/backend/test/data/node/test_reduce_l2_empty_set_expanded/model.onnx,sha256=LTg4WIPzxD8I35meif6I_kDj5fVA7TrOfPZhFTqv4xM,771 +onnx/backend/test/data/node/test_reduce_l2_empty_set_expanded/test_data_set_0/input_0.pb,sha256=diEJAtHd_b95FEPFWBBoLLmxB42c3-1U15UBg9rdUuw,16 +onnx/backend/test/data/node/test_reduce_l2_empty_set_expanded/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_l2_empty_set_expanded/test_data_set_0/output_0.pb,sha256=dkauAAxfP09SxG8x1uXFNv8Ndb1oDMAKAb8GRYfbrG0,51 +onnx/backend/test/data/node/test_reduce_l2_keep_dims_example/model.onnx,sha256=z66shsNj20DpEmucNL9xyFGHMMCNhxIFGt5H0gOBNYk,188 +onnx/backend/test/data/node/test_reduce_l2_keep_dims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_l2_keep_dims_example/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_l2_keep_dims_example/test_data_set_0/output_0.pb,sha256=jEsoUMRhG4OeYC5KTm_HKvd8ZN1iEJzVUEOlNq3zsow,43 +onnx/backend/test/data/node/test_reduce_l2_keep_dims_example_expanded/model.onnx,sha256=Cv4-IzsbBcliMyx6QsJckfp2CeUFTJSo77QmoHXPhrI,843 +onnx/backend/test/data/node/test_reduce_l2_keep_dims_example_expanded/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_l2_keep_dims_example_expanded/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_l2_keep_dims_example_expanded/test_data_set_0/output_0.pb,sha256=jEsoUMRhG4OeYC5KTm_HKvd8ZN1iEJzVUEOlNq3zsow,43 +onnx/backend/test/data/node/test_reduce_l2_keep_dims_random/model.onnx,sha256=mUGEy6kiSHSdFR1SPWYA8uUyKQkNwYKS2lO6wy0WgqY,187 +onnx/backend/test/data/node/test_reduce_l2_keep_dims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_l2_keep_dims_random/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_l2_keep_dims_random/test_data_set_0/output_0.pb,sha256=rPsbYUQn6_Cmp7zcRSMK4aDCf4R0jlttuYSLZj9JV6Y,43 +onnx/backend/test/data/node/test_reduce_l2_keep_dims_random_expanded/model.onnx,sha256=oKYJ6egnCfRCEP2A7UY7BY6YJdfCdIyU4FY07ZHtIjA,834 +onnx/backend/test/data/node/test_reduce_l2_keep_dims_random_expanded/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_l2_keep_dims_random_expanded/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_l2_keep_dims_random_expanded/test_data_set_0/output_0.pb,sha256=rPsbYUQn6_Cmp7zcRSMK4aDCf4R0jlttuYSLZj9JV6Y,43 +onnx/backend/test/data/node/test_reduce_l2_negative_axes_keep_dims_example/model.onnx,sha256=pjqo4Qrdy1yBnQntfi1AWBb0mKHhj08HFyvBAk8d4m4,202 +onnx/backend/test/data/node/test_reduce_l2_negative_axes_keep_dims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_l2_negative_axes_keep_dims_example/test_data_set_0/input_1.pb,sha256=TQcMG4GWMtXqeLfE_sX7GRTpBRPd7PNq1CmUW4rnTh8,20 +onnx/backend/test/data/node/test_reduce_l2_negative_axes_keep_dims_example/test_data_set_0/output_0.pb,sha256=jEsoUMRhG4OeYC5KTm_HKvd8ZN1iEJzVUEOlNq3zsow,43 +onnx/backend/test/data/node/test_reduce_l2_negative_axes_keep_dims_example_expanded/model.onnx,sha256=TAgSxvizyBfPDSFzEZO2HR09ggQb3IBptXJDEiubn04,969 +onnx/backend/test/data/node/test_reduce_l2_negative_axes_keep_dims_example_expanded/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_l2_negative_axes_keep_dims_example_expanded/test_data_set_0/input_1.pb,sha256=TQcMG4GWMtXqeLfE_sX7GRTpBRPd7PNq1CmUW4rnTh8,20 +onnx/backend/test/data/node/test_reduce_l2_negative_axes_keep_dims_example_expanded/test_data_set_0/output_0.pb,sha256=jEsoUMRhG4OeYC5KTm_HKvd8ZN1iEJzVUEOlNq3zsow,43 +onnx/backend/test/data/node/test_reduce_l2_negative_axes_keep_dims_random/model.onnx,sha256=_SnWGbphTilYxGrCD0L2vsViRqONP1Wc3UiXvYCvg4Y,201 +onnx/backend/test/data/node/test_reduce_l2_negative_axes_keep_dims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_l2_negative_axes_keep_dims_random/test_data_set_0/input_1.pb,sha256=TQcMG4GWMtXqeLfE_sX7GRTpBRPd7PNq1CmUW4rnTh8,20 +onnx/backend/test/data/node/test_reduce_l2_negative_axes_keep_dims_random/test_data_set_0/output_0.pb,sha256=rPsbYUQn6_Cmp7zcRSMK4aDCf4R0jlttuYSLZj9JV6Y,43 +onnx/backend/test/data/node/test_reduce_l2_negative_axes_keep_dims_random_expanded/model.onnx,sha256=SS_yV_VfVlGoNsYVTtNDm7I9nbNsnSwx8mmOTDRu9Bg,960 +onnx/backend/test/data/node/test_reduce_l2_negative_axes_keep_dims_random_expanded/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_l2_negative_axes_keep_dims_random_expanded/test_data_set_0/input_1.pb,sha256=TQcMG4GWMtXqeLfE_sX7GRTpBRPd7PNq1CmUW4rnTh8,20 +onnx/backend/test/data/node/test_reduce_l2_negative_axes_keep_dims_random_expanded/test_data_set_0/output_0.pb,sha256=rPsbYUQn6_Cmp7zcRSMK4aDCf4R0jlttuYSLZj9JV6Y,43 +onnx/backend/test/data/node/test_reduce_log_sum_asc_axes/model.onnx,sha256=08cgOZBs0l3W00v9s8wZpTPV3rOcIH-j5QRrsP05ZN4,180 +onnx/backend/test/data/node/test_reduce_log_sum_asc_axes/test_data_set_0/input_0.pb,sha256=hiOZW6YmCAGEqJ0ELt4OVjMm3ncS3BqXQ9ngd-FPcN8,257 +onnx/backend/test/data/node/test_reduce_log_sum_asc_axes/test_data_set_0/input_1.pb,sha256=3uvlJ_2xYPsfOzK-E0Fz2WR-a1Yvmf3r1G6DZPUQ8v0,28 +onnx/backend/test/data/node/test_reduce_log_sum_asc_axes/test_data_set_0/output_0.pb,sha256=TtThcW8lQGZNcOpVgIggfFRZhLJ-dEq7i2HJ7P--ZQU,35 +onnx/backend/test/data/node/test_reduce_log_sum_asc_axes_expanded/model.onnx,sha256=8awpV0QZ2b9diWVxOb3f6hIEeHsO4EX0Ef5LPseXuwI,343 +onnx/backend/test/data/node/test_reduce_log_sum_asc_axes_expanded/test_data_set_0/input_0.pb,sha256=hiOZW6YmCAGEqJ0ELt4OVjMm3ncS3BqXQ9ngd-FPcN8,257 +onnx/backend/test/data/node/test_reduce_log_sum_asc_axes_expanded/test_data_set_0/input_1.pb,sha256=3uvlJ_2xYPsfOzK-E0Fz2WR-a1Yvmf3r1G6DZPUQ8v0,28 +onnx/backend/test/data/node/test_reduce_log_sum_asc_axes_expanded/test_data_set_0/output_0.pb,sha256=TtThcW8lQGZNcOpVgIggfFRZhLJ-dEq7i2HJ7P--ZQU,35 +onnx/backend/test/data/node/test_reduce_log_sum_default/model.onnx,sha256=qbcysYmQ32u_zU_D4iArr69SAiwEAtZcY2jQBLOF150,170 +onnx/backend/test/data/node/test_reduce_log_sum_default/test_data_set_0/input_0.pb,sha256=EWB21V0Qg_0r93hTXZxBvBlxpp7ho0W9NhfaNQHoyHk,257 +onnx/backend/test/data/node/test_reduce_log_sum_default/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_log_sum_default/test_data_set_0/output_0.pb,sha256=VX5i5qOEuFQmlsL04_Mi021aVGoCe4VbAhD1x_xXErc,23 +onnx/backend/test/data/node/test_reduce_log_sum_default_expanded/model.onnx,sha256=v8YwEvh9UQoFyTt5FyJWC2Si_ElcETHA1aRkg-vrFu4,348 +onnx/backend/test/data/node/test_reduce_log_sum_default_expanded/test_data_set_0/input_0.pb,sha256=EWB21V0Qg_0r93hTXZxBvBlxpp7ho0W9NhfaNQHoyHk,257 +onnx/backend/test/data/node/test_reduce_log_sum_default_expanded/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_log_sum_default_expanded/test_data_set_0/output_0.pb,sha256=VX5i5qOEuFQmlsL04_Mi021aVGoCe4VbAhD1x_xXErc,23 +onnx/backend/test/data/node/test_reduce_log_sum_desc_axes/model.onnx,sha256=cJkZ61Jw2xic321dMt_B96LpK8tAT8dQE7Z3Ugm_9wk,181 +onnx/backend/test/data/node/test_reduce_log_sum_desc_axes/test_data_set_0/input_0.pb,sha256=EWB21V0Qg_0r93hTXZxBvBlxpp7ho0W9NhfaNQHoyHk,257 +onnx/backend/test/data/node/test_reduce_log_sum_desc_axes/test_data_set_0/input_1.pb,sha256=b9vD-lAvbO5WdOMBewE0sjS9vcl6_4iAPW_FFXA_LLc,28 +onnx/backend/test/data/node/test_reduce_log_sum_desc_axes/test_data_set_0/output_0.pb,sha256=q6wBAMfbpLLolGYAiBt9QC9EEED4D5XYzGF1tSQQgLg,27 +onnx/backend/test/data/node/test_reduce_log_sum_desc_axes_expanded/model.onnx,sha256=of_IuZWjgVPNkM-fv_G4AR5eyrmU7oN8dLonOuoCVqw,346 +onnx/backend/test/data/node/test_reduce_log_sum_desc_axes_expanded/test_data_set_0/input_0.pb,sha256=EWB21V0Qg_0r93hTXZxBvBlxpp7ho0W9NhfaNQHoyHk,257 +onnx/backend/test/data/node/test_reduce_log_sum_desc_axes_expanded/test_data_set_0/input_1.pb,sha256=b9vD-lAvbO5WdOMBewE0sjS9vcl6_4iAPW_FFXA_LLc,28 +onnx/backend/test/data/node/test_reduce_log_sum_desc_axes_expanded/test_data_set_0/output_0.pb,sha256=q6wBAMfbpLLolGYAiBt9QC9EEED4D5XYzGF1tSQQgLg,27 +onnx/backend/test/data/node/test_reduce_log_sum_empty_set/model.onnx,sha256=jXiLk-jqzZ2E7d9Wvtm76r8ATvuVzqtsk-1R8kgCnNE,189 +onnx/backend/test/data/node/test_reduce_log_sum_empty_set/test_data_set_0/input_0.pb,sha256=diEJAtHd_b95FEPFWBBoLLmxB42c3-1U15UBg9rdUuw,16 +onnx/backend/test/data/node/test_reduce_log_sum_empty_set/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_log_sum_empty_set/test_data_set_0/output_0.pb,sha256=NpmcJW0FZ2Uts6tVgBdKtSslkmnlqV43198R71w-KJ0,51 +onnx/backend/test/data/node/test_reduce_log_sum_empty_set_expanded/model.onnx,sha256=mYzxmXNlGeKktZoyOK4xp9i92EKvR0w_f2zYX0oK3Z8,354 +onnx/backend/test/data/node/test_reduce_log_sum_empty_set_expanded/test_data_set_0/input_0.pb,sha256=diEJAtHd_b95FEPFWBBoLLmxB42c3-1U15UBg9rdUuw,16 +onnx/backend/test/data/node/test_reduce_log_sum_empty_set_expanded/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_log_sum_empty_set_expanded/test_data_set_0/output_0.pb,sha256=NpmcJW0FZ2Uts6tVgBdKtSslkmnlqV43198R71w-KJ0,51 +onnx/backend/test/data/node/test_reduce_log_sum_exp_default_axes_keepdims_example/model.onnx,sha256=-LSKr_3WMkVC8ft7_prQsvhatLidnsA4tO3X4QPo1nQ,216 +onnx/backend/test/data/node/test_reduce_log_sum_exp_default_axes_keepdims_example/test_data_set_0/input_0.pb,sha256=M3tV_VhTkcs-tg8qRMjNbRFaieo0qw8nITJY39Tws_A,112 +onnx/backend/test/data/node/test_reduce_log_sum_exp_default_axes_keepdims_example/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_log_sum_exp_default_axes_keepdims_example/test_data_set_0/output_0.pb,sha256=8-s0mM_b-21xuh9gCxveN-F_DCnGgiCmS24Mz2sJGRM,27 +onnx/backend/test/data/node/test_reduce_log_sum_exp_default_axes_keepdims_example_expanded/model.onnx,sha256=WGnECbU7Pcxyzo3EcOi-GH4DAd67pG9lRRpm_6mzaH4,1092 +onnx/backend/test/data/node/test_reduce_log_sum_exp_default_axes_keepdims_example_expanded/test_data_set_0/input_0.pb,sha256=M3tV_VhTkcs-tg8qRMjNbRFaieo0qw8nITJY39Tws_A,112 +onnx/backend/test/data/node/test_reduce_log_sum_exp_default_axes_keepdims_example_expanded/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_log_sum_exp_default_axes_keepdims_example_expanded/test_data_set_0/output_0.pb,sha256=8-s0mM_b-21xuh9gCxveN-F_DCnGgiCmS24Mz2sJGRM,27 +onnx/backend/test/data/node/test_reduce_log_sum_exp_default_axes_keepdims_random/model.onnx,sha256=9-89fYqu5IADjGEbbqzQIF2QdUCOQ5RJQsQk-Y7TeYg,215 +onnx/backend/test/data/node/test_reduce_log_sum_exp_default_axes_keepdims_random/test_data_set_0/input_0.pb,sha256=VTaqgVd8jmvGxs2np_0IVZjhHMhxbltktKMVZ7oKQSw,112 +onnx/backend/test/data/node/test_reduce_log_sum_exp_default_axes_keepdims_random/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_log_sum_exp_default_axes_keepdims_random/test_data_set_0/output_0.pb,sha256=Q17PU4QR2ke-Ms3Jbvm8YwIVODqct6D8iOBWx1lXHUk,27 +onnx/backend/test/data/node/test_reduce_log_sum_exp_default_axes_keepdims_random_expanded/model.onnx,sha256=Jsqj4H52aLk8mS7O2tA3IIOAKqRJV5SpHnBpGkWlAxU,1083 +onnx/backend/test/data/node/test_reduce_log_sum_exp_default_axes_keepdims_random_expanded/test_data_set_0/input_0.pb,sha256=VTaqgVd8jmvGxs2np_0IVZjhHMhxbltktKMVZ7oKQSw,112 +onnx/backend/test/data/node/test_reduce_log_sum_exp_default_axes_keepdims_random_expanded/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_log_sum_exp_default_axes_keepdims_random_expanded/test_data_set_0/output_0.pb,sha256=Q17PU4QR2ke-Ms3Jbvm8YwIVODqct6D8iOBWx1lXHUk,27 +onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_example/model.onnx,sha256=evJeQt8UxeVRdg5hhuhA372TwI4KC85TooGPm2pmGMk,206 +onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_example/test_data_set_0/input_0.pb,sha256=M3tV_VhTkcs-tg8qRMjNbRFaieo0qw8nITJY39Tws_A,112 +onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_example/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_example/test_data_set_0/output_0.pb,sha256=jLIBDVFsMai84zPcahQDicWPNPPjc3IHy3sJUKKjlHI,65 +onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_example_expanded/model.onnx,sha256=H60GUj17DDSlhCl--rc3GuPdkZZvZfXK99Jb9PCWkw4,1033 +onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_example_expanded/test_data_set_0/input_0.pb,sha256=M3tV_VhTkcs-tg8qRMjNbRFaieo0qw8nITJY39Tws_A,112 +onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_example_expanded/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_example_expanded/test_data_set_0/output_0.pb,sha256=jLIBDVFsMai84zPcahQDicWPNPPjc3IHy3sJUKKjlHI,65 +onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_random/model.onnx,sha256=7S4gYBITpFk_DyMhCkrv0PXNBWhZi3lUnFQbEas3ZdI,205 +onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_random/test_data_set_0/input_0.pb,sha256=VTaqgVd8jmvGxs2np_0IVZjhHMhxbltktKMVZ7oKQSw,112 +onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_random/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_random/test_data_set_0/output_0.pb,sha256=nrLY3WzltnAOteYqGXZ67Wf_STNt6egqdDeCw8ppDTo,65 +onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_random_expanded/model.onnx,sha256=rsN-mMp8BP-inB1SYEmVKDOtvJ-lpEbyQgmz1_EBmn8,1024 +onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_random_expanded/test_data_set_0/input_0.pb,sha256=VTaqgVd8jmvGxs2np_0IVZjhHMhxbltktKMVZ7oKQSw,112 +onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_random_expanded/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_random_expanded/test_data_set_0/output_0.pb,sha256=nrLY3WzltnAOteYqGXZ67Wf_STNt6egqdDeCw8ppDTo,65 +onnx/backend/test/data/node/test_reduce_log_sum_exp_empty_set/model.onnx,sha256=dGL0JVccvsmUNguPuRytf2OcDK23X1cr96nIsZXClVM,196 +onnx/backend/test/data/node/test_reduce_log_sum_exp_empty_set/test_data_set_0/input_0.pb,sha256=diEJAtHd_b95FEPFWBBoLLmxB42c3-1U15UBg9rdUuw,16 +onnx/backend/test/data/node/test_reduce_log_sum_exp_empty_set/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_log_sum_exp_empty_set/test_data_set_0/output_0.pb,sha256=NpmcJW0FZ2Uts6tVgBdKtSslkmnlqV43198R71w-KJ0,51 +onnx/backend/test/data/node/test_reduce_log_sum_exp_empty_set_expanded/model.onnx,sha256=0_dfhhLnD0VsGbc-EUt4QFTws7ceuIWGO0-WmVAwyJA,911 +onnx/backend/test/data/node/test_reduce_log_sum_exp_empty_set_expanded/test_data_set_0/input_0.pb,sha256=diEJAtHd_b95FEPFWBBoLLmxB42c3-1U15UBg9rdUuw,16 +onnx/backend/test/data/node/test_reduce_log_sum_exp_empty_set_expanded/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_log_sum_exp_empty_set_expanded/test_data_set_0/output_0.pb,sha256=NpmcJW0FZ2Uts6tVgBdKtSslkmnlqV43198R71w-KJ0,51 +onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_example/model.onnx,sha256=TrGMChcDC10sgL-egbi-a_I1oqVT-m5t3rd26n79apA,203 +onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_example/test_data_set_0/input_0.pb,sha256=M3tV_VhTkcs-tg8qRMjNbRFaieo0qw8nITJY39Tws_A,112 +onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_example/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_example/test_data_set_0/output_0.pb,sha256=DJQqS-8gXhP8nKgBqwU9pn53ssTbxinuUpr6DbULG9o,67 +onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_example_expanded/model.onnx,sha256=amoKiR4Clugn99nU0EaQg1nRoABNrU5_ZlY6MbzJ0KY,974 +onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_example_expanded/test_data_set_0/input_0.pb,sha256=M3tV_VhTkcs-tg8qRMjNbRFaieo0qw8nITJY39Tws_A,112 +onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_example_expanded/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_example_expanded/test_data_set_0/output_0.pb,sha256=DJQqS-8gXhP8nKgBqwU9pn53ssTbxinuUpr6DbULG9o,67 +onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_random/model.onnx,sha256=YlDp2phOYKOpNx4anOtWubW2l6ZX5fDQgpAW0Rxu8wo,202 +onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_random/test_data_set_0/input_0.pb,sha256=VTaqgVd8jmvGxs2np_0IVZjhHMhxbltktKMVZ7oKQSw,112 +onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_random/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_random/test_data_set_0/output_0.pb,sha256=UAn7YoW1uByaGaT4UlyWJYbJ4-soYhmvWGF1vkUrLTc,67 +onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_random_expanded/model.onnx,sha256=QBma0e8Egn1iqejH0h3jNXjqieYy956imeffXg9xbEI,965 +onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_random_expanded/test_data_set_0/input_0.pb,sha256=VTaqgVd8jmvGxs2np_0IVZjhHMhxbltktKMVZ7oKQSw,112 +onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_random_expanded/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_random_expanded/test_data_set_0/output_0.pb,sha256=UAn7YoW1uByaGaT4UlyWJYbJ4-soYhmvWGF1vkUrLTc,67 +onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_example/model.onnx,sha256=mTDZeOW7N99CyYBbe_MquaYlz_4PdDJndCJRmQ1-dbI,217 +onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_example/test_data_set_0/input_0.pb,sha256=M3tV_VhTkcs-tg8qRMjNbRFaieo0qw8nITJY39Tws_A,112 +onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_example/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_example/test_data_set_0/output_0.pb,sha256=DJQqS-8gXhP8nKgBqwU9pn53ssTbxinuUpr6DbULG9o,67 +onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_example_expanded/model.onnx,sha256=KIK93yWXnjSxoOwzEBW7UqYBjzvJXlASuOQVAWcNn2Q,1101 +onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_example_expanded/test_data_set_0/input_0.pb,sha256=M3tV_VhTkcs-tg8qRMjNbRFaieo0qw8nITJY39Tws_A,112 +onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_example_expanded/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_example_expanded/test_data_set_0/output_0.pb,sha256=DJQqS-8gXhP8nKgBqwU9pn53ssTbxinuUpr6DbULG9o,67 +onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_random/model.onnx,sha256=ua6HYd_IG6l-E3FdKTNSFGu5HkiFwxLP1GknDq1w6Bw,216 +onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_random/test_data_set_0/input_0.pb,sha256=VTaqgVd8jmvGxs2np_0IVZjhHMhxbltktKMVZ7oKQSw,112 +onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_random/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_random/test_data_set_0/output_0.pb,sha256=UAn7YoW1uByaGaT4UlyWJYbJ4-soYhmvWGF1vkUrLTc,67 +onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded/model.onnx,sha256=jTmg7nQJOHX8Q4NcyKghDudjv3ZXqLKhpoEYbS7C4qo,1092 +onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded/test_data_set_0/input_0.pb,sha256=VTaqgVd8jmvGxs2np_0IVZjhHMhxbltktKMVZ7oKQSw,112 +onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded/test_data_set_0/output_0.pb,sha256=UAn7YoW1uByaGaT4UlyWJYbJ4-soYhmvWGF1vkUrLTc,67 +onnx/backend/test/data/node/test_reduce_log_sum_negative_axes/model.onnx,sha256=2r5RqRgA1ZB4SAlPBS-zvLiOi2_ZtK5JxeEymrQEbTk,176 +onnx/backend/test/data/node/test_reduce_log_sum_negative_axes/test_data_set_0/input_0.pb,sha256=EWB21V0Qg_0r93hTXZxBvBlxpp7ho0W9NhfaNQHoyHk,257 +onnx/backend/test/data/node/test_reduce_log_sum_negative_axes/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_log_sum_negative_axes/test_data_set_0/output_0.pb,sha256=6Yx7ZZqxOko4EPFziIZlhqwlJqLZz77lIAR6lkQ5Vpg,79 +onnx/backend/test/data/node/test_reduce_log_sum_negative_axes_expanded/model.onnx,sha256=HrCHh4D9-SccREbNVr0KKOxTXJYknt_bqZThp3M9DOU,366 +onnx/backend/test/data/node/test_reduce_log_sum_negative_axes_expanded/test_data_set_0/input_0.pb,sha256=EWB21V0Qg_0r93hTXZxBvBlxpp7ho0W9NhfaNQHoyHk,257 +onnx/backend/test/data/node/test_reduce_log_sum_negative_axes_expanded/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_log_sum_negative_axes_expanded/test_data_set_0/output_0.pb,sha256=6Yx7ZZqxOko4EPFziIZlhqwlJqLZz77lIAR6lkQ5Vpg,79 +onnx/backend/test/data/node/test_reduce_max_bool_inputs/model.onnx,sha256=oxWpcTS9-gG5EkW2xfDZjjsa3kAUUVPuayj8_NDE_Ts,176 +onnx/backend/test/data/node/test_reduce_max_bool_inputs/test_data_set_0/input_0.pb,sha256=ouIG7Bj8ok8jytOSP6d7w90Kt_o1CcnM6U9SabN2X6w,22 +onnx/backend/test/data/node/test_reduce_max_bool_inputs/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_max_bool_inputs/test_data_set_0/output_0.pb,sha256=yHrgLqFsr7-DB7lKf0H5urDhCkMMFmmQZ9yRGfH3cWg,21 +onnx/backend/test/data/node/test_reduce_max_default_axes_keepdim_example/model.onnx,sha256=vE5drf-iydiwgV7A9GR7LcNtlAcOb4i389revGuEYjs,175 +onnx/backend/test/data/node/test_reduce_max_default_axes_keepdim_example/test_data_set_0/input_0.pb,sha256=a01KOwB3K7t7I9cZN-YI2RWzydygLWYRU-jYD3KV0b4,64 +onnx/backend/test/data/node/test_reduce_max_default_axes_keepdim_example/test_data_set_0/output_0.pb,sha256=Bb3KrZse37as3WFj056V_ClYfrCyJjnt2DnhrGk94j8,23 +onnx/backend/test/data/node/test_reduce_max_default_axes_keepdims_random/model.onnx,sha256=vanvh0gwKcMdDQIPsT3BArVb2Mg-XoqL4fY4Ng_mQO8,175 +onnx/backend/test/data/node/test_reduce_max_default_axes_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_max_default_axes_keepdims_random/test_data_set_0/output_0.pb,sha256=ozqWkbR-YL-E8NoR8OqPUH6OkLXjb-vzUIeOxUHkbBA,23 +onnx/backend/test/data/node/test_reduce_max_do_not_keepdims_example/model.onnx,sha256=NFoSyvIEuAsNSntvGbAShr9g9qjoGvWjrTTmeAeMliA,192 +onnx/backend/test/data/node/test_reduce_max_do_not_keepdims_example/test_data_set_0/input_0.pb,sha256=a01KOwB3K7t7I9cZN-YI2RWzydygLWYRU-jYD3KV0b4,64 +onnx/backend/test/data/node/test_reduce_max_do_not_keepdims_example/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_max_do_not_keepdims_example/test_data_set_0/output_0.pb,sha256=HZffK_Pkm02avtHvOv7mhM-E3LY-MijOAq1-YgGuDV0,41 +onnx/backend/test/data/node/test_reduce_max_do_not_keepdims_random/model.onnx,sha256=c7rXRCGW7S7y1GaEovkkfCKSBDVJhmM8DqamGq8liGc,191 +onnx/backend/test/data/node/test_reduce_max_do_not_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_max_do_not_keepdims_random/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_max_do_not_keepdims_random/test_data_set_0/output_0.pb,sha256=EC7d1JJPDs9E3Pcqp3FMyYRM_Ydclfwe8J7KTg0Ae7Y,41 +onnx/backend/test/data/node/test_reduce_max_keepdims_example/model.onnx,sha256=feVjIwASiT48qIHHEiGzGXmS_FLc2LgHWI-uVXYIQAo,189 +onnx/backend/test/data/node/test_reduce_max_keepdims_example/test_data_set_0/input_0.pb,sha256=a01KOwB3K7t7I9cZN-YI2RWzydygLWYRU-jYD3KV0b4,64 +onnx/backend/test/data/node/test_reduce_max_keepdims_example/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_max_keepdims_example/test_data_set_0/output_0.pb,sha256=Y7JjZDTAji1tTbKuUwgzc6DaSNOnMOpmFJsTekO2MzQ,43 +onnx/backend/test/data/node/test_reduce_max_keepdims_random/model.onnx,sha256=cLMY5hW2k6QjxYHquRfmDO9a0hopP47I3ODrk27xKK4,188 +onnx/backend/test/data/node/test_reduce_max_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_max_keepdims_random/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_max_keepdims_random/test_data_set_0/output_0.pb,sha256=U3UiOpILvTohrJ7lDcmCm2IiF8wccl8q9MTyLtVBJ28,43 +onnx/backend/test/data/node/test_reduce_max_negative_axes_keepdims_example/model.onnx,sha256=NEq1WRSDW6dkrjrbU6LQBu2ingjwlNZ0XWd2H7k2x0k,203 +onnx/backend/test/data/node/test_reduce_max_negative_axes_keepdims_example/test_data_set_0/input_0.pb,sha256=a01KOwB3K7t7I9cZN-YI2RWzydygLWYRU-jYD3KV0b4,64 +onnx/backend/test/data/node/test_reduce_max_negative_axes_keepdims_example/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_max_negative_axes_keepdims_example/test_data_set_0/output_0.pb,sha256=Y7JjZDTAji1tTbKuUwgzc6DaSNOnMOpmFJsTekO2MzQ,43 +onnx/backend/test/data/node/test_reduce_max_negative_axes_keepdims_random/model.onnx,sha256=YZMSj660Mh4kI73f3IDKmfokYk2EnhwdqEzmdaC5W8M,202 +onnx/backend/test/data/node/test_reduce_max_negative_axes_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_max_negative_axes_keepdims_random/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_max_negative_axes_keepdims_random/test_data_set_0/output_0.pb,sha256=U3UiOpILvTohrJ7lDcmCm2IiF8wccl8q9MTyLtVBJ28,43 +onnx/backend/test/data/node/test_reduce_mean_default_axes_keepdims_example/model.onnx,sha256=0rdbzI9NKpScLSUZYf6psg5BpGqAWCWX7R3DbJB3rUw,204 +onnx/backend/test/data/node/test_reduce_mean_default_axes_keepdims_example/test_data_set_0/input_0.pb,sha256=a01KOwB3K7t7I9cZN-YI2RWzydygLWYRU-jYD3KV0b4,64 +onnx/backend/test/data/node/test_reduce_mean_default_axes_keepdims_example/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_mean_default_axes_keepdims_example/test_data_set_0/output_0.pb,sha256=7ibSI6LwyCIMwafplWVc4TDIpFpsp_gp9a7jPZQvSiM,23 +onnx/backend/test/data/node/test_reduce_mean_default_axes_keepdims_random/model.onnx,sha256=QuLvz4AXd5SwGI_LYyRq9MjzuwW9bBYuDRTzxZIKEHc,203 +onnx/backend/test/data/node/test_reduce_mean_default_axes_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_mean_default_axes_keepdims_random/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_mean_default_axes_keepdims_random/test_data_set_0/output_0.pb,sha256=F1Wj0GTUnCJNo9yidS3ralMUw5yeg62NF2QM4q6lkhI,23 +onnx/backend/test/data/node/test_reduce_mean_do_not_keepdims_example/model.onnx,sha256=hm6mmXP-WpLZSSQpFWvhoTv4TF1C7sz1wDeR9_r2imM,194 +onnx/backend/test/data/node/test_reduce_mean_do_not_keepdims_example/test_data_set_0/input_0.pb,sha256=a01KOwB3K7t7I9cZN-YI2RWzydygLWYRU-jYD3KV0b4,64 +onnx/backend/test/data/node/test_reduce_mean_do_not_keepdims_example/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_mean_do_not_keepdims_example/test_data_set_0/output_0.pb,sha256=hnISZrOPRTiH5iggcwleJTg6bX6m-U_oEnF8oQuZqxs,41 +onnx/backend/test/data/node/test_reduce_mean_do_not_keepdims_random/model.onnx,sha256=_unWlCItn4pGX1rQ65kNpvv7LVnhftErr4MhWdBHcPI,193 +onnx/backend/test/data/node/test_reduce_mean_do_not_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_mean_do_not_keepdims_random/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_mean_do_not_keepdims_random/test_data_set_0/output_0.pb,sha256=kpivv-z4wkCcK-I8shs26UmpmKG6Ytshz2EvgjiwKEw,41 +onnx/backend/test/data/node/test_reduce_mean_keepdims_example/model.onnx,sha256=SLeuMLRrJpaL1whWfjMX2H4vdQcSjVE1j8p_Is2YZxQ,191 +onnx/backend/test/data/node/test_reduce_mean_keepdims_example/test_data_set_0/input_0.pb,sha256=a01KOwB3K7t7I9cZN-YI2RWzydygLWYRU-jYD3KV0b4,64 +onnx/backend/test/data/node/test_reduce_mean_keepdims_example/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_mean_keepdims_example/test_data_set_0/output_0.pb,sha256=TtXp182U6LiKAlxhCCqkrszUxdjEEvqx9eOk9Znqgk8,43 +onnx/backend/test/data/node/test_reduce_mean_keepdims_random/model.onnx,sha256=2qgVPNkQEUXIIv6XAyHoYtY_NmgTHxu2HNgWD1YVWZc,190 +onnx/backend/test/data/node/test_reduce_mean_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_mean_keepdims_random/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_mean_keepdims_random/test_data_set_0/output_0.pb,sha256=wcLIggmeEyvBw7Fc2_7q0gHHnCiJKgrN1bqGxV-R6vw,43 +onnx/backend/test/data/node/test_reduce_mean_negative_axes_keepdims_example/model.onnx,sha256=Z-hc_RMz3VufOXQULmwK5pnC9MhotEybQrNOG12VSbc,205 +onnx/backend/test/data/node/test_reduce_mean_negative_axes_keepdims_example/test_data_set_0/input_0.pb,sha256=a01KOwB3K7t7I9cZN-YI2RWzydygLWYRU-jYD3KV0b4,64 +onnx/backend/test/data/node/test_reduce_mean_negative_axes_keepdims_example/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_mean_negative_axes_keepdims_example/test_data_set_0/output_0.pb,sha256=TtXp182U6LiKAlxhCCqkrszUxdjEEvqx9eOk9Znqgk8,43 +onnx/backend/test/data/node/test_reduce_mean_negative_axes_keepdims_random/model.onnx,sha256=m2HDJeknLP2M0F3usH0iOb44KJcVyPBXlrQlBei-YDc,204 +onnx/backend/test/data/node/test_reduce_mean_negative_axes_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_mean_negative_axes_keepdims_random/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_mean_negative_axes_keepdims_random/test_data_set_0/output_0.pb,sha256=wcLIggmeEyvBw7Fc2_7q0gHHnCiJKgrN1bqGxV-R6vw,43 +onnx/backend/test/data/node/test_reduce_min_bool_inputs/model.onnx,sha256=QLEBYG3Cmf7D_JNYX-b8MWZs4O3dQ-GJ9MmdT93Ts-Q,176 +onnx/backend/test/data/node/test_reduce_min_bool_inputs/test_data_set_0/input_0.pb,sha256=ouIG7Bj8ok8jytOSP6d7w90Kt_o1CcnM6U9SabN2X6w,22 +onnx/backend/test/data/node/test_reduce_min_bool_inputs/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_min_bool_inputs/test_data_set_0/output_0.pb,sha256=lPV-pYsggeQAp36hh6TrLqcK9RKBzUmS1igMp-KJMBc,21 +onnx/backend/test/data/node/test_reduce_min_default_axes_keepdims_example/model.onnx,sha256=M_DtcG5iqcqzc98s0DSgqY-N0qRPMqQuw7Wh_FqkfpQ,176 +onnx/backend/test/data/node/test_reduce_min_default_axes_keepdims_example/test_data_set_0/input_0.pb,sha256=a01KOwB3K7t7I9cZN-YI2RWzydygLWYRU-jYD3KV0b4,64 +onnx/backend/test/data/node/test_reduce_min_default_axes_keepdims_example/test_data_set_0/output_0.pb,sha256=G2XC5D9PzsK5GpL4ZHlTK5OxYFrf70ozEzcBkItwsaA,23 +onnx/backend/test/data/node/test_reduce_min_default_axes_keepdims_random/model.onnx,sha256=QLKKrC-5Eujvt-EvyH9WFQTM4_6af5rtmEdX5dghe_c,175 +onnx/backend/test/data/node/test_reduce_min_default_axes_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_min_default_axes_keepdims_random/test_data_set_0/output_0.pb,sha256=fCFjoh4xFCI0bizOVGFspYggfTiFgAas8k1Mb9R46vI,23 +onnx/backend/test/data/node/test_reduce_min_do_not_keepdims_example/model.onnx,sha256=uhZrDdQHcU_BN8oFyZTihiTSuKbIxGTJQDXLQND9Dig,192 +onnx/backend/test/data/node/test_reduce_min_do_not_keepdims_example/test_data_set_0/input_0.pb,sha256=a01KOwB3K7t7I9cZN-YI2RWzydygLWYRU-jYD3KV0b4,64 +onnx/backend/test/data/node/test_reduce_min_do_not_keepdims_example/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_min_do_not_keepdims_example/test_data_set_0/output_0.pb,sha256=Xu7cTpxA2M0y065t3hqWb0R-MmowoXfqGwu0JwdVU4I,41 +onnx/backend/test/data/node/test_reduce_min_do_not_keepdims_random/model.onnx,sha256=ay7d2CF3QEKEdFoAk1aJ0zVq5qr_h6XmzZ-Uy7s_fxM,191 +onnx/backend/test/data/node/test_reduce_min_do_not_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_min_do_not_keepdims_random/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_min_do_not_keepdims_random/test_data_set_0/output_0.pb,sha256=Wh_Dx6lJN0QUmIuWRDP29XGjpEw9R9Tlz2gzQT_XzUU,41 +onnx/backend/test/data/node/test_reduce_min_empty_set/model.onnx,sha256=feU3zn_3mTzQQuJnhMKqRSS3qJqLnA7eF_GQ-rDoxpI,182 +onnx/backend/test/data/node/test_reduce_min_empty_set/test_data_set_0/input_0.pb,sha256=diEJAtHd_b95FEPFWBBoLLmxB42c3-1U15UBg9rdUuw,16 +onnx/backend/test/data/node/test_reduce_min_empty_set/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_min_empty_set/test_data_set_0/output_0.pb,sha256=eIU1KG5xXZPe-cHrsnr3fs53PwScOiUMJf0BvHmw830,51 +onnx/backend/test/data/node/test_reduce_min_keepdims_example/model.onnx,sha256=PXL2jvwWdNRFdDTRuPzPtjLvQGO6CA_abXa-oPMtI2s,189 +onnx/backend/test/data/node/test_reduce_min_keepdims_example/test_data_set_0/input_0.pb,sha256=a01KOwB3K7t7I9cZN-YI2RWzydygLWYRU-jYD3KV0b4,64 +onnx/backend/test/data/node/test_reduce_min_keepdims_example/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_min_keepdims_example/test_data_set_0/output_0.pb,sha256=NU4b8D9taBjUcD7UZpFmlgF6rWEuaX175z8RcR5RV1Y,43 +onnx/backend/test/data/node/test_reduce_min_keepdims_random/model.onnx,sha256=vFPC039OYaXjgPtoRF3dq5n7GKwaRJPI1Ql38xqE2Hc,188 +onnx/backend/test/data/node/test_reduce_min_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_min_keepdims_random/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_min_keepdims_random/test_data_set_0/output_0.pb,sha256=mMPCR1Esnn6Ww5w3SHz28klgLCsH3gnvJVLefjQmJMk,43 +onnx/backend/test/data/node/test_reduce_min_negative_axes_keepdims_example/model.onnx,sha256=6Wcrg0issxxFNsBz_rGXOvKbXUyGU-n72hJJN61-2CI,203 +onnx/backend/test/data/node/test_reduce_min_negative_axes_keepdims_example/test_data_set_0/input_0.pb,sha256=a01KOwB3K7t7I9cZN-YI2RWzydygLWYRU-jYD3KV0b4,64 +onnx/backend/test/data/node/test_reduce_min_negative_axes_keepdims_example/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_min_negative_axes_keepdims_example/test_data_set_0/output_0.pb,sha256=NU4b8D9taBjUcD7UZpFmlgF6rWEuaX175z8RcR5RV1Y,43 +onnx/backend/test/data/node/test_reduce_min_negative_axes_keepdims_random/model.onnx,sha256=mAzQANQL7yGJEy7ea19UF7emkKR-DE01KeuoqCHsdQg,202 +onnx/backend/test/data/node/test_reduce_min_negative_axes_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_min_negative_axes_keepdims_random/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_min_negative_axes_keepdims_random/test_data_set_0/output_0.pb,sha256=mMPCR1Esnn6Ww5w3SHz28klgLCsH3gnvJVLefjQmJMk,43 +onnx/backend/test/data/node/test_reduce_prod_default_axes_keepdims_example/model.onnx,sha256=3QaNyMuspKVWgPGbu8UyFpN5KDA0HDFWjv1MfjOx2EU,178 +onnx/backend/test/data/node/test_reduce_prod_default_axes_keepdims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_prod_default_axes_keepdims_example/test_data_set_0/output_0.pb,sha256=L8bDbzhMnsmauRTvBHYtKfDGUrj8Mdx-crYz4NARWz4,23 +onnx/backend/test/data/node/test_reduce_prod_default_axes_keepdims_random/model.onnx,sha256=NyD7Og13fGa5kHZILIXFTyBJOSeeDfXAj3_8fYbvGlc,177 +onnx/backend/test/data/node/test_reduce_prod_default_axes_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_prod_default_axes_keepdims_random/test_data_set_0/output_0.pb,sha256=TOI_r_i3le4r-O5Z1HZerCe0ZmjJ8KOtsHw2nY_Utas,23 +onnx/backend/test/data/node/test_reduce_prod_do_not_keepdims_example/model.onnx,sha256=Nech7zzE0avGRtbwgjk4rgQmC-ETA8Vlw190ScCnqc8,194 +onnx/backend/test/data/node/test_reduce_prod_do_not_keepdims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_prod_do_not_keepdims_example/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_prod_do_not_keepdims_example/test_data_set_0/output_0.pb,sha256=mNWtHVOjuoK-vYvbrudn6UlFXq2_GLE6SHlutihvD5k,41 +onnx/backend/test/data/node/test_reduce_prod_do_not_keepdims_random/model.onnx,sha256=3oeBxWFvIGk1KI4AtIqnKGfDCR1BsxwXeL5Oab3mFKE,193 +onnx/backend/test/data/node/test_reduce_prod_do_not_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_prod_do_not_keepdims_random/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_prod_do_not_keepdims_random/test_data_set_0/output_0.pb,sha256=fvB-o3vzCwkRPNYZVs6InuQtFic0cQ__H0AXKP50rmo,41 +onnx/backend/test/data/node/test_reduce_prod_empty_set/model.onnx,sha256=KTMntXHTNW7Y_rsnn7UJEI45K5CxVr-lw-XlrNgBJgQ,184 +onnx/backend/test/data/node/test_reduce_prod_empty_set/test_data_set_0/input_0.pb,sha256=diEJAtHd_b95FEPFWBBoLLmxB42c3-1U15UBg9rdUuw,16 +onnx/backend/test/data/node/test_reduce_prod_empty_set/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_prod_empty_set/test_data_set_0/output_0.pb,sha256=QSjeQjt3JdGgRAGg5-RM203ryR97LsLN1OasGjZsU78,51 +onnx/backend/test/data/node/test_reduce_prod_keepdims_example/model.onnx,sha256=4kJTiebaZg-DH2l-PwkadFWRrS_qRv6g6wlGHd6xmo8,191 +onnx/backend/test/data/node/test_reduce_prod_keepdims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_prod_keepdims_example/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_prod_keepdims_example/test_data_set_0/output_0.pb,sha256=7ok4vwYSfLvbWG8irRUQBo3WGLl3s_rgJXemrrJRqyw,43 +onnx/backend/test/data/node/test_reduce_prod_keepdims_random/model.onnx,sha256=FG6mPY0kRbr-bPsuhnL8YEQ7CqhltGMk34CeyL3Melo,190 +onnx/backend/test/data/node/test_reduce_prod_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_prod_keepdims_random/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_prod_keepdims_random/test_data_set_0/output_0.pb,sha256=1ila67umCRwEUTp6trOZSoLm3kfR0zVDxb20FWYyRa0,43 +onnx/backend/test/data/node/test_reduce_prod_negative_axes_keepdims_example/model.onnx,sha256=H6lGw_WyHA59zv-lv2lHVTaCGqHgaR_o8-fIzuoyM3Q,205 +onnx/backend/test/data/node/test_reduce_prod_negative_axes_keepdims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_prod_negative_axes_keepdims_example/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_prod_negative_axes_keepdims_example/test_data_set_0/output_0.pb,sha256=7ok4vwYSfLvbWG8irRUQBo3WGLl3s_rgJXemrrJRqyw,43 +onnx/backend/test/data/node/test_reduce_prod_negative_axes_keepdims_random/model.onnx,sha256=v57sz16zLEhnsl-RVPyy7VG2D-oE7tSZprK6CUQCgdM,204 +onnx/backend/test/data/node/test_reduce_prod_negative_axes_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_prod_negative_axes_keepdims_random/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_prod_negative_axes_keepdims_random/test_data_set_0/output_0.pb,sha256=1ila67umCRwEUTp6trOZSoLm3kfR0zVDxb20FWYyRa0,43 +onnx/backend/test/data/node/test_reduce_sum_default_axes_keepdims_example/model.onnx,sha256=mZmJR-3SFDQ--6G0CwQ6IGgLrtTA5b-y71gMuYmBQkI,202 +onnx/backend/test/data/node/test_reduce_sum_default_axes_keepdims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_sum_default_axes_keepdims_example/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_sum_default_axes_keepdims_example/test_data_set_0/output_0.pb,sha256=-tM_bqHqK-65eWP1RbkvxizF8zO7aiimwgb1H-034CM,23 +onnx/backend/test/data/node/test_reduce_sum_default_axes_keepdims_random/model.onnx,sha256=pJt_pNmAgxZaTmbxZ5b3hdtClp2QkSL01LaW9dh_2bA,201 +onnx/backend/test/data/node/test_reduce_sum_default_axes_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_sum_default_axes_keepdims_random/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_sum_default_axes_keepdims_random/test_data_set_0/output_0.pb,sha256=gP6xq40fhaM6-e7UNw1HN64G9nmjUBWgSvnzS5itHHU,23 +onnx/backend/test/data/node/test_reduce_sum_do_not_keepdims_example/model.onnx,sha256=fidSdw3yQJhcR7P810Ln9DAy913jRkLjVFAu9Btcs2E,192 +onnx/backend/test/data/node/test_reduce_sum_do_not_keepdims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_sum_do_not_keepdims_example/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_sum_do_not_keepdims_example/test_data_set_0/output_0.pb,sha256=rUr7bDeYdFfv8ODlJx3VxZCObcOQawo6PvpwwxxVyVk,41 +onnx/backend/test/data/node/test_reduce_sum_do_not_keepdims_random/model.onnx,sha256=jfI2GTWwCBA7kYoQX3ltR7riRcXzCgAlFeoPExOcq6k,191 +onnx/backend/test/data/node/test_reduce_sum_do_not_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_sum_do_not_keepdims_random/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_sum_do_not_keepdims_random/test_data_set_0/output_0.pb,sha256=Epgb6VirPLNuYKYEeE3PT2w4qkKao1u3CNpZaiHnocY,41 +onnx/backend/test/data/node/test_reduce_sum_empty_axes_input_noop_example/model.onnx,sha256=Le5t6Bv1fb7z-_UeTOsYkbt3SrjnaPb6jhP9dSki1Bg,231 +onnx/backend/test/data/node/test_reduce_sum_empty_axes_input_noop_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_sum_empty_axes_input_noop_example/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_sum_empty_axes_input_noop_example/test_data_set_0/output_0.pb,sha256=hxZ8kmFTn4aU_veDr-LP4a6v4IilChYy9YIOoEVHfv8,67 +onnx/backend/test/data/node/test_reduce_sum_empty_set/model.onnx,sha256=2vpXb6j53VqjPrjUtjUDP4FYGDt3-c0gton55tzIuxk,182 +onnx/backend/test/data/node/test_reduce_sum_empty_set/test_data_set_0/input_0.pb,sha256=diEJAtHd_b95FEPFWBBoLLmxB42c3-1U15UBg9rdUuw,16 +onnx/backend/test/data/node/test_reduce_sum_empty_set/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_sum_empty_set/test_data_set_0/output_0.pb,sha256=dkauAAxfP09SxG8x1uXFNv8Ndb1oDMAKAb8GRYfbrG0,51 +onnx/backend/test/data/node/test_reduce_sum_empty_set_non_reduced_axis_zero/model.onnx,sha256=EcEPNTx8l1R49kTe7K2pbyxppf_HkGNpIqAqzBfB4_4,204 +onnx/backend/test/data/node/test_reduce_sum_empty_set_non_reduced_axis_zero/test_data_set_0/input_0.pb,sha256=diEJAtHd_b95FEPFWBBoLLmxB42c3-1U15UBg9rdUuw,16 +onnx/backend/test/data/node/test_reduce_sum_empty_set_non_reduced_axis_zero/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_reduce_sum_empty_set_non_reduced_axis_zero/test_data_set_0/output_0.pb,sha256=HZVVt0c5MbTlwO5vbPsnvEiPwTFHjMFsTMMFj4fYKZ4,19 +onnx/backend/test/data/node/test_reduce_sum_keepdims_example/model.onnx,sha256=_atfvm_z1owilbBcO4adKbxvsFAcYALNlKXooizYsdo,189 +onnx/backend/test/data/node/test_reduce_sum_keepdims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_sum_keepdims_example/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_sum_keepdims_example/test_data_set_0/output_0.pb,sha256=-0ot9rCkyWkDhV7chj_oxYHNFryuggjVddq8z4qWjuY,43 +onnx/backend/test/data/node/test_reduce_sum_keepdims_random/model.onnx,sha256=gqfy3V7eSTxiQ6UTgXo0olNcckZHUIVCMwcA66HzVAE,188 +onnx/backend/test/data/node/test_reduce_sum_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_sum_keepdims_random/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_sum_keepdims_random/test_data_set_0/output_0.pb,sha256=88XdkhxYXPjaDDn84q-yBWICKN6ikXDbVH5RuCm3EKE,43 +onnx/backend/test/data/node/test_reduce_sum_negative_axes_keepdims_example/model.onnx,sha256=vJkjTGTKvBUSYRnYDHfgxFcRK6bpa3pnZ5zv72-JPGg,203 +onnx/backend/test/data/node/test_reduce_sum_negative_axes_keepdims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_sum_negative_axes_keepdims_example/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_sum_negative_axes_keepdims_example/test_data_set_0/output_0.pb,sha256=-0ot9rCkyWkDhV7chj_oxYHNFryuggjVddq8z4qWjuY,43 +onnx/backend/test/data/node/test_reduce_sum_negative_axes_keepdims_random/model.onnx,sha256=-CMn2Mn2eWhFXZQiqj_CIu9RFb7a4aRudqjF5czzlgA,231 +onnx/backend/test/data/node/test_reduce_sum_negative_axes_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_sum_negative_axes_keepdims_random/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_sum_negative_axes_keepdims_random/test_data_set_0/output_0.pb,sha256=cQ0Q_SVFF2KsKqm4m_tH1qSv6sSiPriE2i1T8Ktnc_4,67 +onnx/backend/test/data/node/test_reduce_sum_square_default_axes_keepdims_example/model.onnx,sha256=G2bxijVRgZn1JZJt3QGbNnDsx6oepsfIYamaUD3ZnxU,215 +onnx/backend/test/data/node/test_reduce_sum_square_default_axes_keepdims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_sum_square_default_axes_keepdims_example/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_sum_square_default_axes_keepdims_example/test_data_set_0/output_0.pb,sha256=HZLley9Hbzlh5in3jlFTDghtNaheN7e3B8SBgGb3c-Q,23 +onnx/backend/test/data/node/test_reduce_sum_square_default_axes_keepdims_example_expanded/model.onnx,sha256=YgsggPp3fCEIwJg8g4jFgLdE8B63jvYOY6-9e-G3O4s,436 +onnx/backend/test/data/node/test_reduce_sum_square_default_axes_keepdims_example_expanded/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_sum_square_default_axes_keepdims_example_expanded/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_sum_square_default_axes_keepdims_example_expanded/test_data_set_0/output_0.pb,sha256=HZLley9Hbzlh5in3jlFTDghtNaheN7e3B8SBgGb3c-Q,23 +onnx/backend/test/data/node/test_reduce_sum_square_default_axes_keepdims_random/model.onnx,sha256=T5_7RGmpImy2WrH7D4FS1DbeK1RDqcoBhe5lXso9eFU,214 +onnx/backend/test/data/node/test_reduce_sum_square_default_axes_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_sum_square_default_axes_keepdims_random/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_sum_square_default_axes_keepdims_random/test_data_set_0/output_0.pb,sha256=NbVoWb_YuZBVOJxwwGasGxSa91PqRFyopG8pwnsRrZo,23 +onnx/backend/test/data/node/test_reduce_sum_square_default_axes_keepdims_random_expanded/model.onnx,sha256=ebE1RD68Ez28CPZdtFz8FpSt2dJWG4d1UDMpclNjjdk,433 +onnx/backend/test/data/node/test_reduce_sum_square_default_axes_keepdims_random_expanded/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_sum_square_default_axes_keepdims_random_expanded/test_data_set_0/input_1.pb,sha256=lC3r53K5AzukLQcG3fTzO87xOCgaU4mtW52qktRUPUk,12 +onnx/backend/test/data/node/test_reduce_sum_square_default_axes_keepdims_random_expanded/test_data_set_0/output_0.pb,sha256=NbVoWb_YuZBVOJxwwGasGxSa91PqRFyopG8pwnsRrZo,23 +onnx/backend/test/data/node/test_reduce_sum_square_do_not_keepdims_example/model.onnx,sha256=WkoP-bgJbWSEOUCAEdtp5W3QII3v8-Lu3JZPPJCQyW8,205 +onnx/backend/test/data/node/test_reduce_sum_square_do_not_keepdims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_sum_square_do_not_keepdims_example/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_sum_square_do_not_keepdims_example/test_data_set_0/output_0.pb,sha256=4STmxX1bz3tAUzg68411nc4sQSNZgAaUuYt_onSMO-c,41 +onnx/backend/test/data/node/test_reduce_sum_square_do_not_keepdims_example_expanded/model.onnx,sha256=j8K5jM9xaH8IFMLK1vgVR_CUSpxG8UKiWfstNYwEiI4,414 +onnx/backend/test/data/node/test_reduce_sum_square_do_not_keepdims_example_expanded/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_sum_square_do_not_keepdims_example_expanded/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_sum_square_do_not_keepdims_example_expanded/test_data_set_0/output_0.pb,sha256=4STmxX1bz3tAUzg68411nc4sQSNZgAaUuYt_onSMO-c,41 +onnx/backend/test/data/node/test_reduce_sum_square_do_not_keepdims_random/model.onnx,sha256=bI3zRIns4YaHcHYe99iaPR-9Tyeo-1V-KRYYIFFtP8s,204 +onnx/backend/test/data/node/test_reduce_sum_square_do_not_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_sum_square_do_not_keepdims_random/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_sum_square_do_not_keepdims_random/test_data_set_0/output_0.pb,sha256=Kt1q2dw9kUxmKY4qFgAWAqeJZUagnKjwuytTKfpNlts,41 +onnx/backend/test/data/node/test_reduce_sum_square_do_not_keepdims_random_expanded/model.onnx,sha256=g5FV8FqvL8og11KAIsxB-aD9QOeZ_HySCk0HtJGawSg,411 +onnx/backend/test/data/node/test_reduce_sum_square_do_not_keepdims_random_expanded/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_sum_square_do_not_keepdims_random_expanded/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_sum_square_do_not_keepdims_random_expanded/test_data_set_0/output_0.pb,sha256=Kt1q2dw9kUxmKY4qFgAWAqeJZUagnKjwuytTKfpNlts,41 +onnx/backend/test/data/node/test_reduce_sum_square_empty_set/model.onnx,sha256=-g26jAEVh7DMfc5VHp_sP4V3nVHbkgoZvU1J6lCJtMU,195 +onnx/backend/test/data/node/test_reduce_sum_square_empty_set/test_data_set_0/input_0.pb,sha256=diEJAtHd_b95FEPFWBBoLLmxB42c3-1U15UBg9rdUuw,16 +onnx/backend/test/data/node/test_reduce_sum_square_empty_set/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_sum_square_empty_set/test_data_set_0/output_0.pb,sha256=dkauAAxfP09SxG8x1uXFNv8Ndb1oDMAKAb8GRYfbrG0,51 +onnx/backend/test/data/node/test_reduce_sum_square_empty_set_expanded/model.onnx,sha256=EBVyVKGrHd3Zh_crLovMWGtfGUP9khB_ciPqKXAQ6F8,375 +onnx/backend/test/data/node/test_reduce_sum_square_empty_set_expanded/test_data_set_0/input_0.pb,sha256=diEJAtHd_b95FEPFWBBoLLmxB42c3-1U15UBg9rdUuw,16 +onnx/backend/test/data/node/test_reduce_sum_square_empty_set_expanded/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_sum_square_empty_set_expanded/test_data_set_0/output_0.pb,sha256=dkauAAxfP09SxG8x1uXFNv8Ndb1oDMAKAb8GRYfbrG0,51 +onnx/backend/test/data/node/test_reduce_sum_square_keepdims_example/model.onnx,sha256=7HagD67ewDY4s8-74ByYvNF_IKR7parPtsg4XxhQ0sI,202 +onnx/backend/test/data/node/test_reduce_sum_square_keepdims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_sum_square_keepdims_example/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_sum_square_keepdims_example/test_data_set_0/output_0.pb,sha256=Cq_UDeh4h6nFLBgZBAWRe4VLZgIhclppB9m_jRlZrEo,43 +onnx/backend/test/data/node/test_reduce_sum_square_keepdims_example_expanded/model.onnx,sha256=YoaYN32uRPsLreNfvjCxc_KX_Jlem3oWme5y7aHScPQ,397 +onnx/backend/test/data/node/test_reduce_sum_square_keepdims_example_expanded/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_sum_square_keepdims_example_expanded/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_sum_square_keepdims_example_expanded/test_data_set_0/output_0.pb,sha256=Cq_UDeh4h6nFLBgZBAWRe4VLZgIhclppB9m_jRlZrEo,43 +onnx/backend/test/data/node/test_reduce_sum_square_keepdims_random/model.onnx,sha256=GkzGXsE3RHJ7M4rnwI--bjcW4sXRzpu6wx44mNDsKKM,201 +onnx/backend/test/data/node/test_reduce_sum_square_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_sum_square_keepdims_random/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_sum_square_keepdims_random/test_data_set_0/output_0.pb,sha256=T-ud1oynbPrU6k8obUDvYqxQoCsq0N-S_8SJKxthuBk,43 +onnx/backend/test/data/node/test_reduce_sum_square_keepdims_random_expanded/model.onnx,sha256=78ReP2FSJchrbXYkXWug9ZJL6Y730PQSUQAKVjXI94U,394 +onnx/backend/test/data/node/test_reduce_sum_square_keepdims_random_expanded/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_sum_square_keepdims_random_expanded/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_reduce_sum_square_keepdims_random_expanded/test_data_set_0/output_0.pb,sha256=T-ud1oynbPrU6k8obUDvYqxQoCsq0N-S_8SJKxthuBk,43 +onnx/backend/test/data/node/test_reduce_sum_square_negative_axes_keepdims_example/model.onnx,sha256=Yxeca3t4PmMBNd8A6SAYpLBOatqlLb1_xMJvfpvd9Fw,216 +onnx/backend/test/data/node/test_reduce_sum_square_negative_axes_keepdims_example/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_sum_square_negative_axes_keepdims_example/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_sum_square_negative_axes_keepdims_example/test_data_set_0/output_0.pb,sha256=Cq_UDeh4h6nFLBgZBAWRe4VLZgIhclppB9m_jRlZrEo,43 +onnx/backend/test/data/node/test_reduce_sum_square_negative_axes_keepdims_example_expanded/model.onnx,sha256=3C_RaXrAagqlPLawsYFg-ft6SaeCIhLt1HsCQrgHC5k,439 +onnx/backend/test/data/node/test_reduce_sum_square_negative_axes_keepdims_example_expanded/test_data_set_0/input_0.pb,sha256=PwPZEPZFRN46ucTDA7WJdQ6DZGzo_FrlkXVuDLJ8k7k,64 +onnx/backend/test/data/node/test_reduce_sum_square_negative_axes_keepdims_example_expanded/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_sum_square_negative_axes_keepdims_example_expanded/test_data_set_0/output_0.pb,sha256=Cq_UDeh4h6nFLBgZBAWRe4VLZgIhclppB9m_jRlZrEo,43 +onnx/backend/test/data/node/test_reduce_sum_square_negative_axes_keepdims_random/model.onnx,sha256=26kPF5Om3gzxIpg2VJ8IfFcKVvUtj36jY15NHrl0PkU,215 +onnx/backend/test/data/node/test_reduce_sum_square_negative_axes_keepdims_random/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_sum_square_negative_axes_keepdims_random/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_sum_square_negative_axes_keepdims_random/test_data_set_0/output_0.pb,sha256=T-ud1oynbPrU6k8obUDvYqxQoCsq0N-S_8SJKxthuBk,43 +onnx/backend/test/data/node/test_reduce_sum_square_negative_axes_keepdims_random_expanded/model.onnx,sha256=RhZS-1PyLcRMXwouw2c0XsXI297fSGyEtLo_cKcyQT4,436 +onnx/backend/test/data/node/test_reduce_sum_square_negative_axes_keepdims_random_expanded/test_data_set_0/input_0.pb,sha256=8pEDHKN1HYnY26qgXHG5QbG7pHwvEqss87VIYK8DU2Y,64 +onnx/backend/test/data/node/test_reduce_sum_square_negative_axes_keepdims_random_expanded/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_reduce_sum_square_negative_axes_keepdims_random_expanded/test_data_set_0/output_0.pb,sha256=T-ud1oynbPrU6k8obUDvYqxQoCsq0N-S_8SJKxthuBk,43 +onnx/backend/test/data/node/test_reflect_pad/model.onnx,sha256=M0A7hHGT9fBLQ-sdLme9nGJ6qqV7LcbH4YaJS8F4zJY,160 +onnx/backend/test/data/node/test_reflect_pad/test_data_set_0/input_0.pb,sha256=TuEz1ptAgJa6Qzh6nQx_rsYU5BVmA4qgVWNRPM7AEkU,256 +onnx/backend/test/data/node/test_reflect_pad/test_data_set_0/input_1.pb,sha256=8s4uRhNf19t1kZ-BWDxuitcnY9MeHgkG6w6i1Sghnz0,76 +onnx/backend/test/data/node/test_reflect_pad/test_data_set_0/output_0.pb,sha256=vRjclkAiKB0nuRzVWk4GeIKCHBxR-QZilgYP5DlBVr4,520 +onnx/backend/test/data/node/test_regex_full_match_basic/model.onnx,sha256=yCly5Cs07OW-682KHYYoWZPnJPhKn7NIcYhrkzsmLTc,148 +onnx/backend/test/data/node/test_regex_full_match_basic/test_data_set_0/input_0.pb,sha256=1fO9Vj8-f9A9v_fbA6lHnui0wxRWivm9u4Ofx4bWFMk,56 +onnx/backend/test/data/node/test_regex_full_match_basic/test_data_set_0/output_0.pb,sha256=_lhc3O_b74C03Oht1i8tnibUls64f1ROvyVBDQPO2Os,12 +onnx/backend/test/data/node/test_regex_full_match_email_domain/model.onnx,sha256=LjnPd5qIqTHB87k-0a13GFl_SE-QmAO9WRZNBzVOk0s,187 +onnx/backend/test/data/node/test_regex_full_match_email_domain/test_data_set_0/input_0.pb,sha256=u1NPTw32IImtF95uzUqq4GHK3RoDkr3FoVJm6UMsgCY,80 +onnx/backend/test/data/node/test_regex_full_match_email_domain/test_data_set_0/output_0.pb,sha256=xE17dM4PaT3OoJNUzMKqTicCcEsaku7OGMrpt-6A6XI,15 +onnx/backend/test/data/node/test_regex_full_match_empty/model.onnx,sha256=F0VGRCfhQRpZs-9_aYRqldQdSXWaNuQHd0GhPuXKQzg,180 +onnx/backend/test/data/node/test_regex_full_match_empty/test_data_set_0/input_0.pb,sha256=_oHGYv3MwEVLQZORNyWn0P7JLcItELdIsVNG3t_B7qM,9 +onnx/backend/test/data/node/test_regex_full_match_empty/test_data_set_0/output_0.pb,sha256=1k5BGRIQy5gADr5-ANfqJ5wHmJ-X3M2lEVtj_GZhiQU,11 +onnx/backend/test/data/node/test_relu/model.onnx,sha256=hzHJyDRTlaWNI-jSCxr_WqNHCOSCwGYwlc2URQeJoss,99 +onnx/backend/test/data/node/test_relu/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_relu/test_data_set_0/output_0.pb,sha256=SqNj4MYf1_XozMtqbVv31LmL5bj5_lZV-tOJ9tLs8Ik,254 +onnx/backend/test/data/node/test_relu_expanded_ver18/model.onnx,sha256=KMFio-_6x2DXs7Bs_EyGD11Jy3eR-BfL3LPM9TW8Rpo,335 +onnx/backend/test/data/node/test_relu_expanded_ver18/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_relu_expanded_ver18/test_data_set_0/output_0.pb,sha256=SqNj4MYf1_XozMtqbVv31LmL5bj5_lZV-tOJ9tLs8Ik,254 +onnx/backend/test/data/node/test_reshape_allowzero_reordered/model.onnx,sha256=05HMLGU0sIRbDBNTpaSDSA8skZtJeuLbhP1utaMpccM,192 +onnx/backend/test/data/node/test_reshape_allowzero_reordered/test_data_set_0/input_0.pb,sha256=39jbCpQe08mPS4dsbK-BTXXytqtNJ2iXlQ87ZOq2p_E,16 +onnx/backend/test/data/node/test_reshape_allowzero_reordered/test_data_set_0/input_1.pb,sha256=R5lCYdXFlSEYA9XjPU_Fjhx54__PbgfONnz-Qdy3NGo,37 +onnx/backend/test/data/node/test_reshape_allowzero_reordered/test_data_set_0/output_0.pb,sha256=5DBO19lQ938nrN3x2OoaFb2OR5PxUJ0gS8dSub3omIE,20 +onnx/backend/test/data/node/test_reshape_extended_dims/model.onnx,sha256=sCp3OKT0a5eFA51abp__7HaVJKi2_n39huIJXCvQ8Qg,172 +onnx/backend/test/data/node/test_reshape_extended_dims/test_data_set_0/input_0.pb,sha256=Ra5qJ0bmZk4Mtqoj9EhsgTyEoqrmqYk679L-Ki7OJPI,112 +onnx/backend/test/data/node/test_reshape_extended_dims/test_data_set_0/input_1.pb,sha256=WagSbqYw6WuUYvL67CYqSzbZnVaoUz9MELD3BuUy8b4,45 +onnx/backend/test/data/node/test_reshape_extended_dims/test_data_set_0/output_0.pb,sha256=IDzjEzlEsBbvtMbnKI40MRU6IcXAR1W-gqBHFiXjLJM,118 +onnx/backend/test/data/node/test_reshape_negative_dim/model.onnx,sha256=-HxN0nIlNHKd-CNOMrHzDEBwKVLj5HU1alv3_ZfYwAo,167 +onnx/backend/test/data/node/test_reshape_negative_dim/test_data_set_0/input_0.pb,sha256=Ra5qJ0bmZk4Mtqoj9EhsgTyEoqrmqYk679L-Ki7OJPI,112 +onnx/backend/test/data/node/test_reshape_negative_dim/test_data_set_0/input_1.pb,sha256=_45XuV5dYHVVu7Rx1FGNnvotQy31w4eANzXAA3bGEdE,37 +onnx/backend/test/data/node/test_reshape_negative_dim/test_data_set_0/output_0.pb,sha256=h1F8s8Hz2FQZbHF9HSMXEHzVHS1_RFukvMhdN4ouMns,116 +onnx/backend/test/data/node/test_reshape_negative_extended_dims/model.onnx,sha256=spcii87BK6Z5Xw20CQ2pTTJaAEpVkBQ_4U1WqtBiWac,181 +onnx/backend/test/data/node/test_reshape_negative_extended_dims/test_data_set_0/input_0.pb,sha256=Ra5qJ0bmZk4Mtqoj9EhsgTyEoqrmqYk679L-Ki7OJPI,112 +onnx/backend/test/data/node/test_reshape_negative_extended_dims/test_data_set_0/input_1.pb,sha256=iuyo7JbFDNgl5IVKR0CPdvTCJjQfguS1qvi1qBtMvgY,45 +onnx/backend/test/data/node/test_reshape_negative_extended_dims/test_data_set_0/output_0.pb,sha256=ZEJUohoXCbDgCdfxDVGfmH90AUhuNfiuq8pHSx7qi1M,118 +onnx/backend/test/data/node/test_reshape_one_dim/model.onnx,sha256=_m8SutBmTPXqHwfufeaQtpzYXHlKXOYSBNNktCZ5_w0,154 +onnx/backend/test/data/node/test_reshape_one_dim/test_data_set_0/input_0.pb,sha256=Ra5qJ0bmZk4Mtqoj9EhsgTyEoqrmqYk679L-Ki7OJPI,112 +onnx/backend/test/data/node/test_reshape_one_dim/test_data_set_0/input_1.pb,sha256=FnE_Y1xn7VcaF4iHxUcnwkMCqpumPFvsjsIvG7gZE8Y,21 +onnx/backend/test/data/node/test_reshape_one_dim/test_data_set_0/output_0.pb,sha256=OW1VHWrW-p-vlPhvSXpGxaAnh794jpWNM46uGjVODRY,112 +onnx/backend/test/data/node/test_reshape_reduced_dims/model.onnx,sha256=vSjIXEJBCHYugVqCWaZCMAv_PTAnBblLKcw-jZhTKHo,163 +onnx/backend/test/data/node/test_reshape_reduced_dims/test_data_set_0/input_0.pb,sha256=Ra5qJ0bmZk4Mtqoj9EhsgTyEoqrmqYk679L-Ki7OJPI,112 +onnx/backend/test/data/node/test_reshape_reduced_dims/test_data_set_0/input_1.pb,sha256=ouJ6_oGL8_HBKEg7fUJpxVLNhpa96Ql0goSY7PqqYz8,29 +onnx/backend/test/data/node/test_reshape_reduced_dims/test_data_set_0/output_0.pb,sha256=X0SC5DZwUl-yurzky8NKAXMQ94Z8WqR5ctunKFK1gIk,114 +onnx/backend/test/data/node/test_reshape_reordered_all_dims/model.onnx,sha256=oVcMkZYKfgh4hxhak_u0OxvQfPnKodzAtq7cIjJqtwQ,173 +onnx/backend/test/data/node/test_reshape_reordered_all_dims/test_data_set_0/input_0.pb,sha256=Ra5qJ0bmZk4Mtqoj9EhsgTyEoqrmqYk679L-Ki7OJPI,112 +onnx/backend/test/data/node/test_reshape_reordered_all_dims/test_data_set_0/input_1.pb,sha256=WuXoFZwaDusBjM6stvBuiE2-ueq2hqpxw1IBgbDzXtw,37 +onnx/backend/test/data/node/test_reshape_reordered_all_dims/test_data_set_0/output_0.pb,sha256=F3qOrvE7h1CQDCR8WwQFjdGHVYugNnpU2wa104F5rBw,116 +onnx/backend/test/data/node/test_reshape_reordered_last_dims/model.onnx,sha256=vEJwrZYPQZHn4lEZOW3k58_zSwmFehcE14tsuJlqyJg,174 +onnx/backend/test/data/node/test_reshape_reordered_last_dims/test_data_set_0/input_0.pb,sha256=Ra5qJ0bmZk4Mtqoj9EhsgTyEoqrmqYk679L-Ki7OJPI,112 +onnx/backend/test/data/node/test_reshape_reordered_last_dims/test_data_set_0/input_1.pb,sha256=g3yoG_7MeWVeyjtusuY4oWwwveQIuhkh27RQzhIr1HU,37 +onnx/backend/test/data/node/test_reshape_reordered_last_dims/test_data_set_0/output_0.pb,sha256=J5J0ckD6rfn_-S4k1g8mo4qexsZdcE9w7kh_MMB-91k,116 +onnx/backend/test/data/node/test_reshape_zero_and_negative_dim/model.onnx,sha256=WQ5YYcsJM-387a9nyv3gV5FEN-ckPGHcLtQ6QN6pOsk,180 +onnx/backend/test/data/node/test_reshape_zero_and_negative_dim/test_data_set_0/input_0.pb,sha256=Ra5qJ0bmZk4Mtqoj9EhsgTyEoqrmqYk679L-Ki7OJPI,112 +onnx/backend/test/data/node/test_reshape_zero_and_negative_dim/test_data_set_0/input_1.pb,sha256=fGG-r1bYO3tPdrYbBsKgm8rNOTpMVc43zP5kzaWsOc4,45 +onnx/backend/test/data/node/test_reshape_zero_and_negative_dim/test_data_set_0/output_0.pb,sha256=WpdkH70wR430ATRzOjP79tz4aqDju2-ZGEjmriNmZtw,118 +onnx/backend/test/data/node/test_reshape_zero_dim/model.onnx,sha256=xJkqQUAsEdjCrXQynbMWF8w4GSqvpGoFhiQ-FDORIN0,167 +onnx/backend/test/data/node/test_reshape_zero_dim/test_data_set_0/input_0.pb,sha256=Ra5qJ0bmZk4Mtqoj9EhsgTyEoqrmqYk679L-Ki7OJPI,112 +onnx/backend/test/data/node/test_reshape_zero_dim/test_data_set_0/input_1.pb,sha256=3LQjg-0h7af-IHdpcviFQ7n-7bI2IX04SBTcYt7KGYw,45 +onnx/backend/test/data/node/test_reshape_zero_dim/test_data_set_0/output_0.pb,sha256=OMGl3G0rnNPISyblKGPC3m3crj7IOfBg606ZZ16iUhU,118 +onnx/backend/test/data/node/test_resize_downsample_scales_cubic/model.onnx,sha256=NvGAYLAArD0c10T1O5h7aaRuo0Q1N0ahZydZETAIdCg,186 +onnx/backend/test/data/node/test_resize_downsample_scales_cubic/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_downsample_scales_cubic/test_data_set_0/input_1.pb,sha256=xPbOKT20LLRRzkK1NtGYUZK7K7jMcf7SR8v48R7Uaqo,30 +onnx/backend/test/data/node/test_resize_downsample_scales_cubic/test_data_set_0/output_0.pb,sha256=ZGnKaDrIFCTD_3Yb67GNTX-IVokI9Yr0bh9sWg6us-A,51 +onnx/backend/test/data/node/test_resize_downsample_scales_cubic_A_n0p5_exclude_outside/model.onnx,sha256=_ptXRcW2a1pg6gZDFiIp2ltFvXCRIbrr6ANMwicXsU0,258 +onnx/backend/test/data/node/test_resize_downsample_scales_cubic_A_n0p5_exclude_outside/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_downsample_scales_cubic_A_n0p5_exclude_outside/test_data_set_0/input_1.pb,sha256=xPbOKT20LLRRzkK1NtGYUZK7K7jMcf7SR8v48R7Uaqo,30 +onnx/backend/test/data/node/test_resize_downsample_scales_cubic_A_n0p5_exclude_outside/test_data_set_0/output_0.pb,sha256=loFVIECPdfM4Elw8zNFWIE6JQTefZRc362LdOfJBgPA,51 +onnx/backend/test/data/node/test_resize_downsample_scales_cubic_align_corners/model.onnx,sha256=H6KLpWckHrepT_R1YhD5UzEKXFtCOvzpQBFzkO10PRs,252 +onnx/backend/test/data/node/test_resize_downsample_scales_cubic_align_corners/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_downsample_scales_cubic_align_corners/test_data_set_0/input_1.pb,sha256=xPbOKT20LLRRzkK1NtGYUZK7K7jMcf7SR8v48R7Uaqo,30 +onnx/backend/test/data/node/test_resize_downsample_scales_cubic_align_corners/test_data_set_0/output_0.pb,sha256=2cgJqsQF1PGm1LLvvc9Dc5u6_YD8DHxfSgkKNV0gQtk,51 +onnx/backend/test/data/node/test_resize_downsample_scales_cubic_antialias/model.onnx,sha256=ofXD7K2I90DevflmIHO8Ht0p7SutmVeCu1GFfXfRXFM,214 +onnx/backend/test/data/node/test_resize_downsample_scales_cubic_antialias/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_downsample_scales_cubic_antialias/test_data_set_0/input_1.pb,sha256=_MYqurm3S5Sgvkddd59VRpxNIVlImSmTr8O0q7TgMZc,30 +onnx/backend/test/data/node/test_resize_downsample_scales_cubic_antialias/test_data_set_0/output_0.pb,sha256=3RkVD_dvwVMDIMuTFJpRMSnAJZldXEW7WZL2Xy0xj2c,31 +onnx/backend/test/data/node/test_resize_downsample_scales_linear/model.onnx,sha256=fTKB9Mr2RLtnUE-RzyOAC2ijKeCvKvh6CrBlOIHq_SQ,188 +onnx/backend/test/data/node/test_resize_downsample_scales_linear/test_data_set_0/input_0.pb,sha256=nioMVoUI5wFpgq-Sa9rYpJCbdtubhwaRAM1ym7zDzAQ,47 +onnx/backend/test/data/node/test_resize_downsample_scales_linear/test_data_set_0/input_1.pb,sha256=_MYqurm3S5Sgvkddd59VRpxNIVlImSmTr8O0q7TgMZc,30 +onnx/backend/test/data/node/test_resize_downsample_scales_linear/test_data_set_0/output_0.pb,sha256=uwev2yGwG2Wa5GUD8WPsgHctU1XZJ2Q1Rf3owfZ_XHg,23 +onnx/backend/test/data/node/test_resize_downsample_scales_linear_align_corners/model.onnx,sha256=tk8tGvNP6KtfR7UMFW9f3432SLvmf9pajyuugP3BiTY,254 +onnx/backend/test/data/node/test_resize_downsample_scales_linear_align_corners/test_data_set_0/input_0.pb,sha256=nioMVoUI5wFpgq-Sa9rYpJCbdtubhwaRAM1ym7zDzAQ,47 +onnx/backend/test/data/node/test_resize_downsample_scales_linear_align_corners/test_data_set_0/input_1.pb,sha256=_MYqurm3S5Sgvkddd59VRpxNIVlImSmTr8O0q7TgMZc,30 +onnx/backend/test/data/node/test_resize_downsample_scales_linear_align_corners/test_data_set_0/output_0.pb,sha256=dqsdDkmbIHzYaAA5V1gXM9ft7EKPO8Y-qCfxD8SjjBU,23 +onnx/backend/test/data/node/test_resize_downsample_scales_linear_antialias/model.onnx,sha256=3Nj5SptzieyuU03tjT2Ner6GmjhEOVmWxP3lbT51Tn0,216 +onnx/backend/test/data/node/test_resize_downsample_scales_linear_antialias/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_downsample_scales_linear_antialias/test_data_set_0/input_1.pb,sha256=_MYqurm3S5Sgvkddd59VRpxNIVlImSmTr8O0q7TgMZc,30 +onnx/backend/test/data/node/test_resize_downsample_scales_linear_antialias/test_data_set_0/output_0.pb,sha256=1_-7ZdcqfP_WMa26f4W3pwWFk__1wz_OV-GK-HZe9Hk,31 +onnx/backend/test/data/node/test_resize_downsample_scales_linear_half_pixel_symmetric/model.onnx,sha256=YcAcJ63zezuxLrBF3Vgsl1TeHMU2Ogd3nLiU7g4v1Mw,268 +onnx/backend/test/data/node/test_resize_downsample_scales_linear_half_pixel_symmetric/test_data_set_0/input_0.pb,sha256=Rdy6Z1wI0QCS9oqH-9p6yUHvF-avjYhyozZYTQRcSyY,31 +onnx/backend/test/data/node/test_resize_downsample_scales_linear_half_pixel_symmetric/test_data_set_0/input_1.pb,sha256=Pv1EkH_ZwWOQBY6LxFlPlTh5YDAg7UkXgQyPbkfGCTE,30 +onnx/backend/test/data/node/test_resize_downsample_scales_linear_half_pixel_symmetric/test_data_set_0/output_0.pb,sha256=7weLx1gkYyPrWb_hlbICvh1nnkQTdCCn1usS_9sgXoc,23 +onnx/backend/test/data/node/test_resize_downsample_scales_nearest/model.onnx,sha256=1U9urxLLRINgNwIM00veMP9fErOY5QRG5HW6gp9-kyk,190 +onnx/backend/test/data/node/test_resize_downsample_scales_nearest/test_data_set_0/input_0.pb,sha256=nioMVoUI5wFpgq-Sa9rYpJCbdtubhwaRAM1ym7zDzAQ,47 +onnx/backend/test/data/node/test_resize_downsample_scales_nearest/test_data_set_0/input_1.pb,sha256=_MYqurm3S5Sgvkddd59VRpxNIVlImSmTr8O0q7TgMZc,30 +onnx/backend/test/data/node/test_resize_downsample_scales_nearest/test_data_set_0/output_0.pb,sha256=Yjo_9cj_fib7n4VQV4PsQKY-y5v4-quR63M-EkCrzT0,23 +onnx/backend/test/data/node/test_resize_downsample_sizes_cubic/model.onnx,sha256=6a46eIOdy_Qs4SxCR7ykEV9VTZVM4Og3pqd2DCtDjEM,185 +onnx/backend/test/data/node/test_resize_downsample_sizes_cubic/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_downsample_sizes_cubic/test_data_set_0/input_1.pb,sha256=ndCok6I6o-QU94zqs5ITMbBNfV2gqQXoarYmY5fVKn4,45 +onnx/backend/test/data/node/test_resize_downsample_sizes_cubic/test_data_set_0/output_0.pb,sha256=PiAS9GWaz3sX0Gf6Ln5Psc_9dku3ayUWL5hHq9vlzg0,51 +onnx/backend/test/data/node/test_resize_downsample_sizes_cubic_antialias/model.onnx,sha256=4C6ccjEJ_YoSijXTgxAnK0FhhksGC2Iv4_DED-R_3pw,213 +onnx/backend/test/data/node/test_resize_downsample_sizes_cubic_antialias/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_downsample_sizes_cubic_antialias/test_data_set_0/input_1.pb,sha256=ndCok6I6o-QU94zqs5ITMbBNfV2gqQXoarYmY5fVKn4,45 +onnx/backend/test/data/node/test_resize_downsample_sizes_cubic_antialias/test_data_set_0/output_0.pb,sha256=FQ3KdwTmOlBZYLC7nxQY_fHxEEzwuIxommP9ItoqBUg,51 +onnx/backend/test/data/node/test_resize_downsample_sizes_linear_antialias/model.onnx,sha256=rGpyoONA3briRF0UkgzM87GAVUswPHrGZ_D9U0K1IFs,215 +onnx/backend/test/data/node/test_resize_downsample_sizes_linear_antialias/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_downsample_sizes_linear_antialias/test_data_set_0/input_1.pb,sha256=ndCok6I6o-QU94zqs5ITMbBNfV2gqQXoarYmY5fVKn4,45 +onnx/backend/test/data/node/test_resize_downsample_sizes_linear_antialias/test_data_set_0/output_0.pb,sha256=4k2fzRJDvP4TITb-F963MxhGFoSpmv4Gcm1lMFsOqfM,51 +onnx/backend/test/data/node/test_resize_downsample_sizes_linear_pytorch_half_pixel/model.onnx,sha256=VAAd9-jPn5WuhFovIXkyU76g8UAn2Wg0r06aNIiyjxI,263 +onnx/backend/test/data/node/test_resize_downsample_sizes_linear_pytorch_half_pixel/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_downsample_sizes_linear_pytorch_half_pixel/test_data_set_0/input_1.pb,sha256=37sn5EOXGUhf2Y-K3TqAz2Ass_BYayom8mdmnc2h1GU,45 +onnx/backend/test/data/node/test_resize_downsample_sizes_linear_pytorch_half_pixel/test_data_set_0/output_0.pb,sha256=iKqRlP_EQPX8v6QT7KLRSmrzOZZLemzCdmj8tuVgPXo,27 +onnx/backend/test/data/node/test_resize_downsample_sizes_nearest/model.onnx,sha256=StlILIeH-jdXqhApX2vrq_z1APeoJPCOA-35PsZ8gA8,189 +onnx/backend/test/data/node/test_resize_downsample_sizes_nearest/test_data_set_0/input_0.pb,sha256=nioMVoUI5wFpgq-Sa9rYpJCbdtubhwaRAM1ym7zDzAQ,47 +onnx/backend/test/data/node/test_resize_downsample_sizes_nearest/test_data_set_0/input_1.pb,sha256=EWQ2LRMYiDUHb77tpBRK0ytYOpI4T28MtPEOhezLW9E,45 +onnx/backend/test/data/node/test_resize_downsample_sizes_nearest/test_data_set_0/output_0.pb,sha256=aBoGDHQgxT27m_eQuySjStIVzhxkJMLWH0A55bFbQ5g,27 +onnx/backend/test/data/node/test_resize_downsample_sizes_nearest_not_larger/model.onnx,sha256=eK6kffmxQ8WVtiuCHLZ089AYyS125sPEDrPscMxc6zY,258 +onnx/backend/test/data/node/test_resize_downsample_sizes_nearest_not_larger/test_data_set_0/input_0.pb,sha256=nioMVoUI5wFpgq-Sa9rYpJCbdtubhwaRAM1ym7zDzAQ,47 +onnx/backend/test/data/node/test_resize_downsample_sizes_nearest_not_larger/test_data_set_0/input_1.pb,sha256=Ee8uWnujoqYe5BozCljVH-jTkbA4JDUotF8GxhXEZcw,29 +onnx/backend/test/data/node/test_resize_downsample_sizes_nearest_not_larger/test_data_set_0/output_0.pb,sha256=Yjo_9cj_fib7n4VQV4PsQKY-y5v4-quR63M-EkCrzT0,23 +onnx/backend/test/data/node/test_resize_downsample_sizes_nearest_not_smaller/model.onnx,sha256=d8DDUXajzkKcuKIznrnN9NRhDlHs2VPMiTFH582mm6I,260 +onnx/backend/test/data/node/test_resize_downsample_sizes_nearest_not_smaller/test_data_set_0/input_0.pb,sha256=nioMVoUI5wFpgq-Sa9rYpJCbdtubhwaRAM1ym7zDzAQ,47 +onnx/backend/test/data/node/test_resize_downsample_sizes_nearest_not_smaller/test_data_set_0/input_1.pb,sha256=Ee8uWnujoqYe5BozCljVH-jTkbA4JDUotF8GxhXEZcw,29 +onnx/backend/test/data/node/test_resize_downsample_sizes_nearest_not_smaller/test_data_set_0/output_0.pb,sha256=nAc-MyWd9nHy3TljFTqiL0XzUFYlioeLyRGpZB2Jvcg,39 +onnx/backend/test/data/node/test_resize_tf_crop_and_resize/model.onnx,sha256=O-2zQZRkhoy2_cGlaqxqmuwF0lhUP2kWArr3n21yryY,293 +onnx/backend/test/data/node/test_resize_tf_crop_and_resize/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_tf_crop_and_resize/test_data_set_0/input_1.pb,sha256=jl82DpC9SOeZy-jQNk9BvM6nVoa-AgSa4ZtTT7709dc,43 +onnx/backend/test/data/node/test_resize_tf_crop_and_resize/test_data_set_0/input_2.pb,sha256=ndCok6I6o-QU94zqs5ITMbBNfV2gqQXoarYmY5fVKn4,45 +onnx/backend/test/data/node/test_resize_tf_crop_and_resize/test_data_set_0/output_0.pb,sha256=-7Q9n5yoaXsRRZsDvOtF_c7_YynJmUkeIQgW4rp-zKg,51 +onnx/backend/test/data/node/test_resize_tf_crop_and_resize_axes_2_3/model.onnx,sha256=_2wYb0fspjXsFxSYql6wFFoiYH5J6ERjjUySWuCyDgI,285 +onnx/backend/test/data/node/test_resize_tf_crop_and_resize_axes_2_3/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_tf_crop_and_resize_axes_2_3/test_data_set_0/input_1.pb,sha256=L1L2x0-QpyRLeyyjk73mjV7FdzUEAdNemEV0WbcZZWk,27 +onnx/backend/test/data/node/test_resize_tf_crop_and_resize_axes_2_3/test_data_set_0/input_2.pb,sha256=vAIzVU8L_NauQljYoNRgwEqyRXBRLAdxNa_3HqjbY1E,29 +onnx/backend/test/data/node/test_resize_tf_crop_and_resize_axes_2_3/test_data_set_0/output_0.pb,sha256=WpJZbrC4oUGdfU2pRJ6Z34aitFfYz_OpfxWpEoEouiw,51 +onnx/backend/test/data/node/test_resize_tf_crop_and_resize_axes_3_2/model.onnx,sha256=syz90aTDAXM38gNXnKbP5XprnVCIpl4IdhAT-knvE5o,285 +onnx/backend/test/data/node/test_resize_tf_crop_and_resize_axes_3_2/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_tf_crop_and_resize_axes_3_2/test_data_set_0/input_1.pb,sha256=tAApf3H0iSwfk8uTYnN5NBF8goYF8jHTlgp3mRG7wGo,27 +onnx/backend/test/data/node/test_resize_tf_crop_and_resize_axes_3_2/test_data_set_0/input_2.pb,sha256=vAIzVU8L_NauQljYoNRgwEqyRXBRLAdxNa_3HqjbY1E,29 +onnx/backend/test/data/node/test_resize_tf_crop_and_resize_axes_3_2/test_data_set_0/output_0.pb,sha256=WpJZbrC4oUGdfU2pRJ6Z34aitFfYz_OpfxWpEoEouiw,51 +onnx/backend/test/data/node/test_resize_upsample_scales_cubic/model.onnx,sha256=QCvkupq4I7UtNd64GHm4u1q4sFe2Zdh_ma1EMZ3I9N8,184 +onnx/backend/test/data/node/test_resize_upsample_scales_cubic/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_upsample_scales_cubic/test_data_set_0/input_1.pb,sha256=w8rot1dlQO8XzaRmza_A35JMaO-AnoOi9EtykBJy6kI,30 +onnx/backend/test/data/node/test_resize_upsample_scales_cubic/test_data_set_0/output_0.pb,sha256=PkSU-XfoDiJLcyLNeQLkN2A9yyc9hXq4v8nm3UcPKq4,272 +onnx/backend/test/data/node/test_resize_upsample_scales_cubic_A_n0p5_exclude_outside/model.onnx,sha256=6mHAIPWMNAum--EwaHvxkkk8c6Dw5IECA-hnxGk7a2w,256 +onnx/backend/test/data/node/test_resize_upsample_scales_cubic_A_n0p5_exclude_outside/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_upsample_scales_cubic_A_n0p5_exclude_outside/test_data_set_0/input_1.pb,sha256=w8rot1dlQO8XzaRmza_A35JMaO-AnoOi9EtykBJy6kI,30 +onnx/backend/test/data/node/test_resize_upsample_scales_cubic_A_n0p5_exclude_outside/test_data_set_0/output_0.pb,sha256=0Ir5vu-hpsCeNPtx-nUkliiu-Vgm06Vga2TF5lryblA,272 +onnx/backend/test/data/node/test_resize_upsample_scales_cubic_align_corners/model.onnx,sha256=HCQFmu-zRswTfKUSrVJgMqqbqXFvpU6tboIabJfKzF0,250 +onnx/backend/test/data/node/test_resize_upsample_scales_cubic_align_corners/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_upsample_scales_cubic_align_corners/test_data_set_0/input_1.pb,sha256=w8rot1dlQO8XzaRmza_A35JMaO-AnoOi9EtykBJy6kI,30 +onnx/backend/test/data/node/test_resize_upsample_scales_cubic_align_corners/test_data_set_0/output_0.pb,sha256=AQz_0D5lp8mVlVoHV99xYl1kRmT-i3ZSaIM-2oAmHrs,272 +onnx/backend/test/data/node/test_resize_upsample_scales_cubic_asymmetric/model.onnx,sha256=KR4j7ABjJ_jI_hsrjX8JtttiB8Dwbc8A2b4jEfuR1Jc,244 +onnx/backend/test/data/node/test_resize_upsample_scales_cubic_asymmetric/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_upsample_scales_cubic_asymmetric/test_data_set_0/input_1.pb,sha256=w8rot1dlQO8XzaRmza_A35JMaO-AnoOi9EtykBJy6kI,30 +onnx/backend/test/data/node/test_resize_upsample_scales_cubic_asymmetric/test_data_set_0/output_0.pb,sha256=QmRUXe9rBd0mYIQPWDFpFGFADYvQFVySIweV2tFBvNE,272 +onnx/backend/test/data/node/test_resize_upsample_scales_linear/model.onnx,sha256=mEZo_9rq5fLFehb5FBaV3bKnFUBovWAzdhzhbqdqEqk,186 +onnx/backend/test/data/node/test_resize_upsample_scales_linear/test_data_set_0/input_0.pb,sha256=mLXavIMpEU-t1w77qkVcBljM9nIegS0-j17fhrivmno,31 +onnx/backend/test/data/node/test_resize_upsample_scales_linear/test_data_set_0/input_1.pb,sha256=w8rot1dlQO8XzaRmza_A35JMaO-AnoOi9EtykBJy6kI,30 +onnx/backend/test/data/node/test_resize_upsample_scales_linear/test_data_set_0/output_0.pb,sha256=fAWEWdC3o33m0DECLq7pDzMgk2P9M_AUYpA6oJmR6cE,79 +onnx/backend/test/data/node/test_resize_upsample_scales_linear_align_corners/model.onnx,sha256=Ai1_ehB6rOGR1GQse3CvLwJEB9YCv7kiLWebSYLJYnk,252 +onnx/backend/test/data/node/test_resize_upsample_scales_linear_align_corners/test_data_set_0/input_0.pb,sha256=mLXavIMpEU-t1w77qkVcBljM9nIegS0-j17fhrivmno,31 +onnx/backend/test/data/node/test_resize_upsample_scales_linear_align_corners/test_data_set_0/input_1.pb,sha256=w8rot1dlQO8XzaRmza_A35JMaO-AnoOi9EtykBJy6kI,30 +onnx/backend/test/data/node/test_resize_upsample_scales_linear_align_corners/test_data_set_0/output_0.pb,sha256=7Bpwq1ECSX1fKctUVbWV7mStOAMzyI6P5euZ310sOxQ,79 +onnx/backend/test/data/node/test_resize_upsample_scales_linear_half_pixel_symmetric/model.onnx,sha256=WlGAguzzIkWkkAo6ZfAdQKnSHXL50WIxJ1Zhr0oVw2I,266 +onnx/backend/test/data/node/test_resize_upsample_scales_linear_half_pixel_symmetric/test_data_set_0/input_0.pb,sha256=mLXavIMpEU-t1w77qkVcBljM9nIegS0-j17fhrivmno,31 +onnx/backend/test/data/node/test_resize_upsample_scales_linear_half_pixel_symmetric/test_data_set_0/input_1.pb,sha256=YuaMw9nlwphEIBDxyW46Wap5f3nI5zPV7UYjoA72gmQ,30 +onnx/backend/test/data/node/test_resize_upsample_scales_linear_half_pixel_symmetric/test_data_set_0/output_0.pb,sha256=e7ETAXX5wHi0EZN5mUMvRnkOxNAyUgy6trbeSN7uPvQ,95 +onnx/backend/test/data/node/test_resize_upsample_scales_nearest/model.onnx,sha256=UQqEcTdiIlMdbcPIRMa1I5yM-Qa4RUC7AJelajIyXlE,188 +onnx/backend/test/data/node/test_resize_upsample_scales_nearest/test_data_set_0/input_0.pb,sha256=mLXavIMpEU-t1w77qkVcBljM9nIegS0-j17fhrivmno,31 +onnx/backend/test/data/node/test_resize_upsample_scales_nearest/test_data_set_0/input_1.pb,sha256=3a1OyYYNdnWsgKm8R9EJ4Ja-reMSXI0q04mtA4aFGfo,30 +onnx/backend/test/data/node/test_resize_upsample_scales_nearest/test_data_set_0/output_0.pb,sha256=sqObjzJsZ_nVStepjZKmx3dOTVd9cGqBu3Uw2w6tYFg,111 +onnx/backend/test/data/node/test_resize_upsample_scales_nearest_axes_2_3/model.onnx,sha256=UQRCvBkSGiUgcqs8e3NwS0QUfkpF98phMycKdiYKCOc,212 +onnx/backend/test/data/node/test_resize_upsample_scales_nearest_axes_2_3/test_data_set_0/input_0.pb,sha256=mLXavIMpEU-t1w77qkVcBljM9nIegS0-j17fhrivmno,31 +onnx/backend/test/data/node/test_resize_upsample_scales_nearest_axes_2_3/test_data_set_0/input_1.pb,sha256=eYyQwyy4YgQ7gyB7cQ3EFSsVJypSRA2sAIsgdo6QPqc,22 +onnx/backend/test/data/node/test_resize_upsample_scales_nearest_axes_2_3/test_data_set_0/output_0.pb,sha256=sqObjzJsZ_nVStepjZKmx3dOTVd9cGqBu3Uw2w6tYFg,111 +onnx/backend/test/data/node/test_resize_upsample_scales_nearest_axes_3_2/model.onnx,sha256=N28Zs0qhbIprNOqoC7UKiuHeSpsTIZb8DnTfNHD1sLk,212 +onnx/backend/test/data/node/test_resize_upsample_scales_nearest_axes_3_2/test_data_set_0/input_0.pb,sha256=mLXavIMpEU-t1w77qkVcBljM9nIegS0-j17fhrivmno,31 +onnx/backend/test/data/node/test_resize_upsample_scales_nearest_axes_3_2/test_data_set_0/input_1.pb,sha256=mmDREgvzTwtEpDJvveHfvSZRixUEC3EI5pPND3xyE7s,22 +onnx/backend/test/data/node/test_resize_upsample_scales_nearest_axes_3_2/test_data_set_0/output_0.pb,sha256=sqObjzJsZ_nVStepjZKmx3dOTVd9cGqBu3Uw2w6tYFg,111 +onnx/backend/test/data/node/test_resize_upsample_sizes_cubic/model.onnx,sha256=RIKO4GXHzXsw2M5N1YgKGrWkMVqZwvMbs1er6OpPCDs,183 +onnx/backend/test/data/node/test_resize_upsample_sizes_cubic/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_upsample_sizes_cubic/test_data_set_0/input_1.pb,sha256=nZ1sl7gz-g_V2hXQY5LuvuGaUGoBw8yTWrHeSJMcosg,45 +onnx/backend/test/data/node/test_resize_upsample_sizes_cubic/test_data_set_0/output_0.pb,sha256=EAIMP98RAbzoToZHuzVJfftxaZYNj-guraY2RAfAc9Y,376 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest/model.onnx,sha256=6-rxY2SQvDXSdskmMVaMM18HbJfpurkKmvbnI_6n9lI,187 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest/test_data_set_0/input_0.pb,sha256=mLXavIMpEU-t1w77qkVcBljM9nIegS0-j17fhrivmno,31 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest/test_data_set_0/input_1.pb,sha256=1vs5j9PloFBBBanpvHk9w3ue44PVRJ1eYMbKK18UxOs,45 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest/test_data_set_0/output_0.pb,sha256=j2BVQHsxg5m9FtJaMV_08Y721i4ReIh15UcV_iWXsgA,240 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_axes_2_3/model.onnx,sha256=mf61AtWMlncmh64F_VcEHZgN5R65F3gcLtugKrNVWxY,211 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_axes_2_3/test_data_set_0/input_0.pb,sha256=mLXavIMpEU-t1w77qkVcBljM9nIegS0-j17fhrivmno,31 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_axes_2_3/test_data_set_0/input_1.pb,sha256=bEY07sadyrUqd56CgpL2kpJOof8lkigI8NAOAw40NyQ,29 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_axes_2_3/test_data_set_0/output_0.pb,sha256=j2BVQHsxg5m9FtJaMV_08Y721i4ReIh15UcV_iWXsgA,240 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_axes_3_2/model.onnx,sha256=sXWUKH1vVFpC8ZJXdWE1Sunu6Ts3hkVax_wWRNeeujA,211 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_axes_3_2/test_data_set_0/input_0.pb,sha256=mLXavIMpEU-t1w77qkVcBljM9nIegS0-j17fhrivmno,31 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_axes_3_2/test_data_set_0/input_1.pb,sha256=lOngZJhTLdAR-LZ3kvPtpq3ScjST_SQb8On7xErMt5w,29 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_axes_3_2/test_data_set_0/output_0.pb,sha256=j2BVQHsxg5m9FtJaMV_08Y721i4ReIh15UcV_iWXsgA,240 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_ceil_half_pixel/model.onnx,sha256=4KFn62-_PaftnFvGvGKF0dj2EDdTyyHL1W_Ttaajtz0,277 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_ceil_half_pixel/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_ceil_half_pixel/test_data_set_0/input_1.pb,sha256=TQJhADxzQwfIkkc2pen1_wFd2i4A0mEwyy0xjW-97n0,45 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_ceil_half_pixel/test_data_set_0/output_0.pb,sha256=3eAyujmk4k2eAAy3do3PVMqPf1luKx6ThRenNqxaA3I,272 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_floor_align_corners/model.onnx,sha256=4NEEGSuIpI2t0R7pzQcaQFlJezciMfzLBPn9e0Rwsdk,285 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_floor_align_corners/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_floor_align_corners/test_data_set_0/input_1.pb,sha256=TQJhADxzQwfIkkc2pen1_wFd2i4A0mEwyy0xjW-97n0,45 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_floor_align_corners/test_data_set_0/output_0.pb,sha256=sFAaFbl0e1cItIPWKmEoGnPpo2FsJFYi1U_Wq4Ugh2I,272 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_not_larger/model.onnx,sha256=3ffqPpmk_CBgwSqt0m_4hxKlF67yac4BL9auePr2jLo,257 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_not_larger/test_data_set_0/input_0.pb,sha256=mLXavIMpEU-t1w77qkVcBljM9nIegS0-j17fhrivmno,31 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_not_larger/test_data_set_0/input_1.pb,sha256=bEY07sadyrUqd56CgpL2kpJOof8lkigI8NAOAw40NyQ,29 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_not_larger/test_data_set_0/output_0.pb,sha256=8clDIxBXkyuHVQ323HmQekDO90VrPFq47TfOm87Njkk,272 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_round_prefer_ceil_asymmetric/model.onnx,sha256=N20l0fhsa8cnQ9SdtwuvD1nO5NknKOe_GsNjBvhia3M,304 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_round_prefer_ceil_asymmetric/test_data_set_0/input_0.pb,sha256=MEKMWYHSTOeX7aAtTuIDlSJhbhXnd5W9OLBmeaNaDPE,79 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_round_prefer_ceil_asymmetric/test_data_set_0/input_1.pb,sha256=TQJhADxzQwfIkkc2pen1_wFd2i4A0mEwyy0xjW-97n0,45 +onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_round_prefer_ceil_asymmetric/test_data_set_0/output_0.pb,sha256=3eAyujmk4k2eAAy3do3PVMqPf1luKx6ThRenNqxaA3I,272 +onnx/backend/test/data/node/test_reversesequence_batch/model.onnx,sha256=a3VXmvzlI1t3mIA2hPjhmYHL8fri4xP1_u1ZI2uG1Q0,201 +onnx/backend/test/data/node/test_reversesequence_batch/test_data_set_0/input_0.pb,sha256=SkEHD0rsxMgSTW_tjWL00vAdiYAszJiX1Ksx5YSBdK0,75 +onnx/backend/test/data/node/test_reversesequence_batch/test_data_set_0/input_1.pb,sha256=Wt9-cJ901jGUNQSJpk5jrUaamSkE48Z9G1veqRrgm8U,53 +onnx/backend/test/data/node/test_reversesequence_batch/test_data_set_0/output_0.pb,sha256=X6YMiU-O_e2z3CYNrjkrVro88Zp9gYpoMd3iIeQPQ3E,75 +onnx/backend/test/data/node/test_reversesequence_time/model.onnx,sha256=wBxZXx0gVDsbH_UOz8PVsQseF_R3ghgk9fCDrov0lNM,200 +onnx/backend/test/data/node/test_reversesequence_time/test_data_set_0/input_0.pb,sha256=QhzuL3tyAa1nIihKr6jZUzHkfu4GTyPlq0GErxTiLgI,75 +onnx/backend/test/data/node/test_reversesequence_time/test_data_set_0/input_1.pb,sha256=Cq6df1JM6R0nUUd50ZJO7lz1h-jhb2YzMjuCxeVVIpI,53 +onnx/backend/test/data/node/test_reversesequence_time/test_data_set_0/output_0.pb,sha256=E74my-HnbjLGI__m_D9lTIR8sU4qlEEDW7AagYb5d8E,75 +onnx/backend/test/data/node/test_rnn_seq_length/model.onnx,sha256=XVN6KM9qoPoLCSpS-LC5Uif_k2fTmGylbZbPBhVMPX8,215 +onnx/backend/test/data/node/test_rnn_seq_length/test_data_set_0/input_0.pb,sha256=fFR4OIXWBj5wmFEuZbfl7GOhhRaSJv9tEZM8VbjQ72Q,85 +onnx/backend/test/data/node/test_rnn_seq_length/test_data_set_0/input_1.pb,sha256=ET6TMaay9jqrVqbna-Wnsfz97AB1cb3OoixzzqcB9SE,73 +onnx/backend/test/data/node/test_rnn_seq_length/test_data_set_0/input_2.pb,sha256=8OukqSSCHVpI2UW8Qhn4j9Y8PmzUUWI2y2Qi7j-l6EM,113 +onnx/backend/test/data/node/test_rnn_seq_length/test_data_set_0/input_3.pb,sha256=YPIH8tZG0ntqRQMb2MWtwnrFHQBCYUBq9n-MBS8rYck,51 +onnx/backend/test/data/node/test_rnn_seq_length/test_data_set_0/output_0.pb,sha256=J__yfJ1bCTGPdlxsojDRQtQOq4aNWQq_os7EVw4DTag,75 +onnx/backend/test/data/node/test_roialign_aligned_false/model.onnx,sha256=8C_p0IzC5tpFL3jzziJF9mIyZXKTvy3ML9GjhYmFHgA,352 +onnx/backend/test/data/node/test_roialign_aligned_false/test_data_set_0/input_0.pb,sha256=MAl6mj4EdjRRLLTTrMR3QypOOeqBR_zfDSrKfI5ZOIA,416 +onnx/backend/test/data/node/test_roialign_aligned_false/test_data_set_0/input_1.pb,sha256=0dELrNhfMRb954VgWZmrlXZ9foqomY7BBxzVO39h3F8,62 +onnx/backend/test/data/node/test_roialign_aligned_false/test_data_set_0/input_2.pb,sha256=KBLspuA99QkABElyuFbkwAASK19MfsWnSyuLyOuuhlM,45 +onnx/backend/test/data/node/test_roialign_aligned_false/test_data_set_0/output_0.pb,sha256=pBkN2eAHpzdKOrULgdFPb1vBUZYLEkshdoLCpFRszj0,316 +onnx/backend/test/data/node/test_roialign_aligned_true/model.onnx,sha256=8TfQUVxJaPzuFyO3vePxwCS-uoGGTMWt1bfecpCacK4,344 +onnx/backend/test/data/node/test_roialign_aligned_true/test_data_set_0/input_0.pb,sha256=MAl6mj4EdjRRLLTTrMR3QypOOeqBR_zfDSrKfI5ZOIA,416 +onnx/backend/test/data/node/test_roialign_aligned_true/test_data_set_0/input_1.pb,sha256=0dELrNhfMRb954VgWZmrlXZ9foqomY7BBxzVO39h3F8,62 +onnx/backend/test/data/node/test_roialign_aligned_true/test_data_set_0/input_2.pb,sha256=KBLspuA99QkABElyuFbkwAASK19MfsWnSyuLyOuuhlM,45 +onnx/backend/test/data/node/test_roialign_aligned_true/test_data_set_0/output_0.pb,sha256=fJDZDLjphgnhrq0aWFk-lPD_rGXn8507MnHb4SivShk,316 +onnx/backend/test/data/node/test_roialign_mode_max/model.onnx,sha256=qA9lxKwQjJyeCeuPXT-ghlWh3eeAHrv9WbYGRf6zIC8,363 +onnx/backend/test/data/node/test_roialign_mode_max/test_data_set_0/input_0.pb,sha256=MAl6mj4EdjRRLLTTrMR3QypOOeqBR_zfDSrKfI5ZOIA,416 +onnx/backend/test/data/node/test_roialign_mode_max/test_data_set_0/input_1.pb,sha256=0dELrNhfMRb954VgWZmrlXZ9foqomY7BBxzVO39h3F8,62 +onnx/backend/test/data/node/test_roialign_mode_max/test_data_set_0/input_2.pb,sha256=KBLspuA99QkABElyuFbkwAASK19MfsWnSyuLyOuuhlM,45 +onnx/backend/test/data/node/test_roialign_mode_max/test_data_set_0/output_0.pb,sha256=TlRBT6bTd12HEPhEY2vPxZQBJiTCRcgxHzeyBj42FE4,316 +onnx/backend/test/data/node/test_round/model.onnx,sha256=0_cMKyq3iOq-UN-KzMuy5pMzwSzaArLwpXEGTYI9Kkk,85 +onnx/backend/test/data/node/test_round/test_data_set_0/input_0.pb,sha256=cHlnrY3Uk-RbEkYhAli-6AAm8OTVLrXCadT3V-VzqNw,69 +onnx/backend/test/data/node/test_round/test_data_set_0/output_0.pb,sha256=ZsG9a3LCPTA8lsuKatNZHi17ZVfdZb_L-nBR-4mezCI,69 +onnx/backend/test/data/node/test_scan9_sum/model.onnx,sha256=xhD2OrUhOs2VRKs6i0A8zGY4cRF7JoXuzLcr_HiazMA,350 +onnx/backend/test/data/node/test_scan9_sum/test_data_set_0/input_0.pb,sha256=BnhbN_OTtCNX4M6io3UpaIpvxLzrzErNVilo4coaHGM,23 +onnx/backend/test/data/node/test_scan9_sum/test_data_set_0/input_1.pb,sha256=t5reF7GY_oHbl7Vv4kVs1SoxvwpCDuisZdvKyL_FV-s,35 +onnx/backend/test/data/node/test_scan9_sum/test_data_set_0/output_0.pb,sha256=1sTPVuO9wgsKDIg1gHRzhl3ErtQ1AkzI8laUM5YnrsI,17 +onnx/backend/test/data/node/test_scan9_sum/test_data_set_0/output_1.pb,sha256=HL309oJqNALe24vCksVqGrZ85esldvFHCW6BJejssMw,35 +onnx/backend/test/data/node/test_scan_sum/model.onnx,sha256=kx8UnHYF_CHpQ6CMgieEJsQM1w9FuxdOFP70-ekGUfs,367 +onnx/backend/test/data/node/test_scan_sum/test_data_set_0/input_0.pb,sha256=hft6PG5mqQ8I6v75s5v-STuBVSUl7jLRHCIYXQvvY0g,25 +onnx/backend/test/data/node/test_scan_sum/test_data_set_0/input_1.pb,sha256=AYFDqPhKTq-30FDGtMNfVnNc7S63t8gevJZ5mCzDIuw,37 +onnx/backend/test/data/node/test_scan_sum/test_data_set_0/output_0.pb,sha256=ATpWiBkuoiOqvU3Ad2myqm3WbAGmyrxfQD-VAfugDM4,19 +onnx/backend/test/data/node/test_scan_sum/test_data_set_0/output_1.pb,sha256=LHi4F8MFmjXkX1oYEtIO2SOeEooNoaw2uD1woEvXUi4,37 +onnx/backend/test/data/node/test_scatter_elements_with_axis/model.onnx,sha256=WK7CTrPZTogMaBWL4loizfcx-n60FSOvqF_MFwgOZ0o,216 +onnx/backend/test/data/node/test_scatter_elements_with_axis/test_data_set_0/input_0.pb,sha256=FNuWkFG-4ZlMWvn8oE62jKzYt8GdCRtfDkBYNgeUSmI,34 +onnx/backend/test/data/node/test_scatter_elements_with_axis/test_data_set_0/input_1.pb,sha256=yxAkaBm3PWG9VJ8QL9zIHyP9dhDU0NmwftHGiVWTbmQ,33 +onnx/backend/test/data/node/test_scatter_elements_with_axis/test_data_set_0/input_2.pb,sha256=JDlgg9wBEw7Afkr4_suXRgyDMXlDVfbgBJOPnls2jao,25 +onnx/backend/test/data/node/test_scatter_elements_with_axis/test_data_set_0/output_0.pb,sha256=JcgSylIhYN2e8u823w30DFfY3PA8sLsLxUDtn6dGNxw,31 +onnx/backend/test/data/node/test_scatter_elements_with_duplicate_indices/model.onnx,sha256=zH2xipnpOkq7FoVC7cDl8imEpz-5zW72QybRS9EgoMw,250 +onnx/backend/test/data/node/test_scatter_elements_with_duplicate_indices/test_data_set_0/input_0.pb,sha256=FNuWkFG-4ZlMWvn8oE62jKzYt8GdCRtfDkBYNgeUSmI,34 +onnx/backend/test/data/node/test_scatter_elements_with_duplicate_indices/test_data_set_0/input_1.pb,sha256=LtXsziTJhM9eu2tV2pF68OB993sF-YaVhWI0izz49TE,33 +onnx/backend/test/data/node/test_scatter_elements_with_duplicate_indices/test_data_set_0/input_2.pb,sha256=JDlgg9wBEw7Afkr4_suXRgyDMXlDVfbgBJOPnls2jao,25 +onnx/backend/test/data/node/test_scatter_elements_with_duplicate_indices/test_data_set_0/output_0.pb,sha256=DjROkWNEJO-bgVoEdccRipm7A57FFhCf5hpJbikVJYk,31 +onnx/backend/test/data/node/test_scatter_elements_with_negative_indices/model.onnx,sha256=NmexRxrBS5OI5SNRqpyS67zwEpZpnczkzobgHpRFke8,228 +onnx/backend/test/data/node/test_scatter_elements_with_negative_indices/test_data_set_0/input_0.pb,sha256=FNuWkFG-4ZlMWvn8oE62jKzYt8GdCRtfDkBYNgeUSmI,34 +onnx/backend/test/data/node/test_scatter_elements_with_negative_indices/test_data_set_0/input_1.pb,sha256=q0Fin1pAQYBdnSVWMB4WrxdLIAqsz7cEIXK1wpL7HU4,33 +onnx/backend/test/data/node/test_scatter_elements_with_negative_indices/test_data_set_0/input_2.pb,sha256=JDlgg9wBEw7Afkr4_suXRgyDMXlDVfbgBJOPnls2jao,25 +onnx/backend/test/data/node/test_scatter_elements_with_negative_indices/test_data_set_0/output_0.pb,sha256=8VeVc3XRkIkfpRbJcVpn2e1uRLVVYuqOH4P1iLBaecE,31 +onnx/backend/test/data/node/test_scatter_elements_with_reduction_max/model.onnx,sha256=4LrnmkHzDZ3YI0LeVoU5GVjge3pUxV5gAjfXeghreb4,246 +onnx/backend/test/data/node/test_scatter_elements_with_reduction_max/test_data_set_0/input_0.pb,sha256=FNuWkFG-4ZlMWvn8oE62jKzYt8GdCRtfDkBYNgeUSmI,34 +onnx/backend/test/data/node/test_scatter_elements_with_reduction_max/test_data_set_0/input_1.pb,sha256=LtXsziTJhM9eu2tV2pF68OB993sF-YaVhWI0izz49TE,33 +onnx/backend/test/data/node/test_scatter_elements_with_reduction_max/test_data_set_0/input_2.pb,sha256=JDlgg9wBEw7Afkr4_suXRgyDMXlDVfbgBJOPnls2jao,25 +onnx/backend/test/data/node/test_scatter_elements_with_reduction_max/test_data_set_0/output_0.pb,sha256=WBm964VPNdWt89-yahLjj-RvVplFLut4ZXE0dfnAUuM,31 +onnx/backend/test/data/node/test_scatter_elements_with_reduction_min/model.onnx,sha256=0vsDUI5GdeNGWs9SnbEn8Z5XYbneMDWgNTMATmhNkts,246 +onnx/backend/test/data/node/test_scatter_elements_with_reduction_min/test_data_set_0/input_0.pb,sha256=FNuWkFG-4ZlMWvn8oE62jKzYt8GdCRtfDkBYNgeUSmI,34 +onnx/backend/test/data/node/test_scatter_elements_with_reduction_min/test_data_set_0/input_1.pb,sha256=LtXsziTJhM9eu2tV2pF68OB993sF-YaVhWI0izz49TE,33 +onnx/backend/test/data/node/test_scatter_elements_with_reduction_min/test_data_set_0/input_2.pb,sha256=JDlgg9wBEw7Afkr4_suXRgyDMXlDVfbgBJOPnls2jao,25 +onnx/backend/test/data/node/test_scatter_elements_with_reduction_min/test_data_set_0/output_0.pb,sha256=WADdYmrlyUDxTlC66OZ4XSg_-wQyGi9T4zlov7eXuVA,31 +onnx/backend/test/data/node/test_scatter_elements_without_axis/model.onnx,sha256=5HX0on3L5s_YHj50f0VGynXETPH7KT5qPkvhRgMB34Q,206 +onnx/backend/test/data/node/test_scatter_elements_without_axis/test_data_set_0/input_0.pb,sha256=DiaJ-ROL20bBztx5Xg3H7swSLlKyglCXlmncvXA-HPM,50 +onnx/backend/test/data/node/test_scatter_elements_without_axis/test_data_set_0/input_1.pb,sha256=WXUWKy1f1Dr3A5TpL6asvPHu54JotHi6BQUtGRE7JOU,65 +onnx/backend/test/data/node/test_scatter_elements_without_axis/test_data_set_0/input_2.pb,sha256=FB191eliy4ig-YiffzExk2oVes_SQmX9mHmi6f9Hfh0,41 +onnx/backend/test/data/node/test_scatter_elements_without_axis/test_data_set_0/output_0.pb,sha256=LNLuWCnaX_nnESo8S2kfkcdAP2b_Prx7HAGSQu1nZGo,47 +onnx/backend/test/data/node/test_scatter_with_axis/model.onnx,sha256=mCeJdeuAWeQ2cIfU14Z4Ak4vnlKz1CmVH4cLSsFn1Jc,199 +onnx/backend/test/data/node/test_scatter_with_axis/test_data_set_0/input_0.pb,sha256=FNuWkFG-4ZlMWvn8oE62jKzYt8GdCRtfDkBYNgeUSmI,34 +onnx/backend/test/data/node/test_scatter_with_axis/test_data_set_0/input_1.pb,sha256=yxAkaBm3PWG9VJ8QL9zIHyP9dhDU0NmwftHGiVWTbmQ,33 +onnx/backend/test/data/node/test_scatter_with_axis/test_data_set_0/input_2.pb,sha256=JDlgg9wBEw7Afkr4_suXRgyDMXlDVfbgBJOPnls2jao,25 +onnx/backend/test/data/node/test_scatter_with_axis/test_data_set_0/output_0.pb,sha256=JcgSylIhYN2e8u823w30DFfY3PA8sLsLxUDtn6dGNxw,31 +onnx/backend/test/data/node/test_scatter_without_axis/model.onnx,sha256=3PIZOZHIxXEr-zqvEWLoqwtdYL-u86zgSDXLv_w1CX4,189 +onnx/backend/test/data/node/test_scatter_without_axis/test_data_set_0/input_0.pb,sha256=DiaJ-ROL20bBztx5Xg3H7swSLlKyglCXlmncvXA-HPM,50 +onnx/backend/test/data/node/test_scatter_without_axis/test_data_set_0/input_1.pb,sha256=WXUWKy1f1Dr3A5TpL6asvPHu54JotHi6BQUtGRE7JOU,65 +onnx/backend/test/data/node/test_scatter_without_axis/test_data_set_0/input_2.pb,sha256=FB191eliy4ig-YiffzExk2oVes_SQmX9mHmi6f9Hfh0,41 +onnx/backend/test/data/node/test_scatter_without_axis/test_data_set_0/output_0.pb,sha256=LNLuWCnaX_nnESo8S2kfkcdAP2b_Prx7HAGSQu1nZGo,47 +onnx/backend/test/data/node/test_scatternd/model.onnx,sha256=bivVceLLPAvDCX2F-7S7g_5V1KP_jTWUKBb1I2EHJVk,192 +onnx/backend/test/data/node/test_scatternd/test_data_set_0/input_0.pb,sha256=mKV4k_mYJEXu-r1W90bshbQV9QMgwkHwf9xQ76Rngkw,273 +onnx/backend/test/data/node/test_scatternd/test_data_set_0/input_1.pb,sha256=qUcsgYavSojoDVEkWYR3mzbddMcAMBPdRT4-G56dq1A,33 +onnx/backend/test/data/node/test_scatternd/test_data_set_0/input_2.pb,sha256=RShGkR2PNYKvBsBx83gqCgu6B6x3ly1wRWNp1JFTHH0,148 +onnx/backend/test/data/node/test_scatternd/test_data_set_0/output_0.pb,sha256=A6LOHrpDiF3e-qhcVGjGViue1VwB_INvC3ybI-jpnVU,270 +onnx/backend/test/data/node/test_scatternd_add/model.onnx,sha256=pYn7qepwWYvAL_DUNkJ_frU8pMhPl_hWs1xJXhPpCbQ,217 +onnx/backend/test/data/node/test_scatternd_add/test_data_set_0/input_0.pb,sha256=mKV4k_mYJEXu-r1W90bshbQV9QMgwkHwf9xQ76Rngkw,273 +onnx/backend/test/data/node/test_scatternd_add/test_data_set_0/input_1.pb,sha256=yUrlT5iCIq0_2sUQNf_ql7v3LIeB4ZdHnA_kjiY6qE4,33 +onnx/backend/test/data/node/test_scatternd_add/test_data_set_0/input_2.pb,sha256=RShGkR2PNYKvBsBx83gqCgu6B6x3ly1wRWNp1JFTHH0,148 +onnx/backend/test/data/node/test_scatternd_add/test_data_set_0/output_0.pb,sha256=Pg4VIAYXcs47zRkEaqMYymr8C5m7po7X2ru2phvlYow,270 +onnx/backend/test/data/node/test_scatternd_max/model.onnx,sha256=SFfA1SCU5HdrgvqZhqwbCFhbtT0S4-jAidV-Q6GHYrc,217 +onnx/backend/test/data/node/test_scatternd_max/test_data_set_0/input_0.pb,sha256=mKV4k_mYJEXu-r1W90bshbQV9QMgwkHwf9xQ76Rngkw,273 +onnx/backend/test/data/node/test_scatternd_max/test_data_set_0/input_1.pb,sha256=yUrlT5iCIq0_2sUQNf_ql7v3LIeB4ZdHnA_kjiY6qE4,33 +onnx/backend/test/data/node/test_scatternd_max/test_data_set_0/input_2.pb,sha256=RShGkR2PNYKvBsBx83gqCgu6B6x3ly1wRWNp1JFTHH0,148 +onnx/backend/test/data/node/test_scatternd_max/test_data_set_0/output_0.pb,sha256=H01S0BHgoas9UovgsbP5Z6NrAjVkCQuZQQFdRv2GsZ4,270 +onnx/backend/test/data/node/test_scatternd_min/model.onnx,sha256=YhCygCjRYWyVCBFo5iaYsG-pGLn2HK0wgTIP4bnLT_M,217 +onnx/backend/test/data/node/test_scatternd_min/test_data_set_0/input_0.pb,sha256=mKV4k_mYJEXu-r1W90bshbQV9QMgwkHwf9xQ76Rngkw,273 +onnx/backend/test/data/node/test_scatternd_min/test_data_set_0/input_1.pb,sha256=yUrlT5iCIq0_2sUQNf_ql7v3LIeB4ZdHnA_kjiY6qE4,33 +onnx/backend/test/data/node/test_scatternd_min/test_data_set_0/input_2.pb,sha256=RShGkR2PNYKvBsBx83gqCgu6B6x3ly1wRWNp1JFTHH0,148 +onnx/backend/test/data/node/test_scatternd_min/test_data_set_0/output_0.pb,sha256=bxnp_Su4TiWAgetLLCEZyk0yunmaFzrwlozwz9H8iaw,270 +onnx/backend/test/data/node/test_scatternd_multiply/model.onnx,sha256=4_A5H04c4XRoD50uMfujmSqh_Gq2fmfB3NRx4s7Y8Uc,222 +onnx/backend/test/data/node/test_scatternd_multiply/test_data_set_0/input_0.pb,sha256=mKV4k_mYJEXu-r1W90bshbQV9QMgwkHwf9xQ76Rngkw,273 +onnx/backend/test/data/node/test_scatternd_multiply/test_data_set_0/input_1.pb,sha256=yUrlT5iCIq0_2sUQNf_ql7v3LIeB4ZdHnA_kjiY6qE4,33 +onnx/backend/test/data/node/test_scatternd_multiply/test_data_set_0/input_2.pb,sha256=RShGkR2PNYKvBsBx83gqCgu6B6x3ly1wRWNp1JFTHH0,148 +onnx/backend/test/data/node/test_scatternd_multiply/test_data_set_0/output_0.pb,sha256=M6TZol_avnQUDu8YK3eANT0kknICIs65yucp2T0rXfU,270 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii/model.onnx,sha256=4DjLogkrqhacIeo9bDuxaXEiMj2DiDf63gIqEnmOAVI,231 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii/test_data_set_0/input_0.pb,sha256=qupP19AFkVGsd5p_IG3o67eqQmCi4VOmIPPH2HatK8E,374 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii/test_data_set_0/input_1.pb,sha256=B1l-vlt8ntQR6mMtlxFwTx5L-JRc48LxU5URv95CBwg,156 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii/test_data_set_0/input_2.pb,sha256=S4-FwQAxtH_avU6Egu10Eg7k2WkcisObX8kZuTMU1HE,29 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii/test_data_set_0/output_0.pb,sha256=T1FHwvvDgmOgW7Wcdac4m7Ox5leiRh7vEpBMM_acaaQ,11 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_expanded/model.onnx,sha256=ePNl_XFo8EbbDr1TohiYMiSTY27lWaRUA4_IJwaNPGg,1672 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_expanded/test_data_set_0/input_0.pb,sha256=qupP19AFkVGsd5p_IG3o67eqQmCi4VOmIPPH2HatK8E,374 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_expanded/test_data_set_0/input_1.pb,sha256=B1l-vlt8ntQR6mMtlxFwTx5L-JRc48LxU5URv95CBwg,156 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_expanded/test_data_set_0/input_2.pb,sha256=S4-FwQAxtH_avU6Egu10Eg7k2WkcisObX8kZuTMU1HE,29 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_expanded/test_data_set_0/output_0.pb,sha256=T1FHwvvDgmOgW7Wcdac4m7Ox5leiRh7vEpBMM_acaaQ,11 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_log_prob/model.onnx,sha256=qPccBQaYpxSGbNs26guvmm0OVSGVuLseoBdqhQkDupk,282 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_log_prob/test_data_set_0/input_0.pb,sha256=qupP19AFkVGsd5p_IG3o67eqQmCi4VOmIPPH2HatK8E,374 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_log_prob/test_data_set_0/input_1.pb,sha256=B1l-vlt8ntQR6mMtlxFwTx5L-JRc48LxU5URv95CBwg,156 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_log_prob/test_data_set_0/input_2.pb,sha256=S4-FwQAxtH_avU6Egu10Eg7k2WkcisObX8kZuTMU1HE,29 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_log_prob/test_data_set_0/output_0.pb,sha256=T1FHwvvDgmOgW7Wcdac4m7Ox5leiRh7vEpBMM_acaaQ,11 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_log_prob/test_data_set_0/output_1.pb,sha256=OVOOloRk_LlopMXvzfRiT17U7xyjZZqdiZtrcdp54gg,381 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_log_prob_expanded/model.onnx,sha256=AwsPBxuS30rFBhQkQTpFjzIICdDK5WR2BKxRlnHweUo,1959 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_log_prob_expanded/test_data_set_0/input_0.pb,sha256=qupP19AFkVGsd5p_IG3o67eqQmCi4VOmIPPH2HatK8E,374 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_log_prob_expanded/test_data_set_0/input_1.pb,sha256=B1l-vlt8ntQR6mMtlxFwTx5L-JRc48LxU5URv95CBwg,156 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_log_prob_expanded/test_data_set_0/input_2.pb,sha256=S4-FwQAxtH_avU6Egu10Eg7k2WkcisObX8kZuTMU1HE,29 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_log_prob_expanded/test_data_set_0/output_0.pb,sha256=T1FHwvvDgmOgW7Wcdac4m7Ox5leiRh7vEpBMM_acaaQ,11 +onnx/backend/test/data/node/test_sce_NCd1_mean_weight_negative_ii_log_prob_expanded/test_data_set_0/output_1.pb,sha256=OVOOloRk_LlopMXvzfRiT17U7xyjZZqdiZtrcdp54gg,381 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii/model.onnx,sha256=MkIxoow4nc3DSQ6QRUyXxgMXTGo4pnaGmVlsr_jFTPc,250 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii/test_data_set_0/input_0.pb,sha256=Oj0wAqZWCkppXwBrkc88a6tsGSuIPPixSSKyu-0iAFI,10818 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii/test_data_set_0/input_1.pb,sha256=4RPKO8gfYFkdhvfs-BUDko5KhATj_U0MJsqius0TDBI,4336 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii/test_data_set_0/output_0.pb,sha256=la3r1KC4cnxtWqQmIjhvr5g6d51CtvveNgyfB9dH7ow,2176 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii_expanded/model.onnx,sha256=b9f_7RDCB-p-c4xXii_LzXjE9b9BFG11eI1th4gN2rQ,1789 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii_expanded/test_data_set_0/input_0.pb,sha256=Oj0wAqZWCkppXwBrkc88a6tsGSuIPPixSSKyu-0iAFI,10818 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii_expanded/test_data_set_0/input_1.pb,sha256=4RPKO8gfYFkdhvfs-BUDko5KhATj_U0MJsqius0TDBI,4336 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii_expanded/test_data_set_0/output_0.pb,sha256=la3r1KC4cnxtWqQmIjhvr5g6d51CtvveNgyfB9dH7ow,2176 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob/model.onnx,sha256=3dTRTlrZTBHv1JlBIrcvxSxLP4TUTkV5y_NM_2_6_Dw,309 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob/test_data_set_0/input_0.pb,sha256=Oj0wAqZWCkppXwBrkc88a6tsGSuIPPixSSKyu-0iAFI,10818 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob/test_data_set_0/input_1.pb,sha256=4RPKO8gfYFkdhvfs-BUDko5KhATj_U0MJsqius0TDBI,4336 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob/test_data_set_0/output_0.pb,sha256=la3r1KC4cnxtWqQmIjhvr5g6d51CtvveNgyfB9dH7ow,2176 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob/test_data_set_0/output_1.pb,sha256=WwTHHroM-9FFzxR9_T4rNpxcDfim9TWe7gZSOyW9dmo,10825 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob_expanded/model.onnx,sha256=77p7YOdBc-XatsOyn3QWF8kD4Zdj1aW0GOMNJkbl5Ag,2091 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob_expanded/test_data_set_0/input_0.pb,sha256=Oj0wAqZWCkppXwBrkc88a6tsGSuIPPixSSKyu-0iAFI,10818 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob_expanded/test_data_set_0/input_1.pb,sha256=4RPKO8gfYFkdhvfs-BUDko5KhATj_U0MJsqius0TDBI,4336 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob_expanded/test_data_set_0/output_0.pb,sha256=la3r1KC4cnxtWqQmIjhvr5g6d51CtvveNgyfB9dH7ow,2176 +onnx/backend/test/data/node/test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob_expanded/test_data_set_0/output_1.pb,sha256=WwTHHroM-9FFzxR9_T4rNpxcDfim9TWe7gZSOyW9dmo,10825 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii/model.onnx,sha256=6CST92qbIJai61Z17kv2qWUliCv-4jNORS8i5bswDl0,212 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii/test_data_set_0/input_1.pb,sha256=ECSYPd0TVGdd40h-c438gJx2ydi3K7_5RBP4JFP4my4,33 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii/test_data_set_0/input_2.pb,sha256=lv65FooWlI3jBxoDOoZFOvy7S8opPsYwNL5tiia8U3U,29 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii/test_data_set_0/output_0.pb,sha256=3aYW5UER_i5SRrjYt-SDD8VMa-GvcwrTG9Y9Qc-5CYk,11 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_expanded/model.onnx,sha256=j8rQDC-MWlFg87VAcZfCrhS-lslAMKA7iW7AzyDfaGE,1639 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_expanded/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_expanded/test_data_set_0/input_1.pb,sha256=ECSYPd0TVGdd40h-c438gJx2ydi3K7_5RBP4JFP4my4,33 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_expanded/test_data_set_0/input_2.pb,sha256=lv65FooWlI3jBxoDOoZFOvy7S8opPsYwNL5tiia8U3U,29 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_expanded/test_data_set_0/output_0.pb,sha256=3aYW5UER_i5SRrjYt-SDD8VMa-GvcwrTG9Y9Qc-5CYk,11 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_log_prob/model.onnx,sha256=y8kNNJ56nd8PSzLYECLjEk8XwCDpSWdysau225CSpNI,259 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_log_prob/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_log_prob/test_data_set_0/input_1.pb,sha256=ECSYPd0TVGdd40h-c438gJx2ydi3K7_5RBP4JFP4my4,33 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_log_prob/test_data_set_0/input_2.pb,sha256=lv65FooWlI3jBxoDOoZFOvy7S8opPsYwNL5tiia8U3U,29 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_log_prob/test_data_set_0/output_0.pb,sha256=3aYW5UER_i5SRrjYt-SDD8VMa-GvcwrTG9Y9Qc-5CYk,11 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_log_prob/test_data_set_0/output_1.pb,sha256=jLA4YVCTOH97BUXLmUfsiEHq3tr2wW34snMbOINmEBI,78 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_log_prob_expanded/model.onnx,sha256=FtGQ7KjUknKScU3L1SUUAIS4gpaqr30ixmBUQ6-REc0,1921 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_log_prob_expanded/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_log_prob_expanded/test_data_set_0/input_1.pb,sha256=ECSYPd0TVGdd40h-c438gJx2ydi3K7_5RBP4JFP4my4,33 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_log_prob_expanded/test_data_set_0/input_2.pb,sha256=lv65FooWlI3jBxoDOoZFOvy7S8opPsYwNL5tiia8U3U,29 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_log_prob_expanded/test_data_set_0/output_0.pb,sha256=3aYW5UER_i5SRrjYt-SDD8VMa-GvcwrTG9Y9Qc-5CYk,11 +onnx/backend/test/data/node/test_sce_NCd1d2d3_sum_weight_high_ii_log_prob_expanded/test_data_set_0/output_1.pb,sha256=jLA4YVCTOH97BUXLmUfsiEHq3tr2wW34snMbOINmEBI,78 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight/model.onnx,sha256=15CKZ6DFK9pgqZwiJaYE8xGF3rBGiYCjA46GWKXt9UE,229 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight/test_data_set_0/input_0.pb,sha256=4ra_fFZovmMBj-YqaUnvA-maMuOR9tgeNMc5Db_5fk8,129623 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight/test_data_set_0/input_1.pb,sha256=ZB-f_6ZfwtvBYdYj1AmZYVZkOrWazb_X1yow65GR1sQ,51861 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight/test_data_set_0/input_2.pb,sha256=7c-ar0sMCQdJWPdtJ0UpaFamw1FPHpEbrzYZq0LmkLM,29 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight/test_data_set_0/output_0.pb,sha256=8TWjevDFjBey-IIoPfPMfeoeTsioE4dSFepfqkKERe4,11 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_expanded/model.onnx,sha256=a5I16sJlQjIhADbyISsEp2VJu6iNrYzHTWlcQM5f6As,1614 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_expanded/test_data_set_0/input_0.pb,sha256=4ra_fFZovmMBj-YqaUnvA-maMuOR9tgeNMc5Db_5fk8,129623 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_expanded/test_data_set_0/input_1.pb,sha256=ZB-f_6ZfwtvBYdYj1AmZYVZkOrWazb_X1yow65GR1sQ,51861 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_expanded/test_data_set_0/input_2.pb,sha256=7c-ar0sMCQdJWPdtJ0UpaFamw1FPHpEbrzYZq0LmkLM,29 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_expanded/test_data_set_0/output_0.pb,sha256=8TWjevDFjBey-IIoPfPMfeoeTsioE4dSFepfqkKERe4,11 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_log_prob/model.onnx,sha256=YY5-0tZwDx90tQD82OBpjTbmRqdG_OKVunHsjH6mhfM,296 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_log_prob/test_data_set_0/input_0.pb,sha256=4ra_fFZovmMBj-YqaUnvA-maMuOR9tgeNMc5Db_5fk8,129623 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_log_prob/test_data_set_0/input_1.pb,sha256=ZB-f_6ZfwtvBYdYj1AmZYVZkOrWazb_X1yow65GR1sQ,51861 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_log_prob/test_data_set_0/input_2.pb,sha256=7c-ar0sMCQdJWPdtJ0UpaFamw1FPHpEbrzYZq0LmkLM,29 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_log_prob/test_data_set_0/output_0.pb,sha256=8TWjevDFjBey-IIoPfPMfeoeTsioE4dSFepfqkKERe4,11 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_log_prob/test_data_set_0/output_1.pb,sha256=XSkwM4eCCjrPXkQeTjFxrP1R3X8PGrHLj1UaChCsHFk,129630 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_log_prob_expanded/model.onnx,sha256=09dt3pvbvWp5yUKOS65ubmHCevNtJ21KbCjeUPF8rvo,1913 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_log_prob_expanded/test_data_set_0/input_0.pb,sha256=4ra_fFZovmMBj-YqaUnvA-maMuOR9tgeNMc5Db_5fk8,129623 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_log_prob_expanded/test_data_set_0/input_1.pb,sha256=ZB-f_6ZfwtvBYdYj1AmZYVZkOrWazb_X1yow65GR1sQ,51861 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_log_prob_expanded/test_data_set_0/input_2.pb,sha256=7c-ar0sMCQdJWPdtJ0UpaFamw1FPHpEbrzYZq0LmkLM,29 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_log_prob_expanded/test_data_set_0/output_0.pb,sha256=8TWjevDFjBey-IIoPfPMfeoeTsioE4dSFepfqkKERe4,11 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_mean_weight_log_prob_expanded/test_data_set_0/output_1.pb,sha256=XSkwM4eCCjrPXkQeTjFxrP1R3X8PGrHLj1UaChCsHFk,129630 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight/model.onnx,sha256=ULvWGcrcdSA1N_SUhkV1a6n2qI3MTPLeqWhZdqgwUz4,236 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight/test_data_set_0/input_0.pb,sha256=4ra_fFZovmMBj-YqaUnvA-maMuOR9tgeNMc5Db_5fk8,129623 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight/test_data_set_0/input_1.pb,sha256=ZB-f_6ZfwtvBYdYj1AmZYVZkOrWazb_X1yow65GR1sQ,51861 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight/test_data_set_0/output_0.pb,sha256=QG6n-tNdHkQgVv67CaAhmakmLGNLAPn0d_3F0ZQcLhs,25941 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight_expanded/model.onnx,sha256=x91VgaxENlIQOA2y-v5mUc2FFt5VjHG8Gu4KDmrUAyQ,1663 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight_expanded/test_data_set_0/input_0.pb,sha256=4ra_fFZovmMBj-YqaUnvA-maMuOR9tgeNMc5Db_5fk8,129623 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight_expanded/test_data_set_0/input_1.pb,sha256=ZB-f_6ZfwtvBYdYj1AmZYVZkOrWazb_X1yow65GR1sQ,51861 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight_expanded/test_data_set_0/output_0.pb,sha256=QG6n-tNdHkQgVv67CaAhmakmLGNLAPn0d_3F0ZQcLhs,25941 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight_log_prob/model.onnx,sha256=kuUxbSd2Kk-OVlzmBwsJPV2hJYpFtd0IJXqvKo4bffw,303 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight_log_prob/test_data_set_0/input_0.pb,sha256=4ra_fFZovmMBj-YqaUnvA-maMuOR9tgeNMc5Db_5fk8,129623 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight_log_prob/test_data_set_0/input_1.pb,sha256=ZB-f_6ZfwtvBYdYj1AmZYVZkOrWazb_X1yow65GR1sQ,51861 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight_log_prob/test_data_set_0/output_0.pb,sha256=QG6n-tNdHkQgVv67CaAhmakmLGNLAPn0d_3F0ZQcLhs,25941 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight_log_prob/test_data_set_0/output_1.pb,sha256=XSkwM4eCCjrPXkQeTjFxrP1R3X8PGrHLj1UaChCsHFk,129630 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight_log_prob_expanded/model.onnx,sha256=2tcnV4ot7YtPLDc2hrj134D1hYL78w3GK6ST4G-DkiE,1965 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight_log_prob_expanded/test_data_set_0/input_0.pb,sha256=4ra_fFZovmMBj-YqaUnvA-maMuOR9tgeNMc5Db_5fk8,129623 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight_log_prob_expanded/test_data_set_0/input_1.pb,sha256=ZB-f_6ZfwtvBYdYj1AmZYVZkOrWazb_X1yow65GR1sQ,51861 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight_log_prob_expanded/test_data_set_0/output_0.pb,sha256=QG6n-tNdHkQgVv67CaAhmakmLGNLAPn0d_3F0ZQcLhs,25941 +onnx/backend/test/data/node/test_sce_NCd1d2d3d4d5_none_no_weight_log_prob_expanded/test_data_set_0/output_1.pb,sha256=XSkwM4eCCjrPXkQeTjFxrP1R3X8PGrHLj1UaChCsHFk,129630 +onnx/backend/test/data/node/test_sce_mean/model.onnx,sha256=3-aUSOHOXSSfPlQt8uevWOY0JebH_Kfy4U23ceBqJMY,148 +onnx/backend/test/data/node/test_sce_mean/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_mean/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_mean/test_data_set_0/output_0.pb,sha256=ukRi1Wf8MHKfHhzBXTTjokAYLg59piNsWW1WCJoZKbQ,11 +onnx/backend/test/data/node/test_sce_mean_3d/model.onnx,sha256=b5f8KkCsoJ4L67xiLvpfytT09dUUaLEcgfZ5ROroihw,160 +onnx/backend/test/data/node/test_sce_mean_3d/test_data_set_0/input_0.pb,sha256=s6b0VpPGzh25cOop7adn57jvPue13bYBiWeyf_RrcfQ,133 +onnx/backend/test/data/node/test_sce_mean_3d/test_data_set_0/input_1.pb,sha256=kNoEKy3hlfUjU19ueKYQpkIk-4x7sch0y0l7QJwS_fE,59 +onnx/backend/test/data/node/test_sce_mean_3d/test_data_set_0/output_0.pb,sha256=miNyQyEMweya7bpEMp1TzAzXJzFp6xF-tqsM_YwAgYs,11 +onnx/backend/test/data/node/test_sce_mean_3d_expanded/model.onnx,sha256=q-u73xENPehluTdlJWM5_LrRkAcNfHTrB9OHxLcaTIw,1305 +onnx/backend/test/data/node/test_sce_mean_3d_expanded/test_data_set_0/input_0.pb,sha256=s6b0VpPGzh25cOop7adn57jvPue13bYBiWeyf_RrcfQ,133 +onnx/backend/test/data/node/test_sce_mean_3d_expanded/test_data_set_0/input_1.pb,sha256=kNoEKy3hlfUjU19ueKYQpkIk-4x7sch0y0l7QJwS_fE,59 +onnx/backend/test/data/node/test_sce_mean_3d_expanded/test_data_set_0/output_0.pb,sha256=miNyQyEMweya7bpEMp1TzAzXJzFp6xF-tqsM_YwAgYs,11 +onnx/backend/test/data/node/test_sce_mean_3d_log_prob/model.onnx,sha256=N0mTSdtEJf0VC6pr_80Z-iH6lZKHqyoFLABQN1kInU4,211 +onnx/backend/test/data/node/test_sce_mean_3d_log_prob/test_data_set_0/input_0.pb,sha256=s6b0VpPGzh25cOop7adn57jvPue13bYBiWeyf_RrcfQ,133 +onnx/backend/test/data/node/test_sce_mean_3d_log_prob/test_data_set_0/input_1.pb,sha256=kNoEKy3hlfUjU19ueKYQpkIk-4x7sch0y0l7QJwS_fE,59 +onnx/backend/test/data/node/test_sce_mean_3d_log_prob/test_data_set_0/output_0.pb,sha256=miNyQyEMweya7bpEMp1TzAzXJzFp6xF-tqsM_YwAgYs,11 +onnx/backend/test/data/node/test_sce_mean_3d_log_prob/test_data_set_0/output_1.pb,sha256=kvOCYAwJY6k1Dxx1Zl2374xcARw1xSaVCGAyNt7GDto,140 +onnx/backend/test/data/node/test_sce_mean_3d_log_prob_expanded/model.onnx,sha256=Pw8adJBkNBaFPyqtq5nf0C6e75H7tSROEtcmDhevIzo,1572 +onnx/backend/test/data/node/test_sce_mean_3d_log_prob_expanded/test_data_set_0/input_0.pb,sha256=s6b0VpPGzh25cOop7adn57jvPue13bYBiWeyf_RrcfQ,133 +onnx/backend/test/data/node/test_sce_mean_3d_log_prob_expanded/test_data_set_0/input_1.pb,sha256=kNoEKy3hlfUjU19ueKYQpkIk-4x7sch0y0l7QJwS_fE,59 +onnx/backend/test/data/node/test_sce_mean_3d_log_prob_expanded/test_data_set_0/output_0.pb,sha256=miNyQyEMweya7bpEMp1TzAzXJzFp6xF-tqsM_YwAgYs,11 +onnx/backend/test/data/node/test_sce_mean_3d_log_prob_expanded/test_data_set_0/output_1.pb,sha256=kvOCYAwJY6k1Dxx1Zl2374xcARw1xSaVCGAyNt7GDto,140 +onnx/backend/test/data/node/test_sce_mean_expanded/model.onnx,sha256=TfH-LhShXm0l-eXwnqag3gi4pGL_VJtIyTjtRDrji5Q,1252 +onnx/backend/test/data/node/test_sce_mean_expanded/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_mean_expanded/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_mean_expanded/test_data_set_0/output_0.pb,sha256=ukRi1Wf8MHKfHhzBXTTjokAYLg59piNsWW1WCJoZKbQ,11 +onnx/backend/test/data/node/test_sce_mean_log_prob/model.onnx,sha256=rVrCzG3FrS3dBFUDfVMIr4uj_G-SWjxDAxT9mA1mMqc,196 +onnx/backend/test/data/node/test_sce_mean_log_prob/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_mean_log_prob/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_mean_log_prob/test_data_set_0/output_0.pb,sha256=ukRi1Wf8MHKfHhzBXTTjokAYLg59piNsWW1WCJoZKbQ,11 +onnx/backend/test/data/node/test_sce_mean_log_prob/test_data_set_0/output_1.pb,sha256=jLA4YVCTOH97BUXLmUfsiEHq3tr2wW34snMbOINmEBI,78 +onnx/backend/test/data/node/test_sce_mean_log_prob_expanded/model.onnx,sha256=xN4nEG5gFvD7tP7DFSD2UsbqWMpEmdld0fnUWWQjCmQ,1512 +onnx/backend/test/data/node/test_sce_mean_log_prob_expanded/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_mean_log_prob_expanded/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_mean_log_prob_expanded/test_data_set_0/output_0.pb,sha256=ukRi1Wf8MHKfHhzBXTTjokAYLg59piNsWW1WCJoZKbQ,11 +onnx/backend/test/data/node/test_sce_mean_log_prob_expanded/test_data_set_0/output_1.pb,sha256=jLA4YVCTOH97BUXLmUfsiEHq3tr2wW34snMbOINmEBI,78 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii/model.onnx,sha256=SBpEoe9urLW0rKeW0Ac3BzNiwtc9v4bMTW1nHNnHRuw,183 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii/test_data_set_0/input_1.pb,sha256=RyZME3YrdXmqynJj4q0osF7rcCSLXSIG2ufdzx0CWNo,33 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii/test_data_set_0/output_0.pb,sha256=-p2yYFxEuzJ3QVJAAEY02AlKOWgqfAh-pYOeft2_X-s,11 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d/model.onnx,sha256=hiUDHPE0pIkYNzMom_XlZLWqOZEGlpnDlhmVoXTHkoE,194 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d/test_data_set_0/input_0.pb,sha256=s6b0VpPGzh25cOop7adn57jvPue13bYBiWeyf_RrcfQ,133 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d/test_data_set_0/input_1.pb,sha256=I4cSUTJG17QP5oA6xVZKsUhMOd-3Ykysb6WzcbuY1m4,59 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d/test_data_set_0/output_0.pb,sha256=PFOkOYcvliqNXtaW0RiMuvEKPotIC1lrIuQlASKuwqA,11 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d_expanded/model.onnx,sha256=NPfg8j3S2-3d77D-bWCAA3P_paurP1cRDZHhkRyN8oE,1522 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d_expanded/test_data_set_0/input_0.pb,sha256=s6b0VpPGzh25cOop7adn57jvPue13bYBiWeyf_RrcfQ,133 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d_expanded/test_data_set_0/input_1.pb,sha256=I4cSUTJG17QP5oA6xVZKsUhMOd-3Ykysb6WzcbuY1m4,59 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d_expanded/test_data_set_0/output_0.pb,sha256=PFOkOYcvliqNXtaW0RiMuvEKPotIC1lrIuQlASKuwqA,11 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d_log_prob/model.onnx,sha256=HP6L2jRokXh1_vYgTh-CA4Pqpu4mw5sys0ZaBECvzic,245 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d_log_prob/test_data_set_0/input_0.pb,sha256=s6b0VpPGzh25cOop7adn57jvPue13bYBiWeyf_RrcfQ,133 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d_log_prob/test_data_set_0/input_1.pb,sha256=I4cSUTJG17QP5oA6xVZKsUhMOd-3Ykysb6WzcbuY1m4,59 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d_log_prob/test_data_set_0/output_0.pb,sha256=PFOkOYcvliqNXtaW0RiMuvEKPotIC1lrIuQlASKuwqA,11 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d_log_prob/test_data_set_0/output_1.pb,sha256=kvOCYAwJY6k1Dxx1Zl2374xcARw1xSaVCGAyNt7GDto,140 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d_log_prob_expanded/model.onnx,sha256=jEDXQgGxGjSzMNfNn48mwHMUbpfHPNAxWhW2-iA0P7w,1802 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d_log_prob_expanded/test_data_set_0/input_0.pb,sha256=s6b0VpPGzh25cOop7adn57jvPue13bYBiWeyf_RrcfQ,133 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d_log_prob_expanded/test_data_set_0/input_1.pb,sha256=I4cSUTJG17QP5oA6xVZKsUhMOd-3Ykysb6WzcbuY1m4,59 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d_log_prob_expanded/test_data_set_0/output_0.pb,sha256=PFOkOYcvliqNXtaW0RiMuvEKPotIC1lrIuQlASKuwqA,11 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_3d_log_prob_expanded/test_data_set_0/output_1.pb,sha256=kvOCYAwJY6k1Dxx1Zl2374xcARw1xSaVCGAyNt7GDto,140 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d/model.onnx,sha256=A8FoRpgKtUOk4tOBHau3h4ZipX4Hu0RxF8-keeJwghw,202 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d/test_data_set_0/input_0.pb,sha256=TfLTx9UrsR2ZmBWzR_7JZlnzgcwoLG9VbMGWmwiRBYE,856 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d/test_data_set_0/input_1.pb,sha256=YqKk9mWOW1FVsAOz4P5b4Rkm3_Opfi4966ewfW81WBM,350 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d/test_data_set_0/output_0.pb,sha256=T1V0dSN1m0drt7O148bSLjanf8_Y6jYvzLcOyWMeUcc,11 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d_expanded/model.onnx,sha256=X9nRxKLRogAJfBTFCU2G6-HzYKhMp3ep4M7EwDmbVNE,1530 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d_expanded/test_data_set_0/input_0.pb,sha256=TfLTx9UrsR2ZmBWzR_7JZlnzgcwoLG9VbMGWmwiRBYE,856 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d_expanded/test_data_set_0/input_1.pb,sha256=YqKk9mWOW1FVsAOz4P5b4Rkm3_Opfi4966ewfW81WBM,350 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d_expanded/test_data_set_0/output_0.pb,sha256=T1V0dSN1m0drt7O148bSLjanf8_Y6jYvzLcOyWMeUcc,11 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d_log_prob/model.onnx,sha256=sYLdAh9Wrfmia2nhAuMOFPnatKFH9bxxgrInChYqFvQ,257 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d_log_prob/test_data_set_0/input_0.pb,sha256=TfLTx9UrsR2ZmBWzR_7JZlnzgcwoLG9VbMGWmwiRBYE,856 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d_log_prob/test_data_set_0/input_1.pb,sha256=YqKk9mWOW1FVsAOz4P5b4Rkm3_Opfi4966ewfW81WBM,350 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d_log_prob/test_data_set_0/output_0.pb,sha256=T1V0dSN1m0drt7O148bSLjanf8_Y6jYvzLcOyWMeUcc,11 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d_log_prob/test_data_set_0/output_1.pb,sha256=w-EurMdSNKZnr-TaXWH4MRaTpJ4Oj-02F1ZbQw4Gch0,863 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d_log_prob_expanded/model.onnx,sha256=RCwsAOHhRl-O5bFL_b4w-Ua5wUTSr8hufvHwXzTZTaA,1814 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d_log_prob_expanded/test_data_set_0/input_0.pb,sha256=TfLTx9UrsR2ZmBWzR_7JZlnzgcwoLG9VbMGWmwiRBYE,856 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d_log_prob_expanded/test_data_set_0/input_1.pb,sha256=YqKk9mWOW1FVsAOz4P5b4Rkm3_Opfi4966ewfW81WBM,350 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d_log_prob_expanded/test_data_set_0/output_0.pb,sha256=T1V0dSN1m0drt7O148bSLjanf8_Y6jYvzLcOyWMeUcc,11 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_4d_log_prob_expanded/test_data_set_0/output_1.pb,sha256=w-EurMdSNKZnr-TaXWH4MRaTpJ4Oj-02F1ZbQw4Gch0,863 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_expanded/model.onnx,sha256=q9QvMFTS8OztLXMlaXeKvt7LRrVmnlPDm5020UaXHkk,1469 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_expanded/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_expanded/test_data_set_0/input_1.pb,sha256=RyZME3YrdXmqynJj4q0osF7rcCSLXSIG2ufdzx0CWNo,33 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_expanded/test_data_set_0/output_0.pb,sha256=-p2yYFxEuzJ3QVJAAEY02AlKOWgqfAh-pYOeft2_X-s,11 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_log_prob/model.onnx,sha256=j7HGJhVhHWH5CFK5UID5XF2ctIy43BXBlsqx6KmPFwU,230 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_log_prob/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_log_prob/test_data_set_0/input_1.pb,sha256=RyZME3YrdXmqynJj4q0osF7rcCSLXSIG2ufdzx0CWNo,33 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_log_prob/test_data_set_0/output_0.pb,sha256=-p2yYFxEuzJ3QVJAAEY02AlKOWgqfAh-pYOeft2_X-s,11 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_log_prob/test_data_set_0/output_1.pb,sha256=jLA4YVCTOH97BUXLmUfsiEHq3tr2wW34snMbOINmEBI,78 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_log_prob_expanded/model.onnx,sha256=VzorXg051TojMytCig-YX2oEh35Gmuz_mQ4M5S4WuAA,1742 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_log_prob_expanded/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_log_prob_expanded/test_data_set_0/input_1.pb,sha256=RyZME3YrdXmqynJj4q0osF7rcCSLXSIG2ufdzx0CWNo,33 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_log_prob_expanded/test_data_set_0/output_0.pb,sha256=-p2yYFxEuzJ3QVJAAEY02AlKOWgqfAh-pYOeft2_X-s,11 +onnx/backend/test/data/node/test_sce_mean_no_weight_ii_log_prob_expanded/test_data_set_0/output_1.pb,sha256=jLA4YVCTOH97BUXLmUfsiEHq3tr2wW34snMbOINmEBI,78 +onnx/backend/test/data/node/test_sce_mean_weight/model.onnx,sha256=FDtFwSxTJi2o6AxnBuYfJ50s5T4D7mCELpj0DsZLVbQ,176 +onnx/backend/test/data/node/test_sce_mean_weight/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_mean_weight/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_mean_weight/test_data_set_0/input_2.pb,sha256=3kILcVoqN6HplPyGS5NBip_Uxq8x53NnWk7p_6p55qQ,29 +onnx/backend/test/data/node/test_sce_mean_weight/test_data_set_0/output_0.pb,sha256=lP_xZt4-fH-c-MMHxDWyE0nPc82po0Cj8pMGFj1zxNM,11 +onnx/backend/test/data/node/test_sce_mean_weight_expanded/model.onnx,sha256=8YacVoGPm3GpfQQDrJMUjOem0zDK5U7sgwUptKajO7w,1378 +onnx/backend/test/data/node/test_sce_mean_weight_expanded/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_mean_weight_expanded/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_mean_weight_expanded/test_data_set_0/input_2.pb,sha256=3kILcVoqN6HplPyGS5NBip_Uxq8x53NnWk7p_6p55qQ,29 +onnx/backend/test/data/node/test_sce_mean_weight_expanded/test_data_set_0/output_0.pb,sha256=lP_xZt4-fH-c-MMHxDWyE0nPc82po0Cj8pMGFj1zxNM,11 +onnx/backend/test/data/node/test_sce_mean_weight_ii/model.onnx,sha256=ubPeobCw3w8cF0I1Qnr2L_N6wGyzQ-0Wg3F7v_mcUcs,200 +onnx/backend/test/data/node/test_sce_mean_weight_ii/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_mean_weight_ii/test_data_set_0/input_1.pb,sha256=YJUO7ePUACgNVajGJigiRsGzGaqL0hHgIAz6jyu88zk,33 +onnx/backend/test/data/node/test_sce_mean_weight_ii/test_data_set_0/input_2.pb,sha256=3kILcVoqN6HplPyGS5NBip_Uxq8x53NnWk7p_6p55qQ,29 +onnx/backend/test/data/node/test_sce_mean_weight_ii/test_data_set_0/output_0.pb,sha256=0JMPnsJnw0rYj2LJGjJHGXrFcuwORQsPfEJdn7048yI,11 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d/model.onnx,sha256=NsPI86Dp0m9do2WsjEkCA9R1JcYaMDi1UcDhrJfn5xc,211 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d/test_data_set_0/input_0.pb,sha256=s6b0VpPGzh25cOop7adn57jvPue13bYBiWeyf_RrcfQ,133 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d/test_data_set_0/input_1.pb,sha256=trcZ3XMzOOuOFUO4W2aWJTmt7OFdWqdw8PCSQR0mprM,59 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d/test_data_set_0/input_2.pb,sha256=NfzalgiAf2wCTk7FTCYv25NIOkkHFSogWvLGFqvdfq8,29 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d/test_data_set_0/output_0.pb,sha256=qsY4ogfxxosqu-qK9YR3AW7Hq7r51jui_D3hT_1Vvxo,11 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_expanded/model.onnx,sha256=2FoQTNK043NuohFyUgPwpRNlhzDbhO_aLT6cBQ4lAcs,1497 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_expanded/test_data_set_0/input_0.pb,sha256=s6b0VpPGzh25cOop7adn57jvPue13bYBiWeyf_RrcfQ,133 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_expanded/test_data_set_0/input_1.pb,sha256=trcZ3XMzOOuOFUO4W2aWJTmt7OFdWqdw8PCSQR0mprM,59 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_expanded/test_data_set_0/input_2.pb,sha256=NfzalgiAf2wCTk7FTCYv25NIOkkHFSogWvLGFqvdfq8,29 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_expanded/test_data_set_0/output_0.pb,sha256=qsY4ogfxxosqu-qK9YR3AW7Hq7r51jui_D3hT_1Vvxo,11 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_log_prob/model.onnx,sha256=bn4-smvay63gs1M_duVEpUKF1r0LYcHjChuS97GXypA,262 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_log_prob/test_data_set_0/input_0.pb,sha256=s6b0VpPGzh25cOop7adn57jvPue13bYBiWeyf_RrcfQ,133 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_log_prob/test_data_set_0/input_1.pb,sha256=trcZ3XMzOOuOFUO4W2aWJTmt7OFdWqdw8PCSQR0mprM,59 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_log_prob/test_data_set_0/input_2.pb,sha256=NfzalgiAf2wCTk7FTCYv25NIOkkHFSogWvLGFqvdfq8,29 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_log_prob/test_data_set_0/output_0.pb,sha256=qsY4ogfxxosqu-qK9YR3AW7Hq7r51jui_D3hT_1Vvxo,11 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_log_prob/test_data_set_0/output_1.pb,sha256=kvOCYAwJY6k1Dxx1Zl2374xcARw1xSaVCGAyNt7GDto,140 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_log_prob_expanded/model.onnx,sha256=r4Sxfan1qnUwWoy2WqbrGK0euPoX1xB5G4jyHLnY4Qs,1774 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_log_prob_expanded/test_data_set_0/input_0.pb,sha256=s6b0VpPGzh25cOop7adn57jvPue13bYBiWeyf_RrcfQ,133 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_log_prob_expanded/test_data_set_0/input_1.pb,sha256=trcZ3XMzOOuOFUO4W2aWJTmt7OFdWqdw8PCSQR0mprM,59 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_log_prob_expanded/test_data_set_0/input_2.pb,sha256=NfzalgiAf2wCTk7FTCYv25NIOkkHFSogWvLGFqvdfq8,29 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_log_prob_expanded/test_data_set_0/output_0.pb,sha256=qsY4ogfxxosqu-qK9YR3AW7Hq7r51jui_D3hT_1Vvxo,11 +onnx/backend/test/data/node/test_sce_mean_weight_ii_3d_log_prob_expanded/test_data_set_0/output_1.pb,sha256=kvOCYAwJY6k1Dxx1Zl2374xcARw1xSaVCGAyNt7GDto,140 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d/model.onnx,sha256=1AYaMPMGNskTnRJg7Y0lDPeEYqOEtomdbz3rfcBZNmQ,219 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d/test_data_set_0/input_0.pb,sha256=TfLTx9UrsR2ZmBWzR_7JZlnzgcwoLG9VbMGWmwiRBYE,856 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d/test_data_set_0/input_1.pb,sha256=YqKk9mWOW1FVsAOz4P5b4Rkm3_Opfi4966ewfW81WBM,350 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d/test_data_set_0/input_2.pb,sha256=NfzalgiAf2wCTk7FTCYv25NIOkkHFSogWvLGFqvdfq8,29 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d/test_data_set_0/output_0.pb,sha256=84SOfKbFoEmqcmwtI0T00WQkBlWevdpnax2C7UfyKIs,11 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_expanded/model.onnx,sha256=C2QnHfrdT3fox922LPCYt56dqOh7wv0hzlesUZKm810,1505 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_expanded/test_data_set_0/input_0.pb,sha256=TfLTx9UrsR2ZmBWzR_7JZlnzgcwoLG9VbMGWmwiRBYE,856 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_expanded/test_data_set_0/input_1.pb,sha256=YqKk9mWOW1FVsAOz4P5b4Rkm3_Opfi4966ewfW81WBM,350 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_expanded/test_data_set_0/input_2.pb,sha256=NfzalgiAf2wCTk7FTCYv25NIOkkHFSogWvLGFqvdfq8,29 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_expanded/test_data_set_0/output_0.pb,sha256=84SOfKbFoEmqcmwtI0T00WQkBlWevdpnax2C7UfyKIs,11 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_log_prob/model.onnx,sha256=7jOM2AOOk_vbCzoIpmYtfEEra5Sk_CesOa_8wQDLxdY,274 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_log_prob/test_data_set_0/input_0.pb,sha256=TfLTx9UrsR2ZmBWzR_7JZlnzgcwoLG9VbMGWmwiRBYE,856 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_log_prob/test_data_set_0/input_1.pb,sha256=YqKk9mWOW1FVsAOz4P5b4Rkm3_Opfi4966ewfW81WBM,350 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_log_prob/test_data_set_0/input_2.pb,sha256=NfzalgiAf2wCTk7FTCYv25NIOkkHFSogWvLGFqvdfq8,29 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_log_prob/test_data_set_0/output_0.pb,sha256=84SOfKbFoEmqcmwtI0T00WQkBlWevdpnax2C7UfyKIs,11 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_log_prob/test_data_set_0/output_1.pb,sha256=w-EurMdSNKZnr-TaXWH4MRaTpJ4Oj-02F1ZbQw4Gch0,863 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_log_prob_expanded/model.onnx,sha256=5D5IXJS0E3jfGyP4bKSC3Xxxx-Py8x_L7bLfsgozeZQ,1786 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_log_prob_expanded/test_data_set_0/input_0.pb,sha256=TfLTx9UrsR2ZmBWzR_7JZlnzgcwoLG9VbMGWmwiRBYE,856 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_log_prob_expanded/test_data_set_0/input_1.pb,sha256=YqKk9mWOW1FVsAOz4P5b4Rkm3_Opfi4966ewfW81WBM,350 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_log_prob_expanded/test_data_set_0/input_2.pb,sha256=NfzalgiAf2wCTk7FTCYv25NIOkkHFSogWvLGFqvdfq8,29 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_log_prob_expanded/test_data_set_0/output_0.pb,sha256=84SOfKbFoEmqcmwtI0T00WQkBlWevdpnax2C7UfyKIs,11 +onnx/backend/test/data/node/test_sce_mean_weight_ii_4d_log_prob_expanded/test_data_set_0/output_1.pb,sha256=w-EurMdSNKZnr-TaXWH4MRaTpJ4Oj-02F1ZbQw4Gch0,863 +onnx/backend/test/data/node/test_sce_mean_weight_ii_expanded/model.onnx,sha256=j46HBvdOCXuiQR_0FH7hr-kTwUuYykrPqZCq_hYcUps,1444 +onnx/backend/test/data/node/test_sce_mean_weight_ii_expanded/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_mean_weight_ii_expanded/test_data_set_0/input_1.pb,sha256=YJUO7ePUACgNVajGJigiRsGzGaqL0hHgIAz6jyu88zk,33 +onnx/backend/test/data/node/test_sce_mean_weight_ii_expanded/test_data_set_0/input_2.pb,sha256=3kILcVoqN6HplPyGS5NBip_Uxq8x53NnWk7p_6p55qQ,29 +onnx/backend/test/data/node/test_sce_mean_weight_ii_expanded/test_data_set_0/output_0.pb,sha256=0JMPnsJnw0rYj2LJGjJHGXrFcuwORQsPfEJdn7048yI,11 +onnx/backend/test/data/node/test_sce_mean_weight_ii_log_prob/model.onnx,sha256=IdUuxD1i3D9ZVQBa2r-E7cBsRGa5pslBpvLxNAvIRuE,247 +onnx/backend/test/data/node/test_sce_mean_weight_ii_log_prob/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_mean_weight_ii_log_prob/test_data_set_0/input_1.pb,sha256=YJUO7ePUACgNVajGJigiRsGzGaqL0hHgIAz6jyu88zk,33 +onnx/backend/test/data/node/test_sce_mean_weight_ii_log_prob/test_data_set_0/input_2.pb,sha256=3kILcVoqN6HplPyGS5NBip_Uxq8x53NnWk7p_6p55qQ,29 +onnx/backend/test/data/node/test_sce_mean_weight_ii_log_prob/test_data_set_0/output_0.pb,sha256=0JMPnsJnw0rYj2LJGjJHGXrFcuwORQsPfEJdn7048yI,11 +onnx/backend/test/data/node/test_sce_mean_weight_ii_log_prob/test_data_set_0/output_1.pb,sha256=jLA4YVCTOH97BUXLmUfsiEHq3tr2wW34snMbOINmEBI,78 +onnx/backend/test/data/node/test_sce_mean_weight_ii_log_prob_expanded/model.onnx,sha256=MZDb9GUNI8EDzj_6fsm21bEEe-hzeTJybNPUsYuaEQs,1714 +onnx/backend/test/data/node/test_sce_mean_weight_ii_log_prob_expanded/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_mean_weight_ii_log_prob_expanded/test_data_set_0/input_1.pb,sha256=YJUO7ePUACgNVajGJigiRsGzGaqL0hHgIAz6jyu88zk,33 +onnx/backend/test/data/node/test_sce_mean_weight_ii_log_prob_expanded/test_data_set_0/input_2.pb,sha256=3kILcVoqN6HplPyGS5NBip_Uxq8x53NnWk7p_6p55qQ,29 +onnx/backend/test/data/node/test_sce_mean_weight_ii_log_prob_expanded/test_data_set_0/output_0.pb,sha256=0JMPnsJnw0rYj2LJGjJHGXrFcuwORQsPfEJdn7048yI,11 +onnx/backend/test/data/node/test_sce_mean_weight_ii_log_prob_expanded/test_data_set_0/output_1.pb,sha256=jLA4YVCTOH97BUXLmUfsiEHq3tr2wW34snMbOINmEBI,78 +onnx/backend/test/data/node/test_sce_mean_weight_log_prob/model.onnx,sha256=bi4ZJXOlblQde7zQJ2IXr19-7Tuzh7ABGIF1y9UBkxw,223 +onnx/backend/test/data/node/test_sce_mean_weight_log_prob/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_mean_weight_log_prob/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_mean_weight_log_prob/test_data_set_0/input_2.pb,sha256=3kILcVoqN6HplPyGS5NBip_Uxq8x53NnWk7p_6p55qQ,29 +onnx/backend/test/data/node/test_sce_mean_weight_log_prob/test_data_set_0/output_0.pb,sha256=lP_xZt4-fH-c-MMHxDWyE0nPc82po0Cj8pMGFj1zxNM,11 +onnx/backend/test/data/node/test_sce_mean_weight_log_prob/test_data_set_0/output_1.pb,sha256=jLA4YVCTOH97BUXLmUfsiEHq3tr2wW34snMbOINmEBI,78 +onnx/backend/test/data/node/test_sce_mean_weight_log_prob_expanded/model.onnx,sha256=DZKdS7gtR9_bhuodjbc-XDv8AIXyxzKcjxcd4mojvVg,1644 +onnx/backend/test/data/node/test_sce_mean_weight_log_prob_expanded/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_mean_weight_log_prob_expanded/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_mean_weight_log_prob_expanded/test_data_set_0/input_2.pb,sha256=3kILcVoqN6HplPyGS5NBip_Uxq8x53NnWk7p_6p55qQ,29 +onnx/backend/test/data/node/test_sce_mean_weight_log_prob_expanded/test_data_set_0/output_0.pb,sha256=lP_xZt4-fH-c-MMHxDWyE0nPc82po0Cj8pMGFj1zxNM,11 +onnx/backend/test/data/node/test_sce_mean_weight_log_prob_expanded/test_data_set_0/output_1.pb,sha256=jLA4YVCTOH97BUXLmUfsiEHq3tr2wW34snMbOINmEBI,78 +onnx/backend/test/data/node/test_sce_none/model.onnx,sha256=H2lkLn0o5LXqCGKwv1US_QPA4ziHj_dWCD_VRr-x0So,153 +onnx/backend/test/data/node/test_sce_none/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_none/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_none/test_data_set_0/output_0.pb,sha256=x2eu67Bj72ih7ElYhvsY3EgwooqYYka6FOBY-4e-f5c,21 +onnx/backend/test/data/node/test_sce_none_expanded/model.onnx,sha256=jvMjhz-FPf3Y5Ljjl_wmeuy3w4SCJqd3eqboiqLZj3s,1256 +onnx/backend/test/data/node/test_sce_none_expanded/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_none_expanded/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_none_expanded/test_data_set_0/output_0.pb,sha256=x2eu67Bj72ih7ElYhvsY3EgwooqYYka6FOBY-4e-f5c,21 +onnx/backend/test/data/node/test_sce_none_log_prob/model.onnx,sha256=Y9YRxncQJ320tXRz50KQeX4soUQ_g7qUQqNvr2dmmac,200 +onnx/backend/test/data/node/test_sce_none_log_prob/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_none_log_prob/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_none_log_prob/test_data_set_0/output_0.pb,sha256=x2eu67Bj72ih7ElYhvsY3EgwooqYYka6FOBY-4e-f5c,21 +onnx/backend/test/data/node/test_sce_none_log_prob/test_data_set_0/output_1.pb,sha256=jLA4YVCTOH97BUXLmUfsiEHq3tr2wW34snMbOINmEBI,78 +onnx/backend/test/data/node/test_sce_none_log_prob_expanded/model.onnx,sha256=nDowvW1SSVlbo-44PvNAi27cBtPN-QkGjBweoaXEgSU,1516 +onnx/backend/test/data/node/test_sce_none_log_prob_expanded/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_none_log_prob_expanded/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_none_log_prob_expanded/test_data_set_0/output_0.pb,sha256=x2eu67Bj72ih7ElYhvsY3EgwooqYYka6FOBY-4e-f5c,21 +onnx/backend/test/data/node/test_sce_none_log_prob_expanded/test_data_set_0/output_1.pb,sha256=jLA4YVCTOH97BUXLmUfsiEHq3tr2wW34snMbOINmEBI,78 +onnx/backend/test/data/node/test_sce_none_weights/model.onnx,sha256=vUIbLrpezyXgtQIFBb85vvUQZc-I5OWnzy3fwqygqK4,181 +onnx/backend/test/data/node/test_sce_none_weights/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_none_weights/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_none_weights/test_data_set_0/input_2.pb,sha256=3kILcVoqN6HplPyGS5NBip_Uxq8x53NnWk7p_6p55qQ,29 +onnx/backend/test/data/node/test_sce_none_weights/test_data_set_0/output_0.pb,sha256=y-XkkLpEvSyr9CE4PdlzMmWIvBjaeZB0A0NCBrJ19ig,21 +onnx/backend/test/data/node/test_sce_none_weights_expanded/model.onnx,sha256=Gj0emH3XxL0lpeXKFM_5A_TyLlojnAxleEZd-IFi_ZI,1397 +onnx/backend/test/data/node/test_sce_none_weights_expanded/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_none_weights_expanded/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_none_weights_expanded/test_data_set_0/input_2.pb,sha256=3kILcVoqN6HplPyGS5NBip_Uxq8x53NnWk7p_6p55qQ,29 +onnx/backend/test/data/node/test_sce_none_weights_expanded/test_data_set_0/output_0.pb,sha256=y-XkkLpEvSyr9CE4PdlzMmWIvBjaeZB0A0NCBrJ19ig,21 +onnx/backend/test/data/node/test_sce_none_weights_log_prob/model.onnx,sha256=s7n7tjyaNx2mI62GzKgQa73qF8mTVqTIfbSBEikGNgg,228 +onnx/backend/test/data/node/test_sce_none_weights_log_prob/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_none_weights_log_prob/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_none_weights_log_prob/test_data_set_0/input_2.pb,sha256=3kILcVoqN6HplPyGS5NBip_Uxq8x53NnWk7p_6p55qQ,29 +onnx/backend/test/data/node/test_sce_none_weights_log_prob/test_data_set_0/output_0.pb,sha256=y-XkkLpEvSyr9CE4PdlzMmWIvBjaeZB0A0NCBrJ19ig,21 +onnx/backend/test/data/node/test_sce_none_weights_log_prob/test_data_set_0/output_1.pb,sha256=jLA4YVCTOH97BUXLmUfsiEHq3tr2wW34snMbOINmEBI,78 +onnx/backend/test/data/node/test_sce_none_weights_log_prob_expanded/model.onnx,sha256=3VIOOJuLcgdPaAwOLtfYcd2q9TR6JxU-pvVYvIWqAhE,1664 +onnx/backend/test/data/node/test_sce_none_weights_log_prob_expanded/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_none_weights_log_prob_expanded/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_none_weights_log_prob_expanded/test_data_set_0/input_2.pb,sha256=3kILcVoqN6HplPyGS5NBip_Uxq8x53NnWk7p_6p55qQ,29 +onnx/backend/test/data/node/test_sce_none_weights_log_prob_expanded/test_data_set_0/output_0.pb,sha256=y-XkkLpEvSyr9CE4PdlzMmWIvBjaeZB0A0NCBrJ19ig,21 +onnx/backend/test/data/node/test_sce_none_weights_log_prob_expanded/test_data_set_0/output_1.pb,sha256=jLA4YVCTOH97BUXLmUfsiEHq3tr2wW34snMbOINmEBI,78 +onnx/backend/test/data/node/test_sce_sum/model.onnx,sha256=eKWzrBbwBTgYlHY-2HmzgUTx8HpbkbYXYDKzaPGjFsE,146 +onnx/backend/test/data/node/test_sce_sum/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_sum/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_sum/test_data_set_0/output_0.pb,sha256=G7TfEJjhMn00Lpl6iXNHr_RQDPDaYWFElmZsgbqn13c,11 +onnx/backend/test/data/node/test_sce_sum_expanded/model.onnx,sha256=deboOkn3M8rduO0dixjQGrjXaKtjr8ayU4P9l73GD58,1236 +onnx/backend/test/data/node/test_sce_sum_expanded/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_sum_expanded/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_sum_expanded/test_data_set_0/output_0.pb,sha256=G7TfEJjhMn00Lpl6iXNHr_RQDPDaYWFElmZsgbqn13c,11 +onnx/backend/test/data/node/test_sce_sum_log_prob/model.onnx,sha256=2tYIhEZKQCXLk0k_GwDtKRP-nd259yfV-ZZTbPVtUtI,194 +onnx/backend/test/data/node/test_sce_sum_log_prob/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_sum_log_prob/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_sum_log_prob/test_data_set_0/output_0.pb,sha256=G7TfEJjhMn00Lpl6iXNHr_RQDPDaYWFElmZsgbqn13c,11 +onnx/backend/test/data/node/test_sce_sum_log_prob/test_data_set_0/output_1.pb,sha256=jLA4YVCTOH97BUXLmUfsiEHq3tr2wW34snMbOINmEBI,78 +onnx/backend/test/data/node/test_sce_sum_log_prob_expanded/model.onnx,sha256=usBI99QeRs6byBL7GjCGujjTkngrZVtWdSG2ou-dg6k,1494 +onnx/backend/test/data/node/test_sce_sum_log_prob_expanded/test_data_set_0/input_0.pb,sha256=pj_kwJ8mI7R2Z9WWgaYUu6ZDJi2pczh33QryHGs7FBU,71 +onnx/backend/test/data/node/test_sce_sum_log_prob_expanded/test_data_set_0/input_1.pb,sha256=rsaomDCWHheHMGl-4PbZMWjzhyTJa7yWbeNY-pEMS7I,33 +onnx/backend/test/data/node/test_sce_sum_log_prob_expanded/test_data_set_0/output_0.pb,sha256=G7TfEJjhMn00Lpl6iXNHr_RQDPDaYWFElmZsgbqn13c,11 +onnx/backend/test/data/node/test_sce_sum_log_prob_expanded/test_data_set_0/output_1.pb,sha256=jLA4YVCTOH97BUXLmUfsiEHq3tr2wW34snMbOINmEBI,78 +onnx/backend/test/data/node/test_selu/model.onnx,sha256=bK-4hgbeiz0E86daBeK_tjAdYtOEIPz2jhViyP6Comk,133 +onnx/backend/test/data/node/test_selu/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_selu/test_data_set_0/output_0.pb,sha256=3t_C-XZiQzxkvHFf3gx504Kj9Q0MV9112pt5kMmd8oQ,254 +onnx/backend/test/data/node/test_selu_default/model.onnx,sha256=6Oire7P4k7mvp8_8Hn-OI3-262N26IQKGWQFWQMqHCg,107 +onnx/backend/test/data/node/test_selu_default/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_selu_default/test_data_set_0/output_0.pb,sha256=_aFrBuR3jDgYVvV-OBwrJAzuyFsY0oAPRG4CXasl6nY,254 +onnx/backend/test/data/node/test_selu_default_expanded_ver18/model.onnx,sha256=h3cIE5-IoDq6fDDAAZ1YHGkozoprP8-0EXX6oKVQ0VA,1680 +onnx/backend/test/data/node/test_selu_default_expanded_ver18/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_selu_default_expanded_ver18/test_data_set_0/output_0.pb,sha256=_aFrBuR3jDgYVvV-OBwrJAzuyFsY0oAPRG4CXasl6nY,254 +onnx/backend/test/data/node/test_selu_example/model.onnx,sha256=-G90oXdLJLM77jxlPngzQpgARw_-8xtHF1iLffaL2FI,125 +onnx/backend/test/data/node/test_selu_example/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_selu_example/test_data_set_0/output_0.pb,sha256=GV9rQBElEpmyFwZK5vUCoErkUB9RzczSO8s8wBrlC2U,21 +onnx/backend/test/data/node/test_selu_example_expanded_ver18/model.onnx,sha256=6APAkckMLmBXple_3UQwMVYpf02213InCALqYB3YvY8,1664 +onnx/backend/test/data/node/test_selu_example_expanded_ver18/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_selu_example_expanded_ver18/test_data_set_0/output_0.pb,sha256=GV9rQBElEpmyFwZK5vUCoErkUB9RzczSO8s8wBrlC2U,21 +onnx/backend/test/data/node/test_selu_expanded_ver18/model.onnx,sha256=qjTTbqvlMjwWwE44MeUPBOHQp88UpqbST66W_nhW_zI,1464 +onnx/backend/test/data/node/test_selu_expanded_ver18/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_selu_expanded_ver18/test_data_set_0/output_0.pb,sha256=3t_C-XZiQzxkvHFf3gx504Kj9Q0MV9112pt5kMmd8oQ,254 +onnx/backend/test/data/node/test_sequence_insert_at_back/model.onnx,sha256=dclz71fwabxjiwz7ZZPpZ3ERqjKUCfxs9MmwS5o-S0I,181 +onnx/backend/test/data/node/test_sequence_insert_at_back/test_data_set_0/input_0.pb,sha256=WDgw3x97Oup88446uaFPYpisr9Lmw6gkCgC6dO8RTB8,108 +onnx/backend/test/data/node/test_sequence_insert_at_back/test_data_set_0/input_1.pb,sha256=HM2wXt_MR9LhixZe95BE_6V4E0I8aGCth0LMDB_HfFM,38 +onnx/backend/test/data/node/test_sequence_insert_at_back/test_data_set_0/output_0.pb,sha256=ztM0AhuLe0puEOS9gTDqXrLfm79yY-n3E2NT6ZVMu1s,147 +onnx/backend/test/data/node/test_sequence_insert_at_front/model.onnx,sha256=jHJltVXI4xcX2wLCEOTU8zTdCgFE-73rmpzFqAb5U4c,216 +onnx/backend/test/data/node/test_sequence_insert_at_front/test_data_set_0/input_0.pb,sha256=WDgw3x97Oup88446uaFPYpisr9Lmw6gkCgC6dO8RTB8,108 +onnx/backend/test/data/node/test_sequence_insert_at_front/test_data_set_0/input_1.pb,sha256=Hw_0b-P2L4hJ3YPDPqlTs0s4rH9VdGOA2GO_hkQ3xaM,38 +onnx/backend/test/data/node/test_sequence_insert_at_front/test_data_set_0/input_2.pb,sha256=hRxu_mZX41ntrJWWbxx8f7P6EnyayB8ka0iNrMaxFj0,24 +onnx/backend/test/data/node/test_sequence_insert_at_front/test_data_set_0/output_0.pb,sha256=T17FYTW-QhMbvm3f8yY1hDND0c4r6qEtLdIGlcQblEs,147 +onnx/backend/test/data/node/test_sequence_map_add_1_sequence_1_tensor/model.onnx,sha256=OfGcJofqQCkVFhPal0FCQgWSlvjbr1shOVMI2nxKq8M,272 +onnx/backend/test/data/node/test_sequence_map_add_1_sequence_1_tensor/test_data_set_0/input_0.pb,sha256=nCXX4WEnc0Zu3v-2E9cMzpL-HDU_mXtYYAVy4nBG_to,150 +onnx/backend/test/data/node/test_sequence_map_add_1_sequence_1_tensor/test_data_set_0/input_1.pb,sha256=4LWEZM0lo_jM1KcnB2dq0CYg0TvFm7G3_eRttrDsycU,50 +onnx/backend/test/data/node/test_sequence_map_add_1_sequence_1_tensor/test_data_set_0/output_0.pb,sha256=MYwdCW3clBFkSs7zcMQR211ZOPnLwIkOVyLHhSUwlFA,150 +onnx/backend/test/data/node/test_sequence_map_add_1_sequence_1_tensor_expanded/model.onnx,sha256=dyU0kfVT2Fx83iA2ADqcXZnzZ3M3g6AB5St0Dy5vMeM,2583 +onnx/backend/test/data/node/test_sequence_map_add_1_sequence_1_tensor_expanded/test_data_set_0/input_0.pb,sha256=nCXX4WEnc0Zu3v-2E9cMzpL-HDU_mXtYYAVy4nBG_to,150 +onnx/backend/test/data/node/test_sequence_map_add_1_sequence_1_tensor_expanded/test_data_set_0/input_1.pb,sha256=4LWEZM0lo_jM1KcnB2dq0CYg0TvFm7G3_eRttrDsycU,50 +onnx/backend/test/data/node/test_sequence_map_add_1_sequence_1_tensor_expanded/test_data_set_0/output_0.pb,sha256=MYwdCW3clBFkSs7zcMQR211ZOPnLwIkOVyLHhSUwlFA,150 +onnx/backend/test/data/node/test_sequence_map_add_2_sequences/model.onnx,sha256=Mc9kMXbr184Szpv9fAbC6BMTnCTJdG83fws0BRhWEGU,268 +onnx/backend/test/data/node/test_sequence_map_add_2_sequences/test_data_set_0/input_0.pb,sha256=cGwNd7wWWJ-FuNDIPlO8IvEIyjmzK0D-xYjIam7MXhU,74 +onnx/backend/test/data/node/test_sequence_map_add_2_sequences/test_data_set_0/input_1.pb,sha256=bf6wd6KFJk459ileC4koSFigL5H6HdmP33e-4lH815M,74 +onnx/backend/test/data/node/test_sequence_map_add_2_sequences/test_data_set_0/output_0.pb,sha256=PkOXYpt42DXPXr5LGcda5uLFKC3W4SXgWNrvjupnYxM,74 +onnx/backend/test/data/node/test_sequence_map_add_2_sequences_expanded/model.onnx,sha256=WQtKBAc9qxRR1Z7p20KsXH3JvS9396ukdYuqfo6woWU,2502 +onnx/backend/test/data/node/test_sequence_map_add_2_sequences_expanded/test_data_set_0/input_0.pb,sha256=cGwNd7wWWJ-FuNDIPlO8IvEIyjmzK0D-xYjIam7MXhU,74 +onnx/backend/test/data/node/test_sequence_map_add_2_sequences_expanded/test_data_set_0/input_1.pb,sha256=bf6wd6KFJk459ileC4koSFigL5H6HdmP33e-4lH815M,74 +onnx/backend/test/data/node/test_sequence_map_add_2_sequences_expanded/test_data_set_0/output_0.pb,sha256=PkOXYpt42DXPXr5LGcda5uLFKC3W4SXgWNrvjupnYxM,74 +onnx/backend/test/data/node/test_sequence_map_extract_shapes/model.onnx,sha256=Jsl6R2LLrjHqFbg1N_uoGT7dW9HtxevfhzLm2lZVDl4,248 +onnx/backend/test/data/node/test_sequence_map_extract_shapes/test_data_set_0/input_0.pb,sha256=mPhMK4W7csMlfDABLzSUi_9hj5Yyoaybjwu7Megeiyw,17452 +onnx/backend/test/data/node/test_sequence_map_extract_shapes/test_data_set_0/output_0.pb,sha256=PXiF7who7_NSvRSbm7qpuMjQlD5XXC40cVlJBFVo4NI,106 +onnx/backend/test/data/node/test_sequence_map_extract_shapes_expanded/model.onnx,sha256=xCqJXbZ7WsC9KoFpy3BvQMIR1v5b_n9DYuRhRPQUdK4,2232 +onnx/backend/test/data/node/test_sequence_map_extract_shapes_expanded/test_data_set_0/input_0.pb,sha256=mPhMK4W7csMlfDABLzSUi_9hj5Yyoaybjwu7Megeiyw,17452 +onnx/backend/test/data/node/test_sequence_map_extract_shapes_expanded/test_data_set_0/output_0.pb,sha256=PXiF7who7_NSvRSbm7qpuMjQlD5XXC40cVlJBFVo4NI,106 +onnx/backend/test/data/node/test_sequence_map_identity_1_sequence/model.onnx,sha256=OEeIP33hLWof6fEFh_haP8yrxUYh7w1TePk86XFJrFs,220 +onnx/backend/test/data/node/test_sequence_map_identity_1_sequence/test_data_set_0/input_0.pb,sha256=6tnw5WE3C0cv0L08TL4bxvHXwwdVH6diP_77Wwk_6a8,149 +onnx/backend/test/data/node/test_sequence_map_identity_1_sequence/test_data_set_0/output_0.pb,sha256=1EIyt4XmclkaRfxbArKm4RLQAJ1XgXG3_0vGgEm0lOY,149 +onnx/backend/test/data/node/test_sequence_map_identity_1_sequence_1_tensor/model.onnx,sha256=21q_NLTDki4fmu_UvzCQbYGplZXFVeEp8LktrHp4mN8,350 +onnx/backend/test/data/node/test_sequence_map_identity_1_sequence_1_tensor/test_data_set_0/input_0.pb,sha256=p9PoQWTo5bC27UoTmyKqVW7FNgPzuy19YhT8PfT0k8M,106 +onnx/backend/test/data/node/test_sequence_map_identity_1_sequence_1_tensor/test_data_set_0/input_1.pb,sha256=ZyvRjOUi5VBwdltGImSNNn_iVdnJ4lCr8fJNRgGenw0,18 +onnx/backend/test/data/node/test_sequence_map_identity_1_sequence_1_tensor/test_data_set_0/output_0.pb,sha256=hMkE_GYM8mWkRJa_E5S3GXal67uKMxKuOiAHw9POsJk,106 +onnx/backend/test/data/node/test_sequence_map_identity_1_sequence_1_tensor/test_data_set_0/output_1.pb,sha256=h2zwPyLH6s4cI9yRu_DJMHHm1wnDg5SXWFDAWEaI3zg,54 +onnx/backend/test/data/node/test_sequence_map_identity_1_sequence_1_tensor_expanded/model.onnx,sha256=nP2doMsKhjazXp5WYeLS8IWMGgF3Qh9jFsf4QnjSgc4,3664 +onnx/backend/test/data/node/test_sequence_map_identity_1_sequence_1_tensor_expanded/test_data_set_0/input_0.pb,sha256=p9PoQWTo5bC27UoTmyKqVW7FNgPzuy19YhT8PfT0k8M,106 +onnx/backend/test/data/node/test_sequence_map_identity_1_sequence_1_tensor_expanded/test_data_set_0/input_1.pb,sha256=ZyvRjOUi5VBwdltGImSNNn_iVdnJ4lCr8fJNRgGenw0,18 +onnx/backend/test/data/node/test_sequence_map_identity_1_sequence_1_tensor_expanded/test_data_set_0/output_0.pb,sha256=hMkE_GYM8mWkRJa_E5S3GXal67uKMxKuOiAHw9POsJk,106 +onnx/backend/test/data/node/test_sequence_map_identity_1_sequence_1_tensor_expanded/test_data_set_0/output_1.pb,sha256=h2zwPyLH6s4cI9yRu_DJMHHm1wnDg5SXWFDAWEaI3zg,54 +onnx/backend/test/data/node/test_sequence_map_identity_1_sequence_expanded/model.onnx,sha256=hiA16-f3aO5Nt6NmliOYBS7IOU3u8PUXz9Gcx8sjPg8,2307 +onnx/backend/test/data/node/test_sequence_map_identity_1_sequence_expanded/test_data_set_0/input_0.pb,sha256=6tnw5WE3C0cv0L08TL4bxvHXwwdVH6diP_77Wwk_6a8,149 +onnx/backend/test/data/node/test_sequence_map_identity_1_sequence_expanded/test_data_set_0/output_0.pb,sha256=1EIyt4XmclkaRfxbArKm4RLQAJ1XgXG3_0vGgEm0lOY,149 +onnx/backend/test/data/node/test_sequence_map_identity_2_sequences/model.onnx,sha256=gBknA8YcoQ3V89G83B5Jh7ypAYRVOPZ2yiI_NXj4NYw,346 +onnx/backend/test/data/node/test_sequence_map_identity_2_sequences/test_data_set_0/input_0.pb,sha256=p9PoQWTo5bC27UoTmyKqVW7FNgPzuy19YhT8PfT0k8M,106 +onnx/backend/test/data/node/test_sequence_map_identity_2_sequences/test_data_set_0/input_1.pb,sha256=edLeh_wZP5_dANUseUALIqMRTOZ7y1QyX7w9_rZFYdU,82 +onnx/backend/test/data/node/test_sequence_map_identity_2_sequences/test_data_set_0/output_0.pb,sha256=hMkE_GYM8mWkRJa_E5S3GXal67uKMxKuOiAHw9POsJk,106 +onnx/backend/test/data/node/test_sequence_map_identity_2_sequences/test_data_set_0/output_1.pb,sha256=QNVN7ibNoj0ZZ8qF6QDsbXXk5ReGVQrR58d7XUF7Lzs,82 +onnx/backend/test/data/node/test_sequence_map_identity_2_sequences_expanded/model.onnx,sha256=7Q_GyTpRAMwU6C1nN_emXFXtUQ7mPJpZvzmVsc44ob0,3524 +onnx/backend/test/data/node/test_sequence_map_identity_2_sequences_expanded/test_data_set_0/input_0.pb,sha256=p9PoQWTo5bC27UoTmyKqVW7FNgPzuy19YhT8PfT0k8M,106 +onnx/backend/test/data/node/test_sequence_map_identity_2_sequences_expanded/test_data_set_0/input_1.pb,sha256=edLeh_wZP5_dANUseUALIqMRTOZ7y1QyX7w9_rZFYdU,82 +onnx/backend/test/data/node/test_sequence_map_identity_2_sequences_expanded/test_data_set_0/output_0.pb,sha256=hMkE_GYM8mWkRJa_E5S3GXal67uKMxKuOiAHw9POsJk,106 +onnx/backend/test/data/node/test_sequence_map_identity_2_sequences_expanded/test_data_set_0/output_1.pb,sha256=QNVN7ibNoj0ZZ8qF6QDsbXXk5ReGVQrR58d7XUF7Lzs,82 +onnx/backend/test/data/node/test_shape/model.onnx,sha256=GWdmkXD4fjm5zOvsbbWplXM1-g_fqZ5plTwZQqmEmzU,93 +onnx/backend/test/data/node/test_shape/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_shape/test_data_set_0/output_0.pb,sha256=EECFSLSrOZp2gtSXfesFT_CNAvoEOqauyDoYlnKEOTE,33 +onnx/backend/test/data/node/test_shape_clip_end/model.onnx,sha256=IXU1o2oilsHYktVW217VcqP8HKw753knZzxp-mQu7DQ,114 +onnx/backend/test/data/node/test_shape_clip_end/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_shape_clip_end/test_data_set_0/output_0.pb,sha256=EECFSLSrOZp2gtSXfesFT_CNAvoEOqauyDoYlnKEOTE,33 +onnx/backend/test/data/node/test_shape_clip_start/model.onnx,sha256=t28iwZVW95tx23d2YSJxQeoiGyvpbzcYxWq8UNvH8JE,127 +onnx/backend/test/data/node/test_shape_clip_start/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_shape_clip_start/test_data_set_0/output_0.pb,sha256=EECFSLSrOZp2gtSXfesFT_CNAvoEOqauyDoYlnKEOTE,33 +onnx/backend/test/data/node/test_shape_end_1/model.onnx,sha256=IQ7pthV-0xyNYeRSoFIIv7J0dZ7EraWwtGpkyu11UT8,111 +onnx/backend/test/data/node/test_shape_end_1/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_shape_end_1/test_data_set_0/output_0.pb,sha256=ujKSnDUgGwN2lii1DHBi3GFScTBofqmZ4RzFbFOIc9E,17 +onnx/backend/test/data/node/test_shape_end_negative_1/model.onnx,sha256=FFb51SZcmY40laBQ-oFL0j-iyBiwlU5HMeoKddb3txU,129 +onnx/backend/test/data/node/test_shape_end_negative_1/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_shape_end_negative_1/test_data_set_0/output_0.pb,sha256=o_sMz2Vgclt0Ror2cqSXaN2p7cm4La1LMKCB8O_0aT0,25 +onnx/backend/test/data/node/test_shape_example/model.onnx,sha256=1WWAZ-wJYrsMN9K3omglo4OnRLrAPTzmADf8_F7ffJU,97 +onnx/backend/test/data/node/test_shape_example/test_data_set_0/input_0.pb,sha256=4pQ6jBxU8_tBNnaRVRgYgck68GhQp-YW_tL388GcCvE,35 +onnx/backend/test/data/node/test_shape_example/test_data_set_0/output_0.pb,sha256=6ikEm3TGVfmw6AYUxs9Hj6Sz_9fYkqmojZP1Jzf94l4,25 +onnx/backend/test/data/node/test_shape_start_1/model.onnx,sha256=PT79EoZESTneig7Br8PsWyRbaXgsr-xZpVnU9mP3pIs,115 +onnx/backend/test/data/node/test_shape_start_1/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_shape_start_1/test_data_set_0/output_0.pb,sha256=6CbIjNHfclOJfoITJrtphF0oSHCpNlMWJf5rgpAe7Hc,25 +onnx/backend/test/data/node/test_shape_start_1_end_2/model.onnx,sha256=DG4CNcJd4SJpDbzbgnUgQ6jHCVYuJFyV0Q2LZKb9pH0,133 +onnx/backend/test/data/node/test_shape_start_1_end_2/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_shape_start_1_end_2/test_data_set_0/output_0.pb,sha256=EIeRS9lwJGLIo_UX9RbLDKw-bg_pno5AbyLkr0eDwbk,17 +onnx/backend/test/data/node/test_shape_start_1_end_negative_1/model.onnx,sha256=FuwoRSvwBsnvJiS7Icg5Z3jtTIytP06bepEZ6hd-sS8,151 +onnx/backend/test/data/node/test_shape_start_1_end_negative_1/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_shape_start_1_end_negative_1/test_data_set_0/output_0.pb,sha256=EIeRS9lwJGLIo_UX9RbLDKw-bg_pno5AbyLkr0eDwbk,17 +onnx/backend/test/data/node/test_shape_start_negative_1/model.onnx,sha256=AOdDE7hKh8HvaLo5dQ-0Kq0652qj35RyoR7BV738V68,133 +onnx/backend/test/data/node/test_shape_start_negative_1/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_shape_start_negative_1/test_data_set_0/output_0.pb,sha256=E5EQLnbSPOMjgkdKPi9jK7WKlPyQIdVIdbNaQTDYhrs,17 +onnx/backend/test/data/node/test_shrink_hard/model.onnx,sha256=hRk01cid7ePJWw54gaQvV4SuhFswtdeY0sMW4cCV7Mk,109 +onnx/backend/test/data/node/test_shrink_hard/test_data_set_0/input_0.pb,sha256=b6eG0JoSaNfFZ9uN6DHuVlebG4hyn5rm9o2xzU2w0-Y,29 +onnx/backend/test/data/node/test_shrink_hard/test_data_set_0/output_0.pb,sha256=KreC4NTAYK5M6dHrWIiG52RveZDpxh9mGTsf43xT4sk,29 +onnx/backend/test/data/node/test_shrink_hard_expanded_ver18/model.onnx,sha256=g9Xalz8K-ARbwHMGzGKnXHdUHfdtfl1Xg_av5lSu7aY,1761 +onnx/backend/test/data/node/test_shrink_hard_expanded_ver18/test_data_set_0/input_0.pb,sha256=b6eG0JoSaNfFZ9uN6DHuVlebG4hyn5rm9o2xzU2w0-Y,29 +onnx/backend/test/data/node/test_shrink_hard_expanded_ver18/test_data_set_0/output_0.pb,sha256=KreC4NTAYK5M6dHrWIiG52RveZDpxh9mGTsf43xT4sk,29 +onnx/backend/test/data/node/test_shrink_soft/model.onnx,sha256=fd95F4NzSxY8j8N0YEuX3RFbmVvQk78Qzm2JpNX7VhE,125 +onnx/backend/test/data/node/test_shrink_soft/test_data_set_0/input_0.pb,sha256=b6eG0JoSaNfFZ9uN6DHuVlebG4hyn5rm9o2xzU2w0-Y,29 +onnx/backend/test/data/node/test_shrink_soft/test_data_set_0/output_0.pb,sha256=AbQplXJ5FVu2LaNzGC0t3G9EZhZUCsd-_fnyXRD9T2I,29 +onnx/backend/test/data/node/test_shrink_soft_expanded_ver18/model.onnx,sha256=b-762wBGgDraNAQenMZF5yYpIi9UG639NYP1A8Cueaw,1761 +onnx/backend/test/data/node/test_shrink_soft_expanded_ver18/test_data_set_0/input_0.pb,sha256=b6eG0JoSaNfFZ9uN6DHuVlebG4hyn5rm9o2xzU2w0-Y,29 +onnx/backend/test/data/node/test_shrink_soft_expanded_ver18/test_data_set_0/output_0.pb,sha256=AbQplXJ5FVu2LaNzGC0t3G9EZhZUCsd-_fnyXRD9T2I,29 +onnx/backend/test/data/node/test_sigmoid/model.onnx,sha256=yepFvj3QD9Q4Zac5RYx0wffH_TJfr4KUAJ9DrpwGKN0,105 +onnx/backend/test/data/node/test_sigmoid/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_sigmoid/test_data_set_0/output_0.pb,sha256=0M1TGc-0294NBpXmRH6IueVH60XOGwbeqLHuWtwlYaQ,254 +onnx/backend/test/data/node/test_sigmoid_example/model.onnx,sha256=qOwSJGf4ve6g7bQYHFcESH7TCyR-8MnWManHLNU6yCQ,97 +onnx/backend/test/data/node/test_sigmoid_example/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_sigmoid_example/test_data_set_0/output_0.pb,sha256=n9DyuVyiD6_UcKq3n3I64IT13QLjk3QfNoHdQTVflTE,21 +onnx/backend/test/data/node/test_sign/model.onnx,sha256=5l4xY8OBCnG5LepNxNKpVslAvTsQ7luIHrYwBJBeERw,83 +onnx/backend/test/data/node/test_sign/test_data_set_0/input_0.pb,sha256=rjz39Xc3D8_98MARLx2sEOivP3ylvsTlSYV1gS-Bkjo,53 +onnx/backend/test/data/node/test_sign/test_data_set_0/output_0.pb,sha256=GYc7eDjQSxVY5ZNV9JTqvwtl76bQQ-5Lj7KU0VWq2xU,53 +onnx/backend/test/data/node/test_simple_rnn_batchwise/model.onnx,sha256=EsuXrzQ-Ob09ghioZ3NZdLzngyvFW1fWBerh7zfaJ4g,242 +onnx/backend/test/data/node/test_simple_rnn_batchwise/test_data_set_0/input_0.pb,sha256=X4Jx731skujCbqYlfE79v4ovLqbqAar9VfhQTegviw8,37 +onnx/backend/test/data/node/test_simple_rnn_batchwise/test_data_set_0/input_1.pb,sha256=-qFlupAAYG72bVDrKQjQwSrFWi7iN5QBw4xnAFPcW84,45 +onnx/backend/test/data/node/test_simple_rnn_batchwise/test_data_set_0/input_2.pb,sha256=dGa_abJyaziCNP5gMI36AA1qPkXY_LbN8ysUOVmvHyk,77 +onnx/backend/test/data/node/test_simple_rnn_batchwise/test_data_set_0/output_0.pb,sha256=Ez-xZ1K8qEGZMxzQU_vbW69DKAZQtG3EiqIHGbBQt4M,63 +onnx/backend/test/data/node/test_simple_rnn_batchwise/test_data_set_0/output_1.pb,sha256=nXDD6IaK4uJCe6-oxjWmBH0vVBgbRwZ-0yFCq_-3OB0,63 +onnx/backend/test/data/node/test_simple_rnn_defaults/model.onnx,sha256=9FBv3ZwgUVQyC_aykS7yGwXVv7BGTAFFyT8GrliGouc,196 +onnx/backend/test/data/node/test_simple_rnn_defaults/test_data_set_0/input_0.pb,sha256=CfdyEi1TF1qNXdr7S9ZjPY0_c7pWtxM7ou3bNl3fNIo,37 +onnx/backend/test/data/node/test_simple_rnn_defaults/test_data_set_0/input_1.pb,sha256=L2zxMm6uL4QSLsCnRiqUzv-9fp0-VkDW34RiKxcyg3g,45 +onnx/backend/test/data/node/test_simple_rnn_defaults/test_data_set_0/input_2.pb,sha256=kU8zrxPzF60vfcmkNonlRPNh3BWlyVnLbMwQ5CkKp8A,77 +onnx/backend/test/data/node/test_simple_rnn_defaults/test_data_set_0/output_0.pb,sha256=OU_yJMBnE3tT08kLB6QX9tCug2oOoUznNbKkxphDC2k,63 +onnx/backend/test/data/node/test_simple_rnn_with_initial_bias/model.onnx,sha256=fe8V2dsmBZF8BGsqYmew2dxlaDBhi_d6mw18S0wbhys,229 +onnx/backend/test/data/node/test_simple_rnn_with_initial_bias/test_data_set_0/input_0.pb,sha256=eujm2YGHVRU93EAGK-aPA-aFEublku2wP288wJjVc4M,49 +onnx/backend/test/data/node/test_simple_rnn_with_initial_bias/test_data_set_0/input_1.pb,sha256=XvvuxoSRCrX0paTv67nqpKoXd_aJPHnXNXHAk58EJLo,73 +onnx/backend/test/data/node/test_simple_rnn_with_initial_bias/test_data_set_0/input_2.pb,sha256=eHvrnjVBev9Xxrb4pqD28kF3B0UDphj4glVsRcqMlGc,113 +onnx/backend/test/data/node/test_simple_rnn_with_initial_bias/test_data_set_0/input_3.pb,sha256=xxxsj5zURFnPlWj7Qthy8BnSXEWPmD0jqteuG9OYLzw,51 +onnx/backend/test/data/node/test_simple_rnn_with_initial_bias/test_data_set_0/output_0.pb,sha256=XxDQiHg3N9hdXmZ1DvqGHcyQxln3qD178X2V3_tAm1w,75 +onnx/backend/test/data/node/test_sin/model.onnx,sha256=MPX3UXM-DfTcxk4GwxYlIsSHnHJBT2fpQq3aiPUYFy8,97 +onnx/backend/test/data/node/test_sin/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_sin/test_data_set_0/output_0.pb,sha256=9o8tToPY2FZLrQ9kpgkTSRtcGUruDpzj4Mv-1YM3x7Q,254 +onnx/backend/test/data/node/test_sin_example/model.onnx,sha256=wduzRKh4BsPV587Mg7J_L0aiWfhK7y_LTFnion9Tf6g,89 +onnx/backend/test/data/node/test_sin_example/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_sin_example/test_data_set_0/output_0.pb,sha256=rESUsCJt7xlSm0XiIouAgAB5X2EDtEkx1XgwSgNXt60,21 +onnx/backend/test/data/node/test_sinh/model.onnx,sha256=0NBUelqvm6ruB5Gn3dVwtjP2iRuknT1aBQA2iyg6JT4,99 +onnx/backend/test/data/node/test_sinh/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_sinh/test_data_set_0/output_0.pb,sha256=OGE2jz_pzkVkSMGwOa3rjsfBb_IffS4370ap03Jxhd0,254 +onnx/backend/test/data/node/test_sinh_example/model.onnx,sha256=L66imtG3NvcSnDCDSARKLJ_RukbUfBBXfghiQpAWZjY,91 +onnx/backend/test/data/node/test_sinh_example/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_sinh_example/test_data_set_0/output_0.pb,sha256=K9t2_Mw4JTwHagenQVbXUofxj_teQvFtDY4StMA6np4,21 +onnx/backend/test/data/node/test_size/model.onnx,sha256=aKOjRwa1ZAaor-O0xce68f7ypKuyU8Fcn-NT9dEfma0,87 +onnx/backend/test/data/node/test_size/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_size/test_data_set_0/output_0.pb,sha256=b1cKolCwb8qATNXxOwy-uZzIoYoFpdZ3uAldE3zbmBc,15 +onnx/backend/test/data/node/test_size_example/model.onnx,sha256=vL5PQF6-Ciuu5M1NVugZEoJnxGOKktTSEtBXfSdry_8,91 +onnx/backend/test/data/node/test_size_example/test_data_set_0/input_0.pb,sha256=4pQ6jBxU8_tBNnaRVRgYgck68GhQp-YW_tL388GcCvE,35 +onnx/backend/test/data/node/test_size_example/test_data_set_0/output_0.pb,sha256=7DuNFW7Z7uQ1cybMsorcUI-Yli8oj4V5HZFJX0W2oVk,15 +onnx/backend/test/data/node/test_slice/model.onnx,sha256=-Y-njaE0gcBSHb4aXWALKqZfPQCALE7BvM-MvRmTExs,212 +onnx/backend/test/data/node/test_slice/test_data_set_0/input_0.pb,sha256=EiB2JiuXu9Np31LSxkkbIbZra3nXCjnmcIsaVkoiD0k,4014 +onnx/backend/test/data/node/test_slice/test_data_set_0/input_1.pb,sha256=wc0xhhIxzp415JcdJvpBr3MTluCa3ilwjbTmaicBxXg,30 +onnx/backend/test/data/node/test_slice/test_data_set_0/input_2.pb,sha256=cF5G7dCz_SnsX419hWpbVK4j_5H2YKZE-5xFMquukGw,28 +onnx/backend/test/data/node/test_slice/test_data_set_0/input_3.pb,sha256=3uvlJ_2xYPsfOzK-E0Fz2WR-a1Yvmf3r1G6DZPUQ8v0,28 +onnx/backend/test/data/node/test_slice/test_data_set_0/input_4.pb,sha256=FYpZ2BP2ch3ngdORN4Fd-XXCd1OqSsJv2WWY_whU-Jg,29 +onnx/backend/test/data/node/test_slice/test_data_set_0/output_0.pb,sha256=D_IVQ8WutqrVCZXsKWyJBT7ouKphKsCc0qSUJxTHWqM,614 +onnx/backend/test/data/node/test_slice_default_axes/model.onnx,sha256=4JJ6BOFJqe-cAZhCh_bTrU_jfCuwTmIyu1shAFTomA8,171 +onnx/backend/test/data/node/test_slice_default_axes/test_data_set_0/input_0.pb,sha256=EiB2JiuXu9Np31LSxkkbIbZra3nXCjnmcIsaVkoiD0k,4014 +onnx/backend/test/data/node/test_slice_default_axes/test_data_set_0/input_1.pb,sha256=0MyK8o_JTn-8OHMKQd2Fbs5NEoDrp0ZhGk5VXN4dmzU,38 +onnx/backend/test/data/node/test_slice_default_axes/test_data_set_0/input_2.pb,sha256=gMy-BWymFe5-HqAkM0EEfJd0pohqpQfkWvnkLE591wo,36 +onnx/backend/test/data/node/test_slice_default_axes/test_data_set_0/output_0.pb,sha256=x__ywPxcTlMOMVGXHmZiTOgKJLZByNMQhBM7bldOtX0,814 +onnx/backend/test/data/node/test_slice_default_steps/model.onnx,sha256=jwnfWp6zT12XSVPpY-HYg5haSqhx3yadBTmABYLgD3Y,198 +onnx/backend/test/data/node/test_slice_default_steps/test_data_set_0/input_0.pb,sha256=EiB2JiuXu9Np31LSxkkbIbZra3nXCjnmcIsaVkoiD0k,4014 +onnx/backend/test/data/node/test_slice_default_steps/test_data_set_0/input_1.pb,sha256=0MyK8o_JTn-8OHMKQd2Fbs5NEoDrp0ZhGk5VXN4dmzU,38 +onnx/backend/test/data/node/test_slice_default_steps/test_data_set_0/input_2.pb,sha256=gMy-BWymFe5-HqAkM0EEfJd0pohqpQfkWvnkLE591wo,36 +onnx/backend/test/data/node/test_slice_default_steps/test_data_set_0/input_3.pb,sha256=9iIJIH63usRCzPxS79uoJJal5z6hRmFovI6qk_j9cFg,36 +onnx/backend/test/data/node/test_slice_default_steps/test_data_set_0/output_0.pb,sha256=x__ywPxcTlMOMVGXHmZiTOgKJLZByNMQhBM7bldOtX0,814 +onnx/backend/test/data/node/test_slice_end_out_of_bounds/model.onnx,sha256=k_XTlsDXa9msN9VdHe9K_J3ki5KaJeGOwfGPqtXWb0w,230 +onnx/backend/test/data/node/test_slice_end_out_of_bounds/test_data_set_0/input_0.pb,sha256=EiB2JiuXu9Np31LSxkkbIbZra3nXCjnmcIsaVkoiD0k,4014 +onnx/backend/test/data/node/test_slice_end_out_of_bounds/test_data_set_0/input_1.pb,sha256=XWv_EpyftbZOy7BvmcGybZQOKyOWxra7roBZ3Z22qRw,22 +onnx/backend/test/data/node/test_slice_end_out_of_bounds/test_data_set_0/input_2.pb,sha256=P6LxgBoMoyanncUlS1nQkUDkj88OFJk_Ve-Xzhl2R1s,20 +onnx/backend/test/data/node/test_slice_end_out_of_bounds/test_data_set_0/input_3.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_slice_end_out_of_bounds/test_data_set_0/input_4.pb,sha256=H6nsc5oVsaH4Y0X2D1ZXmG5CRvWCq2I_iVx14iTm1rw,21 +onnx/backend/test/data/node/test_slice_end_out_of_bounds/test_data_set_0/output_0.pb,sha256=widw8tjNc3GiW69yXjw3DNXyrRew0s0tW58CO06AOro,3614 +onnx/backend/test/data/node/test_slice_neg/model.onnx,sha256=JxZ24gL6ooXtIkwVYa4_T1xB4s1QRMztgMyuflxup8k,216 +onnx/backend/test/data/node/test_slice_neg/test_data_set_0/input_0.pb,sha256=EiB2JiuXu9Np31LSxkkbIbZra3nXCjnmcIsaVkoiD0k,4014 +onnx/backend/test/data/node/test_slice_neg/test_data_set_0/input_1.pb,sha256=y7Lo1gIZ0Noqo6CgxoS0eeiFhLvLERZtwLtJfDD_kQw,22 +onnx/backend/test/data/node/test_slice_neg/test_data_set_0/input_2.pb,sha256=HVPAD1MvsIO_DXFtTbbGyV4N8iyv-amZiqvFCmDwXo4,20 +onnx/backend/test/data/node/test_slice_neg/test_data_set_0/input_3.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_slice_neg/test_data_set_0/input_4.pb,sha256=H6nsc5oVsaH4Y0X2D1ZXmG5CRvWCq2I_iVx14iTm1rw,21 +onnx/backend/test/data/node/test_slice_neg/test_data_set_0/output_0.pb,sha256=WWi0BR0FvzzDXkYKRxk7YBwjvUAnpYN_1glTU9rocFA,3614 +onnx/backend/test/data/node/test_slice_neg_steps/model.onnx,sha256=jmxK1-OxeZ9jaaFhAH3-Kvi-Fm34eWPtlzRS4fvSM3I,222 +onnx/backend/test/data/node/test_slice_neg_steps/test_data_set_0/input_0.pb,sha256=EiB2JiuXu9Np31LSxkkbIbZra3nXCjnmcIsaVkoiD0k,4014 +onnx/backend/test/data/node/test_slice_neg_steps/test_data_set_0/input_1.pb,sha256=-Zy7pr4EQ4B7KtRMFQTOCSlyXXftEvJJ7ZN-ZDeNuZQ,38 +onnx/backend/test/data/node/test_slice_neg_steps/test_data_set_0/input_2.pb,sha256=TtilbvYaT70fTxtYWUr_xaWr5e2v0OCMPMdtm6aZhfk,36 +onnx/backend/test/data/node/test_slice_neg_steps/test_data_set_0/input_3.pb,sha256=9iIJIH63usRCzPxS79uoJJal5z6hRmFovI6qk_j9cFg,36 +onnx/backend/test/data/node/test_slice_neg_steps/test_data_set_0/input_4.pb,sha256=7YuunH2YMZ_XZ8Axu63BxakwocdYdHjdAuBYtGoruJg,37 +onnx/backend/test/data/node/test_slice_neg_steps/test_data_set_0/output_0.pb,sha256=yignEmTSPo1HLtKNxvQrL_hjHzz0YCWadWmUKIjD-Kk,470 +onnx/backend/test/data/node/test_slice_negative_axes/model.onnx,sha256=I_yla_KIgD4D6RFpTWtQrCU3f_1yFvyZjeisKwOknwY,198 +onnx/backend/test/data/node/test_slice_negative_axes/test_data_set_0/input_0.pb,sha256=EiB2JiuXu9Np31LSxkkbIbZra3nXCjnmcIsaVkoiD0k,4014 +onnx/backend/test/data/node/test_slice_negative_axes/test_data_set_0/input_1.pb,sha256=0MyK8o_JTn-8OHMKQd2Fbs5NEoDrp0ZhGk5VXN4dmzU,38 +onnx/backend/test/data/node/test_slice_negative_axes/test_data_set_0/input_2.pb,sha256=gMy-BWymFe5-HqAkM0EEfJd0pohqpQfkWvnkLE591wo,36 +onnx/backend/test/data/node/test_slice_negative_axes/test_data_set_0/input_3.pb,sha256=GzIxNVTw4ACJiY_83J53w9FTq-KK-8hroLrrDcD8Ql8,36 +onnx/backend/test/data/node/test_slice_negative_axes/test_data_set_0/output_0.pb,sha256=x__ywPxcTlMOMVGXHmZiTOgKJLZByNMQhBM7bldOtX0,814 +onnx/backend/test/data/node/test_slice_start_out_of_bounds/model.onnx,sha256=oOedHfZOtIflvQ5uiRdbfgb6Tx5LisRu4_PdGV_ZIZA,232 +onnx/backend/test/data/node/test_slice_start_out_of_bounds/test_data_set_0/input_0.pb,sha256=EiB2JiuXu9Np31LSxkkbIbZra3nXCjnmcIsaVkoiD0k,4014 +onnx/backend/test/data/node/test_slice_start_out_of_bounds/test_data_set_0/input_1.pb,sha256=54hVGhxKE_jcB9_6i2I-XxQHazFEivW1yktR1utgllg,22 +onnx/backend/test/data/node/test_slice_start_out_of_bounds/test_data_set_0/input_2.pb,sha256=P6LxgBoMoyanncUlS1nQkUDkj88OFJk_Ve-Xzhl2R1s,20 +onnx/backend/test/data/node/test_slice_start_out_of_bounds/test_data_set_0/input_3.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_slice_start_out_of_bounds/test_data_set_0/input_4.pb,sha256=H6nsc5oVsaH4Y0X2D1ZXmG5CRvWCq2I_iVx14iTm1rw,21 +onnx/backend/test/data/node/test_slice_start_out_of_bounds/test_data_set_0/output_0.pb,sha256=rrPue6VSpt1lUoRyxwKugP6KM5RSPt607YQ4WQ1KttU,13 +onnx/backend/test/data/node/test_softmax_axis_0/model.onnx,sha256=rPvEUpIpvKEMpLsORpNlc2C3eF4zPG3alrx0am9jiaw,125 +onnx/backend/test/data/node/test_softmax_axis_0/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_softmax_axis_0/test_data_set_0/output_0.pb,sha256=gReHANPYz0AKw3vwpUSfcfbkJyjj9NahVmvP3q9Eziw,254 +onnx/backend/test/data/node/test_softmax_axis_0_expanded/model.onnx,sha256=Uvf67IryWaNIbghB11ybsMl1oAd0RS4otNp0DYLc7fE,859 +onnx/backend/test/data/node/test_softmax_axis_0_expanded/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_softmax_axis_0_expanded/test_data_set_0/output_0.pb,sha256=gReHANPYz0AKw3vwpUSfcfbkJyjj9NahVmvP3q9Eziw,254 +onnx/backend/test/data/node/test_softmax_axis_0_expanded_ver18/model.onnx,sha256=dXadX9DosmijwVIn1f6Jj5jfgGffos-9wm7XFUkkSl0,905 +onnx/backend/test/data/node/test_softmax_axis_0_expanded_ver18/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_softmax_axis_0_expanded_ver18/test_data_set_0/output_0.pb,sha256=gReHANPYz0AKw3vwpUSfcfbkJyjj9NahVmvP3q9Eziw,254 +onnx/backend/test/data/node/test_softmax_axis_1/model.onnx,sha256=-fdXWdlN_2pRTAR1NWUmS16_BOKalktHpA8N_fvP39I,125 +onnx/backend/test/data/node/test_softmax_axis_1/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_softmax_axis_1/test_data_set_0/output_0.pb,sha256=2NtZxKwupZCfKtMkHWpzmQmC7c88DQrshZJByIrSFHs,254 +onnx/backend/test/data/node/test_softmax_axis_1_expanded/model.onnx,sha256=bWJ1bsrya2jyX53AHNO_N8wj0ED72nM3lRP318wrxFI,859 +onnx/backend/test/data/node/test_softmax_axis_1_expanded/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_softmax_axis_1_expanded/test_data_set_0/output_0.pb,sha256=2NtZxKwupZCfKtMkHWpzmQmC7c88DQrshZJByIrSFHs,254 +onnx/backend/test/data/node/test_softmax_axis_1_expanded_ver18/model.onnx,sha256=O9rrLRIkGh1qvn9uXTP_GuY_NBWnZ1_QmqRxr-bMK_4,905 +onnx/backend/test/data/node/test_softmax_axis_1_expanded_ver18/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_softmax_axis_1_expanded_ver18/test_data_set_0/output_0.pb,sha256=2NtZxKwupZCfKtMkHWpzmQmC7c88DQrshZJByIrSFHs,254 +onnx/backend/test/data/node/test_softmax_axis_2/model.onnx,sha256=xI2MXHglefKmAVAq3tQcFcjgsm8eSu3TN-nbwtyF1Gs,125 +onnx/backend/test/data/node/test_softmax_axis_2/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_softmax_axis_2/test_data_set_0/output_0.pb,sha256=HXYaCnrmFAQgJqqc4KNL-b30SEla9MMJlhvZwiCK0Qg,254 +onnx/backend/test/data/node/test_softmax_axis_2_expanded/model.onnx,sha256=vndP3LBl1H73mq-JQ7igZVawXO4X-wjubBFQpxmX3Tc,859 +onnx/backend/test/data/node/test_softmax_axis_2_expanded/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_softmax_axis_2_expanded/test_data_set_0/output_0.pb,sha256=HXYaCnrmFAQgJqqc4KNL-b30SEla9MMJlhvZwiCK0Qg,254 +onnx/backend/test/data/node/test_softmax_axis_2_expanded_ver18/model.onnx,sha256=vg2LyI1uLoTTsvne1I9d7c1OAEyqJAbT63sfLFhXrQ4,905 +onnx/backend/test/data/node/test_softmax_axis_2_expanded_ver18/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_softmax_axis_2_expanded_ver18/test_data_set_0/output_0.pb,sha256=HXYaCnrmFAQgJqqc4KNL-b30SEla9MMJlhvZwiCK0Qg,254 +onnx/backend/test/data/node/test_softmax_default_axis/model.onnx,sha256=cdN4CJp3aOtL9kxltySsMgE7nfDR0qU14TA4ZMPUYpo,118 +onnx/backend/test/data/node/test_softmax_default_axis/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_softmax_default_axis/test_data_set_0/output_0.pb,sha256=HXYaCnrmFAQgJqqc4KNL-b30SEla9MMJlhvZwiCK0Qg,254 +onnx/backend/test/data/node/test_softmax_default_axis_expanded/model.onnx,sha256=9TkAiCwfQq4IjN0VcW-QHCxpbdUTMb1dywNefX7NUg4,951 +onnx/backend/test/data/node/test_softmax_default_axis_expanded/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_softmax_default_axis_expanded/test_data_set_0/output_0.pb,sha256=HXYaCnrmFAQgJqqc4KNL-b30SEla9MMJlhvZwiCK0Qg,254 +onnx/backend/test/data/node/test_softmax_default_axis_expanded_ver18/model.onnx,sha256=5jS9BaH8YIvWGy83ht5IRW_tfxoiSKxmnfkWqH9gD_I,994 +onnx/backend/test/data/node/test_softmax_default_axis_expanded_ver18/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_softmax_default_axis_expanded_ver18/test_data_set_0/output_0.pb,sha256=HXYaCnrmFAQgJqqc4KNL-b30SEla9MMJlhvZwiCK0Qg,254 +onnx/backend/test/data/node/test_softmax_example/model.onnx,sha256=feyA07cOiiKh-hENcmC-hWKB5kyneLCin5Ub7GZu6TE,105 +onnx/backend/test/data/node/test_softmax_example/test_data_set_0/input_0.pb,sha256=NHt6xFA_Q0se0ekO4FsGJVcWaEWivQQoZluHhSWn6JY,23 +onnx/backend/test/data/node/test_softmax_example/test_data_set_0/output_0.pb,sha256=AxlcbRehwEN4TljnHvvA3UwR5A7TPVjLT-4ruMBkjVM,23 +onnx/backend/test/data/node/test_softmax_example_expanded/model.onnx,sha256=YbOfYjHr1VLVKasKhncA8X-GxiVvBHTvlRRgalIfHOM,881 +onnx/backend/test/data/node/test_softmax_example_expanded/test_data_set_0/input_0.pb,sha256=NHt6xFA_Q0se0ekO4FsGJVcWaEWivQQoZluHhSWn6JY,23 +onnx/backend/test/data/node/test_softmax_example_expanded/test_data_set_0/output_0.pb,sha256=AxlcbRehwEN4TljnHvvA3UwR5A7TPVjLT-4ruMBkjVM,23 +onnx/backend/test/data/node/test_softmax_example_expanded_ver18/model.onnx,sha256=RpKwwJvijOgEJTtwWqBJgjRkgwkoqhxwFTnuD8NTQLY,919 +onnx/backend/test/data/node/test_softmax_example_expanded_ver18/test_data_set_0/input_0.pb,sha256=NHt6xFA_Q0se0ekO4FsGJVcWaEWivQQoZluHhSWn6JY,23 +onnx/backend/test/data/node/test_softmax_example_expanded_ver18/test_data_set_0/output_0.pb,sha256=AxlcbRehwEN4TljnHvvA3UwR5A7TPVjLT-4ruMBkjVM,23 +onnx/backend/test/data/node/test_softmax_large_number/model.onnx,sha256=SSF-_e-pfB3flN7K6UKFtiWmtvsDV0hOA4giV2S_cSY,110 +onnx/backend/test/data/node/test_softmax_large_number/test_data_set_0/input_0.pb,sha256=8aqq9T0ktdil6R8zGhL-Sdrp2dpDCnSGzKl7HZiLcaI,43 +onnx/backend/test/data/node/test_softmax_large_number/test_data_set_0/output_0.pb,sha256=VVIksa1N2Whk8zHrBBuhgt3c6Grf8KBuxwwrI5lJM0A,43 +onnx/backend/test/data/node/test_softmax_large_number_expanded/model.onnx,sha256=_dWdXF9bLoxc2Rxon_ICSor9xHyr1rAEaatQaDs6TEA,943 +onnx/backend/test/data/node/test_softmax_large_number_expanded/test_data_set_0/input_0.pb,sha256=8aqq9T0ktdil6R8zGhL-Sdrp2dpDCnSGzKl7HZiLcaI,43 +onnx/backend/test/data/node/test_softmax_large_number_expanded/test_data_set_0/output_0.pb,sha256=VVIksa1N2Whk8zHrBBuhgt3c6Grf8KBuxwwrI5lJM0A,43 +onnx/backend/test/data/node/test_softmax_large_number_expanded_ver18/model.onnx,sha256=IUVcW3Q_5J_U6sBO1BK3jPPdG67xElsLc5t77ASZ1EQ,986 +onnx/backend/test/data/node/test_softmax_large_number_expanded_ver18/test_data_set_0/input_0.pb,sha256=8aqq9T0ktdil6R8zGhL-Sdrp2dpDCnSGzKl7HZiLcaI,43 +onnx/backend/test/data/node/test_softmax_large_number_expanded_ver18/test_data_set_0/output_0.pb,sha256=VVIksa1N2Whk8zHrBBuhgt3c6Grf8KBuxwwrI5lJM0A,43 +onnx/backend/test/data/node/test_softmax_negative_axis/model.onnx,sha256=LqbaqeEi_FLpNHU9XxKztZRbswQAo2oWyJwT6yAn6pY,141 +onnx/backend/test/data/node/test_softmax_negative_axis/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_softmax_negative_axis/test_data_set_0/output_0.pb,sha256=HXYaCnrmFAQgJqqc4KNL-b30SEla9MMJlhvZwiCK0Qg,254 +onnx/backend/test/data/node/test_softmax_negative_axis_expanded/model.onnx,sha256=YwZwOKhiEh3xNC8oa4tNTAyg0sQfKpKeYLmR_JnwILw,963 +onnx/backend/test/data/node/test_softmax_negative_axis_expanded/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_softmax_negative_axis_expanded/test_data_set_0/output_0.pb,sha256=HXYaCnrmFAQgJqqc4KNL-b30SEla9MMJlhvZwiCK0Qg,254 +onnx/backend/test/data/node/test_softmax_negative_axis_expanded_ver18/model.onnx,sha256=uTMJBQCkfux5JdqgY4yKzsQJS2HQUFU8cEt050_cO2s,1007 +onnx/backend/test/data/node/test_softmax_negative_axis_expanded_ver18/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_softmax_negative_axis_expanded_ver18/test_data_set_0/output_0.pb,sha256=HXYaCnrmFAQgJqqc4KNL-b30SEla9MMJlhvZwiCK0Qg,254 +onnx/backend/test/data/node/test_softplus/model.onnx,sha256=QwSuVk_2IYaooRgcjHxq5AL7aAK-PK0-WEkNLxI8dNk,107 +onnx/backend/test/data/node/test_softplus/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_softplus/test_data_set_0/output_0.pb,sha256=rHJ50Q-HF9yqjDagvyD5Y-sN3gPdoFkLA5AoQ-arwCY,254 +onnx/backend/test/data/node/test_softplus_example/model.onnx,sha256=GUQ8w8gigz93BTvsCpYKeAWBJYvyvP7VPr7OAnMwM48,99 +onnx/backend/test/data/node/test_softplus_example/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_softplus_example/test_data_set_0/output_0.pb,sha256=hBr-1WJ2kvuYa9H8KU8JwzCmjoiJc3Hx2EEUrm1R7Gs,21 +onnx/backend/test/data/node/test_softplus_example_expanded_ver18/model.onnx,sha256=o8N5kGU49K5hQSpJktA_yOwPKSo3ryWMITSc61obz0s,653 +onnx/backend/test/data/node/test_softplus_example_expanded_ver18/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_softplus_example_expanded_ver18/test_data_set_0/output_0.pb,sha256=hBr-1WJ2kvuYa9H8KU8JwzCmjoiJc3Hx2EEUrm1R7Gs,21 +onnx/backend/test/data/node/test_softplus_expanded_ver18/model.onnx,sha256=oGQzrTvruMiAZAh8rh0ieWvmiBWmF62IBASvIx0Necs,596 +onnx/backend/test/data/node/test_softplus_expanded_ver18/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_softplus_expanded_ver18/test_data_set_0/output_0.pb,sha256=rHJ50Q-HF9yqjDagvyD5Y-sN3gPdoFkLA5AoQ-arwCY,254 +onnx/backend/test/data/node/test_softsign/model.onnx,sha256=7DK5AqrODujVO_bU07Pu-auaDdc_6zsGXFxNUFQHDVc,107 +onnx/backend/test/data/node/test_softsign/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_softsign/test_data_set_0/output_0.pb,sha256=C1uB1I9A1YtJLV8Uk5qi0Kt4Qp4JgvRtwFC7jgpvPv0,254 +onnx/backend/test/data/node/test_softsign_example/model.onnx,sha256=HDQ1Q1XdZYZF_7B2aRHkgOORJMZLtR6JxIKS6jct09Y,99 +onnx/backend/test/data/node/test_softsign_example/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_softsign_example/test_data_set_0/output_0.pb,sha256=0PjRkEThQLPQg-L9OX5IMgy3AKDBFVV19sCL_5lIw8s,21 +onnx/backend/test/data/node/test_softsign_example_expanded_ver18/model.onnx,sha256=ctkO0bzc-LtS7pGIvJ58OYyvGngyUVnUm6a7aLWgSek,661 +onnx/backend/test/data/node/test_softsign_example_expanded_ver18/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_softsign_example_expanded_ver18/test_data_set_0/output_0.pb,sha256=0PjRkEThQLPQg-L9OX5IMgy3AKDBFVV19sCL_5lIw8s,21 +onnx/backend/test/data/node/test_softsign_expanded_ver18/model.onnx,sha256=kzujxj_7QVmYB_slW_nCKpbiowBThOZZHxqqBqCE1CA,605 +onnx/backend/test/data/node/test_softsign_expanded_ver18/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_softsign_expanded_ver18/test_data_set_0/output_0.pb,sha256=C1uB1I9A1YtJLV8Uk5qi0Kt4Qp4JgvRtwFC7jgpvPv0,254 +onnx/backend/test/data/node/test_spacetodepth/model.onnx,sha256=5bhFh8fjQS6TNDdICtgmDCob_KnzRygkh-56CaGgYeg,141 +onnx/backend/test/data/node/test_spacetodepth/test_data_set_0/input_0.pb,sha256=RQwrUqEeNR-xpz-3gUYuK_U3iKOcH02SYQbl9v7xBq4,592 +onnx/backend/test/data/node/test_spacetodepth/test_data_set_0/output_0.pb,sha256=bCbPbnCmL7gHpAW36xdWiJsJAR-Edj6WLGvzeQv-Dvc,592 +onnx/backend/test/data/node/test_spacetodepth_example/model.onnx,sha256=COrE-uxJ_5EJlYTiRbmDlzVrB-abPgnEKwV2BRJrWxY,149 +onnx/backend/test/data/node/test_spacetodepth_example/test_data_set_0/input_0.pb,sha256=vzHIoweipp3I3wmKCKgBriLjySIPGzTodMmImqlK_js,111 +onnx/backend/test/data/node/test_spacetodepth_example/test_data_set_0/output_0.pb,sha256=tit_KIMPTKvj4s3AmV4lFHs2di-xrcHDuk7J0GRVVDY,111 +onnx/backend/test/data/node/test_split_1d_uneven_split_opset18/model.onnx,sha256=PdguF3n01jJ2BWS57bgVhz0nvJ6lAKsshODlU8qXvfs,254 +onnx/backend/test/data/node/test_split_1d_uneven_split_opset18/test_data_set_0/input_0.pb,sha256=0MMLUhV-IwhwMRPXDWkkAOYhGXFio1ApRW7tIcwmzYg,41 +onnx/backend/test/data/node/test_split_1d_uneven_split_opset18/test_data_set_0/output_0.pb,sha256=PZBD8ZeddYVsOIQKh3QdgF1GWMmR7fqW0Vk4_-zjaME,24 +onnx/backend/test/data/node/test_split_1d_uneven_split_opset18/test_data_set_0/output_1.pb,sha256=ppWgsDUe535k0J7J7eHrM4nXbzwrP1d2HRDBbGokHYs,24 +onnx/backend/test/data/node/test_split_1d_uneven_split_opset18/test_data_set_0/output_2.pb,sha256=wHVtUBsyF8K-U_XpjGbmnest6VdUNcj5uOgMRhN-6y0,24 +onnx/backend/test/data/node/test_split_1d_uneven_split_opset18/test_data_set_0/output_3.pb,sha256=klI1oBl0uNJfSanTYax_iQ_y6jhdGIbAiyDMBPVj_6I,20 +onnx/backend/test/data/node/test_split_2d_uneven_split_opset18/model.onnx,sha256=DC3MsMOmKoYOIeox6oW-ea9I9xE8UTpk7NQVYsl8uAM,249 +onnx/backend/test/data/node/test_split_2d_uneven_split_opset18/test_data_set_0/input_0.pb,sha256=ne0IY_mmYi1Myo1AL7u3nGpHodjQpJbXmhu5LICcJMc,79 +onnx/backend/test/data/node/test_split_2d_uneven_split_opset18/test_data_set_0/output_0.pb,sha256=lZJx7RYf8c-urSeibZ8DnaW0kP5i2lQfQly2Fb2f-Fc,42 +onnx/backend/test/data/node/test_split_2d_uneven_split_opset18/test_data_set_0/output_1.pb,sha256=ZBvUdSiV_UXZTEDGHukB4UtnoIR11YOlHOXxleju8dY,42 +onnx/backend/test/data/node/test_split_2d_uneven_split_opset18/test_data_set_0/output_2.pb,sha256=4EcXbQUeqluX6Taji-9zMhQCwDm1QZdb3FXNmAuSAx0,34 +onnx/backend/test/data/node/test_split_equal_parts_1d_opset13/model.onnx,sha256=kq4EG6GOwMXdzGmK0_Cw1AJeLP3Bx6ZactogJKmm2vo,212 +onnx/backend/test/data/node/test_split_equal_parts_1d_opset13/test_data_set_0/input_0.pb,sha256=6-4ny9aa56B_H1JS86gEwXcZdWOABmTlWFY0-GRimGU,37 +onnx/backend/test/data/node/test_split_equal_parts_1d_opset13/test_data_set_0/output_0.pb,sha256=PZBD8ZeddYVsOIQKh3QdgF1GWMmR7fqW0Vk4_-zjaME,24 +onnx/backend/test/data/node/test_split_equal_parts_1d_opset13/test_data_set_0/output_1.pb,sha256=ppWgsDUe535k0J7J7eHrM4nXbzwrP1d2HRDBbGokHYs,24 +onnx/backend/test/data/node/test_split_equal_parts_1d_opset13/test_data_set_0/output_2.pb,sha256=wHVtUBsyF8K-U_XpjGbmnest6VdUNcj5uOgMRhN-6y0,24 +onnx/backend/test/data/node/test_split_equal_parts_1d_opset18/model.onnx,sha256=gHiamqUc_ThuSGkUE2fiCMc8DCfclBbFO12AYU7UKKM,232 +onnx/backend/test/data/node/test_split_equal_parts_1d_opset18/test_data_set_0/input_0.pb,sha256=6-4ny9aa56B_H1JS86gEwXcZdWOABmTlWFY0-GRimGU,37 +onnx/backend/test/data/node/test_split_equal_parts_1d_opset18/test_data_set_0/output_0.pb,sha256=PZBD8ZeddYVsOIQKh3QdgF1GWMmR7fqW0Vk4_-zjaME,24 +onnx/backend/test/data/node/test_split_equal_parts_1d_opset18/test_data_set_0/output_1.pb,sha256=ppWgsDUe535k0J7J7eHrM4nXbzwrP1d2HRDBbGokHYs,24 +onnx/backend/test/data/node/test_split_equal_parts_1d_opset18/test_data_set_0/output_2.pb,sha256=wHVtUBsyF8K-U_XpjGbmnest6VdUNcj5uOgMRhN-6y0,24 +onnx/backend/test/data/node/test_split_equal_parts_2d/model.onnx,sha256=cTCvN8fr_EQTkk1XgE2uYKCh1dxQLqFdGxc6IFkGeMo,202 +onnx/backend/test/data/node/test_split_equal_parts_2d/test_data_set_0/input_0.pb,sha256=wcsj07dMPwy_TgfeqO_LZn4LBOusGc6f2Toix53A0ks,63 +onnx/backend/test/data/node/test_split_equal_parts_2d/test_data_set_0/output_0.pb,sha256=SRqisx8PBlmhEqo0lYWXWQ2JyCutfOy1SP3mfHqpUKA,42 +onnx/backend/test/data/node/test_split_equal_parts_2d/test_data_set_0/output_1.pb,sha256=4oCTYs32ZK3rT5vrT3FIds25XjGHsz8MwlbQkr_sEZY,42 +onnx/backend/test/data/node/test_split_equal_parts_2d_opset13/model.onnx,sha256=3Yo0dSUSNjV5uBK52USIf0OPPIeWkkVsryj52ISwMPk,190 +onnx/backend/test/data/node/test_split_equal_parts_2d_opset13/test_data_set_0/input_0.pb,sha256=wcsj07dMPwy_TgfeqO_LZn4LBOusGc6f2Toix53A0ks,63 +onnx/backend/test/data/node/test_split_equal_parts_2d_opset13/test_data_set_0/output_0.pb,sha256=SRqisx8PBlmhEqo0lYWXWQ2JyCutfOy1SP3mfHqpUKA,42 +onnx/backend/test/data/node/test_split_equal_parts_2d_opset13/test_data_set_0/output_1.pb,sha256=4oCTYs32ZK3rT5vrT3FIds25XjGHsz8MwlbQkr_sEZY,42 +onnx/backend/test/data/node/test_split_equal_parts_default_axis_opset13/model.onnx,sha256=jzvzdKnSNBTAOb6TiEs_N-f-g_tn-MALqiyObWJa-yI,209 +onnx/backend/test/data/node/test_split_equal_parts_default_axis_opset13/test_data_set_0/input_0.pb,sha256=6-4ny9aa56B_H1JS86gEwXcZdWOABmTlWFY0-GRimGU,37 +onnx/backend/test/data/node/test_split_equal_parts_default_axis_opset13/test_data_set_0/output_0.pb,sha256=PZBD8ZeddYVsOIQKh3QdgF1GWMmR7fqW0Vk4_-zjaME,24 +onnx/backend/test/data/node/test_split_equal_parts_default_axis_opset13/test_data_set_0/output_1.pb,sha256=ppWgsDUe535k0J7J7eHrM4nXbzwrP1d2HRDBbGokHYs,24 +onnx/backend/test/data/node/test_split_equal_parts_default_axis_opset13/test_data_set_0/output_2.pb,sha256=wHVtUBsyF8K-U_XpjGbmnest6VdUNcj5uOgMRhN-6y0,24 +onnx/backend/test/data/node/test_split_equal_parts_default_axis_opset18/model.onnx,sha256=TyUulY-Fid3BZkoAjctVoKT86f0zWauE3Yu-cMhNY5o,229 +onnx/backend/test/data/node/test_split_equal_parts_default_axis_opset18/test_data_set_0/input_0.pb,sha256=6-4ny9aa56B_H1JS86gEwXcZdWOABmTlWFY0-GRimGU,37 +onnx/backend/test/data/node/test_split_equal_parts_default_axis_opset18/test_data_set_0/output_0.pb,sha256=PZBD8ZeddYVsOIQKh3QdgF1GWMmR7fqW0Vk4_-zjaME,24 +onnx/backend/test/data/node/test_split_equal_parts_default_axis_opset18/test_data_set_0/output_1.pb,sha256=ppWgsDUe535k0J7J7eHrM4nXbzwrP1d2HRDBbGokHYs,24 +onnx/backend/test/data/node/test_split_equal_parts_default_axis_opset18/test_data_set_0/output_2.pb,sha256=wHVtUBsyF8K-U_XpjGbmnest6VdUNcj5uOgMRhN-6y0,24 +onnx/backend/test/data/node/test_split_to_sequence_1/model.onnx,sha256=GzaBCiipHJBNDLa7A-NAWzP-HIBl79qkQvswgMA5J0Q,159 +onnx/backend/test/data/node/test_split_to_sequence_1/test_data_set_0/input_0.pb,sha256=hohal__2jZZ7sW8FBCBj6k5KwzGvWUrrFo7_dmF8Zrk,86 +onnx/backend/test/data/node/test_split_to_sequence_1/test_data_set_0/input_1.pb,sha256=rRPJw_MP4GzZF9UqyUHod5oZNTZDCYD4LJ4iubKW3KY,19 +onnx/backend/test/data/node/test_split_to_sequence_1/test_data_set_0/output_0.pb,sha256=fV41UEmrLEzyFUfJvb60DaR7S_hdmzhe23IXBLiBtC0,109 +onnx/backend/test/data/node/test_split_to_sequence_2/model.onnx,sha256=DaCK6v5UOuPg7Y47KtsL8b9wFNObu-DITh8SeaWnbeU,163 +onnx/backend/test/data/node/test_split_to_sequence_2/test_data_set_0/input_0.pb,sha256=hohal__2jZZ7sW8FBCBj6k5KwzGvWUrrFo7_dmF8Zrk,86 +onnx/backend/test/data/node/test_split_to_sequence_2/test_data_set_0/input_1.pb,sha256=Z3UXDkJvBF4Hf1WnsQpa7ifhVWCdbGzMyb-BGdfxSfQ,29 +onnx/backend/test/data/node/test_split_to_sequence_2/test_data_set_0/output_0.pb,sha256=-QRNSvDp1Qn7lrM4ldwcAd5Crm7M6kfAs4DAU-muPJw,99 +onnx/backend/test/data/node/test_split_to_sequence_nokeepdims/model.onnx,sha256=otmU9iu6BnhwdACervDDqfKR7i208eAAua4s8xaUjUQ,161 +onnx/backend/test/data/node/test_split_to_sequence_nokeepdims/test_data_set_0/input_0.pb,sha256=hohal__2jZZ7sW8FBCBj6k5KwzGvWUrrFo7_dmF8Zrk,86 +onnx/backend/test/data/node/test_split_to_sequence_nokeepdims/test_data_set_0/output_0.pb,sha256=08IDo97JLVHNcylBdWoEiBpsK_sehJED72UcixmIZic,127 +onnx/backend/test/data/node/test_split_variable_parts_1d_opset13/model.onnx,sha256=NoEZGVziDX2zoDtLOYRVuCTkdghM66-MlXVZWg6MGPg,209 +onnx/backend/test/data/node/test_split_variable_parts_1d_opset13/test_data_set_0/input_0.pb,sha256=6-4ny9aa56B_H1JS86gEwXcZdWOABmTlWFY0-GRimGU,37 +onnx/backend/test/data/node/test_split_variable_parts_1d_opset13/test_data_set_0/input_1.pb,sha256=UI8Xnn4y2HCHt4mE8KiqJl6XfIYNb-qx9UeaIBJ8cic,29 +onnx/backend/test/data/node/test_split_variable_parts_1d_opset13/test_data_set_0/output_0.pb,sha256=PZBD8ZeddYVsOIQKh3QdgF1GWMmR7fqW0Vk4_-zjaME,24 +onnx/backend/test/data/node/test_split_variable_parts_1d_opset13/test_data_set_0/output_1.pb,sha256=2Q8n5Jfg3-M-8gGJ4RH4-w6ZMopbqb-TkncpX-x_ZYg,32 +onnx/backend/test/data/node/test_split_variable_parts_1d_opset18/model.onnx,sha256=uAI7cQQ5yrWC4lV0L5RRIC9JgXCvkV80WyJ0EdzMTVk,209 +onnx/backend/test/data/node/test_split_variable_parts_1d_opset18/test_data_set_0/input_0.pb,sha256=6-4ny9aa56B_H1JS86gEwXcZdWOABmTlWFY0-GRimGU,37 +onnx/backend/test/data/node/test_split_variable_parts_1d_opset18/test_data_set_0/input_1.pb,sha256=UI8Xnn4y2HCHt4mE8KiqJl6XfIYNb-qx9UeaIBJ8cic,29 +onnx/backend/test/data/node/test_split_variable_parts_1d_opset18/test_data_set_0/output_0.pb,sha256=PZBD8ZeddYVsOIQKh3QdgF1GWMmR7fqW0Vk4_-zjaME,24 +onnx/backend/test/data/node/test_split_variable_parts_1d_opset18/test_data_set_0/output_1.pb,sha256=2Q8n5Jfg3-M-8gGJ4RH4-w6ZMopbqb-TkncpX-x_ZYg,32 +onnx/backend/test/data/node/test_split_variable_parts_2d_opset13/model.onnx,sha256=D-cYrAkJDR1UeR3zk5_ylK5WFx_WNOsJSS7_p44iOnk,221 +onnx/backend/test/data/node/test_split_variable_parts_2d_opset13/test_data_set_0/input_0.pb,sha256=wcsj07dMPwy_TgfeqO_LZn4LBOusGc6f2Toix53A0ks,63 +onnx/backend/test/data/node/test_split_variable_parts_2d_opset13/test_data_set_0/input_1.pb,sha256=UI8Xnn4y2HCHt4mE8KiqJl6XfIYNb-qx9UeaIBJ8cic,29 +onnx/backend/test/data/node/test_split_variable_parts_2d_opset13/test_data_set_0/output_0.pb,sha256=9pl6VjjiFVClVzpC4AhEkRQIBd1alqZxXzThCmDLhKk,34 +onnx/backend/test/data/node/test_split_variable_parts_2d_opset13/test_data_set_0/output_1.pb,sha256=_3ZLeX0bUaRISuadzex91jHCMjQPZefG5c9_ZZj4zvs,50 +onnx/backend/test/data/node/test_split_variable_parts_2d_opset18/model.onnx,sha256=tJv_THJn53mN0BM-XB4V5dxm3iQNPBLV2lUTneO6xjI,221 +onnx/backend/test/data/node/test_split_variable_parts_2d_opset18/test_data_set_0/input_0.pb,sha256=wcsj07dMPwy_TgfeqO_LZn4LBOusGc6f2Toix53A0ks,63 +onnx/backend/test/data/node/test_split_variable_parts_2d_opset18/test_data_set_0/input_1.pb,sha256=UI8Xnn4y2HCHt4mE8KiqJl6XfIYNb-qx9UeaIBJ8cic,29 +onnx/backend/test/data/node/test_split_variable_parts_2d_opset18/test_data_set_0/output_0.pb,sha256=9pl6VjjiFVClVzpC4AhEkRQIBd1alqZxXzThCmDLhKk,34 +onnx/backend/test/data/node/test_split_variable_parts_2d_opset18/test_data_set_0/output_1.pb,sha256=_3ZLeX0bUaRISuadzex91jHCMjQPZefG5c9_ZZj4zvs,50 +onnx/backend/test/data/node/test_split_variable_parts_default_axis_opset13/model.onnx,sha256=lW6SmYm30zfpsf_Rb_-6pX8weVSZdb5ZSl4JPbcfRvM,206 +onnx/backend/test/data/node/test_split_variable_parts_default_axis_opset13/test_data_set_0/input_0.pb,sha256=6-4ny9aa56B_H1JS86gEwXcZdWOABmTlWFY0-GRimGU,37 +onnx/backend/test/data/node/test_split_variable_parts_default_axis_opset13/test_data_set_0/input_1.pb,sha256=UI8Xnn4y2HCHt4mE8KiqJl6XfIYNb-qx9UeaIBJ8cic,29 +onnx/backend/test/data/node/test_split_variable_parts_default_axis_opset13/test_data_set_0/output_0.pb,sha256=PZBD8ZeddYVsOIQKh3QdgF1GWMmR7fqW0Vk4_-zjaME,24 +onnx/backend/test/data/node/test_split_variable_parts_default_axis_opset13/test_data_set_0/output_1.pb,sha256=2Q8n5Jfg3-M-8gGJ4RH4-w6ZMopbqb-TkncpX-x_ZYg,32 +onnx/backend/test/data/node/test_split_variable_parts_default_axis_opset18/model.onnx,sha256=sTCSMCCXeFnUb6_1YMMPlvpkoMJJlliDBbSk1EfcSO0,206 +onnx/backend/test/data/node/test_split_variable_parts_default_axis_opset18/test_data_set_0/input_0.pb,sha256=6-4ny9aa56B_H1JS86gEwXcZdWOABmTlWFY0-GRimGU,37 +onnx/backend/test/data/node/test_split_variable_parts_default_axis_opset18/test_data_set_0/input_1.pb,sha256=UI8Xnn4y2HCHt4mE8KiqJl6XfIYNb-qx9UeaIBJ8cic,29 +onnx/backend/test/data/node/test_split_variable_parts_default_axis_opset18/test_data_set_0/output_0.pb,sha256=PZBD8ZeddYVsOIQKh3QdgF1GWMmR7fqW0Vk4_-zjaME,24 +onnx/backend/test/data/node/test_split_variable_parts_default_axis_opset18/test_data_set_0/output_1.pb,sha256=2Q8n5Jfg3-M-8gGJ4RH4-w6ZMopbqb-TkncpX-x_ZYg,32 +onnx/backend/test/data/node/test_split_zero_size_splits_opset13/model.onnx,sha256=iQn5gh1rElU3EDnVlW3xQKsWeTKUIPlexQG_eBrT1kw,229 +onnx/backend/test/data/node/test_split_zero_size_splits_opset13/test_data_set_0/input_0.pb,sha256=PNWgucFe5Tf1mGk98Ir_bEaOELmES3FAPZpKbskJO6M,13 +onnx/backend/test/data/node/test_split_zero_size_splits_opset13/test_data_set_0/input_1.pb,sha256=o-O8kmlClSSIjeKhfy88IVCUriMV6Wsi10eqicjIVHA,37 +onnx/backend/test/data/node/test_split_zero_size_splits_opset13/test_data_set_0/output_0.pb,sha256=4cN5_4IAh3yvrFKoOn7wqzTUZaeUlMF0Y2JzSlAev8U,16 +onnx/backend/test/data/node/test_split_zero_size_splits_opset13/test_data_set_0/output_1.pb,sha256=SgOFp0XrEtOKXGCO2yCwjM-mmrnGiG43OJeVKPVQSCI,16 +onnx/backend/test/data/node/test_split_zero_size_splits_opset13/test_data_set_0/output_2.pb,sha256=Gve-t8o-LviHnt0QBVUYwp7TJLd1j_sJZbadWj0t-go,16 +onnx/backend/test/data/node/test_split_zero_size_splits_opset18/model.onnx,sha256=o1DgL2iQ7ybNT8OokLTzkpyEUCcdiC0XnhnGiLqcOPk,229 +onnx/backend/test/data/node/test_split_zero_size_splits_opset18/test_data_set_0/input_0.pb,sha256=PNWgucFe5Tf1mGk98Ir_bEaOELmES3FAPZpKbskJO6M,13 +onnx/backend/test/data/node/test_split_zero_size_splits_opset18/test_data_set_0/input_1.pb,sha256=o-O8kmlClSSIjeKhfy88IVCUriMV6Wsi10eqicjIVHA,37 +onnx/backend/test/data/node/test_split_zero_size_splits_opset18/test_data_set_0/output_0.pb,sha256=4cN5_4IAh3yvrFKoOn7wqzTUZaeUlMF0Y2JzSlAev8U,16 +onnx/backend/test/data/node/test_split_zero_size_splits_opset18/test_data_set_0/output_1.pb,sha256=SgOFp0XrEtOKXGCO2yCwjM-mmrnGiG43OJeVKPVQSCI,16 +onnx/backend/test/data/node/test_split_zero_size_splits_opset18/test_data_set_0/output_2.pb,sha256=Gve-t8o-LviHnt0QBVUYwp7TJLd1j_sJZbadWj0t-go,16 +onnx/backend/test/data/node/test_sqrt/model.onnx,sha256=h4jfkNJWaUfcoSQGcqHwdY15o5OLRjVoCc-1b7dVwS0,99 +onnx/backend/test/data/node/test_sqrt/test_data_set_0/input_0.pb,sha256=bFw_k8kydnJjSxU8NNjUFVzKoPKQ6fGU_od8777JidY,254 +onnx/backend/test/data/node/test_sqrt/test_data_set_0/output_0.pb,sha256=As-Nz2MWaj1kh9fTPsNyeNwMUow9_A13pahFbHZVk-0,254 +onnx/backend/test/data/node/test_sqrt_example/model.onnx,sha256=7uQM7y_IGof70AbsYdJdLDmTWw0_7WDKZtPj4N_q8eI,91 +onnx/backend/test/data/node/test_sqrt_example/test_data_set_0/input_0.pb,sha256=Nrf34J1rWfH_mvaflbCOOw7nNwxeOO-haem99-KYESk,21 +onnx/backend/test/data/node/test_sqrt_example/test_data_set_0/output_0.pb,sha256=KW9J7uTWmyfHYqYwYwZyv64IeAl6_iDrPha7DI-bhcU,21 +onnx/backend/test/data/node/test_squeeze/model.onnx,sha256=KopcnkezA307ReXnZafJ8O52QW2a36ERR15RZwNeesw,135 +onnx/backend/test/data/node/test_squeeze/test_data_set_0/input_0.pb,sha256=hFcc65NzF2jKbEvO_e8J60kUbf0yiM7kqrgc4fH_zcw,256 +onnx/backend/test/data/node/test_squeeze/test_data_set_0/input_1.pb,sha256=3ZIfNI5JADuUCnYUjr-VBN77qOrY7__k-MJ2cszFazI,20 +onnx/backend/test/data/node/test_squeeze/test_data_set_0/output_0.pb,sha256=4E7JZEL-6RPD2ZoGlbe4UZXnssgawDGOHg15V0RhX58,254 +onnx/backend/test/data/node/test_squeeze_negative_axes/model.onnx,sha256=t_WOrIaomOfSPwVs9pMuejtgTefEiOUl0FCXzoRPDHM,149 +onnx/backend/test/data/node/test_squeeze_negative_axes/test_data_set_0/input_0.pb,sha256=aB6aWPKjpjxoogC4OWA9AtqtA_neDA-penHUJfcxTCQ,75 +onnx/backend/test/data/node/test_squeeze_negative_axes/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_squeeze_negative_axes/test_data_set_0/output_0.pb,sha256=Nw5Lbu_KXM7uUzsNQJyxkH74rempglPVlAcohSxDoLE,73 +onnx/backend/test/data/node/test_stft/model.onnx,sha256=tsMRYdduSuEfzNmFvwSvdo4n6XHrwMgNfY1Hs93Kj8w,199 +onnx/backend/test/data/node/test_stft/test_data_set_0/input_0.pb,sha256=8AO0tqkf6LPSAI3rAdEMf4BpkfPwW4XMIcz9Mc_CFO4,532 +onnx/backend/test/data/node/test_stft/test_data_set_0/input_1.pb,sha256=QWZmtDzOdzs5-9Q_NiVq1dbgf4ogy53xglu2F4-18dg,24 +onnx/backend/test/data/node/test_stft/test_data_set_0/input_2.pb,sha256=U21-PeJEIhgtSvlY30evxCn0p-LdDUIhR1irDFEoFmQ,26 +onnx/backend/test/data/node/test_stft/test_data_set_0/output_0.pb,sha256=IACcg3anVjOxSt-5wInGQEIOFNYVEJfPZP9qEIJfEZ8,1101 +onnx/backend/test/data/node/test_stft_with_window/model.onnx,sha256=7Ix1wx_t7d6ZphvQEq7CqQjtXzzPcLO0xR8nGwNugWo,201 +onnx/backend/test/data/node/test_stft_with_window/test_data_set_0/input_0.pb,sha256=8AO0tqkf6LPSAI3rAdEMf4BpkfPwW4XMIcz9Mc_CFO4,532 +onnx/backend/test/data/node/test_stft_with_window/test_data_set_0/input_1.pb,sha256=QWZmtDzOdzs5-9Q_NiVq1dbgf4ogy53xglu2F4-18dg,24 +onnx/backend/test/data/node/test_stft_with_window/test_data_set_0/input_2.pb,sha256=rJQB-GYGcXBdFE7RHla7WdBEX6zCYovE_4wpQ52svCQ,78 +onnx/backend/test/data/node/test_stft_with_window/test_data_set_0/output_0.pb,sha256=Y_OFwqn8opxGjdeUZP9lwSBt6vQqbzLy8b2rFzgoVPQ,1101 +onnx/backend/test/data/node/test_string_concat/model.onnx,sha256=V4HVX0Apd0nG72Xf_ocY_e_cqH7n--O1h0l1C-lImfU,130 +onnx/backend/test/data/node/test_string_concat/test_data_set_0/input_0.pb,sha256=WAb6sWZFYKW1LIru7MSRLhhwzazGUBtNJGtEkhZZ2FU,17 +onnx/backend/test/data/node/test_string_concat/test_data_set_0/input_1.pb,sha256=C4UHcDF6qLIvX8lPPvnN1Cq1IJhj_X1S1Y_Oyh1Ollw,19 +onnx/backend/test/data/node/test_string_concat/test_data_set_0/output_0.pb,sha256=vtiaZwPMnnzcSkkFGBhpOoIFeLPJvPKSY-iKQcq0Cr8,30 +onnx/backend/test/data/node/test_string_concat_broadcasting/model.onnx,sha256=pIw0kVroIMJJPDux74zZptIqxRZZVBpgo-nHanU_TPU,143 +onnx/backend/test/data/node/test_string_concat_broadcasting/test_data_set_0/input_0.pb,sha256=g8umE9mK4nbafqeQqJPDNus3Mo9GKl5HzHLEV5CCKtc,24 +onnx/backend/test/data/node/test_string_concat_broadcasting/test_data_set_0/input_1.pb,sha256=61MXBNeu3kgkjUDe06Bj72XsTRXS15buo8aeEzpqVA0,10 +onnx/backend/test/data/node/test_string_concat_broadcasting/test_data_set_0/output_0.pb,sha256=Q2Z8aHvRjq90Zxdee7q5C4y9FvozMzCmOjJGWFXqb5E,32 +onnx/backend/test/data/node/test_string_concat_empty_string/model.onnx,sha256=axrDOcNhjUfIlP3MCEpQJxGUx-5x4bPcku_DL_6QKro,143 +onnx/backend/test/data/node/test_string_concat_empty_string/test_data_set_0/input_0.pb,sha256=4bX3C0zVcBmpqqlnCwjiLvsLiAeGTQRh3b1vnEsLj1Y,14 +onnx/backend/test/data/node/test_string_concat_empty_string/test_data_set_0/input_1.pb,sha256=NNqhNABxE4ldnXiKr5xqCu0DlAF3PKrb_8wvFRw9ffM,14 +onnx/backend/test/data/node/test_string_concat_empty_string/test_data_set_0/output_0.pb,sha256=HzMKwLLMEbUHqVquy5UKq89wbBGtOGZuw_pvjU7xBZ0,22 +onnx/backend/test/data/node/test_string_concat_utf8/model.onnx,sha256=q91FvEO8NduK8DhbBQ2ucWQRkLlsnIA6T40JMJ2hvDs,135 +onnx/backend/test/data/node/test_string_concat_utf8/test_data_set_0/input_0.pb,sha256=32q4itbhVcHrg4jOFXv7YUevdgA4X2m1jCjxBAUKETI,17 +onnx/backend/test/data/node/test_string_concat_utf8/test_data_set_0/input_1.pb,sha256=4xLYBwA5WD6V1tuwDJGoIRikUycy9TPoV5CYMTOAxPw,17 +onnx/backend/test/data/node/test_string_concat_utf8/test_data_set_0/output_0.pb,sha256=OjvOO2BFyHWQn9fDXfLOQc4tnl5fC09D59QjxZ5zkcQ,28 +onnx/backend/test/data/node/test_string_concat_zero_dimensional/model.onnx,sha256=RWMbf8hdRMOGC7IjuBZ3ZUVjgtwV-6IfFQtmllUHWfg,135 +onnx/backend/test/data/node/test_string_concat_zero_dimensional/test_data_set_0/input_0.pb,sha256=lC7SLuieEtx5ZKUfw9AC88_BTYMdctctD2JpZE1DLwY,10 +onnx/backend/test/data/node/test_string_concat_zero_dimensional/test_data_set_0/input_1.pb,sha256=rCuW4N8-s8CeOag00vK3OQB2rSz9QguGP_b7a9be5v0,8 +onnx/backend/test/data/node/test_string_concat_zero_dimensional/test_data_set_0/output_0.pb,sha256=2YAt0AaY7vuMYckXy79wIx1jOemGjEF84gQNxfCMYRw,16 +onnx/backend/test/data/node/test_string_split_basic/model.onnx,sha256=trlfotzvOj2dbTRLeIX3CBB_dQZ1x6dqbWOWnmf9t90,176 +onnx/backend/test/data/node/test_string_split_basic/test_data_set_0/input_0.pb,sha256=piuT_LVlAUIMTWxojXsT_Iegr0UhW-nQZpBMCvcuzQM,25 +onnx/backend/test/data/node/test_string_split_basic/test_data_set_0/output_0.pb,sha256=eSxG7MXxNHAhhYp6qbv7zjuoDMqhqy2Aqn3PYViNaXw,38 +onnx/backend/test/data/node/test_string_split_basic/test_data_set_0/output_1.pb,sha256=tF5640dkxKgR-Mu-YhKv23-Dl-2shEt0mc3lwc_S068,30 +onnx/backend/test/data/node/test_string_split_consecutive_delimiters/model.onnx,sha256=XBLjqbffWXpSYMvyB5qJzo_WhbCaWMXEGK51r9DEdXw,193 +onnx/backend/test/data/node/test_string_split_consecutive_delimiters/test_data_set_0/input_0.pb,sha256=tQoYwhGu_kDGW2phfxV5xYWlsY9cjNw8GE-GDhVNjHU,29 +onnx/backend/test/data/node/test_string_split_consecutive_delimiters/test_data_set_0/output_0.pb,sha256=Gg8LH7Jg30rGSIH6Ajz793OMcHCAfpd5J8tc7y0Grkc,50 +onnx/backend/test/data/node/test_string_split_consecutive_delimiters/test_data_set_0/output_1.pb,sha256=HZrraho2OjK-YoIRraGoy3KzUH-5rB5Vxdg07GYC5Z0,30 +onnx/backend/test/data/node/test_string_split_empty_string_delimiter/model.onnx,sha256=Cz40MTTA_DE27jFTQO-MiaVZ3IRW5siOwnvwHO9z5Y0,192 +onnx/backend/test/data/node/test_string_split_empty_string_delimiter/test_data_set_0/input_0.pb,sha256=Lkk7Xliu4YQBMqmCNfiGw3e9NFq8dZPi5XuSK2HpB8M,60 +onnx/backend/test/data/node/test_string_split_empty_string_delimiter/test_data_set_0/output_0.pb,sha256=TlFjPdmg9QYAw87BGSTFgCxZ56k9y0jzst3jxqwq_Hs,69 +onnx/backend/test/data/node/test_string_split_empty_string_delimiter/test_data_set_0/output_1.pb,sha256=RKyl62yqTMrGNj1giVZZQoUAxGvRJ3UV35inlIw0Wwg,38 +onnx/backend/test/data/node/test_string_split_empty_tensor/model.onnx,sha256=ZkpBEwpbKXTJaMH8jpaQ4-PCicjHOwYpPFaZS4cDoho,162 +onnx/backend/test/data/node/test_string_split_empty_tensor/test_data_set_0/input_0.pb,sha256=0vt_8lV5eLAUk7cNBRWVeZfQGWCKVZaoTRAsYoNVB8c,7 +onnx/backend/test/data/node/test_string_split_empty_tensor/test_data_set_0/output_0.pb,sha256=rinTKTofPPAwoN5DGMisAVXS3kv88t6-SA-YwOmuTgk,18 +onnx/backend/test/data/node/test_string_split_empty_tensor/test_data_set_0/output_1.pb,sha256=NJba867FCwoLhvj61B8qq1aF_0mhOxmm-HukkWIILY4,14 +onnx/backend/test/data/node/test_string_split_maxsplit/model.onnx,sha256=6tq06jJuJe-3W5da3INCxxR9RPw-VH9AAKoI1lt9RrU,189 +onnx/backend/test/data/node/test_string_split_maxsplit/test_data_set_0/input_0.pb,sha256=QuOpp74o_63oQzR3w5zvsGdSrZTZHaOSM_kNmCE5SaI,61 +onnx/backend/test/data/node/test_string_split_maxsplit/test_data_set_0/output_0.pb,sha256=-7Su4t0TNbvtTiix3ZU2WkBNgB0c-CMKVktl02CWEkk,83 +onnx/backend/test/data/node/test_string_split_maxsplit/test_data_set_0/output_1.pb,sha256=wFu0Vt3H3291L7GxgukDzFzNXDdTj1kqzQ-cmjfV-P0,48 +onnx/backend/test/data/node/test_string_split_no_delimiter/model.onnx,sha256=mF5gr4DOc9B2MnfXRKeyJgT6sVhaNmxbuxEWh-jxfBQ,164 +onnx/backend/test/data/node/test_string_split_no_delimiter/test_data_set_0/input_0.pb,sha256=Lkk7Xliu4YQBMqmCNfiGw3e9NFq8dZPi5XuSK2HpB8M,60 +onnx/backend/test/data/node/test_string_split_no_delimiter/test_data_set_0/output_0.pb,sha256=TlFjPdmg9QYAw87BGSTFgCxZ56k9y0jzst3jxqwq_Hs,69 +onnx/backend/test/data/node/test_string_split_no_delimiter/test_data_set_0/output_1.pb,sha256=RKyl62yqTMrGNj1giVZZQoUAxGvRJ3UV35inlIw0Wwg,38 +onnx/backend/test/data/node/test_strnormalizer_export_monday_casesensintive_lower/model.onnx,sha256=8WTCxTE83fSRwhNrP-Qh__Qmc-PmIIExxvbdUY4uyoQ,222 +onnx/backend/test/data/node/test_strnormalizer_export_monday_casesensintive_lower/test_data_set_0/input_0.pb,sha256=esXLitVtK2jXZ1_Lr-WQo0JzyzURptvL5Nk19vQXSr8,45 +onnx/backend/test/data/node/test_strnormalizer_export_monday_casesensintive_lower/test_data_set_0/output_0.pb,sha256=_-mGRZ7G9I9iAx2P9lMk3aHUawP2XKjO9hn2fpR6tpY,37 +onnx/backend/test/data/node/test_strnormalizer_export_monday_casesensintive_nochangecase/model.onnx,sha256=93DFSw1HvmrQmyGjSG4nm8Dck9Ucc3cf63YZN_NpC1M,197 +onnx/backend/test/data/node/test_strnormalizer_export_monday_casesensintive_nochangecase/test_data_set_0/input_0.pb,sha256=esXLitVtK2jXZ1_Lr-WQo0JzyzURptvL5Nk19vQXSr8,45 +onnx/backend/test/data/node/test_strnormalizer_export_monday_casesensintive_nochangecase/test_data_set_0/output_0.pb,sha256=_-mGRZ7G9I9iAx2P9lMk3aHUawP2XKjO9hn2fpR6tpY,37 +onnx/backend/test/data/node/test_strnormalizer_export_monday_casesensintive_upper/model.onnx,sha256=jaCqkB5G5s9lkylgpZSBESwA8IeKqqtEqPSOsC47w-o,222 +onnx/backend/test/data/node/test_strnormalizer_export_monday_casesensintive_upper/test_data_set_0/input_0.pb,sha256=esXLitVtK2jXZ1_Lr-WQo0JzyzURptvL5Nk19vQXSr8,45 +onnx/backend/test/data/node/test_strnormalizer_export_monday_casesensintive_upper/test_data_set_0/output_0.pb,sha256=7Zk-IeCaqHCfs5Z1zqHp5ERKMABu8eMRVVYNTMLdFc4,37 +onnx/backend/test/data/node/test_strnormalizer_export_monday_empty_output/model.onnx,sha256=UX3g4dMXYOc0tuakv3AsxS4QxTTRPeWmb9Khcidb7rQ,214 +onnx/backend/test/data/node/test_strnormalizer_export_monday_empty_output/test_data_set_0/input_0.pb,sha256=XGok6pzumeWYmW61YcE2gO49FZP4HhO5L7VQiaofooQ,23 +onnx/backend/test/data/node/test_strnormalizer_export_monday_empty_output/test_data_set_0/output_0.pb,sha256=fve41mqTI11I7kUof7Bey6Rtk7Sv23j8xV9xaJLPzQ0,9 +onnx/backend/test/data/node/test_strnormalizer_export_monday_insensintive_upper_twodim/model.onnx,sha256=GHPpZvp0i0FgVto26sff1Ics_yUdMEKiSC-Adv9Q3UA,209 +onnx/backend/test/data/node/test_strnormalizer_export_monday_insensintive_upper_twodim/test_data_set_0/input_0.pb,sha256=5_crR6dG0l6PE3Zf0m0C1WePv2dsKuUUSeSBHD4-fx8,65 +onnx/backend/test/data/node/test_strnormalizer_export_monday_insensintive_upper_twodim/test_data_set_0/output_0.pb,sha256=etP-R5bCcemRt-RrzQK7MPjXOQwqbEBeZCcYma1Jb7M,49 +onnx/backend/test/data/node/test_strnormalizer_nostopwords_nochangecase/model.onnx,sha256=CxFs65zLw1qHXcsN6vxSznvl3RFdX2mzbTO-WQBm0UY,156 +onnx/backend/test/data/node/test_strnormalizer_nostopwords_nochangecase/test_data_set_0/input_0.pb,sha256=uBHq4M-aleAVs_ZnyQty-Yz85ya7wK6Dp7RjXk0bUZw,24 +onnx/backend/test/data/node/test_strnormalizer_nostopwords_nochangecase/test_data_set_0/output_0.pb,sha256=NhjGWjPVxmkR0f35ghfx6YQm3Is2Eue9LWCC-NaQqQY,24 +onnx/backend/test/data/node/test_sub/model.onnx,sha256=mP10ldWkDDKH8APo3PpHCAEwuvxrP5mwXhG9tfEPvVM,125 +onnx/backend/test/data/node/test_sub/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_sub/test_data_set_0/input_1.pb,sha256=qlmuuQNl3eLtxabKfNE6cbZHjYcj-DzNBr1M4rz7GSo,254 +onnx/backend/test/data/node/test_sub/test_data_set_0/output_0.pb,sha256=8dGJKtTismvzTYKNniWX7nI7kGv6E-yTz7tnd9fXnK4,254 +onnx/backend/test/data/node/test_sub_bcast/model.onnx,sha256=4wE4E0xFCLz9T8Z6v19gh5g9Q34MR-hWv5qLgNAbORs,123 +onnx/backend/test/data/node/test_sub_bcast/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_sub_bcast/test_data_set_0/input_1.pb,sha256=jYn9FLUzE9ETJcxFW48XqSehRlyePPjUoI-gtNyozBs,29 +onnx/backend/test/data/node/test_sub_bcast/test_data_set_0/output_0.pb,sha256=LyA5dyUqm2I4X1kd6ky4dvMue4RkuYrzpg9F7suqdhs,254 +onnx/backend/test/data/node/test_sub_example/model.onnx,sha256=0qCw4c6DH-9qPZRZFBEhJXzz0cuRYN_AA_kgjg_cB1Q,109 +onnx/backend/test/data/node/test_sub_example/test_data_set_0/input_0.pb,sha256=07NXD9M8ncP5OIBozL5eyrRmifhNqQKh-yXwmBAQ5_Y,21 +onnx/backend/test/data/node/test_sub_example/test_data_set_0/input_1.pb,sha256=ldaXLvrKYcVHRK_B5teZ_Wc4K33RQimHHQ1UI9lc1r4,21 +onnx/backend/test/data/node/test_sub_example/test_data_set_0/output_0.pb,sha256=PE0UGZkx639vsMtw-TNum1iHVl0LeudCk4ehTOso4a0,21 +onnx/backend/test/data/node/test_sub_uint8/model.onnx,sha256=dOOLhNUqQJJzBYvl2d_gr0tELW8JumtINUnPeTHZ5hM,131 +onnx/backend/test/data/node/test_sub_uint8/test_data_set_0/input_0.pb,sha256=rbapTNwdifJamIWMOKri6wDhWXBqZFmQI0E7KSh-ukM,73 +onnx/backend/test/data/node/test_sub_uint8/test_data_set_0/input_1.pb,sha256=Bhd20g540nbcCIDE7mLNKXv0V7lCD01cmdVOeXApWTc,73 +onnx/backend/test/data/node/test_sub_uint8/test_data_set_0/output_0.pb,sha256=SvCJ5U5_hcZH5kKG_KITYVPXfAFRfOEGcLDhyEo9uRA,73 +onnx/backend/test/data/node/test_sum_example/model.onnx,sha256=XZP8TSc2z90MlswCEnr5H06sUKLF0UvocBc-aNWaoqk,170 +onnx/backend/test/data/node/test_sum_example/test_data_set_0/input_0.pb,sha256=N_bAzbz_zIhMsxt_JXexH-iLaQmHJs7Ci6YiUFcbYqo,26 +onnx/backend/test/data/node/test_sum_example/test_data_set_0/input_1.pb,sha256=CT8EqMoWnZgfvIbZCTZCcy5yi-scc6HUEY-s5dz8uC4,26 +onnx/backend/test/data/node/test_sum_example/test_data_set_0/input_2.pb,sha256=_PLxlJ7xl9XIGa6nbKkonHq25eO5mYsALw5JQS7zyx8,26 +onnx/backend/test/data/node/test_sum_example/test_data_set_0/output_0.pb,sha256=uwkdb0lH_dEBq1A7Tmw4xNdVFOnbXYnaLhqJPa70el8,26 +onnx/backend/test/data/node/test_sum_one_input/model.onnx,sha256=6UQdw-WAFYgRrvWemfXV-9CzuTjQfgyYujsTaGUVV4g,111 +onnx/backend/test/data/node/test_sum_one_input/test_data_set_0/input_0.pb,sha256=N_bAzbz_zIhMsxt_JXexH-iLaQmHJs7Ci6YiUFcbYqo,26 +onnx/backend/test/data/node/test_sum_one_input/test_data_set_0/output_0.pb,sha256=ZKhVafwrQna2mYQVSKmI2eivwdOVRllNuFJ0a3S4awM,26 +onnx/backend/test/data/node/test_sum_two_inputs/model.onnx,sha256=N8dWqhy-TnLXUw9HSex51GW0uGtetk3z_AiUh3gMZ2w,142 +onnx/backend/test/data/node/test_sum_two_inputs/test_data_set_0/input_0.pb,sha256=N_bAzbz_zIhMsxt_JXexH-iLaQmHJs7Ci6YiUFcbYqo,26 +onnx/backend/test/data/node/test_sum_two_inputs/test_data_set_0/input_1.pb,sha256=CT8EqMoWnZgfvIbZCTZCcy5yi-scc6HUEY-s5dz8uC4,26 +onnx/backend/test/data/node/test_sum_two_inputs/test_data_set_0/output_0.pb,sha256=TY8-bJ08UEwSu_EVLVl6Bwb7549mVnCWsDUHC5XHv88,26 +onnx/backend/test/data/node/test_tan/model.onnx,sha256=8y5lt8ZB9lM4FB4Iu6hdul54IA6-RTw12uBqGPN9ZKc,97 +onnx/backend/test/data/node/test_tan/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_tan/test_data_set_0/output_0.pb,sha256=oEAQ3ju_CX1kS_3kNbw_shRrFeHXrwbzXlxZ84IeXi8,254 +onnx/backend/test/data/node/test_tan_example/model.onnx,sha256=LzUncM0mcZ79YwBaqrgityWvvT3oDjk7wmtFqdQ2nhY,89 +onnx/backend/test/data/node/test_tan_example/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_tan_example/test_data_set_0/output_0.pb,sha256=fqcp5u1o4BVyNsOREf4iXsbMXZziCecBb7Q7w1dF_70,21 +onnx/backend/test/data/node/test_tanh/model.onnx,sha256=U3A5tfcs_Hp17L6AxqD4qHr26dvWD_WZ4Lixxk8hGlg,99 +onnx/backend/test/data/node/test_tanh/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_tanh/test_data_set_0/output_0.pb,sha256=9epV2ujx5MyrnOrUIjmF5O9si_CqNGqtoyDQ6aWIGFg,254 +onnx/backend/test/data/node/test_tanh_example/model.onnx,sha256=AoBuuazpOgUw3IGATd5Au8wqhcnxvtw5xMRcno4TH34,91 +onnx/backend/test/data/node/test_tanh_example/test_data_set_0/input_0.pb,sha256=RY7cC3PG2NU1nqWO6C7rbFiTMHWg1hK-N8IVnjp9UIU,21 +onnx/backend/test/data/node/test_tanh_example/test_data_set_0/output_0.pb,sha256=nFD1raZkukttNDNzXe2Qh0UKEk9cX0Ul4lLASfmYj9U,21 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_batch_onlybigrams_skip0/model.onnx,sha256=-EniRrPntw0dzQmhYCut6sPuBkfQU1xkIqQQLLoiTiw,323 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_batch_onlybigrams_skip0/test_data_set_0/input_0.pb,sha256=RuSeM5AAJoMD0_LUG3RLk0WKe_Yu3njy5AIvlpXBEGw,59 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_batch_onlybigrams_skip0/test_data_set_0/output_0.pb,sha256=5IdNsCA9je2XbErSYUmWdkfPsfxkwrUf7op6GaqXqf8,67 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_batch_onlybigrams_skip5/model.onnx,sha256=pS_36SRouNVGZo5He-ITKOBEKYJWADwNET22dbyuVI8,323 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_batch_onlybigrams_skip5/test_data_set_0/input_0.pb,sha256=RuSeM5AAJoMD0_LUG3RLk0WKe_Yu3njy5AIvlpXBEGw,59 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_batch_onlybigrams_skip5/test_data_set_0/output_0.pb,sha256=UD8F8mfp4Z-P7z4_poshzkhI1ONkSs8RWiioBZ6wXSs,67 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_batch_uniandbigrams_skip5/model.onnx,sha256=hwFqXbWQ3m9HhGM1RdqQIWwT-0ShxF1yZbX8begEDIc,325 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_batch_uniandbigrams_skip5/test_data_set_0/input_0.pb,sha256=RuSeM5AAJoMD0_LUG3RLk0WKe_Yu3njy5AIvlpXBEGw,59 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_batch_uniandbigrams_skip5/test_data_set_0/output_0.pb,sha256=lZon0vYsJAmO9YgPUKEXr6zIAZvJOXYSLvkymMrzx_Y,67 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_only_bigrams_skip0/model.onnx,sha256=GS8cEAj07bUmc0iwrHrR3BXlXmzJkKZG6iNwv9yORMo,310 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_only_bigrams_skip0/test_data_set_0/input_0.pb,sha256=YCc_Bptl6ovGeLNS8nNKpq9kZn61r24klmgDmte_FF8,57 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_only_bigrams_skip0/test_data_set_0/output_0.pb,sha256=OHQudUdGevZkYWHLjqghPTQjpE_rzikFq-mnC3u2Hkg,37 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_onlybigrams_levelempty/model.onnx,sha256=A2zNW-p1Kd0gFy1nlNxddLRwPfi25aBcGBN1s3tZgho,298 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_onlybigrams_levelempty/test_data_set_0/input_0.pb,sha256=YCc_Bptl6ovGeLNS8nNKpq9kZn61r24klmgDmte_FF8,57 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_onlybigrams_levelempty/test_data_set_0/output_0.pb,sha256=HqYz4t-IV0NRYVNdxt6tHgAr6cXkXZb9bhxw5eRZSG0,21 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_onlybigrams_skip5/model.onnx,sha256=9BzVp8R0xgTHrWa6iEpnJE71YDoMSeA2j-bwkNZB5tU,309 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_onlybigrams_skip5/test_data_set_0/input_0.pb,sha256=YCc_Bptl6ovGeLNS8nNKpq9kZn61r24klmgDmte_FF8,57 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_onlybigrams_skip5/test_data_set_0/output_0.pb,sha256=bWYlhOyhr89Gu8Egsb-EZCem3F2ITmQ2T-pAAMoBkSM,37 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_uniandbigrams_skip5/model.onnx,sha256=7pVbMq_QNlnT5GEO49GAtvrWW6sUFq-dbT3m0cYWSdo,311 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_uniandbigrams_skip5/test_data_set_0/input_0.pb,sha256=YCc_Bptl6ovGeLNS8nNKpq9kZn61r24klmgDmte_FF8,57 +onnx/backend/test/data/node/test_tfidfvectorizer_tf_uniandbigrams_skip5/test_data_set_0/output_0.pb,sha256=fWBlu9B5oyr0J1oSpwScOYEOm4wpBfIXOGcZDLMSEtc,37 +onnx/backend/test/data/node/test_thresholdedrelu/model.onnx,sha256=oTxyJkdK3p9Uf-aVTJl_ndNK0xwLrbJbZvSAeoOYiC0,138 +onnx/backend/test/data/node/test_thresholdedrelu/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_thresholdedrelu/test_data_set_0/output_0.pb,sha256=EYPDY0T-gVBpBycVsSv96EYC8I5OQKmOXlUd0PlqqTo,254 +onnx/backend/test/data/node/test_thresholdedrelu_default/model.onnx,sha256=TJw1TED0YtDseBnwHp5uw-5SK6LRCVznrmJSmE-UE9A,129 +onnx/backend/test/data/node/test_thresholdedrelu_default/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_thresholdedrelu_default/test_data_set_0/output_0.pb,sha256=R8JHO3yOt_f-HSe9W_6opD1vEVd0J_9IyfYvUqvYHb8,254 +onnx/backend/test/data/node/test_thresholdedrelu_default_expanded_ver18/model.onnx,sha256=Xjy0FUJJHKbF_mLVMfXrmoLzkCIs5SiRNXSJ0DQqdpw,993 +onnx/backend/test/data/node/test_thresholdedrelu_default_expanded_ver18/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_thresholdedrelu_default_expanded_ver18/test_data_set_0/output_0.pb,sha256=R8JHO3yOt_f-HSe9W_6opD1vEVd0J_9IyfYvUqvYHb8,254 +onnx/backend/test/data/node/test_thresholdedrelu_example/model.onnx,sha256=kJKpeX0TjKfz1KkxpNI4jpxjQ9yifzgn08Zttg8n7rI,130 +onnx/backend/test/data/node/test_thresholdedrelu_example/test_data_set_0/input_0.pb,sha256=GyqQxuM6edhbQcLuvJ7Ff2l8cqNvuhOCWT6tq6GrCxQ,29 +onnx/backend/test/data/node/test_thresholdedrelu_example/test_data_set_0/output_0.pb,sha256=56tfAjuqWM64ZQOZoGZe2HownMu6FhctDhrCWEX-U3c,29 +onnx/backend/test/data/node/test_thresholdedrelu_example_expanded_ver18/model.onnx,sha256=dr6KzXPOEebHoFgYsZ2zIYjXMA2jz3p_M6Lk-0Ed8Uo,977 +onnx/backend/test/data/node/test_thresholdedrelu_example_expanded_ver18/test_data_set_0/input_0.pb,sha256=GyqQxuM6edhbQcLuvJ7Ff2l8cqNvuhOCWT6tq6GrCxQ,29 +onnx/backend/test/data/node/test_thresholdedrelu_example_expanded_ver18/test_data_set_0/output_0.pb,sha256=56tfAjuqWM64ZQOZoGZe2HownMu6FhctDhrCWEX-U3c,29 +onnx/backend/test/data/node/test_thresholdedrelu_expanded_ver18/model.onnx,sha256=2RT0LcAGiGKRmi90hz2kxYpH3hUoMaXxSG-aM6goZOk,905 +onnx/backend/test/data/node/test_thresholdedrelu_expanded_ver18/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_thresholdedrelu_expanded_ver18/test_data_set_0/output_0.pb,sha256=EYPDY0T-gVBpBycVsSv96EYC8I5OQKmOXlUd0PlqqTo,254 +onnx/backend/test/data/node/test_tile/model.onnx,sha256=QfHZNXzmqFwu78ZWpKbrIoairQG5_T80e91VExVm5nQ,127 +onnx/backend/test/data/node/test_tile/test_data_set_0/input_0.pb,sha256=gw969Zq2ZPC3oIK5VZvEvK3ygLM6eqRhSyV89xoSWa0,496 +onnx/backend/test/data/node/test_tile/test_data_set_0/input_1.pb,sha256=3hLt-sVia_TlsVYqobdpF_RmIXzm-x9MlyzSKruqYmM,41 +onnx/backend/test/data/node/test_tile/test_data_set_0/output_0.pb,sha256=Rs-OKCKQNoROZ4obyQdCkRgsW6jV-IFq2p2uGO-rcCQ,161297 +onnx/backend/test/data/node/test_tile_precomputed/model.onnx,sha256=QXGzsgDOILyxC9mjPlMFdY7_WGynokKvLHY_-gKC5U8,123 +onnx/backend/test/data/node/test_tile_precomputed/test_data_set_0/input_0.pb,sha256=0phUVbSY99aYNDvbb4I7ZwTF4_oqB7I9Teh_3LZTlfk,27 +onnx/backend/test/data/node/test_tile_precomputed/test_data_set_0/input_1.pb,sha256=8YdPn9kXj5gTtZWZFpSg86zsyytFp2VxVrzLf29WrDM,25 +onnx/backend/test/data/node/test_tile_precomputed/test_data_set_0/output_0.pb,sha256=UhTwBP5SsiJy8mBvMpnEyfL8TWSEzsn2xB-b29BSfRc,75 +onnx/backend/test/data/node/test_top_k/model.onnx,sha256=nBQnNpnRF182ToZriwV_zz4kksH_QSMuYLbQhTL1rL4,172 +onnx/backend/test/data/node/test_top_k/test_data_set_0/input_0.pb,sha256=XI00qhavYkKl_aHKei8a3IH_dsLyhI_Y57J38wFhkU0,59 +onnx/backend/test/data/node/test_top_k/test_data_set_0/input_1.pb,sha256=2s0pITsSkAcfbYtBMMznqlVPskqVWNigM8OehwDxlhU,17 +onnx/backend/test/data/node/test_top_k/test_data_set_0/output_0.pb,sha256=vzxrEwUR_AoNSo5nsdkq3fMRD-TWJIdQTFnoTF5HOE8,52 +onnx/backend/test/data/node/test_top_k/test_data_set_0/output_1.pb,sha256=Lq4Jo5XkOjWTnkaaHX6oxn_k4xXtVuYESe87xYE4-JM,89 +onnx/backend/test/data/node/test_top_k_negative_axis/model.onnx,sha256=KS5REJglVn3-UblpD99uxQCnR1zpyrSl2UEfs_9S3RI,195 +onnx/backend/test/data/node/test_top_k_negative_axis/test_data_set_0/input_0.pb,sha256=XI00qhavYkKl_aHKei8a3IH_dsLyhI_Y57J38wFhkU0,59 +onnx/backend/test/data/node/test_top_k_negative_axis/test_data_set_0/input_1.pb,sha256=2s0pITsSkAcfbYtBMMznqlVPskqVWNigM8OehwDxlhU,17 +onnx/backend/test/data/node/test_top_k_negative_axis/test_data_set_0/output_0.pb,sha256=vzxrEwUR_AoNSo5nsdkq3fMRD-TWJIdQTFnoTF5HOE8,52 +onnx/backend/test/data/node/test_top_k_negative_axis/test_data_set_0/output_1.pb,sha256=Lq4Jo5XkOjWTnkaaHX6oxn_k4xXtVuYESe87xYE4-JM,89 +onnx/backend/test/data/node/test_top_k_smallest/model.onnx,sha256=zsfOUFk4VRFbwL0MPCYpnGe0KCdqnMT_jf7Vc3vf2m4,212 +onnx/backend/test/data/node/test_top_k_smallest/test_data_set_0/input_0.pb,sha256=ryRK_hmxESRdBORPiEQGCBWqPscZuVkuLVdq4UiTkIk,59 +onnx/backend/test/data/node/test_top_k_smallest/test_data_set_0/input_1.pb,sha256=2s0pITsSkAcfbYtBMMznqlVPskqVWNigM8OehwDxlhU,17 +onnx/backend/test/data/node/test_top_k_smallest/test_data_set_0/output_0.pb,sha256=6NbpYxWiqeXH1aQ5Qsdxf5e4Thh3nDRzyF8nMxMU-v4,52 +onnx/backend/test/data/node/test_top_k_smallest/test_data_set_0/output_1.pb,sha256=8o2XaU6qQNbXjIpJgqXwjpFgTIBjtdHfok7ylx2w6Nc,89 +onnx/backend/test/data/node/test_training_dropout/model.onnx,sha256=_U1yAo0mT0ZiMrGogrUSlW9iqFgzt3A_NHvtD3RsdvY,160 +onnx/backend/test/data/node/test_training_dropout/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_training_dropout/test_data_set_0/input_1.pb,sha256=Ihb6BRVhibKnewisCvIZ4IdGYIG8RISUnIQOFl1wHSo,11 +onnx/backend/test/data/node/test_training_dropout/test_data_set_0/input_2.pb,sha256=ykb35DzfMoMhKwpLutzmEnZcfmNm5ILYorWGgJ6Wk34,8 +onnx/backend/test/data/node/test_training_dropout/test_data_set_0/output_0.pb,sha256=CmBjvEq_A4VxiIqoeJA9geeo6q067Zraon47UAQZysA,254 +onnx/backend/test/data/node/test_training_dropout_default/model.onnx,sha256=E8V8aUIcRgk2Jzy1WECbVjULWmOL0MyNnMS-oeliSSs,168 +onnx/backend/test/data/node/test_training_dropout_default/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_training_dropout_default/test_data_set_0/input_1.pb,sha256=x_s2_p7rxoVX_GfqD2NvSwHUYcx5i61p2DlsgKolOJE,11 +onnx/backend/test/data/node/test_training_dropout_default/test_data_set_0/input_2.pb,sha256=ykb35DzfMoMhKwpLutzmEnZcfmNm5ILYorWGgJ6Wk34,8 +onnx/backend/test/data/node/test_training_dropout_default/test_data_set_0/output_0.pb,sha256=6RLLCaWhovCKRwj2lZPTEcsCXAXwpny9iHnJWVbZlqY,254 +onnx/backend/test/data/node/test_training_dropout_default_mask/model.onnx,sha256=C7Pp6QXLnuc9iCfBtAl3NoTgCa6B2sUoc5ybkyV1ovc,201 +onnx/backend/test/data/node/test_training_dropout_default_mask/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_training_dropout_default_mask/test_data_set_0/input_1.pb,sha256=x_s2_p7rxoVX_GfqD2NvSwHUYcx5i61p2DlsgKolOJE,11 +onnx/backend/test/data/node/test_training_dropout_default_mask/test_data_set_0/input_2.pb,sha256=ykb35DzfMoMhKwpLutzmEnZcfmNm5ILYorWGgJ6Wk34,8 +onnx/backend/test/data/node/test_training_dropout_default_mask/test_data_set_0/output_0.pb,sha256=6RLLCaWhovCKRwj2lZPTEcsCXAXwpny9iHnJWVbZlqY,254 +onnx/backend/test/data/node/test_training_dropout_default_mask/test_data_set_0/output_1.pb,sha256=U7w4KTIRqkX6TMfa8mTB9l88U4Byw-Gxr5-BTRWGzC0,73 +onnx/backend/test/data/node/test_training_dropout_mask/model.onnx,sha256=qrU_n6FT3SwpNeSO7-h52DxoUtxc4KhMKHWsPonexbc,193 +onnx/backend/test/data/node/test_training_dropout_mask/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_training_dropout_mask/test_data_set_0/input_1.pb,sha256=Ihb6BRVhibKnewisCvIZ4IdGYIG8RISUnIQOFl1wHSo,11 +onnx/backend/test/data/node/test_training_dropout_mask/test_data_set_0/input_2.pb,sha256=ykb35DzfMoMhKwpLutzmEnZcfmNm5ILYorWGgJ6Wk34,8 +onnx/backend/test/data/node/test_training_dropout_mask/test_data_set_0/output_0.pb,sha256=CmBjvEq_A4VxiIqoeJA9geeo6q067Zraon47UAQZysA,254 +onnx/backend/test/data/node/test_training_dropout_mask/test_data_set_0/output_1.pb,sha256=rPswqfqePkmopvWjUY6xwIBzHLn8yFTlIEaC7zi_1ws,73 +onnx/backend/test/data/node/test_training_dropout_zero_ratio/model.onnx,sha256=RL10VHLor0PqHsgjfcEnElK4hn5Oms5B8ryo27O6XNw,171 +onnx/backend/test/data/node/test_training_dropout_zero_ratio/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_training_dropout_zero_ratio/test_data_set_0/input_1.pb,sha256=hZowRFBsf_dlyYrjzzS179SSCrfk16UaslAIJqe0JkA,11 +onnx/backend/test/data/node/test_training_dropout_zero_ratio/test_data_set_0/input_2.pb,sha256=ykb35DzfMoMhKwpLutzmEnZcfmNm5ILYorWGgJ6Wk34,8 +onnx/backend/test/data/node/test_training_dropout_zero_ratio/test_data_set_0/output_0.pb,sha256=4E7JZEL-6RPD2ZoGlbe4UZXnssgawDGOHg15V0RhX58,254 +onnx/backend/test/data/node/test_training_dropout_zero_ratio_mask/model.onnx,sha256=NZ9lNocdnX98wzwAEj_tUsTIqPcSaDYXQeogAy4QxSs,204 +onnx/backend/test/data/node/test_training_dropout_zero_ratio_mask/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_training_dropout_zero_ratio_mask/test_data_set_0/input_1.pb,sha256=hZowRFBsf_dlyYrjzzS179SSCrfk16UaslAIJqe0JkA,11 +onnx/backend/test/data/node/test_training_dropout_zero_ratio_mask/test_data_set_0/input_2.pb,sha256=ykb35DzfMoMhKwpLutzmEnZcfmNm5ILYorWGgJ6Wk34,8 +onnx/backend/test/data/node/test_training_dropout_zero_ratio_mask/test_data_set_0/output_0.pb,sha256=4E7JZEL-6RPD2ZoGlbe4UZXnssgawDGOHg15V0RhX58,254 +onnx/backend/test/data/node/test_training_dropout_zero_ratio_mask/test_data_set_0/output_1.pb,sha256=4sWzAHgaDiZSCGEr949BKLl9dNfAyqyQpzL1ccqJJ2M,73 +onnx/backend/test/data/node/test_transpose_all_permutations_0/model.onnx,sha256=PbOdd5XMUpUEmPDVeBqmRTmhPoow9RbyotyhGjZ2zOQ,170 +onnx/backend/test/data/node/test_transpose_all_permutations_0/test_data_set_0/input_0.pb,sha256=Ra5qJ0bmZk4Mtqoj9EhsgTyEoqrmqYk679L-Ki7OJPI,112 +onnx/backend/test/data/node/test_transpose_all_permutations_0/test_data_set_0/output_0.pb,sha256=8m48HiFPwfnlF03aRhdOZsTuw4GtLKS-6Q7Kdu2CTHY,118 +onnx/backend/test/data/node/test_transpose_all_permutations_1/model.onnx,sha256=oHirpmpdI5DNR9mYQlXCoeQv9W7xM9z-k1yUpaaHO_s,170 +onnx/backend/test/data/node/test_transpose_all_permutations_1/test_data_set_0/input_0.pb,sha256=Ra5qJ0bmZk4Mtqoj9EhsgTyEoqrmqYk679L-Ki7OJPI,112 +onnx/backend/test/data/node/test_transpose_all_permutations_1/test_data_set_0/output_0.pb,sha256=KdRdeEJ8zlT8RRzybOAlbdCF6M66iCYf1ZXc1lZw7QY,118 +onnx/backend/test/data/node/test_transpose_all_permutations_2/model.onnx,sha256=zvSFuJ0z3UkyGeP696wtk13GtOGe2VLMzbv47alXgUU,170 +onnx/backend/test/data/node/test_transpose_all_permutations_2/test_data_set_0/input_0.pb,sha256=Ra5qJ0bmZk4Mtqoj9EhsgTyEoqrmqYk679L-Ki7OJPI,112 +onnx/backend/test/data/node/test_transpose_all_permutations_2/test_data_set_0/output_0.pb,sha256=EDwdkyw_W6NnH7UTmY_FwWB6vJ2zZNEfhKTkro6t2mc,118 +onnx/backend/test/data/node/test_transpose_all_permutations_3/model.onnx,sha256=L8Yg2WJpvPi12XS8IF3ODzDUOTdPPXfb02JAY3rXQkc,170 +onnx/backend/test/data/node/test_transpose_all_permutations_3/test_data_set_0/input_0.pb,sha256=Ra5qJ0bmZk4Mtqoj9EhsgTyEoqrmqYk679L-Ki7OJPI,112 +onnx/backend/test/data/node/test_transpose_all_permutations_3/test_data_set_0/output_0.pb,sha256=hBbLAZf6HxZc9v80VT82V1DlS_5sknpc9ejUOibhPDs,118 +onnx/backend/test/data/node/test_transpose_all_permutations_4/model.onnx,sha256=XUtH21xUz91Qh7pqmU91RqmwgExVRzXpnCzNwFiRWvQ,170 +onnx/backend/test/data/node/test_transpose_all_permutations_4/test_data_set_0/input_0.pb,sha256=Ra5qJ0bmZk4Mtqoj9EhsgTyEoqrmqYk679L-Ki7OJPI,112 +onnx/backend/test/data/node/test_transpose_all_permutations_4/test_data_set_0/output_0.pb,sha256=eFo6pA8Z_W1f_lj5SuJSWG5TGGlFZaNO3jKaR7gq47o,118 +onnx/backend/test/data/node/test_transpose_all_permutations_5/model.onnx,sha256=iVDU5k05sTABx4Ns6sXNYPAjupP9wk_24k4d4yqFekA,170 +onnx/backend/test/data/node/test_transpose_all_permutations_5/test_data_set_0/input_0.pb,sha256=Ra5qJ0bmZk4Mtqoj9EhsgTyEoqrmqYk679L-Ki7OJPI,112 +onnx/backend/test/data/node/test_transpose_all_permutations_5/test_data_set_0/output_0.pb,sha256=cHZA22uz70nv-VGa4Pz3DDZlMtCkO7A42XLP232Va5w,118 +onnx/backend/test/data/node/test_transpose_default/model.onnx,sha256=H2sK9k0mVGss1ux_LNEB-GiM8XZn4uuNX6OaKGcqsi8,141 +onnx/backend/test/data/node/test_transpose_default/test_data_set_0/input_0.pb,sha256=Ra5qJ0bmZk4Mtqoj9EhsgTyEoqrmqYk679L-Ki7OJPI,112 +onnx/backend/test/data/node/test_transpose_default/test_data_set_0/output_0.pb,sha256=cHZA22uz70nv-VGa4Pz3DDZlMtCkO7A42XLP232Va5w,118 +onnx/backend/test/data/node/test_tril/model.onnx,sha256=lhJDHmrNmmQmM3IE2vMEwWyxqjuE6E-lkEW1eqAnFRI,106 +onnx/backend/test/data/node/test_tril/test_data_set_0/input_0.pb,sha256=jDVc0JhPtInMnx5F5UakieVJD3xICkyoLxm81-hMvaA,172 +onnx/backend/test/data/node/test_tril/test_data_set_0/output_0.pb,sha256=0yOBOAhKCbt38Vyz7y8JwUrBoiwNlUWPKG9XTvmkyRE,172 +onnx/backend/test/data/node/test_tril_neg/model.onnx,sha256=lGWNM5bmJR0DyK5sZonj8a3CqcI9iZV8cABSDM87kaE,126 +onnx/backend/test/data/node/test_tril_neg/test_data_set_0/input_0.pb,sha256=jDVc0JhPtInMnx5F5UakieVJD3xICkyoLxm81-hMvaA,172 +onnx/backend/test/data/node/test_tril_neg/test_data_set_0/input_1.pb,sha256=i0ZHP5YqdC9NiwoL_Bm7HypdONtt3SVFXYuFeG3mUy4,15 +onnx/backend/test/data/node/test_tril_neg/test_data_set_0/output_0.pb,sha256=kYHROrSW_24f7h99TB3GfxE41l-6Pa-_xhnWeLyG9YE,172 +onnx/backend/test/data/node/test_tril_one_row_neg/model.onnx,sha256=yt6xj_XnWkJPKqkFDcbF8Dy6aRmVtXShqsMdd8gFF0Y,126 +onnx/backend/test/data/node/test_tril_one_row_neg/test_data_set_0/input_0.pb,sha256=ScwWPKBfy94Tzyt92uIxTxzOec9LWF6m_ox8cdpH3R0,133 +onnx/backend/test/data/node/test_tril_one_row_neg/test_data_set_0/output_0.pb,sha256=uQ0LME1uBl6FjFBG7-4pUsoQdykXVZdtHnbP4AclmWg,133 +onnx/backend/test/data/node/test_tril_out_neg/model.onnx,sha256=Pw6kQJ6Wxc_IG-6C_HZOswSAEUCLz4cgmPit3sMyTPc,130 +onnx/backend/test/data/node/test_tril_out_neg/test_data_set_0/input_0.pb,sha256=jDVc0JhPtInMnx5F5UakieVJD3xICkyoLxm81-hMvaA,172 +onnx/backend/test/data/node/test_tril_out_neg/test_data_set_0/input_1.pb,sha256=Tt-kDFMTEYFiH-Yfmgak7F401t7I7nrv4UpeGtdKD5M,15 +onnx/backend/test/data/node/test_tril_out_neg/test_data_set_0/output_0.pb,sha256=QfgazYHaPKrLW8AspgXsm4Kbyrfo3VZkoC7P73D11R8,172 +onnx/backend/test/data/node/test_tril_out_pos/model.onnx,sha256=z5gAacj6JzJo5-QaSZACl3WTeDFo68Cd9-yNVv1aTz8,130 +onnx/backend/test/data/node/test_tril_out_pos/test_data_set_0/input_0.pb,sha256=jDVc0JhPtInMnx5F5UakieVJD3xICkyoLxm81-hMvaA,172 +onnx/backend/test/data/node/test_tril_out_pos/test_data_set_0/input_1.pb,sha256=9bqFzalAARJbjjUBHRhuduhGUfbMRCaZbI2Jr8ayV7g,15 +onnx/backend/test/data/node/test_tril_out_pos/test_data_set_0/output_0.pb,sha256=3PmW_Dd84mJIfxdwLn1HgG7o8YwHhSGboTBaruGHyAQ,172 +onnx/backend/test/data/node/test_tril_pos/model.onnx,sha256=qtHANJJBn7vNMrkVc8B8nzEJObPVNtWtS0T4nFRFyIE,126 +onnx/backend/test/data/node/test_tril_pos/test_data_set_0/input_0.pb,sha256=jDVc0JhPtInMnx5F5UakieVJD3xICkyoLxm81-hMvaA,172 +onnx/backend/test/data/node/test_tril_pos/test_data_set_0/input_1.pb,sha256=gW2ZrFQv3ahq6TbMYWhXMbs0jzUdce0TXUK4945SMm4,15 +onnx/backend/test/data/node/test_tril_pos/test_data_set_0/output_0.pb,sha256=sstFogvuO6IXlRUwDjX2czo9TEKLz4Yv-y98cszv0so,172 +onnx/backend/test/data/node/test_tril_square/model.onnx,sha256=TXbYCWCohDH4w3nd77x_vjeOgCIFKiuxBaV2bqvCA3g,121 +onnx/backend/test/data/node/test_tril_square/test_data_set_0/input_0.pb,sha256=1lg6-XA3blxfCn0jtzXBITxdSjycH7mbLTN9GItwNHE,158 +onnx/backend/test/data/node/test_tril_square/test_data_set_0/output_0.pb,sha256=vdj5m3p8LBerTUcy8c8wUXgkKaCdstCfJyVpgzNKUog,158 +onnx/backend/test/data/node/test_tril_square_neg/model.onnx,sha256=qu1dUTCnaTWKK-fri2UpZv3Zz7j8oLpaZozxXiBpbr0,141 +onnx/backend/test/data/node/test_tril_square_neg/test_data_set_0/input_0.pb,sha256=1lg6-XA3blxfCn0jtzXBITxdSjycH7mbLTN9GItwNHE,158 +onnx/backend/test/data/node/test_tril_square_neg/test_data_set_0/input_1.pb,sha256=i0ZHP5YqdC9NiwoL_Bm7HypdONtt3SVFXYuFeG3mUy4,15 +onnx/backend/test/data/node/test_tril_square_neg/test_data_set_0/output_0.pb,sha256=RZf7iUnfN42u5ktYpTiiDwOXwdrhCQtT13GxJQZQH38,158 +onnx/backend/test/data/node/test_tril_zero/model.onnx,sha256=hnZmDv5ZOJztbmhw2IDOo1gFvL60AeMqWhl8UVjKTQw,135 +onnx/backend/test/data/node/test_tril_zero/test_data_set_0/input_0.pb,sha256=Kapx_xKdYnTZDHE0gZW2f4mFu_JoDYcW68ayR-ev2dA,13 +onnx/backend/test/data/node/test_tril_zero/test_data_set_0/input_1.pb,sha256=9bqFzalAARJbjjUBHRhuduhGUfbMRCaZbI2Jr8ayV7g,15 +onnx/backend/test/data/node/test_tril_zero/test_data_set_0/output_0.pb,sha256=y0-YZEpnT8AdgNRD6Us4QuB-FZaplytFQlXV8jEkrKc,13 +onnx/backend/test/data/node/test_triu/model.onnx,sha256=gpkyt4sDW1kzNziEz7gV68zyVAgo_57JweFnE0kJnDI,92 +onnx/backend/test/data/node/test_triu/test_data_set_0/input_0.pb,sha256=jDVc0JhPtInMnx5F5UakieVJD3xICkyoLxm81-hMvaA,172 +onnx/backend/test/data/node/test_triu/test_data_set_0/output_0.pb,sha256=-RYXmZuISX8RTZVbK_02g9AoUJ6v0GuQ4IE_4_KEBFE,172 +onnx/backend/test/data/node/test_triu_neg/model.onnx,sha256=eD2I1le73tiWp5Fg--vV_u_NXKv1sNEwOcXfl_um_rA,112 +onnx/backend/test/data/node/test_triu_neg/test_data_set_0/input_0.pb,sha256=jDVc0JhPtInMnx5F5UakieVJD3xICkyoLxm81-hMvaA,172 +onnx/backend/test/data/node/test_triu_neg/test_data_set_0/input_1.pb,sha256=i0ZHP5YqdC9NiwoL_Bm7HypdONtt3SVFXYuFeG3mUy4,15 +onnx/backend/test/data/node/test_triu_neg/test_data_set_0/output_0.pb,sha256=7V1XOQLF1dNocafYdMS-s2KYghNF3sRoYBRbeWR_vxs,172 +onnx/backend/test/data/node/test_triu_one_row/model.onnx,sha256=6_MwfrUOU_ZKMO0MhSE90ZuX3M--xqjNrXv1CSgHeIU,124 +onnx/backend/test/data/node/test_triu_one_row/test_data_set_0/input_0.pb,sha256=ScwWPKBfy94Tzyt92uIxTxzOec9LWF6m_ox8cdpH3R0,133 +onnx/backend/test/data/node/test_triu_one_row/test_data_set_0/input_1.pb,sha256=5dmgjxbBKzMl-btFlxmcAdeSrI3mggZSyGhb128Fw0c,15 +onnx/backend/test/data/node/test_triu_one_row/test_data_set_0/output_0.pb,sha256=9bTgow1Cm656P2OpenExYLW1EomHftqLJulgaXYdf6o,133 +onnx/backend/test/data/node/test_triu_out_neg_out/model.onnx,sha256=XXpDfKfYM1Qp1XZDKqXainml-cAj2U768AmLeMnWfWU,120 +onnx/backend/test/data/node/test_triu_out_neg_out/test_data_set_0/input_0.pb,sha256=jDVc0JhPtInMnx5F5UakieVJD3xICkyoLxm81-hMvaA,172 +onnx/backend/test/data/node/test_triu_out_neg_out/test_data_set_0/input_1.pb,sha256=Tt-kDFMTEYFiH-Yfmgak7F401t7I7nrv4UpeGtdKD5M,15 +onnx/backend/test/data/node/test_triu_out_neg_out/test_data_set_0/output_0.pb,sha256=3PmW_Dd84mJIfxdwLn1HgG7o8YwHhSGboTBaruGHyAQ,172 +onnx/backend/test/data/node/test_triu_out_pos/model.onnx,sha256=uazDxppu435fjfwPfPxeEa8weRtNved_HuLgy1SpxwE,116 +onnx/backend/test/data/node/test_triu_out_pos/test_data_set_0/input_0.pb,sha256=jDVc0JhPtInMnx5F5UakieVJD3xICkyoLxm81-hMvaA,172 +onnx/backend/test/data/node/test_triu_out_pos/test_data_set_0/input_1.pb,sha256=9bqFzalAARJbjjUBHRhuduhGUfbMRCaZbI2Jr8ayV7g,15 +onnx/backend/test/data/node/test_triu_out_pos/test_data_set_0/output_0.pb,sha256=QfgazYHaPKrLW8AspgXsm4Kbyrfo3VZkoC7P73D11R8,172 +onnx/backend/test/data/node/test_triu_pos/model.onnx,sha256=tFHTxvcfOmNdZ7cbt8HE-a2UBj2mqMyMGE0Tb2udaEY,112 +onnx/backend/test/data/node/test_triu_pos/test_data_set_0/input_0.pb,sha256=jDVc0JhPtInMnx5F5UakieVJD3xICkyoLxm81-hMvaA,172 +onnx/backend/test/data/node/test_triu_pos/test_data_set_0/input_1.pb,sha256=gW2ZrFQv3ahq6TbMYWhXMbs0jzUdce0TXUK4945SMm4,15 +onnx/backend/test/data/node/test_triu_pos/test_data_set_0/output_0.pb,sha256=SLp3CwNp4vx0w13204jSX4McKeiz802CKiITnHiyldU,172 +onnx/backend/test/data/node/test_triu_square/model.onnx,sha256=RFsaV-Vew2pD0XJ9FHjbS3oYY9LIUlCVDi8Cu5ImGsA,107 +onnx/backend/test/data/node/test_triu_square/test_data_set_0/input_0.pb,sha256=1lg6-XA3blxfCn0jtzXBITxdSjycH7mbLTN9GItwNHE,158 +onnx/backend/test/data/node/test_triu_square/test_data_set_0/output_0.pb,sha256=36AMhLCF7SadJX9HRMgzRid74z5gEDPfF0pMKmZOu_A,158 +onnx/backend/test/data/node/test_triu_square_neg/model.onnx,sha256=zRGX45x3GZWKvvdWcydY3usiY8y3zqHKJ0mGaC88ifc,127 +onnx/backend/test/data/node/test_triu_square_neg/test_data_set_0/input_0.pb,sha256=1lg6-XA3blxfCn0jtzXBITxdSjycH7mbLTN9GItwNHE,158 +onnx/backend/test/data/node/test_triu_square_neg/test_data_set_0/input_1.pb,sha256=i0ZHP5YqdC9NiwoL_Bm7HypdONtt3SVFXYuFeG3mUy4,15 +onnx/backend/test/data/node/test_triu_square_neg/test_data_set_0/output_0.pb,sha256=bbzJaIibrNsk9884uaPj87_IBzuFWPmqc44QkKcbBQ4,158 +onnx/backend/test/data/node/test_triu_zero/model.onnx,sha256=iGyvccjcRHA99U1iqg8YmoSstOgVlB8MDgGfmArNLUk,113 +onnx/backend/test/data/node/test_triu_zero/test_data_set_0/input_0.pb,sha256=Q4GTabNiDzUJ2uTk3sAfLoeeFVE3N4nsJcyCSfj7r0A,11 +onnx/backend/test/data/node/test_triu_zero/test_data_set_0/input_1.pb,sha256=9bqFzalAARJbjjUBHRhuduhGUfbMRCaZbI2Jr8ayV7g,15 +onnx/backend/test/data/node/test_triu_zero/test_data_set_0/output_0.pb,sha256=Ofi3T4wcGmuiSbXYfYn0XSNmCZg-m6mMiCUYdRLjTyA,11 +onnx/backend/test/data/node/test_unique_not_sorted_without_axis/model.onnx,sha256=mI9ze_3FIMLoYnysIxv7sCLQs4gENmzvxUiQlnpEkNs,237 +onnx/backend/test/data/node/test_unique_not_sorted_without_axis/test_data_set_0/input_0.pb,sha256=Ns8OxpGU_k8PsN9mEqOHyqV6UppeCIXanwl42u0Ab1g,33 +onnx/backend/test/data/node/test_unique_not_sorted_without_axis/test_data_set_0/output_0.pb,sha256=LAq6nE8XXq5Mlam5kdTIVIADwEBfKtEo2fHe8-L5GHM,25 +onnx/backend/test/data/node/test_unique_not_sorted_without_axis/test_data_set_0/output_1.pb,sha256=Sxq63uhbT7l8GKBrtiC01yTSsQZgfHSvaaJGhCjbdws,47 +onnx/backend/test/data/node/test_unique_not_sorted_without_axis/test_data_set_0/output_2.pb,sha256=mgccksCXBKUz17RP65cS7NBD7n-XMa24IFiz6uIG5L8,71 +onnx/backend/test/data/node/test_unique_not_sorted_without_axis/test_data_set_0/output_3.pb,sha256=hPqVSpfdfcRPh5WvGmSgoojnYL_Baos0WzTXzYvnalA,46 +onnx/backend/test/data/node/test_unique_sorted_with_axis/model.onnx,sha256=eyviJg4BbaPZDQ-eC1eT_BO6nLcCG-9DJ1cqVNGCfaw,251 +onnx/backend/test/data/node/test_unique_sorted_with_axis/test_data_set_0/input_0.pb,sha256=-84LKk25M0JkwiVnp3fuOqet5lFGsWn9dkwOZerjeCI,47 +onnx/backend/test/data/node/test_unique_sorted_with_axis/test_data_set_0/output_0.pb,sha256=vMMWxLgUqNcKAkJLK7e4z-tIriWOQds5LQAJtgfkSmw,35 +onnx/backend/test/data/node/test_unique_sorted_with_axis/test_data_set_0/output_1.pb,sha256=xtKkdir_J2mcMJQPbbbmwXHUX-ZEs5nRyjgfBYz7-cA,31 +onnx/backend/test/data/node/test_unique_sorted_with_axis/test_data_set_0/output_2.pb,sha256=MtOqG-EXKfAEBzZbrrGh8qjnN1QcvDtmZIQxyYqi4gU,47 +onnx/backend/test/data/node/test_unique_sorted_with_axis/test_data_set_0/output_3.pb,sha256=v2ScObavRe4FpFQK5d3imkd0Sw8otdQv1znGst4Y5Oc,30 +onnx/backend/test/data/node/test_unique_sorted_with_axis_3d/model.onnx,sha256=FlWgY6d_vk8x-490N2n9Qv_NCdybuq-6t0jinEr0eqQ,262 +onnx/backend/test/data/node/test_unique_sorted_with_axis_3d/test_data_set_0/input_0.pb,sha256=6a9P3TPDB6QH4-3m3Gools1cGlBo_OIbWJeglabhcu0,77 +onnx/backend/test/data/node/test_unique_sorted_with_axis_3d/test_data_set_0/output_0.pb,sha256=YzN-pRyi12eIXyALCN5eHojPdmVGuL-DMINTwLaK5WY,61 +onnx/backend/test/data/node/test_unique_sorted_with_axis_3d/test_data_set_0/output_1.pb,sha256=BVBCngYFGHlKon1y_ZXgRTi7fN4UnWIl3BIx3Uz_0zA,39 +onnx/backend/test/data/node/test_unique_sorted_with_axis_3d/test_data_set_0/output_2.pb,sha256=Q55BSlLZREX4SafY1xiRetTNeOjxaBk-rIXBKS_UOoM,55 +onnx/backend/test/data/node/test_unique_sorted_with_axis_3d/test_data_set_0/output_3.pb,sha256=w9rka_9R3rrKcnipyjjE1aYp1bHAUXRAHaF6k1VFKZ8,38 +onnx/backend/test/data/node/test_unique_sorted_with_negative_axis/model.onnx,sha256=BTdTVahQ20B9sj7qO7V0ORq4ctappfgvN7-dkPRuXVE,269 +onnx/backend/test/data/node/test_unique_sorted_with_negative_axis/test_data_set_0/input_0.pb,sha256=pUCGtsou5GEKcbl6nLdN9pU29-_oadU6jb0lSXogOXo,47 +onnx/backend/test/data/node/test_unique_sorted_with_negative_axis/test_data_set_0/output_0.pb,sha256=elaSiSMuIeTR5E3jLR2MmJYlSGW8Lf7kHm7XK9I21ms,35 +onnx/backend/test/data/node/test_unique_sorted_with_negative_axis/test_data_set_0/output_1.pb,sha256=nEcWVbGWyBXKcIc0WWAAKlGfOu-ezo-7gMwo0n5jz9k,31 +onnx/backend/test/data/node/test_unique_sorted_with_negative_axis/test_data_set_0/output_2.pb,sha256=dbkzp5DFsqzeGFufr0S2WHfnQj8ygai09efxFpfSBS8,47 +onnx/backend/test/data/node/test_unique_sorted_with_negative_axis/test_data_set_0/output_3.pb,sha256=v2ScObavRe4FpFQK5d3imkd0Sw8otdQv1znGst4Y5Oc,30 +onnx/backend/test/data/node/test_unique_sorted_without_axis/model.onnx,sha256=u0wxMZgmvT8CQ79k7HphAOOqdPqIpSd1wc5iHeMX888,218 +onnx/backend/test/data/node/test_unique_sorted_without_axis/test_data_set_0/input_0.pb,sha256=Ns8OxpGU_k8PsN9mEqOHyqV6UppeCIXanwl42u0Ab1g,33 +onnx/backend/test/data/node/test_unique_sorted_without_axis/test_data_set_0/output_0.pb,sha256=1ZcKwXNjKdAcaeAtk_IWYB0fwW7DigTs9dL-8OcYehM,25 +onnx/backend/test/data/node/test_unique_sorted_without_axis/test_data_set_0/output_1.pb,sha256=Vzsgo_9q0aWXU7KrUGj_Ulkd-zmn6zGq8qHe8h5toX0,47 +onnx/backend/test/data/node/test_unique_sorted_without_axis/test_data_set_0/output_2.pb,sha256=DJYa6RfMUcag73E9JldAu99PExISkx_iQAYEGVl7arw,71 +onnx/backend/test/data/node/test_unique_sorted_without_axis/test_data_set_0/output_3.pb,sha256=FUDlaCFeY2YPucjoGxW3edfoBJyoqAVz_r5Jt9q5TWA,46 +onnx/backend/test/data/node/test_unsqueeze_axis_0/model.onnx,sha256=1srq190UG5jXtuFcRVUuPhdBD7hrrhxq8XYZ-7T4bNw,146 +onnx/backend/test/data/node/test_unsqueeze_axis_0/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_unsqueeze_axis_0/test_data_set_0/input_1.pb,sha256=3ZIfNI5JADuUCnYUjr-VBN77qOrY7__k-MJ2cszFazI,20 +onnx/backend/test/data/node/test_unsqueeze_axis_0/test_data_set_0/output_0.pb,sha256=7pBNI_l4wxYb22FSOMO0t85L0wi5LsIih7xtBBY5EtM,256 +onnx/backend/test/data/node/test_unsqueeze_axis_1/model.onnx,sha256=tq_zKMkNcjbISve0QL_uKlorMC6AnwEpfIXT_8m2MYM,146 +onnx/backend/test/data/node/test_unsqueeze_axis_1/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_unsqueeze_axis_1/test_data_set_0/input_1.pb,sha256=o4le8V6iPIlHLtoGuvyqO5LaEop3xA-fhQyODESQhjM,20 +onnx/backend/test/data/node/test_unsqueeze_axis_1/test_data_set_0/output_0.pb,sha256=erjuk57H84W6c04_54gQIjRGdm6p4nVaR50I7tFpFyc,256 +onnx/backend/test/data/node/test_unsqueeze_axis_2/model.onnx,sha256=rTIgpx4UomiiR1-t-qaZTetbs7Ib4x3ZrAzO999I5fY,146 +onnx/backend/test/data/node/test_unsqueeze_axis_2/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_unsqueeze_axis_2/test_data_set_0/input_1.pb,sha256=U_wQ65FCC8JB-y4RR_g61R_1rF1iPdWLzt_RQUQuh5I,20 +onnx/backend/test/data/node/test_unsqueeze_axis_2/test_data_set_0/output_0.pb,sha256=FE6UwTuZHYy7nJYxcNYOjwbR6cpMSo5Mc13I4BxMW-o,256 +onnx/backend/test/data/node/test_unsqueeze_negative_axes/model.onnx,sha256=48-TscnpGmbD0SPCTa1wk3T9vjVjCJitBtGxPzmcciw,162 +onnx/backend/test/data/node/test_unsqueeze_negative_axes/test_data_set_0/input_0.pb,sha256=aB6aWPKjpjxoogC4OWA9AtqtA_neDA-penHUJfcxTCQ,75 +onnx/backend/test/data/node/test_unsqueeze_negative_axes/test_data_set_0/input_1.pb,sha256=5PAkeDfyfr_LK59jYKRKHgLSB1uNbqL05Q6iDP3bYKQ,20 +onnx/backend/test/data/node/test_unsqueeze_negative_axes/test_data_set_0/output_0.pb,sha256=M_rXEhjcZ9MDQd1SSpUxWFzwhQ_3dx4LQ5X6I5isi8E,77 +onnx/backend/test/data/node/test_unsqueeze_three_axes/model.onnx,sha256=W2Ti5WqVk0sCFiSdfLTl6p0SxmdFR3fXXnVq2D58Gqw,159 +onnx/backend/test/data/node/test_unsqueeze_three_axes/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_unsqueeze_three_axes/test_data_set_0/input_1.pb,sha256=XTLt93U00nr062yDPpKmKXnDxZAp-uRTWSE_mpGSYoI,36 +onnx/backend/test/data/node/test_unsqueeze_three_axes/test_data_set_0/output_0.pb,sha256=YspzhMTScvKa4iChfYrxn74bnYhE6oZNMMffc6VkSHM,260 +onnx/backend/test/data/node/test_unsqueeze_two_axes/model.onnx,sha256=c7nVz-QS5UWoDfxfPYhT4rZpEXRlv5TWHn5CX0W-uTA,153 +onnx/backend/test/data/node/test_unsqueeze_two_axes/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_unsqueeze_two_axes/test_data_set_0/input_1.pb,sha256=XBER72Hv68bXC29lHTLkcZZvWEOlhd5ifYnNGSA-QlA,28 +onnx/backend/test/data/node/test_unsqueeze_two_axes/test_data_set_0/output_0.pb,sha256=GfzpZxLfr85XoTxoLpWubUTBYq7QM3L5ovPrc5_sQ1U,258 +onnx/backend/test/data/node/test_unsqueeze_unsorted_axes/model.onnx,sha256=Qbvn0rziDqPrcdPhINUrISR_ZOPF_riNiVESXqmrolM,162 +onnx/backend/test/data/node/test_unsqueeze_unsorted_axes/test_data_set_0/input_0.pb,sha256=AYD1ZlMl40MHdzbkkD-kNROucUzrEx3u5FuEI3DfKJo,254 +onnx/backend/test/data/node/test_unsqueeze_unsorted_axes/test_data_set_0/input_1.pb,sha256=YePFsHywIWhMNzc3PugUCl7h4RavlIdU5IJGj5Jf09c,36 +onnx/backend/test/data/node/test_unsqueeze_unsorted_axes/test_data_set_0/output_0.pb,sha256=YspzhMTScvKa4iChfYrxn74bnYhE6oZNMMffc6VkSHM,260 +onnx/backend/test/data/node/test_upsample_nearest/model.onnx,sha256=lC2-Cymd0v6CU906ju6rolvrrM6lek_-uoPdNsHQQXY,174 +onnx/backend/test/data/node/test_upsample_nearest/test_data_set_0/input_0.pb,sha256=mLXavIMpEU-t1w77qkVcBljM9nIegS0-j17fhrivmno,31 +onnx/backend/test/data/node/test_upsample_nearest/test_data_set_0/input_1.pb,sha256=3a1OyYYNdnWsgKm8R9EJ4Ja-reMSXI0q04mtA4aFGfo,30 +onnx/backend/test/data/node/test_upsample_nearest/test_data_set_0/output_0.pb,sha256=sqObjzJsZ_nVStepjZKmx3dOTVd9cGqBu3Uw2w6tYFg,111 +onnx/backend/test/data/node/test_where_example/model.onnx,sha256=4YX9T8syogoVpTQ7KiWunBIGYGSGKAOklJh9uEdjEP8,166 +onnx/backend/test/data/node/test_where_example/test_data_set_0/input_0.pb,sha256=ghkps-yp5CRRR8fRbC7fiqWG19bL00TcORlce2Dr8og,23 +onnx/backend/test/data/node/test_where_example/test_data_set_0/input_1.pb,sha256=lPIc8OteAp_-7KlBv6zvkdDRdCAfOoxPPaN_av_HSJQ,27 +onnx/backend/test/data/node/test_where_example/test_data_set_0/input_2.pb,sha256=7893d1VgDXW5UWBTA27uzhL4FaPrjXYO5MNy9JB3a04,27 +onnx/backend/test/data/node/test_where_example/test_data_set_0/output_0.pb,sha256=3asllVK1SP7UhYXVLz5tibbbeultaVH_nFb-UOh8-pU,27 +onnx/backend/test/data/node/test_where_long_example/model.onnx,sha256=IiJZuUKLbgaVXnSuiOXQRbggyn5ymP42iJeHb75n96c,171 +onnx/backend/test/data/node/test_where_long_example/test_data_set_0/input_0.pb,sha256=ghkps-yp5CRRR8fRbC7fiqWG19bL00TcORlce2Dr8og,23 +onnx/backend/test/data/node/test_where_long_example/test_data_set_0/input_1.pb,sha256=ihk2_EF7Vli4ISkpvz__BQzOCl_2COzVep_4BiFH43g,43 +onnx/backend/test/data/node/test_where_long_example/test_data_set_0/input_2.pb,sha256=f_zUgnAQ6lnkBPWONXb6jcZgcQ5HwulMKehbqLKYwqg,43 +onnx/backend/test/data/node/test_where_long_example/test_data_set_0/output_0.pb,sha256=XgBYedUw_ZiqoFlcvsS7HsIoJNZPFm-DE9Bx6qZeF70,43 +onnx/backend/test/data/node/test_wrap_pad/model.onnx,sha256=Vk-qEZ51ONTRWai58xS-ZkUgSJOLRnCxCpI4mhpbIR0,154 +onnx/backend/test/data/node/test_wrap_pad/test_data_set_0/input_0.pb,sha256=umu-JvCA2frJXIw-HvgygGTng85wErTWgIoWSJRivzE,256 +onnx/backend/test/data/node/test_wrap_pad/test_data_set_0/input_1.pb,sha256=8s4uRhNf19t1kZ-BWDxuitcnY9MeHgkG6w6i1Sghnz0,76 +onnx/backend/test/data/node/test_wrap_pad/test_data_set_0/output_0.pb,sha256=g9TcriXsdlgExNte-G7DnSP10rAenwXVxWkiA-pXqM8,520 +onnx/backend/test/data/node/test_xor2d/model.onnx,sha256=3GJJUC1tVd_FIuAx1ZCV29I7vpp_jwEO_A_C8qFQl3c,119 +onnx/backend/test/data/node/test_xor2d/test_data_set_0/input_0.pb,sha256=LjNp_gf5HfToURj-fOV-TL_10b7cTqoOj8RfPTNPYLQ,23 +onnx/backend/test/data/node/test_xor2d/test_data_set_0/input_1.pb,sha256=goZmxegiEZPjRTXScUiQU9fpxPndF4EqAZy8oeToa8k,23 +onnx/backend/test/data/node/test_xor2d/test_data_set_0/output_0.pb,sha256=wYcj4-6dNRFaXwU1hntV05Gy5aHpr9Vv7uCuV61ad50,25 +onnx/backend/test/data/node/test_xor3d/model.onnx,sha256=cQQUDctDV6dJcdwK8vYh668uk1M4oHKIM9Oueba62ro,131 +onnx/backend/test/data/node/test_xor3d/test_data_set_0/input_0.pb,sha256=Dr4k5dppNqEaWmFsMUXw_Pbp7QoEpqkXv4Bodqt7zMQ,73 +onnx/backend/test/data/node/test_xor3d/test_data_set_0/input_1.pb,sha256=v5SpTzEGQijgxpt3GQdqFdpD-LCqlDgrPace75EL5pQ,73 +onnx/backend/test/data/node/test_xor3d/test_data_set_0/output_0.pb,sha256=Vh-V32zTffsrBgNhRvfhcFOZ1svl-bmYxXnxZHCuAQM,75 +onnx/backend/test/data/node/test_xor4d/model.onnx,sha256=zorAb8QHzWBFMEznyeOGSrgu-gMQ1B5BCQYS6sWXNkI,143 +onnx/backend/test/data/node/test_xor4d/test_data_set_0/input_0.pb,sha256=sMKxU5gNQx6mv89ZOkmV0QCdEu9f8EhE-MZuoWNbcek,376 +onnx/backend/test/data/node/test_xor4d/test_data_set_0/input_1.pb,sha256=9JaUvXVRf_NYTRwYWtPYsbSoKJDwOXPZuRBgFY7pYPc,376 +onnx/backend/test/data/node/test_xor4d/test_data_set_0/output_0.pb,sha256=7t2mIHhiY74BIpOpK5SfckyO__tT-EiLhluUCYb-6TA,378 +onnx/backend/test/data/node/test_xor_bcast3v1d/model.onnx,sha256=ZareaBAuTyUxBY2ReZrYhH1ejl0N5Qpgu45vJTsqlOQ,131 +onnx/backend/test/data/node/test_xor_bcast3v1d/test_data_set_0/input_0.pb,sha256=wF52hvLEXCfryTN0FvgWPKXSx3PPlpiaC_aAZ5IXCMc,73 +onnx/backend/test/data/node/test_xor_bcast3v1d/test_data_set_0/input_1.pb,sha256=0cP0kHJ95VbmGsyJW3rlozPO38YQocSctc5buLas0ZI,14 +onnx/backend/test/data/node/test_xor_bcast3v1d/test_data_set_0/output_0.pb,sha256=8tkEvTgVHtTWyJZ3JJi-ap4ICdUqb0TyJoYleM7Xwjo,75 +onnx/backend/test/data/node/test_xor_bcast3v2d/model.onnx,sha256=PNKaP79liHG8NtmiYaeafrRs5qi2XUSu5fN37c0RqFw,135 +onnx/backend/test/data/node/test_xor_bcast3v2d/test_data_set_0/input_0.pb,sha256=2Hi5gZzCyk3SWtB8dhtS5tIMipISnPJGI1SIjx08Fe0,73 +onnx/backend/test/data/node/test_xor_bcast3v2d/test_data_set_0/input_1.pb,sha256=J1neLsOqZHTvGdagBF8_3O6sQk1byuUIMABR0wJxASA,31 +onnx/backend/test/data/node/test_xor_bcast3v2d/test_data_set_0/output_0.pb,sha256=jsMfiHOic7b2QO8cGqTo5WGnBTeBPx83WXxh3j_bLkg,75 +onnx/backend/test/data/node/test_xor_bcast4v2d/model.onnx,sha256=jG3UhG63Rb3lWqlONWth4lsfURxHt-y2M_y10XGgRTU,143 +onnx/backend/test/data/node/test_xor_bcast4v2d/test_data_set_0/input_0.pb,sha256=TG9PpNJBW-rhSdwYDpmwCMotWc8SOe2e2cLAoXaNQ8o,376 +onnx/backend/test/data/node/test_xor_bcast4v2d/test_data_set_0/input_1.pb,sha256=D4JIlfVUzijC8rk152pkPsDWa33YJRlmutjMvkT-1FI,41 +onnx/backend/test/data/node/test_xor_bcast4v2d/test_data_set_0/output_0.pb,sha256=Qgh2Pvp5xceZzOVQ9LbIq5hSoPcfBb2a3_UVolJSwH8,378 +onnx/backend/test/data/node/test_xor_bcast4v3d/model.onnx,sha256=ka5mFaag2sB3Wo-gr591NnmvwsVpkR6hQyR6KBmZWY0,147 +onnx/backend/test/data/node/test_xor_bcast4v3d/test_data_set_0/input_0.pb,sha256=1gaunFCw_HO0ZAf_sAp9bfcwVRJW_aTJIwkv-2WYWMw,376 +onnx/backend/test/data/node/test_xor_bcast4v3d/test_data_set_0/input_1.pb,sha256=26gLYNI9eQESqKzWzx1zrE8YDDCNIvfAE_0m_FoeT3E,133 +onnx/backend/test/data/node/test_xor_bcast4v3d/test_data_set_0/output_0.pb,sha256=PSXb0kYNrXzlksKUyQCNo2US4L4q5PW5JY-q8RDCTDc,378 +onnx/backend/test/data/node/test_xor_bcast4v4d/model.onnx,sha256=itvh8evQEuNJ5pxrge9J62QTcH0OOr_3FRCL-v4fOIw,151 +onnx/backend/test/data/node/test_xor_bcast4v4d/test_data_set_0/input_0.pb,sha256=f63fGhPch5eTkAvMXCrJTC0FOIdhKnaSfJ8Ox9hnol0,39 +onnx/backend/test/data/node/test_xor_bcast4v4d/test_data_set_0/input_1.pb,sha256=iCGDAU6H1-WHvqKKRCD44UWeluKsnFCWMpevbsJ5ASU,105 +onnx/backend/test/data/node/test_xor_bcast4v4d/test_data_set_0/output_0.pb,sha256=K5QEaUMGmY5feFywFKuoQfnj0csdxH87pELYOw4ib6Q,378 +onnx/backend/test/data/pytorch-converted/test_AvgPool1d/model.onnx,sha256=8mAVDhS8q2983UD42Tn2UtYcj6o_GOx31Bf6rOQnmic,234 +onnx/backend/test/data/pytorch-converted/test_AvgPool1d/test_data_set_0/input_0.pb,sha256=zV1Vt8e4rt7BBMwCWTvpZ4ewNN6urxrFnE0-oTAebYo,155 +onnx/backend/test/data/pytorch-converted/test_AvgPool1d/test_data_set_0/output_0.pb,sha256=qn9ze93KKekBUHX1zcPFPQszhnb0xCGUQ2cUi4v-PBc,82 +onnx/backend/test/data/pytorch-converted/test_AvgPool1d_stride/model.onnx,sha256=8mAVDhS8q2983UD42Tn2UtYcj6o_GOx31Bf6rOQnmic,234 +onnx/backend/test/data/pytorch-converted/test_AvgPool1d_stride/test_data_set_0/input_0.pb,sha256=93fhfQkyWxsVEbiwnmhx5RtN3NuGixryFHcYGrBqiAk,155 +onnx/backend/test/data/pytorch-converted/test_AvgPool1d_stride/test_data_set_0/output_0.pb,sha256=V98iRFVVabmSxa6RQGlOA7DLuTsCfMwULBopoq5OgEQ,82 +onnx/backend/test/data/pytorch-converted/test_AvgPool2d/model.onnx,sha256=TB8sJRl2IhMEO9FUoayJ2MM85Fw3RhU9W1e5BipjB5I,180 +onnx/backend/test/data/pytorch-converted/test_AvgPool2d/test_data_set_0/input_0.pb,sha256=bNLdgLPEgneV2dYpx44Z_cNcu-P-1TtLe3BxHBzvSSc,877 +onnx/backend/test/data/pytorch-converted/test_AvgPool2d/test_data_set_0/output_0.pb,sha256=F40fDjWldQJxabM_XYkLzpEMez7jL6OzeJlgbouXRrc,229 +onnx/backend/test/data/pytorch-converted/test_AvgPool2d_stride/model.onnx,sha256=TB8sJRl2IhMEO9FUoayJ2MM85Fw3RhU9W1e5BipjB5I,180 +onnx/backend/test/data/pytorch-converted/test_AvgPool2d_stride/test_data_set_0/input_0.pb,sha256=TpDPisxlYGn58g52CgJ4HAsFm-BsfjIwEszTQcTV8y0,877 +onnx/backend/test/data/pytorch-converted/test_AvgPool2d_stride/test_data_set_0/output_0.pb,sha256=Y_cshxOA9B_1xVCMY4SBn0Cmt_NuFf3gL5tmh-vNvPY,229 +onnx/backend/test/data/pytorch-converted/test_AvgPool3d/model.onnx,sha256=pJRrVDIgyVnsXheUw0l8DEsCI3H-A9E1Fytt721SEvQ,196 +onnx/backend/test/data/pytorch-converted/test_AvgPool3d/test_data_set_0/input_0.pb,sha256=n-2GBmhLD0xffKPoJ5FnFBEW1kB0aMyZdET_GWifw6Q,1551 +onnx/backend/test/data/pytorch-converted/test_AvgPool3d/test_data_set_0/output_0.pb,sha256=FShc69O3dlB2g0mQ8-jI2kGpMyTyWdMA8TAFosrPyQo,207 +onnx/backend/test/data/pytorch-converted/test_AvgPool3d_stride/model.onnx,sha256=i4Ana78vTj4DG_7vWo9TQqp8rR1SsMJQzNPtEZtncdY,196 +onnx/backend/test/data/pytorch-converted/test_AvgPool3d_stride/test_data_set_0/input_0.pb,sha256=y_Tl_lr5uoFfr_mDOVp10o1Rqwto3dUX7uyqbZphtlY,3015 +onnx/backend/test/data/pytorch-converted/test_AvgPool3d_stride/test_data_set_0/output_0.pb,sha256=50IuhwLSEvlySwQziXRKplBocjNERvL_Ug_QVvAtZ7E,207 +onnx/backend/test/data/pytorch-converted/test_AvgPool3d_stride1_pad0_gpu_input/model.onnx,sha256=7bss8slZ11_xPh6ekLnbPqwSx-2W7nzBlfSNrWguH_s,196 +onnx/backend/test/data/pytorch-converted/test_AvgPool3d_stride1_pad0_gpu_input/test_data_set_0/input_0.pb,sha256=hJelxSiUlg_o8SL7GnKSbwzK1xTJmFwOGbYnymULc60,1551 +onnx/backend/test/data/pytorch-converted/test_AvgPool3d_stride1_pad0_gpu_input/test_data_set_0/output_0.pb,sha256=PRmuI0KY_2RPQttWtH8mIBA71Ab1VLR65BqK2vmqPXQ,207 +onnx/backend/test/data/pytorch-converted/test_BatchNorm1d_3d_input_eval/model.onnx,sha256=KavPmaT-7YmXr1yv85ueUtemurjesyB5noD0v3Mc7Rc,378 +onnx/backend/test/data/pytorch-converted/test_BatchNorm1d_3d_input_eval/test_data_set_0/input_0.pb,sha256=WUmYIvacvUxOv8Z44Vt5oWHP3Rq0lRjXV0uTRrVB4O8,251 +onnx/backend/test/data/pytorch-converted/test_BatchNorm1d_3d_input_eval/test_data_set_0/output_0.pb,sha256=F8hXf4ujaw9wkKXJOH22sy9d-CfBDLp13y16hP8_Qg4,251 +onnx/backend/test/data/pytorch-converted/test_BatchNorm2d_eval/model.onnx,sha256=8Yd0nlXaftKBaeljimCYJXMW99MU6ro6DDMkNFQzZ1I,354 +onnx/backend/test/data/pytorch-converted/test_BatchNorm2d_eval/test_data_set_0/input_0.pb,sha256=WZIsl8gdTSpkybVdM2eC-UDdnwUclJpv6Ciz1dJa2Vo,877 +onnx/backend/test/data/pytorch-converted/test_BatchNorm2d_eval/test_data_set_0/output_0.pb,sha256=prMmsJs86e9qAfIsik9a4EIZbCI_YgdcebO7mH571KI,877 +onnx/backend/test/data/pytorch-converted/test_BatchNorm2d_momentum_eval/model.onnx,sha256=pOJ6Q6RfjazEKRDHG3tfkl3iPgf-7XzDh1NdW6z1zIA,354 +onnx/backend/test/data/pytorch-converted/test_BatchNorm2d_momentum_eval/test_data_set_0/input_0.pb,sha256=Xl_bu5yQ7sA6heXBH0AC1ycwOKKz7x8yyDnXqGKsNKM,877 +onnx/backend/test/data/pytorch-converted/test_BatchNorm2d_momentum_eval/test_data_set_0/output_0.pb,sha256=0UTSeOyp4AgM1BSqfX8F1zt4Axxg5eWpxprQRlb-v0Q,877 +onnx/backend/test/data/pytorch-converted/test_BatchNorm3d_eval/model.onnx,sha256=oQCFAVSP_Lwz8YD_w3Qhg99sqtnJ-UPESFVhyfm_Z80,362 +onnx/backend/test/data/pytorch-converted/test_BatchNorm3d_eval/test_data_set_0/input_0.pb,sha256=xWcOlLS21xbvStXK5VyuQOqhPjlYHJVyXl7sb6DhOwo,1551 +onnx/backend/test/data/pytorch-converted/test_BatchNorm3d_eval/test_data_set_0/output_0.pb,sha256=QLvTKHFqJsIIaKNyJPxi3QB5_rQtQ7VJ_cvQ1sdPOKo,1551 +onnx/backend/test/data/pytorch-converted/test_BatchNorm3d_momentum_eval/model.onnx,sha256=yjD59gbBFMjuznApZPsJOxDoyKk1ce8meoT4y1aPun4,362 +onnx/backend/test/data/pytorch-converted/test_BatchNorm3d_momentum_eval/test_data_set_0/input_0.pb,sha256=KIyabETJj2PkS0Ck7y5IPAhEXrPQ3QLsQlstwZvCCnY,1551 +onnx/backend/test/data/pytorch-converted/test_BatchNorm3d_momentum_eval/test_data_set_0/output_0.pb,sha256=et4SSsQj6ySOcT9jh8TnzwTGicmyngVZafqJnWQ0q-4,1551 +onnx/backend/test/data/pytorch-converted/test_ConstantPad2d/model.onnx,sha256=0SsEC_Z0DrELQVHraXG6BzRPfrdrI78eD93nd40gXXM,177 +onnx/backend/test/data/pytorch-converted/test_ConstantPad2d/test_data_set_0/input_0.pb,sha256=QoOUMz7CIm79Vi7-FqFrNH_Fpm2pwjiJTJ5KHGsB-uw,397 +onnx/backend/test/data/pytorch-converted/test_ConstantPad2d/test_data_set_0/output_0.pb,sha256=crHpXo4F_TPPkniDuVYpgF3TfubVLZFeSEKvqhTkvpo,1861 +onnx/backend/test/data/pytorch-converted/test_Conv1d/model.onnx,sha256=Z4QCn29y1djJ3JZnuFj4OIgsAqqSZj47jJ-FpQqc8Q4,525 +onnx/backend/test/data/pytorch-converted/test_Conv1d/test_data_set_0/input_0.pb,sha256=M1p3HfX3tyCNxMOUXytdx5cq4AUFr38MGd8C5i6PTHE,331 +onnx/backend/test/data/pytorch-converted/test_Conv1d/test_data_set_0/output_0.pb,sha256=XG8Vzj2qFCJBwS1zk5kl16mtDZXs3GjkTqX7kYHINqM,331 +onnx/backend/test/data/pytorch-converted/test_Conv1d_dilated/model.onnx,sha256=CWlj2QY04xL0V39Rf_Q_Eyy90kVUXLK6lWICIpET2KE,525 +onnx/backend/test/data/pytorch-converted/test_Conv1d_dilated/test_data_set_0/input_0.pb,sha256=3E2ZYHLqPEaVB7HRQ5NvJlXwC2luExxp_d1W2XYUhxI,331 +onnx/backend/test/data/pytorch-converted/test_Conv1d_dilated/test_data_set_0/output_0.pb,sha256=Rs6ytMEQmbUEfOLDX76YAQ6hDS-JxU3qR4v_lisgyd4,251 +onnx/backend/test/data/pytorch-converted/test_Conv1d_groups/model.onnx,sha256=RC8liAaCSm35TMxBRnNrJKIMam82QkW1Jkc0O_qRi5w,433 +onnx/backend/test/data/pytorch-converted/test_Conv1d_groups/test_data_set_0/input_0.pb,sha256=i2XCJCDESsEGqcZJiBpBtu-jRPVzh7K1h9PecjAtXK0,203 +onnx/backend/test/data/pytorch-converted/test_Conv1d_groups/test_data_set_0/output_0.pb,sha256=Do0wjZqxfKbFCsZirn1INKtXTYSN0669VnKHyV-7wvw,203 +onnx/backend/test/data/pytorch-converted/test_Conv1d_pad1/model.onnx,sha256=TARXa4saDVX_6iYTSr_N3VAWGjAh65alUwmJi27dOBg,525 +onnx/backend/test/data/pytorch-converted/test_Conv1d_pad1/test_data_set_0/input_0.pb,sha256=vCs38klXgXvMU4xaQIA24rh0jrrlgyZ-JknlD6uk_0w,331 +onnx/backend/test/data/pytorch-converted/test_Conv1d_pad1/test_data_set_0/output_0.pb,sha256=5l7hug7Ooa6l_1pY7-BPJAQPe01YEanl37UKEalctvU,411 +onnx/backend/test/data/pytorch-converted/test_Conv1d_pad1size1/model.onnx,sha256=RPJomwlFC-2ZMW2wwl6o6YfYAtuprYuVgvnNE-NMsmo,473 +onnx/backend/test/data/pytorch-converted/test_Conv1d_pad1size1/test_data_set_0/input_0.pb,sha256=BV9L90-Mjw_hKFJo1KDsckTs9KNfDEYYNzOtEDEAbEY,26 +onnx/backend/test/data/pytorch-converted/test_Conv1d_pad1size1/test_data_set_0/output_0.pb,sha256=QrhPUkU1IiaaQG9co6aeB7PPn6I8zgqjRG4Dyt1xc0w,26 +onnx/backend/test/data/pytorch-converted/test_Conv1d_pad2/model.onnx,sha256=6BtfkkYnCOH_OddQBr2fi6Nbbwjo9YWCPKonFxCBW7A,685 +onnx/backend/test/data/pytorch-converted/test_Conv1d_pad2/test_data_set_0/input_0.pb,sha256=KwIVOkQUfZQsyVi4WOkmxQQ6jrtqcbVC9Hnu63ZoblQ,331 +onnx/backend/test/data/pytorch-converted/test_Conv1d_pad2/test_data_set_0/output_0.pb,sha256=P8I3CDmlvUVih56z4CBHvo_hwq_FP4AtTab4nYieP4Y,411 +onnx/backend/test/data/pytorch-converted/test_Conv1d_pad2size1/model.onnx,sha256=ZWRfSCErOjTP1vHZYQlCHMv3poCqHNdXlIZQ2QhVakY,601 +onnx/backend/test/data/pytorch-converted/test_Conv1d_pad2size1/test_data_set_0/input_0.pb,sha256=uS_fv_8ZJYypoe5d-EBLOLhaPQ4ATE_KPHOIQ3Gqeto,26 +onnx/backend/test/data/pytorch-converted/test_Conv1d_pad2size1/test_data_set_0/output_0.pb,sha256=oK8jgBMFbuJE87uSWda7mMKDDjFBmE0Vts17x29g3Dc,26 +onnx/backend/test/data/pytorch-converted/test_Conv1d_stride/model.onnx,sha256=ptjiJVis2eF_cYg7wJuQy01kSdZ3LZjuoILl2hSdnR4,525 +onnx/backend/test/data/pytorch-converted/test_Conv1d_stride/test_data_set_0/input_0.pb,sha256=9CHLFEq_ik_V5LaqT7mcz6SNeHOIhuIuv_jvyTNSRrk,331 +onnx/backend/test/data/pytorch-converted/test_Conv1d_stride/test_data_set_0/output_0.pb,sha256=0pNh7Y0jeuA_RGC13raCipZ1qiqZu-Yz7cL1jAhOlEg,171 +onnx/backend/test/data/pytorch-converted/test_Conv2d/model.onnx,sha256=y432KyJAGqZE5G4TtVt6xfPDgU4AL_k5pLvhEnIPwGY,593 +onnx/backend/test/data/pytorch-converted/test_Conv2d/test_data_set_0/input_0.pb,sha256=1OBmuNrH0n2t8NGHYARcjbatJIjaK5tNQ3r5RJ9td5U,853 +onnx/backend/test/data/pytorch-converted/test_Conv2d/test_data_set_0/output_0.pb,sha256=Jlk7Om0xPxTfTD3ERkm1VAo0x_lWP1M01vtDQ-MOGHA,653 +onnx/backend/test/data/pytorch-converted/test_Conv2d_depthwise/model.onnx,sha256=ifRT0arIIJm963Lu92VvSZGQoS2OQnQh35GwDZVF_BU,449 +onnx/backend/test/data/pytorch-converted/test_Conv2d_depthwise/test_data_set_0/input_0.pb,sha256=oIPHf9UX_VMt_rNrr4y-WwbCf3Pa1ri9dzYzyPtDCAk,1165 +onnx/backend/test/data/pytorch-converted/test_Conv2d_depthwise/test_data_set_0/output_0.pb,sha256=_RPmQnqkoj4aVc9bQIhHiaSTggTE3t3AtUEfL1DhddM,525 +onnx/backend/test/data/pytorch-converted/test_Conv2d_depthwise_padded/model.onnx,sha256=fpGjNOhOxeUycys8ikMj065ilM73_LW2yQbz2h9ZqoQ,449 +onnx/backend/test/data/pytorch-converted/test_Conv2d_depthwise_padded/test_data_set_0/input_0.pb,sha256=8RivKZOj2PWj__57mFbs9rnGXRyydS9uMXCK7_D1-hs,1165 +onnx/backend/test/data/pytorch-converted/test_Conv2d_depthwise_padded/test_data_set_0/output_0.pb,sha256=DHO7upgdKnVIz_7I0aN-oH1prFbEDoWKHhUUuD5unNA,1165 +onnx/backend/test/data/pytorch-converted/test_Conv2d_depthwise_strided/model.onnx,sha256=9BZDcN_FQ4h5-GuyKnsrFiqFluDBhVzxy7ZjQYAYv9Y,449 +onnx/backend/test/data/pytorch-converted/test_Conv2d_depthwise_strided/test_data_set_0/input_0.pb,sha256=z4xSxpRG1WwCJdeH1dvUR3B-8pzhtCg7ItADQTwkoNs,1165 +onnx/backend/test/data/pytorch-converted/test_Conv2d_depthwise_strided/test_data_set_0/output_0.pb,sha256=hKMKPzFqMzJQW_UZrWEU4utjyHzPavSUrjM_QLccCB8,141 +onnx/backend/test/data/pytorch-converted/test_Conv2d_depthwise_with_multiplier/model.onnx,sha256=p0zmWsZaMksp0qUv5dTsNa4bt-LLPIWkaUOeB38Q_YE,609 +onnx/backend/test/data/pytorch-converted/test_Conv2d_depthwise_with_multiplier/test_data_set_0/input_0.pb,sha256=4QOtaGsL1JztJ46gZNxToNKt-hzvC_b-gavbU-JTrvc,1165 +onnx/backend/test/data/pytorch-converted/test_Conv2d_depthwise_with_multiplier/test_data_set_0/output_0.pb,sha256=d_Gg8Pwi-UEIlyZPIIZ1aIpFFv1whi6Ufjl6-j64yxw,1037 +onnx/backend/test/data/pytorch-converted/test_Conv2d_dilated/model.onnx,sha256=M4Tog0lHqJerdu9jcCuKNjCuEVOW0h9Eq4u36T7hjwQ,513 +onnx/backend/test/data/pytorch-converted/test_Conv2d_dilated/test_data_set_0/input_0.pb,sha256=R6GKeyEpmjB3-rRHJ7mnjGrTRfKCFJfguLjBEnZiGH8,1549 +onnx/backend/test/data/pytorch-converted/test_Conv2d_dilated/test_data_set_0/output_0.pb,sha256=w02ja9QxjYGDw606aWk7_oSr5lUCoStmytZlVme1evw,157 +onnx/backend/test/data/pytorch-converted/test_Conv2d_groups/model.onnx,sha256=uh-iRrzSbF6r96Jq4NTGnKV726HBDnv-5phjp3iiEJI,601 +onnx/backend/test/data/pytorch-converted/test_Conv2d_groups/test_data_set_0/input_0.pb,sha256=ATzHdOKHF7WLjEWSJmSuW_6v2nEyhI2ixRB2r8lbfvs,973 +onnx/backend/test/data/pytorch-converted/test_Conv2d_groups/test_data_set_0/output_0.pb,sha256=jECSkjosOUIiACcNQrbmZ9wSua10w1XivubTJ9ExL5E,781 +onnx/backend/test/data/pytorch-converted/test_Conv2d_groups_thnn/model.onnx,sha256=4x_JVppcm1xB2Es0ZKdSu54IT3X1UfN6pF2LTdsRVZA,601 +onnx/backend/test/data/pytorch-converted/test_Conv2d_groups_thnn/test_data_set_0/input_0.pb,sha256=W2rRPvSGGsPzKpUVpFBw2eR1bfsdMu4vo66myacAb0M,973 +onnx/backend/test/data/pytorch-converted/test_Conv2d_groups_thnn/test_data_set_0/output_0.pb,sha256=99GiPC_yeuHqMA4l-fWHO3Nrfptqn1h6yiyH5Vhyj40,781 +onnx/backend/test/data/pytorch-converted/test_Conv2d_no_bias/model.onnx,sha256=ZsLuWnDa5_i6ms1o58ROf6NeBf82JHq8Jyg6ZK0jNxQ,546 +onnx/backend/test/data/pytorch-converted/test_Conv2d_no_bias/test_data_set_0/input_0.pb,sha256=5AjXIP60cvhAdvKlUv1Mf0dnZ7YOESiyGhYH4A3zzMc,733 +onnx/backend/test/data/pytorch-converted/test_Conv2d_no_bias/test_data_set_0/output_0.pb,sha256=XH-ZGTgDbQGOnSfcNq-orV6S2C8x11xzPDVUknUCLvg,525 +onnx/backend/test/data/pytorch-converted/test_Conv2d_padding/model.onnx,sha256=7R3bRZT7rxJC6ll_papH9LqxC6xLMXLfjjMbOSyu8NU,737 +onnx/backend/test/data/pytorch-converted/test_Conv2d_padding/test_data_set_0/input_0.pb,sha256=Cy-XsPZPkHKiafZJ9Y7iR7zj7sVVL3i4RAIjb_HN00o,877 +onnx/backend/test/data/pytorch-converted/test_Conv2d_padding/test_data_set_0/output_0.pb,sha256=YZB_7e9uqA8ER15q1QenHfrIUXt2fQnPgOTbLX8CF-U,301 +onnx/backend/test/data/pytorch-converted/test_Conv2d_strided/model.onnx,sha256=egZ--Av5rYKCJMGEGGns-pbOVvH5tqyISxx6BQqZeOI,737 +onnx/backend/test/data/pytorch-converted/test_Conv2d_strided/test_data_set_0/input_0.pb,sha256=XIlefsfSL9PVbGn4KZSR6YnajJ71vd8dW1NI3rlXRQ0,877 +onnx/backend/test/data/pytorch-converted/test_Conv2d_strided/test_data_set_0/output_0.pb,sha256=_OOVJ1t07lBsD3OmyMjhCFDd9iQeMw19glVEC4lOIaU,141 +onnx/backend/test/data/pytorch-converted/test_Conv3d/model.onnx,sha256=16JkMwetdzIJqDe9IN5rsD6C2fCNZUWat4q7kOW9vYA,1481 +onnx/backend/test/data/pytorch-converted/test_Conv3d/test_data_set_0/input_0.pb,sha256=4If0txoCf02Sxyb-TKsVSmnfyPeCyJUl0vlDcjhb_3Y,1455 +onnx/backend/test/data/pytorch-converted/test_Conv3d/test_data_set_0/output_0.pb,sha256=i7pEvDV-ZSeyS2rU96YaFK_KLKGPMtulFLQJx7ERNwM,271 +onnx/backend/test/data/pytorch-converted/test_Conv3d_dilated/model.onnx,sha256=gJoR548aEmQX7kEw-DegL18l6WG9PZiRG4myAGZJLLA,713 +onnx/backend/test/data/pytorch-converted/test_Conv3d_dilated/test_data_set_0/input_0.pb,sha256=sgU-wF7Lo5l0dwFX_FgXV2B6pO6PBk8C6Q8VuYl37Jo,3015 +onnx/backend/test/data/pytorch-converted/test_Conv3d_dilated/test_data_set_0/output_0.pb,sha256=ZVXm7ParEHvjD0FRcLyDw0sCStjwNg9LzyfB2YFLaZI,879 +onnx/backend/test/data/pytorch-converted/test_Conv3d_dilated_strided/model.onnx,sha256=MF7-hVNyKxpSOYCPOtMqj4PPLumZvEyNLpKKdH0VZ8U,713 +onnx/backend/test/data/pytorch-converted/test_Conv3d_dilated_strided/test_data_set_0/input_0.pb,sha256=EkFpNPI0uV-PEtZqr5nJ4DLxhk94Kip8KNpzHEL0Uq4,3015 +onnx/backend/test/data/pytorch-converted/test_Conv3d_dilated_strided/test_data_set_0/output_0.pb,sha256=UoxcAqI9HHOERFKkYWf27G9B28-MFakBAtIAoYhQiB8,271 +onnx/backend/test/data/pytorch-converted/test_Conv3d_groups/model.onnx,sha256=v9ICh0w4hqY5snMTyD92gTHhW1g7VQ6WVnYQoLSa02o,1633 +onnx/backend/test/data/pytorch-converted/test_Conv3d_groups/test_data_set_0/input_0.pb,sha256=M_PRS9s6oz5E5R4hGjSJG7OtJdfGLWGW45Fn_UZWYY8,2575 +onnx/backend/test/data/pytorch-converted/test_Conv3d_groups/test_data_set_0/output_0.pb,sha256=npPaSWCc4w_5xBE4uyXB2YRxGWa_hjmxTdOCrk3kb-E,591 +onnx/backend/test/data/pytorch-converted/test_Conv3d_no_bias/model.onnx,sha256=M-IZkgZ_-Br1YoiwQvut2UFFXftUFF5UuqcNniqn_Tk,1434 +onnx/backend/test/data/pytorch-converted/test_Conv3d_no_bias/test_data_set_0/input_0.pb,sha256=_MHSqR1rF_XUblE_4HbjInNfEM417inei05uy1cPIeA,1455 +onnx/backend/test/data/pytorch-converted/test_Conv3d_no_bias/test_data_set_0/output_0.pb,sha256=ELnUZu6Tub3Nt-SbqAnuZ_bF167tpWqqxD8TE2rsa3k,271 +onnx/backend/test/data/pytorch-converted/test_Conv3d_stride/model.onnx,sha256=EZb1n6bYJciHlcZPAzV08Wi7q1HcxvJXCy5aPOfynQ4,713 +onnx/backend/test/data/pytorch-converted/test_Conv3d_stride/test_data_set_0/input_0.pb,sha256=HSk4UoX2KpcGlXg1SnnnWI1iUKh4PkPXxdGLlx6fXQc,3015 +onnx/backend/test/data/pytorch-converted/test_Conv3d_stride/test_data_set_0/output_0.pb,sha256=QBprsW8RzmVrAoLVM0VQ9D1DOBZqh9YmluoNuDAlPKU,271 +onnx/backend/test/data/pytorch-converted/test_Conv3d_stride_padding/model.onnx,sha256=LDPW0JXDJeZnhYI12RKv_IwIdrS1U3GHr8uYEY_Ivi8,713 +onnx/backend/test/data/pytorch-converted/test_Conv3d_stride_padding/test_data_set_0/input_0.pb,sha256=SswSGowYkI_cfPsk1g9hEVsurerN_J2gFJJ_aPiGKwg,3015 +onnx/backend/test/data/pytorch-converted/test_Conv3d_stride_padding/test_data_set_0/output_0.pb,sha256=bmi0M7qU67vZyg50hdRO46QU35LTp2SmykyEnr3jK1Y,879 +onnx/backend/test/data/pytorch-converted/test_ConvTranspose2d/model.onnx,sha256=uiawKtrZn1qlhaKVjfD97MeD5wpWP4-BgfsgWqAHAqE,772 +onnx/backend/test/data/pytorch-converted/test_ConvTranspose2d/test_data_set_0/input_0.pb,sha256=04V2wjKmR5vpKXskDgyX-9EPbsPah_j6pnPgdn5m5ww,517 +onnx/backend/test/data/pytorch-converted/test_ConvTranspose2d/test_data_set_0/output_0.pb,sha256=2sxWuV8VYus2X51ShW3kRGXXB_YgSaP4pFbPkVxWvto,3853 +onnx/backend/test/data/pytorch-converted/test_ConvTranspose2d_no_bias/model.onnx,sha256=4pzDYfTqDObpNKLQDnNoO4Sf4VCzALkhnj6g_KMbZXQ,725 +onnx/backend/test/data/pytorch-converted/test_ConvTranspose2d_no_bias/test_data_set_0/input_0.pb,sha256=cDragHQ7hS-xtN1fMIGWyMXJ2NerZwn_JSD8CNZSGAI,517 +onnx/backend/test/data/pytorch-converted/test_ConvTranspose2d_no_bias/test_data_set_0/output_0.pb,sha256=IA0L2r215O76xM761W_inWJBIV2dK4SvKb6guj76kt0,3853 +onnx/backend/test/data/pytorch-converted/test_ELU/model.onnx,sha256=2SeXax0MIzkha8Ha39cc2bQLXMTLIUwAgomrGh9xbaI,120 +onnx/backend/test/data/pytorch-converted/test_ELU/test_data_set_0/input_0.pb,sha256=luvTZVOagQKAbBjB9-H7TBlYEFI2Uqu3FTQRMTybmpk,130 +onnx/backend/test/data/pytorch-converted/test_ELU/test_data_set_0/output_0.pb,sha256=3p3X7-mRaLL9ssscsdr9mHcY3-Hn-b0bbAJ64qKhWyo,130 +onnx/backend/test/data/pytorch-converted/test_Embedding/model.onnx,sha256=_0o-LP_DjPwbBW0Dxap5Bprj7jdlHjX4UfbmKn4dZ9Q,188 +onnx/backend/test/data/pytorch-converted/test_Embedding/test_data_set_0/input_0.pb,sha256=_0rc6-MctiMvWy8M6NSWVsm4bVxLADLFjOhsTv8xIMw,40 +onnx/backend/test/data/pytorch-converted/test_Embedding/test_data_set_0/output_0.pb,sha256=j1g-AeOS1kcJXRL72P8UHZsuaFt2UiCjUXUoNDKnZRI,58 +onnx/backend/test/data/pytorch-converted/test_Embedding_sparse/model.onnx,sha256=F5vU9vTQYqruQnldJ38QSMMLOuLQJEDfmwy2XlDO3hY,188 +onnx/backend/test/data/pytorch-converted/test_Embedding_sparse/test_data_set_0/input_0.pb,sha256=_0rc6-MctiMvWy8M6NSWVsm4bVxLADLFjOhsTv8xIMw,40 +onnx/backend/test/data/pytorch-converted/test_Embedding_sparse/test_data_set_0/output_0.pb,sha256=poXwHivKwggktTHae36VM5QmG8dryXX2SOtNAVUssuM,58 +onnx/backend/test/data/pytorch-converted/test_GLU/model.onnx,sha256=HQmhBaiXteu956BHP9oxeAwiqMlBqDwSX6tEsrBHau8,156 +onnx/backend/test/data/pytorch-converted/test_GLU/test_data_set_0/input_0.pb,sha256=S3wGQIwYbwGN0JopQ-SYCMqnIkBQ7EbqwE5OQRUBJ50,128 +onnx/backend/test/data/pytorch-converted/test_GLU/test_data_set_0/output_0.pb,sha256=KFzb3xYeFeA9xHmKlNhI6AXtfYCz99ON8DVzG1oihmE,68 +onnx/backend/test/data/pytorch-converted/test_GLU_dim/model.onnx,sha256=Xry3Ey1Ohmwx0Ou4JlncDobNbJVSokkJ2CyG5qj6DsI,155 +onnx/backend/test/data/pytorch-converted/test_GLU_dim/test_data_set_0/input_0.pb,sha256=qgaBlwPclOrBoRpDWaRQKo47MQzUrXcHEoJo9op9vnM,851 +onnx/backend/test/data/pytorch-converted/test_GLU_dim/test_data_set_0/output_0.pb,sha256=EEGmzp1nC91QOMpO3BXPusSKwYSBpa664LSOkOzCJH8,431 +onnx/backend/test/data/pytorch-converted/test_LeakyReLU/model.onnx,sha256=iQptij6gd52DigK_wVZk1PNZSkgFwgzOx0w6iePLeeE,126 +onnx/backend/test/data/pytorch-converted/test_LeakyReLU/test_data_set_0/input_0.pb,sha256=SK7wRI8g-o1mvSNjDS9utXtEG8ATWXM2dr1VGOW3B7E,130 +onnx/backend/test/data/pytorch-converted/test_LeakyReLU/test_data_set_0/output_0.pb,sha256=q-0YWwHtpdz_9DWan9IbokhV1p9ti73MMonkFJpJ710,130 +onnx/backend/test/data/pytorch-converted/test_LeakyReLU_with_negval/model.onnx,sha256=NM4JUm95vZvdMzAT2F5D2_q1XyfmtcCsocRF6IcpC-M,126 +onnx/backend/test/data/pytorch-converted/test_LeakyReLU_with_negval/test_data_set_0/input_0.pb,sha256=djYXx0SejTe9i5wHFcKEo2abs4zfpMbcRLtUxikCSJE,130 +onnx/backend/test/data/pytorch-converted/test_LeakyReLU_with_negval/test_data_set_0/output_0.pb,sha256=BCvDv2KDQ6j-LtkWz5ZcSs_6ZgNyVYBsTEpY5s_SxYQ,130 +onnx/backend/test/data/pytorch-converted/test_Linear/model.onnx,sha256=YKNUaSXYMmhseI0-oicy_q71vFJ-BGkcMuyfZ3yB9hI,585 +onnx/backend/test/data/pytorch-converted/test_Linear/test_data_set_0/input_0.pb,sha256=EVBDYsD6CZzsPoADTCeqHJmWarSTvnqSUkucBqN8YeY,169 +onnx/backend/test/data/pytorch-converted/test_Linear/test_data_set_0/output_0.pb,sha256=Y2MWMPgxI58ITCWNv9R1pkfR-I41JAKqIgdzxvTjoxQ,137 +onnx/backend/test/data/pytorch-converted/test_Linear_no_bias/model.onnx,sha256=Bxzkm2mGNbCQSHaX7ObRgizQJdCQL0o5-XEbub1DPo4,492 +onnx/backend/test/data/pytorch-converted/test_Linear_no_bias/test_data_set_0/input_0.pb,sha256=pdTU_UjclLXaiQv81fct2hf0azMiw-kEUrvR4Y7eeAY,169 +onnx/backend/test/data/pytorch-converted/test_Linear_no_bias/test_data_set_0/output_0.pb,sha256=GLoTdF0mNzWWJSk7Eh1dUDd0jJtGYRZv915Fu-4wrYs,137 +onnx/backend/test/data/pytorch-converted/test_LogSoftmax/model.onnx,sha256=7OG_h8rN3YDf9U3fCDZxSE1snbwrkZE4qDytlOnLm0c,115 +onnx/backend/test/data/pytorch-converted/test_LogSoftmax/test_data_set_0/input_0.pb,sha256=aIvEbzha6zVWoOMyDQk0X0-F7ycVmOTQQPDPzCK_fdM,809 +onnx/backend/test/data/pytorch-converted/test_LogSoftmax/test_data_set_0/output_0.pb,sha256=kcLfEFJdOk46YnJH2AKfD9a9UeoM35TZKOLZ3H1WTdU,809 +onnx/backend/test/data/pytorch-converted/test_MaxPool1d/model.onnx,sha256=IEtciU1nSV0VFZbrIf55J6lfpZ6_3VXOvgyPbdStnr4,160 +onnx/backend/test/data/pytorch-converted/test_MaxPool1d/test_data_set_0/input_0.pb,sha256=NGuPnKs6gIclbkt4IRL1z7WvbPm8A0Gu2fX-SaNVePE,331 +onnx/backend/test/data/pytorch-converted/test_MaxPool1d/test_data_set_0/output_0.pb,sha256=79siDNKe866D0fCh9xWgCat_Iltxvm7n-C-8h6HK2qY,90 +onnx/backend/test/data/pytorch-converted/test_MaxPool1d_stride/model.onnx,sha256=IEtciU1nSV0VFZbrIf55J6lfpZ6_3VXOvgyPbdStnr4,160 +onnx/backend/test/data/pytorch-converted/test_MaxPool1d_stride/test_data_set_0/input_0.pb,sha256=U6fj0KCQ4sggGDgUtqsPr81b3thuxSDfIYAuIK3q0ac,331 +onnx/backend/test/data/pytorch-converted/test_MaxPool1d_stride/test_data_set_0/output_0.pb,sha256=9l6v8PiMMo2UIsA9mYoZcuS7ewxdsrqfYQPwp3XDkAo,90 +onnx/backend/test/data/pytorch-converted/test_MaxPool1d_stride_padding_dilation/model.onnx,sha256=flUyj5LMCtVUzJvaeJGW1xkB577pgEV4YIcczdGPNVY,165 +onnx/backend/test/data/pytorch-converted/test_MaxPool1d_stride_padding_dilation/test_data_set_0/input_0.pb,sha256=4qJvUOWUo3f91o8JddOTERM6MTcohmKrISyEnefIeiM,880014 +onnx/backend/test/data/pytorch-converted/test_MaxPool1d_stride_padding_dilation/test_data_set_0/output_0.pb,sha256=pOgx9JOSU_e-2nS5DZpaauJF9NpUBcDjN9OTYyVlgAI,87298 +onnx/backend/test/data/pytorch-converted/test_MaxPool2d/model.onnx,sha256=6qw0Jl0dJZcgUXdgknbsKXPEtbxIPBxBSmuWU7TWBmA,176 +onnx/backend/test/data/pytorch-converted/test_MaxPool2d/test_data_set_0/input_0.pb,sha256=Gz4idq9I1lPqAHpTOusIQ_hfTeZvcEw0pXwdnFibumg,601 +onnx/backend/test/data/pytorch-converted/test_MaxPool2d/test_data_set_0/output_0.pb,sha256=fkrqthNBgDvYIaVm_bv6g2do8vvs-0AMbsKCLxf7DJg,205 +onnx/backend/test/data/pytorch-converted/test_MaxPool2d_stride_padding_dilation/model.onnx,sha256=B9dO0myVpZkPOTNRE_KSWNGZ0a2zALqPoRIpIEjBWg8,180 +onnx/backend/test/data/pytorch-converted/test_MaxPool2d_stride_padding_dilation/test_data_set_0/input_0.pb,sha256=KERNzX0dOh2dZWTf0dVh3ftuar0N1aTYH9kXJSuIjfU,4000017 +onnx/backend/test/data/pytorch-converted/test_MaxPool2d_stride_padding_dilation/test_data_set_0/output_0.pb,sha256=qmgdxgxSdjm-gXp16_WG_bGQn6U29XRZmLzNN8sljis,4313 +onnx/backend/test/data/pytorch-converted/test_MaxPool3d/model.onnx,sha256=EkVIxU2Gvd-03OLvhzQwdP9gM48Ila14pNmsySHlTJ8,192 +onnx/backend/test/data/pytorch-converted/test_MaxPool3d/test_data_set_0/input_0.pb,sha256=dJ5NUUUFLIBlKgA8CIxs2U1CdTelFQM6GaOVcjdfXH8,3015 +onnx/backend/test/data/pytorch-converted/test_MaxPool3d/test_data_set_0/output_0.pb,sha256=MxSYbK5mw58Ewucz6ommkVScxLBbaSHxhnIoaYbIovE,207 +onnx/backend/test/data/pytorch-converted/test_MaxPool3d_stride/model.onnx,sha256=EkVIxU2Gvd-03OLvhzQwdP9gM48Ila14pNmsySHlTJ8,192 +onnx/backend/test/data/pytorch-converted/test_MaxPool3d_stride/test_data_set_0/input_0.pb,sha256=4xZ_MzlwtkraNLfBDMDjlNHR87cReCK4aN6jKCSvfic,3015 +onnx/backend/test/data/pytorch-converted/test_MaxPool3d_stride/test_data_set_0/output_0.pb,sha256=jkevhpPha2nlXOhpYzU6bPhiiNhixWbDdJz5Yh6WT44,207 +onnx/backend/test/data/pytorch-converted/test_MaxPool3d_stride_padding/model.onnx,sha256=Y-1oo-lij06opYfKEwu65De_E_wn1_M10syRyZD-OrU,192 +onnx/backend/test/data/pytorch-converted/test_MaxPool3d_stride_padding/test_data_set_0/input_0.pb,sha256=vFZi989muj43A22p864N0y2TeuLIirF1QEWpeo0g0cQ,3015 +onnx/backend/test/data/pytorch-converted/test_MaxPool3d_stride_padding/test_data_set_0/output_0.pb,sha256=QWM4IdynZ3TeBEWJlrWcmiLuRzF8-GUDsIjU8RRaeRM,663 +onnx/backend/test/data/pytorch-converted/test_PReLU_1d/model.onnx,sha256=4mjvE2iJgkg7zRTS0Y0Mwmi3rbxDzPNAXF6lw2kW0BA,140 +onnx/backend/test/data/pytorch-converted/test_PReLU_1d/test_data_set_0/input_0.pb,sha256=dwCj5dOo0Xb0LWoFEa02MXjNCBKek1Mb7p9loqn-qBY,106 +onnx/backend/test/data/pytorch-converted/test_PReLU_1d/test_data_set_0/output_0.pb,sha256=WVVfw7lN4bUO91ljJuOCrF0aAsAVeO7VxMWLJCPPf24,106 +onnx/backend/test/data/pytorch-converted/test_PReLU_1d_multiparam/model.onnx,sha256=IvY8tpSEk0hJlbRtt5J_IzrQsIDrbYvGdRmHj_kAgyg,148 +onnx/backend/test/data/pytorch-converted/test_PReLU_1d_multiparam/test_data_set_0/input_0.pb,sha256=qUglE2t9TlGG2PODCNryl96IZCMJRnn2s2kFKPtidug,106 +onnx/backend/test/data/pytorch-converted/test_PReLU_1d_multiparam/test_data_set_0/output_0.pb,sha256=kTwdDuY0JawlXDtQyKba8alf87POvLW-H0svciJZIRs,106 +onnx/backend/test/data/pytorch-converted/test_PReLU_2d/model.onnx,sha256=wt-aOA3QVpCLDlTprLv-5QTXKC9xqFoAF90G6m_D1Ls,148 +onnx/backend/test/data/pytorch-converted/test_PReLU_2d/test_data_set_0/input_0.pb,sha256=D1kkTnMRur2IBSkmCXshOMXBM31Trp3gcsBnHLoxwPY,493 +onnx/backend/test/data/pytorch-converted/test_PReLU_2d/test_data_set_0/output_0.pb,sha256=G_Dp6PuiE225MDYlJKnajeiYlMCwWTwm_3EvFjOzfto,493 +onnx/backend/test/data/pytorch-converted/test_PReLU_2d_multiparam/model.onnx,sha256=kukgvtj4KMURYyG6ZxeZb7uPkBlyolfPCSDI2O99Jk8,157 +onnx/backend/test/data/pytorch-converted/test_PReLU_2d_multiparam/test_data_set_0/input_0.pb,sha256=NVK5z3BB-7fMzSpEAVe9KJ8_sjW6_oteQ-lFs07YMmA,493 +onnx/backend/test/data/pytorch-converted/test_PReLU_2d_multiparam/test_data_set_0/output_0.pb,sha256=5SPkRU_nkyPSqWawHP_s5I5nFnGXcea7i6rihB_vWHY,493 +onnx/backend/test/data/pytorch-converted/test_PReLU_3d/model.onnx,sha256=VvZt8tBvrh1vuaXYca3DxeVODOxpsz2IaSRWtPgh_jQ,157 +onnx/backend/test/data/pytorch-converted/test_PReLU_3d/test_data_set_0/input_0.pb,sha256=kjNIdTEFfb16oMR_21gnH0-x1yufQ20r0JJwtEt-4no,2895 +onnx/backend/test/data/pytorch-converted/test_PReLU_3d/test_data_set_0/output_0.pb,sha256=k6V8hNaUEXKnZCUTKlyCcADbPdyZwabu97D4qzyU63E,2895 +onnx/backend/test/data/pytorch-converted/test_PReLU_3d_multiparam/model.onnx,sha256=6I44FjDRNdOgmuwdeewtzNZQbbTYuPs50CxyS21RDIU,165 +onnx/backend/test/data/pytorch-converted/test_PReLU_3d_multiparam/test_data_set_0/input_0.pb,sha256=7CLbhHgR9jhf6-fSx9oVVDPkyB3qVJyRqQVVsjM8TnA,2895 +onnx/backend/test/data/pytorch-converted/test_PReLU_3d_multiparam/test_data_set_0/output_0.pb,sha256=b6pQPzS39brhgz_HkZsQaIU1rvRZP-ONgl36n2vGiKA,2895 +onnx/backend/test/data/pytorch-converted/test_PixelShuffle/model.onnx,sha256=4JEqlXzNhKwqniaLCOWSFSueWfuOn7VbaFKr42KIHKA,331 +onnx/backend/test/data/pytorch-converted/test_PixelShuffle/test_data_set_0/input_0.pb,sha256=A576kvtGuxWcgI872W5-c89GMBV1iQTtr6ZzsihfJf4,589 +onnx/backend/test/data/pytorch-converted/test_PixelShuffle/test_data_set_0/output_0.pb,sha256=XpsHUnOBLIoYD7eZK2V2Qwq6haDNVQI1_pg4jJZ5Zm8,589 +onnx/backend/test/data/pytorch-converted/test_PoissonNLLLLoss_no_reduce/model.onnx,sha256=tCWj-a4XelmO7y2SKKhmBKytRa9gzWH04L64BxoYBGM,569 +onnx/backend/test/data/pytorch-converted/test_PoissonNLLLLoss_no_reduce/test_data_set_0/input_0.pb,sha256=UKNDvDbMADHM63IF8twG9QID6pTJ3Cdsd3AazzCesvk,409 +onnx/backend/test/data/pytorch-converted/test_PoissonNLLLLoss_no_reduce/test_data_set_0/output_0.pb,sha256=W2gyxT0Yg9UxhnmysFE9lzrGT_6F4ZywIk9N5sxkiAs,409 +onnx/backend/test/data/pytorch-converted/test_ReLU/model.onnx,sha256=54bYFvc3RA1Ljsf68rFWpoazyI0bo_4V8nOzFnkRFKY,112 +onnx/backend/test/data/pytorch-converted/test_ReLU/test_data_set_0/input_0.pb,sha256=Ey2pRpSgVvZhAp_fXdJLIVAidkpih_7XTMas2aU_x3Y,493 +onnx/backend/test/data/pytorch-converted/test_ReLU/test_data_set_0/output_0.pb,sha256=p1LjECbj3CHtzc3dIr05bBrD_GeLzWOd0xNVstAg-pI,493 +onnx/backend/test/data/pytorch-converted/test_ReflectionPad2d/model.onnx,sha256=rRj1gGqwtSlWd14E5jpTt8i4gXR-gsHktOFH2VeclpY,159 +onnx/backend/test/data/pytorch-converted/test_ReflectionPad2d/test_data_set_0/input_0.pb,sha256=GdrCTnPr9lRVY_XPRDeMDg76bWDKxczmbLprbhFz81A,1549 +onnx/backend/test/data/pytorch-converted/test_ReflectionPad2d/test_data_set_0/output_0.pb,sha256=MItKenrxsfen0QqbAl3PIx9MTh2X8UsVszVaYmbTGNY,3973 +onnx/backend/test/data/pytorch-converted/test_ReplicationPad2d/model.onnx,sha256=iWrndt5r6y9xH9tJWVR_srvWd3qHRI-YzfMLgHIe_x4,156 +onnx/backend/test/data/pytorch-converted/test_ReplicationPad2d/test_data_set_0/input_0.pb,sha256=rvprCAB87mJqy5c2K47dXKz6lXiAg6Vk0PCSNIIf8fg,397 +onnx/backend/test/data/pytorch-converted/test_ReplicationPad2d/test_data_set_0/output_0.pb,sha256=CpGyltobSp8j5PiTJ1ELDekd0BvP3YG9VLaB34ibKds,1861 +onnx/backend/test/data/pytorch-converted/test_SELU/model.onnx,sha256=kbYX2BrYvTBNgqZ_FNNx5meiOXyCu9-7tFlQvzv2UfQ,104 +onnx/backend/test/data/pytorch-converted/test_SELU/test_data_set_0/input_0.pb,sha256=57MtXdSWmk8rV37-gl-meavMAFrNk1I1U1jRtRnfQv4,130 +onnx/backend/test/data/pytorch-converted/test_SELU/test_data_set_0/output_0.pb,sha256=vUvpliH3lpuLdxxQcTLm47OOxuFnkSlElGzpcbbcTlk,130 +onnx/backend/test/data/pytorch-converted/test_Sigmoid/model.onnx,sha256=r-jXvTu2OB1uoHSEc3weUBfipW-j-bomHEToiLRndZg,115 +onnx/backend/test/data/pytorch-converted/test_Sigmoid/test_data_set_0/input_0.pb,sha256=xGqhiolQj1AXF_J9DI6RshclHdkTtvUfjA9UNeoH34g,493 +onnx/backend/test/data/pytorch-converted/test_Sigmoid/test_data_set_0/output_0.pb,sha256=_aud8_qgy8myKoty42ouDGvc8LqB9P0wziM1WT9Udv0,493 +onnx/backend/test/data/pytorch-converted/test_Softmax/model.onnx,sha256=M8YU-Plr8Vox-xvg2ytI9dvOtVRmfrFA1WFtqrz1RrQ,112 +onnx/backend/test/data/pytorch-converted/test_Softmax/test_data_set_0/input_0.pb,sha256=Mc2T2AKRlGbMn9JhKQCqvdNhMOd4yv6eoizRI4rQ3Es,809 +onnx/backend/test/data/pytorch-converted/test_Softmax/test_data_set_0/output_0.pb,sha256=cEBcNyA-UO7PvJP05BNFPg34O0b9xuI2oIH0HujbJCA,809 +onnx/backend/test/data/pytorch-converted/test_Softmin/model.onnx,sha256=LkzcLVf0mVmKlV1Q7LaRz2roqZVUWOShqLA0F7CS3qw,125 +onnx/backend/test/data/pytorch-converted/test_Softmin/test_data_set_0/input_0.pb,sha256=6EvkXgsM1qeqy4xXvrNmjzFgQIfUg1LADvKujrkhutE,809 +onnx/backend/test/data/pytorch-converted/test_Softmin/test_data_set_0/output_0.pb,sha256=dM0ZzuuwBljCYCl5gwtz6dyl7xbwPq06OSJpbZz3rl0,809 +onnx/backend/test/data/pytorch-converted/test_Softplus/model.onnx,sha256=LW7eq1fDhqzTUcpreFM8MW1P-SXMU6R_-6TrI0bSeeY,100 +onnx/backend/test/data/pytorch-converted/test_Softplus/test_data_set_0/input_0.pb,sha256=RHCWvbZJ2puJcskrkyp-UO7VJXgJbbtfngv50m-XBrQ,809 +onnx/backend/test/data/pytorch-converted/test_Softplus/test_data_set_0/output_0.pb,sha256=T83BVCZNzgIMJeO_KjnLFCl3fvO6pI2swMn7AcHv2z4,809 +onnx/backend/test/data/pytorch-converted/test_Softsign/model.onnx,sha256=77Av428IZawS315gnrFwnQoxmzSxc8yyvjGvbpcg8I4,191 +onnx/backend/test/data/pytorch-converted/test_Softsign/test_data_set_0/input_0.pb,sha256=OgR7ov0pN5ABVMr6BCcNwgO5LIM2kD3aZC8cakpK-JU,130 +onnx/backend/test/data/pytorch-converted/test_Softsign/test_data_set_0/output_0.pb,sha256=dgsXbFOSn0KYtbVe_G4dJ--YwtKGY50m5L2hcy88lgI,130 +onnx/backend/test/data/pytorch-converted/test_Tanh/model.onnx,sha256=g7REbYpEDQKk-HFEUTr_edrTqEJzP7p6U41qAyRe9pg,112 +onnx/backend/test/data/pytorch-converted/test_Tanh/test_data_set_0/input_0.pb,sha256=h-yYS6LvZ4ydAM-99zICo6VeQT7O187lkHDazxIon78,493 +onnx/backend/test/data/pytorch-converted/test_Tanh/test_data_set_0/output_0.pb,sha256=GFqBj4MG7PAiujxMR6uzcTqQGD8YumwF75SzqvusSdo,493 +onnx/backend/test/data/pytorch-converted/test_ZeroPad2d/model.onnx,sha256=mijCH0L13fd-XfpTow-vPLjuxzYrpqpewCmica_m4RI,177 +onnx/backend/test/data/pytorch-converted/test_ZeroPad2d/test_data_set_0/input_0.pb,sha256=a2xkpLGBgCHkK36Ch5z9mMQMwaeo46gYSt_95Q7ViUs,397 +onnx/backend/test/data/pytorch-converted/test_ZeroPad2d/test_data_set_0/output_0.pb,sha256=6aGq1RDiXJoI805HAFtG0ryTkbOZ6KvzjeoNUX8ZVuE,1861 +onnx/backend/test/data/pytorch-converted/test_log_softmax_dim3/model.onnx,sha256=iVa0we93ifXSp1JapVRj8Hm68mLpWXF2QQOmjcBZ9r0,131 +onnx/backend/test/data/pytorch-converted/test_log_softmax_dim3/test_data_set_0/input_0.pb,sha256=z9p6Tc4CGDfcMevMb3WI6R9nKVLSrmdsPnwCzB_YRz0,493 +onnx/backend/test/data/pytorch-converted/test_log_softmax_dim3/test_data_set_0/output_0.pb,sha256=5NXTb99k_ebDZZ8QdvNt2ZbVhIREH9c454g36hZXHcY,493 +onnx/backend/test/data/pytorch-converted/test_log_softmax_lastdim/model.onnx,sha256=6b1s4M7kUTlLThqFJhrps_HEEDGGMRwmDcvg3LcMk7w,126 +onnx/backend/test/data/pytorch-converted/test_log_softmax_lastdim/test_data_set_0/input_0.pb,sha256=vM6J5cm2YZRB4tCZwuPWtA4ItF8o4Hglhi78iySDgsY,1034 +onnx/backend/test/data/pytorch-converted/test_log_softmax_lastdim/test_data_set_0/output_0.pb,sha256=LCqEWC3UH6W3AVeGaohUPNU8BJ0Qho_C259h6WlUN9U,1034 +onnx/backend/test/data/pytorch-converted/test_softmax_functional_dim3/model.onnx,sha256=cW81QWpRLL4gWbYmKzo4crNfSP3o5kv7R59Xn1NTOtY,128 +onnx/backend/test/data/pytorch-converted/test_softmax_functional_dim3/test_data_set_0/input_0.pb,sha256=1T7S2mfA3VyTw_Uu_xR1KZOkk6fR8gH1lRhqe77kCKc,493 +onnx/backend/test/data/pytorch-converted/test_softmax_functional_dim3/test_data_set_0/output_0.pb,sha256=i3rXEzAEknBVyPI7uZ9b_3quJTfUfQRlhrHJPDzrI6U,493 +onnx/backend/test/data/pytorch-converted/test_softmax_lastdim/model.onnx,sha256=AWWX8ISlHkfjfBJr-8cOPCVIVplDrZQ81mBh1Yg_3EQ,114 +onnx/backend/test/data/pytorch-converted/test_softmax_lastdim/test_data_set_0/input_0.pb,sha256=II_c6Nd8HW6Q2S8wb99jN6__oTjkOq0LVrftA92K0Jc,1034 +onnx/backend/test/data/pytorch-converted/test_softmax_lastdim/test_data_set_0/output_0.pb,sha256=L8ZOHtpkcNP-l-p17BcmWTK6ZUDAGA6OtgpSwjFwxfY,1034 +onnx/backend/test/data/pytorch-operator/test_operator_add_broadcast/model.onnx,sha256=4yoo6WnkuoE4hyDHi5yq3l3xs73PPdGJBDn8tsCtbmg,146 +onnx/backend/test/data/pytorch-operator/test_operator_add_broadcast/test_data_set_0/input_0.pb,sha256=WeKTF2a0gLSgFSE7RPu7suC8BakLI5JwxRIgcYBRMpE,56 +onnx/backend/test/data/pytorch-operator/test_operator_add_broadcast/test_data_set_0/input_1.pb,sha256=GlJ6YO-W-nl6W8Rn7NiA3VQBynnDcw0CHU0K60hAPxQ,30 +onnx/backend/test/data/pytorch-operator/test_operator_add_broadcast/test_data_set_0/output_0.pb,sha256=vAWPseKpiLhg5xbLv4brD66PGujVCowAWPy5xHf7MNs,56 +onnx/backend/test/data/pytorch-operator/test_operator_add_size1_broadcast/model.onnx,sha256=Q_3UC9xYgU-Z1FCDqowXgtQYE5TFM3dv_nvw5nAW3GM,151 +onnx/backend/test/data/pytorch-operator/test_operator_add_size1_broadcast/test_data_set_0/input_0.pb,sha256=dBZjS6f0YZq-PAmutf2KVyc5Ic8lkbaReqAjdLKIv9g,56 +onnx/backend/test/data/pytorch-operator/test_operator_add_size1_broadcast/test_data_set_0/input_1.pb,sha256=Jm3kGLGMLfCLdjtlidztrRUtVcz4HaHKYpZojt7FbHo,24 +onnx/backend/test/data/pytorch-operator/test_operator_add_size1_broadcast/test_data_set_0/output_0.pb,sha256=buFw95HzCCj2kzEsyFxO6VbO01-le7uTqki3YineAh8,56 +onnx/backend/test/data/pytorch-operator/test_operator_add_size1_right_broadcast/model.onnx,sha256=4yoo6WnkuoE4hyDHi5yq3l3xs73PPdGJBDn8tsCtbmg,146 +onnx/backend/test/data/pytorch-operator/test_operator_add_size1_right_broadcast/test_data_set_0/input_0.pb,sha256=G1yJUQGsG2uWKJX9PURVlIHVI1GLbKwq8oLWm3v9lEA,56 +onnx/backend/test/data/pytorch-operator/test_operator_add_size1_right_broadcast/test_data_set_0/input_1.pb,sha256=mH_dZUm9K3R0jgvjqdsWve2qIOmh5Gu8qVQveA0iQyI,30 +onnx/backend/test/data/pytorch-operator/test_operator_add_size1_right_broadcast/test_data_set_0/output_0.pb,sha256=KaDIjtF8dJTqVmxbm7bl-eNiGZM-3z4NqdhX8MFX6mg,56 +onnx/backend/test/data/pytorch-operator/test_operator_add_size1_singleton_broadcast/model.onnx,sha256=W0u6KMQhEmUHe2aMKtEd1LP8RhdYPE42ZX0h4U1zU3Q,151 +onnx/backend/test/data/pytorch-operator/test_operator_add_size1_singleton_broadcast/test_data_set_0/input_0.pb,sha256=Hgx9BDvjzAKv9uel5a1TqPgT_UgnDmf0WC7f8d3QDbI,56 +onnx/backend/test/data/pytorch-operator/test_operator_add_size1_singleton_broadcast/test_data_set_0/input_1.pb,sha256=DvWV31gp254KnBk4m95GwdcvLI8e7GI0PIHV7Fb4akQ,32 +onnx/backend/test/data/pytorch-operator/test_operator_add_size1_singleton_broadcast/test_data_set_0/output_0.pb,sha256=6EYYoWZQq5vpFlC5uj2bjNQs084U-fyAJdb5zLiphZc,56 +onnx/backend/test/data/pytorch-operator/test_operator_addconstant/model.onnx,sha256=PGsgLxLVxE6B-YnT5dTigfgO-0VUBuDkNxaLHQIpzIM,158 +onnx/backend/test/data/pytorch-operator/test_operator_addconstant/test_data_set_0/input_0.pb,sha256=aeAhCMgJONISiYFLETMGwolmnEWUviB-0r05x4Dj_QE,56 +onnx/backend/test/data/pytorch-operator/test_operator_addconstant/test_data_set_0/output_0.pb,sha256=3qSzQRI4ETvMsUTXU1C91aUqqIWvz211iPcREF1jNL4,56 +onnx/backend/test/data/pytorch-operator/test_operator_addmm/model.onnx,sha256=pkXscfFqeA69tuw-s-5H15Yo8M5_a7-vwfoUF1E2kjw,245 +onnx/backend/test/data/pytorch-operator/test_operator_addmm/test_data_set_0/input_0.pb,sha256=F8HIxuMvLzdYqNBU1WOhSu9BNb9f8sE2Nh4LScjkCv4,32 +onnx/backend/test/data/pytorch-operator/test_operator_addmm/test_data_set_0/input_1.pb,sha256=wReWT7hu1wvpzVbdy4OQ3RLHmIGleWudjXtNzQxN3z8,56 +onnx/backend/test/data/pytorch-operator/test_operator_addmm/test_data_set_0/input_2.pb,sha256=iSPe0APqyrCBab3JfQTvuXWKxYW1jq3wMM0ndaVHepQ,22 +onnx/backend/test/data/pytorch-operator/test_operator_addmm/test_data_set_0/output_0.pb,sha256=2Q0N9b_FX6_Eli007JAr0zDu3FWCKLmiTtqJqJJJ6AY,40 +onnx/backend/test/data/pytorch-operator/test_operator_basic/model.onnx,sha256=j8SoAGDrf68-0Ty2zB9CuCbVpRP0WA3g1eYPyqwwthk,168 +onnx/backend/test/data/pytorch-operator/test_operator_basic/test_data_set_0/input_0.pb,sha256=Zssp45J1LZPvS6jAzlFOyLE5-R6r4ib6u0lGwdgrrls,10 +onnx/backend/test/data/pytorch-operator/test_operator_basic/test_data_set_0/input_1.pb,sha256=miH5Y5Wk3S1nVM23Js570P-CGR48fQrIQ2nUNDTy1U4,10 +onnx/backend/test/data/pytorch-operator/test_operator_basic/test_data_set_0/output_0.pb,sha256=ZFQSF2mOXhSlpLV3SiXplTBx4phBjRd63bhIcvS0scg,10 +onnx/backend/test/data/pytorch-operator/test_operator_chunk/model.onnx,sha256=Pv5XrJq5yDInM7YsNbYSFxdbZAxWT8s4a1yK5uX6lWw,138 +onnx/backend/test/data/pytorch-operator/test_operator_chunk/test_data_set_0/input_0.pb,sha256=4UQkC9my1MZR7yS-gxUxLhHoDPcZRxOKVvPLOmF6I2Y,18 +onnx/backend/test/data/pytorch-operator/test_operator_chunk/test_data_set_0/output_0.pb,sha256=vcT6Vt5kjiPQn_uVM30-vRqsKZvb0EHtSHQA-Zfe8dU,14 +onnx/backend/test/data/pytorch-operator/test_operator_chunk/test_data_set_0/output_1.pb,sha256=RSc1IkqRdFLMBmDEQ7JzmC62gTBmianfJ-_9l05ev3E,10 +onnx/backend/test/data/pytorch-operator/test_operator_clip/model.onnx,sha256=d4_FGVudXbf56YhRlKKo7749XttVrIqHTtHnihmtwGs,126 +onnx/backend/test/data/pytorch-operator/test_operator_clip/test_data_set_0/input_0.pb,sha256=lE4aKW3dubbAlB4rcDUcltWXIISr4HeAN1LWnjcGlw8,56 +onnx/backend/test/data/pytorch-operator/test_operator_clip/test_data_set_0/output_0.pb,sha256=4akqZ8FX-HKNqVP2ano4QBijtZtg1XKrXBX2HLkDYYI,56 +onnx/backend/test/data/pytorch-operator/test_operator_concat2/model.onnx,sha256=_QTccgjLy9EPX9UF-JVnTvkGZ2QxzO-WCQCVesgj5Gw,135 +onnx/backend/test/data/pytorch-operator/test_operator_concat2/test_data_set_0/input_0.pb,sha256=F8HIxuMvLzdYqNBU1WOhSu9BNb9f8sE2Nh4LScjkCv4,32 +onnx/backend/test/data/pytorch-operator/test_operator_concat2/test_data_set_0/input_1.pb,sha256=x2uEGj8ly8Au6vONB5db3B2ZHsfzle6lcjWvcBsrm60,32 +onnx/backend/test/data/pytorch-operator/test_operator_concat2/test_data_set_0/output_0.pb,sha256=amAMMN54fzkJ_ZeJCBvf51xwrKnoHs4i0fM0iR16ypY,56 +onnx/backend/test/data/pytorch-operator/test_operator_conv/model.onnx,sha256=hoZnLZ7StTm1yaZw2TzgB8VpMU9tdOM7SxARij8z1lY,7746 +onnx/backend/test/data/pytorch-operator/test_operator_conv/test_data_set_0/input_0.pb,sha256=UvIhWzUBbIXBelrU1HHHHr7b9JFAqIGvD3-A8CPr63A,2560015 +onnx/backend/test/data/pytorch-operator/test_operator_conv/test_data_set_0/output_0.pb,sha256=-1WPY6f7jpGTyYED8nmGHDidFx7eq14NTYKlkmgk1eo,1896974 +onnx/backend/test/data/pytorch-operator/test_operator_convtranspose/model.onnx,sha256=V_1XB3M_HnRzMi0ZljeB0xj0LIzvIgQcMLk9BOSs6G4,617 +onnx/backend/test/data/pytorch-operator/test_operator_convtranspose/test_data_set_0/input_0.pb,sha256=RFf6T5yV2ccdEeDEpKOX1Qka6Zw0JT5E3A45ct0zJRc,493 +onnx/backend/test/data/pytorch-operator/test_operator_convtranspose/test_data_set_0/output_0.pb,sha256=3er0H-Tbr76YIcTT70mwgbc48b9QoMxTJKTJgge5uuM,4333 +onnx/backend/test/data/pytorch-operator/test_operator_exp/model.onnx,sha256=YLXCr5sIVod9gw7LO0QDJbuW-6wZWOLgripbCUCncKU,95 +onnx/backend/test/data/pytorch-operator/test_operator_exp/test_data_set_0/input_0.pb,sha256=lE4aKW3dubbAlB4rcDUcltWXIISr4HeAN1LWnjcGlw8,56 +onnx/backend/test/data/pytorch-operator/test_operator_exp/test_data_set_0/output_0.pb,sha256=EN1seuuM_1UBJFjXoz6xlX1FBepf8N0B5m7BTkWM8VU,56 +onnx/backend/test/data/pytorch-operator/test_operator_flatten/model.onnx,sha256=666oqdVmUbi6BFTyna0eHkclropUVJtqv6E8-TqjmtA,120 +onnx/backend/test/data/pytorch-operator/test_operator_flatten/test_data_set_0/input_0.pb,sha256=O8PmJpISXR22yQA__IfBMa9X11Npm8Gf8PA2ayZiBF8,108 +onnx/backend/test/data/pytorch-operator/test_operator_flatten/test_data_set_0/output_0.pb,sha256=wUt1WI2-KKlMOggC9wS59vZF8F_ROACdI3s7gmotw_U,104 +onnx/backend/test/data/pytorch-operator/test_operator_index/model.onnx,sha256=J1ty6t8ZzwyUhIz8R4zmc60rSqUa0_Jkt-0TWLluKm8,165 +onnx/backend/test/data/pytorch-operator/test_operator_index/test_data_set_0/input_0.pb,sha256=hy5YLPPZ35Pu3PoQQVqGsxFpEvOOcTe127tUCiOf-yQ,12 +onnx/backend/test/data/pytorch-operator/test_operator_index/test_data_set_0/output_0.pb,sha256=26QXjAPnTsE3QpqJjcP-PkSFVR-8imYTPkQwy9qdEpM,10 +onnx/backend/test/data/pytorch-operator/test_operator_max/model.onnx,sha256=RJnaYng4GaBOlE3io3sHHOCxOQHwRI_FJkBvXbZxVoc,119 +onnx/backend/test/data/pytorch-operator/test_operator_max/test_data_set_0/input_0.pb,sha256=lE4aKW3dubbAlB4rcDUcltWXIISr4HeAN1LWnjcGlw8,56 +onnx/backend/test/data/pytorch-operator/test_operator_max/test_data_set_0/input_1.pb,sha256=ofgAi6Cf8rn6vfsF7b8ZilqUiLtsn-griER2UzhkGpg,56 +onnx/backend/test/data/pytorch-operator/test_operator_max/test_data_set_0/output_0.pb,sha256=piy6G5PPC3qlXr-0V6zHG0zvgArltR30tPVGS9lVraM,56 +onnx/backend/test/data/pytorch-operator/test_operator_maxpool/model.onnx,sha256=p7r_3V7RvgQx3OZFYYHkk20ocZF3KItd1P90Hxh-Fig,160 +onnx/backend/test/data/pytorch-operator/test_operator_maxpool/test_data_set_0/input_0.pb,sha256=aa_kAzlLZ3rm3CnRm9P7BhZKM5HFUJAKZWcpJqjTKhY,64012 +onnx/backend/test/data/pytorch-operator/test_operator_maxpool/test_data_set_0/output_0.pb,sha256=OguwhkdioYE3ge7me1NGCI81QPh1pYjmx5LdqJ01da4,30732 +onnx/backend/test/data/pytorch-operator/test_operator_min/model.onnx,sha256=Prwpo5VZNFPFoDnkSmRDXzMGetPh_WBwE4tA6YyTEy8,119 +onnx/backend/test/data/pytorch-operator/test_operator_min/test_data_set_0/input_0.pb,sha256=lE4aKW3dubbAlB4rcDUcltWXIISr4HeAN1LWnjcGlw8,56 +onnx/backend/test/data/pytorch-operator/test_operator_min/test_data_set_0/input_1.pb,sha256=ofgAi6Cf8rn6vfsF7b8ZilqUiLtsn-griER2UzhkGpg,56 +onnx/backend/test/data/pytorch-operator/test_operator_min/test_data_set_0/output_0.pb,sha256=knO2XL2hje6BCvnjC7KMAGC4NNmTsKHdt-92jS9I5cw,56 +onnx/backend/test/data/pytorch-operator/test_operator_mm/model.onnx,sha256=vVO-SkTN9zcSOzdiDP7A7UxH6NUELPcnvesQ2DcG_bg,214 +onnx/backend/test/data/pytorch-operator/test_operator_mm/test_data_set_0/input_0.pb,sha256=F8HIxuMvLzdYqNBU1WOhSu9BNb9f8sE2Nh4LScjkCv4,32 +onnx/backend/test/data/pytorch-operator/test_operator_mm/test_data_set_0/input_1.pb,sha256=wReWT7hu1wvpzVbdy4OQ3RLHmIGleWudjXtNzQxN3z8,56 +onnx/backend/test/data/pytorch-operator/test_operator_mm/test_data_set_0/output_0.pb,sha256=6plZuSG8XBXIZx869mo4CIt_vk1_YamfsKNeqCCxh7s,40 +onnx/backend/test/data/pytorch-operator/test_operator_non_float_params/model.onnx,sha256=SFxmoMG1q9-D0Do1Hwdh6GuapE6CF4fUvFFFzL_aVGU,181 +onnx/backend/test/data/pytorch-operator/test_operator_non_float_params/test_data_set_0/input_0.pb,sha256=K70twgRARvUdaJmFeW6B-5p-XPeOnSI7UHikMCqGo5w,40 +onnx/backend/test/data/pytorch-operator/test_operator_non_float_params/test_data_set_0/output_0.pb,sha256=g_q2_Vasu0pHakl1XkqeGqZ3mxXiykg-tDWqx8_VQz0,40 +onnx/backend/test/data/pytorch-operator/test_operator_pad/model.onnx,sha256=gXaZExe5rB9enuGhiokjEQIIa490pEoFFgKxoOEpSqA,159 +onnx/backend/test/data/pytorch-operator/test_operator_pad/test_data_set_0/input_0.pb,sha256=sPny-rLD9VzG1Qr1U03fcwqyvUHMgTJv1DCMw2uIOEo,44 +onnx/backend/test/data/pytorch-operator/test_operator_pad/test_data_set_0/output_0.pb,sha256=5J6xbmPIT527VCLp7VgEL6KPIMEKKkrSvtwXOH2WWmo,120 +onnx/backend/test/data/pytorch-operator/test_operator_params/model.onnx,sha256=Rhj1CwdUkGkdEadRYZBdl53hs-EvIPiSJVA5mTy04Os,209 +onnx/backend/test/data/pytorch-operator/test_operator_params/test_data_set_0/input_0.pb,sha256=noOfxW6AHam2Av3f5rnGXx__oo2mqzch042fpYDSuVI,24 +onnx/backend/test/data/pytorch-operator/test_operator_params/test_data_set_0/output_0.pb,sha256=tWqHSLbKRaikVQUxqOSUIYscqNyoXdiiiMdpyYowlHw,24 +onnx/backend/test/data/pytorch-operator/test_operator_permute2/model.onnx,sha256=0KrJH05LAsL-_L77wY67rTYcKl7O4C3YC7oTvPEHrTU,157 +onnx/backend/test/data/pytorch-operator/test_operator_permute2/test_data_set_0/input_0.pb,sha256=L_NDecbs6B53f9DH3kVRb5Jg733mbzz1qLJi6ayQUSM,20 +onnx/backend/test/data/pytorch-operator/test_operator_permute2/test_data_set_0/output_0.pb,sha256=L_NDecbs6B53f9DH3kVRb5Jg733mbzz1qLJi6ayQUSM,20 +onnx/backend/test/data/pytorch-operator/test_operator_pow/model.onnx,sha256=M7XNkUGAfZ5E2nkFjWW_7SXvK9UHb3JScOaCix8QuyE,143 +onnx/backend/test/data/pytorch-operator/test_operator_pow/test_data_set_0/input_0.pb,sha256=O8PmJpISXR22yQA__IfBMa9X11Npm8Gf8PA2ayZiBF8,108 +onnx/backend/test/data/pytorch-operator/test_operator_pow/test_data_set_0/input_1.pb,sha256=LQt-jviNzkk-G9vloxDlOGCy8KSCW7U63NTBwgns_D8,108 +onnx/backend/test/data/pytorch-operator/test_operator_pow/test_data_set_0/output_0.pb,sha256=XEN7AbFAM3Z4_8ItjckmF1DOvmIaQzPHLRWunkBESEM,108 +onnx/backend/test/data/pytorch-operator/test_operator_reduced_mean/model.onnx,sha256=O1IEJavHoDIP8kdZY1MemfLfvEabPS21oZ5woHqUWkA,144 +onnx/backend/test/data/pytorch-operator/test_operator_reduced_mean/test_data_set_0/input_0.pb,sha256=O8PmJpISXR22yQA__IfBMa9X11Npm8Gf8PA2ayZiBF8,108 +onnx/backend/test/data/pytorch-operator/test_operator_reduced_mean/test_data_set_0/output_0.pb,sha256=0GRusgT6llp9L4xRar3Wsx135lkRheHdk_yLjqPz6Z4,42 +onnx/backend/test/data/pytorch-operator/test_operator_reduced_mean_keepdim/model.onnx,sha256=8acUiZ_yB2CvOmhE2V-LMPvKC35XfS-_B-jKPt7d3vo,148 +onnx/backend/test/data/pytorch-operator/test_operator_reduced_mean_keepdim/test_data_set_0/input_0.pb,sha256=O8PmJpISXR22yQA__IfBMa9X11Npm8Gf8PA2ayZiBF8,108 +onnx/backend/test/data/pytorch-operator/test_operator_reduced_mean_keepdim/test_data_set_0/output_0.pb,sha256=JT45FKL7fQp93nScOLNH5XVzpe5_LSd0JERoEk-ZbhQ,44 +onnx/backend/test/data/pytorch-operator/test_operator_reduced_sum/model.onnx,sha256=bUlOp9MQkbBdQJeZkj1agt-NtW6cKQtTatKD8OU_p98,143 +onnx/backend/test/data/pytorch-operator/test_operator_reduced_sum/test_data_set_0/input_0.pb,sha256=O8PmJpISXR22yQA__IfBMa9X11Npm8Gf8PA2ayZiBF8,108 +onnx/backend/test/data/pytorch-operator/test_operator_reduced_sum/test_data_set_0/output_0.pb,sha256=Utlni28fX3BXWdVXDcuLt0cu_ulgHjp_rgur3_lQyzY,42 +onnx/backend/test/data/pytorch-operator/test_operator_reduced_sum_keepdim/model.onnx,sha256=L62_cXaEa-Cykw8LVM7XvGaKRbIdrBK4iamTWxh8OBg,147 +onnx/backend/test/data/pytorch-operator/test_operator_reduced_sum_keepdim/test_data_set_0/input_0.pb,sha256=O8PmJpISXR22yQA__IfBMa9X11Npm8Gf8PA2ayZiBF8,108 +onnx/backend/test/data/pytorch-operator/test_operator_reduced_sum_keepdim/test_data_set_0/output_0.pb,sha256=5MvToMGwW6pFTiCLuFJPxxkfYbFAg9-kwFeauYw4Mhc,44 +onnx/backend/test/data/pytorch-operator/test_operator_repeat/model.onnx,sha256=RQJOZuNAEdwW6PlCklxKlGhtZ22mF8kCpou8FAIlWyU,183 +onnx/backend/test/data/pytorch-operator/test_operator_repeat/test_data_set_0/input_0.pb,sha256=O8PmJpISXR22yQA__IfBMa9X11Npm8Gf8PA2ayZiBF8,108 +onnx/backend/test/data/pytorch-operator/test_operator_repeat/test_data_set_0/output_0.pb,sha256=-YlOKC1jEG5kWC86OwzjnWKFlazgBGUPbrz4wuOGNZc,2317 +onnx/backend/test/data/pytorch-operator/test_operator_repeat_dim_overflow/model.onnx,sha256=mIr4O3Jeff3HjNcLPtRgdNo8sqy3ij_EmRwtk0-_Lik,262 +onnx/backend/test/data/pytorch-operator/test_operator_repeat_dim_overflow/test_data_set_0/input_0.pb,sha256=PeeWjZl6s614QawpKlTzaa2M_qDKkuO8BBaz-qfk8ss,16 +onnx/backend/test/data/pytorch-operator/test_operator_repeat_dim_overflow/test_data_set_0/output_0.pb,sha256=fcp9lp_a0UFVqmJQjOurs65k3IERx-brNnyQREIxAN8,205 +onnx/backend/test/data/pytorch-operator/test_operator_selu/model.onnx,sha256=3mqKXDt5KwBID5o11HDtMyoILtWR14_9oPGtONyDuaw,112 +onnx/backend/test/data/pytorch-operator/test_operator_selu/test_data_set_0/input_0.pb,sha256=O8PmJpISXR22yQA__IfBMa9X11Npm8Gf8PA2ayZiBF8,108 +onnx/backend/test/data/pytorch-operator/test_operator_selu/test_data_set_0/output_0.pb,sha256=gd_7TvsfK6wzJF1ctWcoUAr7_EAii8eAr1Aw08qArU8,108 +onnx/backend/test/data/pytorch-operator/test_operator_sqrt/model.onnx,sha256=c-mj8KfwvSG6yoCZCL1RWVPC4YEsFq9m9H7UQ5_Yec8,96 +onnx/backend/test/data/pytorch-operator/test_operator_sqrt/test_data_set_0/input_0.pb,sha256=lE4aKW3dubbAlB4rcDUcltWXIISr4HeAN1LWnjcGlw8,56 +onnx/backend/test/data/pytorch-operator/test_operator_sqrt/test_data_set_0/output_0.pb,sha256=p3YSFtZwHoUIHrJqk7eOMcA7za5NaV97uor3259xh6I,56 +onnx/backend/test/data/pytorch-operator/test_operator_symbolic_override/model.onnx,sha256=TcukI5SQ2YCJEtjb6C-9S1Tpg93BRWVhmUJcLvQGyBk,291 +onnx/backend/test/data/pytorch-operator/test_operator_symbolic_override/test_data_set_0/input_0.pb,sha256=e1b3DFL0AWrzfrgYCD2Z_tmqUGzqz1XEfvLGynXik8Q,81934 +onnx/backend/test/data/pytorch-operator/test_operator_symbolic_override/test_data_set_0/output_0.pb,sha256=bqx1z2aK2tKWTHyU3O3EFn--0qeCgqvkwfZkm19C1WQ,81934 +onnx/backend/test/data/pytorch-operator/test_operator_symbolic_override_nested/model.onnx,sha256=Y5HmWP09sqQ8D87KKCuNzacDAFTXvT9d2qrT1UQqCFs,188 +onnx/backend/test/data/pytorch-operator/test_operator_symbolic_override_nested/test_data_set_0/input_0.pb,sha256=g4F_BI1dL9Zw7KRNa82h7qrc2ihraPzolEMraSI3Pq4,10 +onnx/backend/test/data/pytorch-operator/test_operator_symbolic_override_nested/test_data_set_0/input_1.pb,sha256=RSc1IkqRdFLMBmDEQ7JzmC62gTBmianfJ-_9l05ev3E,10 +onnx/backend/test/data/pytorch-operator/test_operator_symbolic_override_nested/test_data_set_0/input_2.pb,sha256=FmQxo2Hd_d2lkwKCgTubHR74fIGqSDeryUre8Zz2XbA,10 +onnx/backend/test/data/pytorch-operator/test_operator_symbolic_override_nested/test_data_set_0/output_0.pb,sha256=3d2sF1KRdRHfCmRz_gE6aeW3U3FXdj6a5iJZ16GbqPE,10 +onnx/backend/test/data/pytorch-operator/test_operator_symbolic_override_nested/test_data_set_0/output_1.pb,sha256=27DWRsqnvZq9m_3dBy-aOw_WF16XB0wQRmQTg3YFsqc,10 +onnx/backend/test/data/pytorch-operator/test_operator_symbolic_override_nested/test_data_set_0/output_2.pb,sha256=YgmWdPXZb_anXNeasbA2AZ5vpd5tKBADxsOuwSBk6WM,10 +onnx/backend/test/data/pytorch-operator/test_operator_view/model.onnx,sha256=XT7TK6OPv6q7O3b3898MF0l2-r7_Y1qFG8C8TCGsM5s,108 +onnx/backend/test/data/pytorch-operator/test_operator_view/test_data_set_0/input_0.pb,sha256=26QXjAPnTsE3QpqJjcP-PkSFVR-8imYTPkQwy9qdEpM,10 +onnx/backend/test/data/pytorch-operator/test_operator_view/test_data_set_0/output_0.pb,sha256=hy5YLPPZ35Pu3PoQQVqGsxFpEvOOcTe127tUCiOf-yQ,12 +onnx/backend/test/data/real/test_bvlc_alexnet/data.json,sha256=gTz09RnRuoLoForPp-zduDCJNxfXByr-o62-4QpH7QQ,123 +onnx/backend/test/data/real/test_densenet121/data.json,sha256=K_awJig_u8V8iXzzvMGyKUB3tIfu-jv0NmknR4-10Es,121 +onnx/backend/test/data/real/test_inception_v1/data.json,sha256=ZWFpowuWI8N5Yy1aKLNhvNnXzNo5kmhpfandv4IMtiY,123 +onnx/backend/test/data/real/test_inception_v2/data.json,sha256=CyDIFBbTrRTiiuW9LVvLso_HtMLIbenVBR2qazMz6Ww,123 +onnx/backend/test/data/real/test_resnet50/data.json,sha256=4GWkjjl4-uCahU2h7NiwKZt33Expj2Pjk14N9tjuwSE,115 +onnx/backend/test/data/real/test_shufflenet/data.json,sha256=j-qHhpjQOfm6VTRw9joMZet2z1bt-7ziAhGUhtuoXag,119 +onnx/backend/test/data/real/test_squeezenet/data.json,sha256=Wc_Fxf5HPmehmoYP4UNckip3_Kk783zoO7U2xKecNH8,119 +onnx/backend/test/data/real/test_vgg19/data.json,sha256=zazHTUwGDFkcpUPTP95t_XZj5Xjozd40C19otzyuQo4,109 +onnx/backend/test/data/real/test_zfnet512/data.json,sha256=-uRgfrHvF6uF_KdVb01ehe6KexyoTLGZwKKkeJAY6hM,115 +onnx/backend/test/data/simple/test_expand_shape_model1/model.onnx,sha256=RjVogwevJIx-F_NCRrB78u3wz8J43liwi_4VXZ5zT3M,132 +onnx/backend/test/data/simple/test_expand_shape_model1/test_data_set_0/input_0.pb,sha256=FeacT0JWpRN8J4-0SXL6u5IFq8rqpzNiTg5j5-U39oU,25 +onnx/backend/test/data/simple/test_expand_shape_model1/test_data_set_0/input_1.pb,sha256=-kFoNUTrPOdcUABEXtz8Y0JvTYyaQKDWNoIlhldRde0,29 +onnx/backend/test/data/simple/test_expand_shape_model1/test_data_set_0/output_0.pb,sha256=Ory0VuIbWoxQQlFNuvfHKxsStsXUx98DHtV1L-0l_8E,25 +onnx/backend/test/data/simple/test_expand_shape_model2/model.onnx,sha256=8vMBfHTTsakvEYYAHRix0Rw5UlLLX2YLH_LHIauQ_dI,132 +onnx/backend/test/data/simple/test_expand_shape_model2/test_data_set_0/input_0.pb,sha256=FeacT0JWpRN8J4-0SXL6u5IFq8rqpzNiTg5j5-U39oU,25 +onnx/backend/test/data/simple/test_expand_shape_model2/test_data_set_0/input_1.pb,sha256=jtEjGRaQMY2aXanx5tzKSV1aQZqCMTz_eAe-NASuXvI,29 +onnx/backend/test/data/simple/test_expand_shape_model2/test_data_set_0/output_0.pb,sha256=Nfr-Y4hHMCx7RZd6zuGthvWOnTqq6B02AeKOTcNUibA,49 +onnx/backend/test/data/simple/test_expand_shape_model3/model.onnx,sha256=i2W7OvivVljkRuYyr7qgTJhoeMJ-DZaYTOYcE5ErhoM,132 +onnx/backend/test/data/simple/test_expand_shape_model3/test_data_set_0/input_0.pb,sha256=FeacT0JWpRN8J4-0SXL6u5IFq8rqpzNiTg5j5-U39oU,25 +onnx/backend/test/data/simple/test_expand_shape_model3/test_data_set_0/input_1.pb,sha256=90Jk8uu8W-Zk-IKoYDY1M9ynDidapdOgGlDsn-Nhzs0,37 +onnx/backend/test/data/simple/test_expand_shape_model3/test_data_set_0/output_0.pb,sha256=f3h1_JMnmSfaS5ve3NZ68hxYs9q56syzQIrZwBG4Yyc,121 +onnx/backend/test/data/simple/test_expand_shape_model4/model.onnx,sha256=-LCt6hxsHX6zj7BhYIcFVieavb3xJEOVhgysNc7Q_IM,136 +onnx/backend/test/data/simple/test_expand_shape_model4/test_data_set_0/input_0.pb,sha256=FeacT0JWpRN8J4-0SXL6u5IFq8rqpzNiTg5j5-U39oU,25 +onnx/backend/test/data/simple/test_expand_shape_model4/test_data_set_0/input_1.pb,sha256=TzuTXPf2kofUisXxxsPIjs9kGduOx7kUKo0GK6M0c1k,45 +onnx/backend/test/data/simple/test_expand_shape_model4/test_data_set_0/output_0.pb,sha256=-54z-uncN2E4Xp6GCo0JFdJ0Nvjgzlhr812uvk0Yrng,340 +onnx/backend/test/data/simple/test_gradient_of_add/model.onnx,sha256=hqxKd-FKyhCxjveUJXYk7AXEXvZHy0aKG0gf6AzYz78,264 +onnx/backend/test/data/simple/test_gradient_of_add/test_data_set_0/input_0.pb,sha256=01kqGBPZb3rDpPA9T56CvZXs2yaYshAUG4N9Dnl25Mw,11 +onnx/backend/test/data/simple/test_gradient_of_add/test_data_set_0/input_1.pb,sha256=fA_vLSZfxRw0E8uijMUP0AjksThofW6HfWZmZV2a59Y,11 +onnx/backend/test/data/simple/test_gradient_of_add/test_data_set_0/output_0.pb,sha256=MoopEq7YvmlIiWp-ULnZQ7cgYLyeTTxqO1eqVNn9KQg,11 +onnx/backend/test/data/simple/test_gradient_of_add/test_data_set_0/output_1.pb,sha256=Yo4qcMojcGk2JZPSRpcKSY_BWkbKcZoRuC-nT5OlCZM,15 +onnx/backend/test/data/simple/test_gradient_of_add/test_data_set_0/output_2.pb,sha256=1xx1amq9P1-dPK30tNpnEdNn-SduOremceOw8uW8C3k,15 +onnx/backend/test/data/simple/test_gradient_of_add_and_mul/model.onnx,sha256=WNxpfuoymu3Q1SkrssVQABq55ea3GoCcGWxi7aSMHJc,297 +onnx/backend/test/data/simple/test_gradient_of_add_and_mul/test_data_set_0/input_0.pb,sha256=01kqGBPZb3rDpPA9T56CvZXs2yaYshAUG4N9Dnl25Mw,11 +onnx/backend/test/data/simple/test_gradient_of_add_and_mul/test_data_set_0/input_1.pb,sha256=fA_vLSZfxRw0E8uijMUP0AjksThofW6HfWZmZV2a59Y,11 +onnx/backend/test/data/simple/test_gradient_of_add_and_mul/test_data_set_0/output_0.pb,sha256=k_SlcYexZAOt4fP6X2ghd-7chiUMXOoyvCJveI3MTWc,11 +onnx/backend/test/data/simple/test_gradient_of_add_and_mul/test_data_set_0/output_1.pb,sha256=ZIUr0lNQxoOvyIvdFjkKNk_x10J0_El2BKhC1VO5Cqs,15 +onnx/backend/test/data/simple/test_gradient_of_add_and_mul/test_data_set_0/output_2.pb,sha256=nXyZvmlusCjPtriTAgchwXnlwHF4hAOFbPqA8wzz1Pk,15 +onnx/backend/test/data/simple/test_sequence_model1/model.onnx,sha256=5xFFoohE76wt8FmPC5aobMCnys9ZAbNtZXwJ1yB1zhk,371 +onnx/backend/test/data/simple/test_sequence_model1/test_data_set_0/input_0.pb,sha256=HBHYia7bIMIEe4U5fWCsXOb5Tf7kriOjoNOib0tkUpo,109 +onnx/backend/test/data/simple/test_sequence_model1/test_data_set_0/input_1.pb,sha256=acWUJJJkjaegHyWZXk1TzmUv7Pj2A6Fj4Kau-LD68wQ,61 +onnx/backend/test/data/simple/test_sequence_model1/test_data_set_0/input_2.pb,sha256=8jc-ymVjyW4wPnnIHhZ5_BKaWy8WOsqrAa_mbJrRnaI,158 +onnx/backend/test/data/simple/test_sequence_model1/test_data_set_0/output_0.pb,sha256=7mBt-bM7EukBGQzUvqoyxL934b2LrCkDx_I3-PAaWZY,63 +onnx/backend/test/data/simple/test_sequence_model2/model.onnx,sha256=KZzZBGV-tDv_bkWo2KOjixr8YKbHXS_ornwssdPPpjY,322 +onnx/backend/test/data/simple/test_sequence_model2/test_data_set_0/input_0.pb,sha256=HBHYia7bIMIEe4U5fWCsXOb5Tf7kriOjoNOib0tkUpo,109 +onnx/backend/test/data/simple/test_sequence_model2/test_data_set_0/input_1.pb,sha256=QYuDQFaYJk_vZOlJvOrFihGNRwYhQIvxW59i_dDFavA,109 +onnx/backend/test/data/simple/test_sequence_model2/test_data_set_0/input_2.pb,sha256=6yIT4GU0ai5jfrrMWxDYlnRCzdydqQHvj9AmzXn0aqg,109 +onnx/backend/test/data/simple/test_sequence_model2/test_data_set_0/output_0.pb,sha256=0VfhqZDCxkMww3kF8CHMsdPQ2ae6_3HHrWGfUUUhcxs,111 +onnx/backend/test/data/simple/test_sequence_model3/model.onnx,sha256=J9qvj7WaYXTfJJWbwlaOZq7uCSY1emnWv8Ot4bVzVmg,437 +onnx/backend/test/data/simple/test_sequence_model3/test_data_set_0/input_0.pb,sha256=HBHYia7bIMIEe4U5fWCsXOb5Tf7kriOjoNOib0tkUpo,109 +onnx/backend/test/data/simple/test_sequence_model3/test_data_set_0/input_1.pb,sha256=QYuDQFaYJk_vZOlJvOrFihGNRwYhQIvxW59i_dDFavA,109 +onnx/backend/test/data/simple/test_sequence_model3/test_data_set_0/input_2.pb,sha256=6yIT4GU0ai5jfrrMWxDYlnRCzdydqQHvj9AmzXn0aqg,109 +onnx/backend/test/data/simple/test_sequence_model3/test_data_set_0/output_0.pb,sha256=0VfhqZDCxkMww3kF8CHMsdPQ2ae6_3HHrWGfUUUhcxs,111 +onnx/backend/test/data/simple/test_sequence_model4/model.onnx,sha256=1dZ8RAqL2C4M5qt2anAFL0XkW4q73_krGrOB-vR0IeA,219 +onnx/backend/test/data/simple/test_sequence_model4/test_data_set_0/input_0.pb,sha256=HBHYia7bIMIEe4U5fWCsXOb5Tf7kriOjoNOib0tkUpo,109 +onnx/backend/test/data/simple/test_sequence_model4/test_data_set_0/input_1.pb,sha256=QYuDQFaYJk_vZOlJvOrFihGNRwYhQIvxW59i_dDFavA,109 +onnx/backend/test/data/simple/test_sequence_model4/test_data_set_0/input_2.pb,sha256=6yIT4GU0ai5jfrrMWxDYlnRCzdydqQHvj9AmzXn0aqg,109 +onnx/backend/test/data/simple/test_sequence_model4/test_data_set_0/output_0.pb,sha256=xn5AGPzsNiwU7zvOGMww3GK0K6mjZFeyctuICJDD_jk,304 +onnx/backend/test/data/simple/test_sequence_model5/model.onnx,sha256=1YqWf-J8logBWGMQl9wYHslyJeozA19o3ULV3gFrIeU,251 +onnx/backend/test/data/simple/test_sequence_model5/test_data_set_0/input_0.pb,sha256=HBHYia7bIMIEe4U5fWCsXOb5Tf7kriOjoNOib0tkUpo,109 +onnx/backend/test/data/simple/test_sequence_model5/test_data_set_0/input_1.pb,sha256=QYuDQFaYJk_vZOlJvOrFihGNRwYhQIvxW59i_dDFavA,109 +onnx/backend/test/data/simple/test_sequence_model5/test_data_set_0/input_2.pb,sha256=6yIT4GU0ai5jfrrMWxDYlnRCzdydqQHvj9AmzXn0aqg,109 +onnx/backend/test/data/simple/test_sequence_model5/test_data_set_0/output_0.pb,sha256=M0bT0Bi-MmK76q7g_NGHZBGtBqCIRkOEHbZwQxOMS4M,306 +onnx/backend/test/data/simple/test_sequence_model6/model.onnx,sha256=JANoYkvy3KHwu0OQp7_JOebk2uyywXnF1kOhAh8v-A8,156 +onnx/backend/test/data/simple/test_sequence_model6/test_data_set_0/input_0.pb,sha256=HBHYia7bIMIEe4U5fWCsXOb5Tf7kriOjoNOib0tkUpo,109 +onnx/backend/test/data/simple/test_sequence_model6/test_data_set_0/output_0.pb,sha256=WQhbsdsmsmPRHbpqfZ2MiS0DAKfdf-5atRiJ3PQSa8I,17 +onnx/backend/test/data/simple/test_sequence_model7/model.onnx,sha256=8W0ICmPH4I-RbyPg1sUAqS8g80VjI3b1w6KwX8LSthI,209 +onnx/backend/test/data/simple/test_sequence_model7/test_data_set_0/input_0.pb,sha256=iDb2BEF6r1s_Ivm2lami2Cd5mumrAH-iCufdqTWZtVg,206 +onnx/backend/test/data/simple/test_sequence_model7/test_data_set_0/output_0.pb,sha256=hRDk49zUKK-zx6kVhCyPbUW8GAbH2oHne4UBEZ-abPk,109 +onnx/backend/test/data/simple/test_sequence_model8/model.onnx,sha256=uRG-4PlDM24-SF0xEMigmVnRe_eTopTh5iPwAmRJngA,157 +onnx/backend/test/data/simple/test_sequence_model8/test_data_set_0/input_0.pb,sha256=MnclcMUBN86XdRdCIhDbiw8iXBNVY9m2zmyFUnlMLM8,9 +onnx/backend/test/data/simple/test_sequence_model8/test_data_set_0/input_1.pb,sha256=7EJgBev8JC7V0Rw_gwYv6p__Wn6ntHoisPgOXmLl680,38 +onnx/backend/test/data/simple/test_sequence_model8/test_data_set_0/output_0.pb,sha256=XQxLMlrcsgBaXyWLm5m5QW1Ph1e1xB8VoLsTnp-G9TI,17 +onnx/backend/test/data/simple/test_shrink/model.onnx,sha256=w20edVDLuFYRyg_mTFTeX28fDIvf5Eu2DkfmX0nVGzA,115 +onnx/backend/test/data/simple/test_shrink/test_data_set_0/input_0.pb,sha256=b6eG0JoSaNfFZ9uN6DHuVlebG4hyn5rm9o2xzU2w0-Y,29 +onnx/backend/test/data/simple/test_shrink/test_data_set_0/output_0.pb,sha256=AbQplXJ5FVu2LaNzGC0t3G9EZhZUCsd-_fnyXRD9T2I,29 +onnx/backend/test/data/simple/test_sign_model/model.onnx,sha256=x-DuU3N-mIY90B4uRxuhogKFVZq8nHXxJrFs5i-fDKQ,90 +onnx/backend/test/data/simple/test_sign_model/test_data_set_0/input_0.pb,sha256=OYoPx6Ht6FXdfpszUznfOHcHY34rscU83vgklJlGV_A,37 +onnx/backend/test/data/simple/test_sign_model/test_data_set_0/output_0.pb,sha256=CeKwUlQI9tHyyzENPyq43v-DgnaQhzqRFwnITipA7SY,37 +onnx/backend/test/data/simple/test_single_relu_model/model.onnx,sha256=81t2jgdqDN2px9zzoPPsu4STlrL3Ff1ELHcFx9H7Rzs,98 +onnx/backend/test/data/simple/test_single_relu_model/test_data_set_0/input_0.pb,sha256=z3PIwDv5elbsTylVjEIm6kzqZADwyn7wXFOKazkGPDo,19 +onnx/backend/test/data/simple/test_single_relu_model/test_data_set_0/output_0.pb,sha256=o_KNVxD7Z64gu7M_8ZB2SD7cbjBQg7ZcTid0oZk_lcA,19 +onnx/backend/test/data/simple/test_strnorm_model_monday_casesensintive_lower/model.onnx,sha256=YEJ4VUhWCA58OTuF7rc5CYazSwyDvm_hdNZhSJRSOsE,185 +onnx/backend/test/data/simple/test_strnorm_model_monday_casesensintive_lower/test_data_set_0/input_0.pb,sha256=esXLitVtK2jXZ1_Lr-WQo0JzyzURptvL5Nk19vQXSr8,45 +onnx/backend/test/data/simple/test_strnorm_model_monday_casesensintive_lower/test_data_set_0/output_0.pb,sha256=_-mGRZ7G9I9iAx2P9lMk3aHUawP2XKjO9hn2fpR6tpY,37 +onnx/backend/test/data/simple/test_strnorm_model_monday_casesensintive_nochangecase/model.onnx,sha256=chr1nCThp6Y4V52zC2WrTWUJ0GZJIJKCOJQTB6qruiA,153 +onnx/backend/test/data/simple/test_strnorm_model_monday_casesensintive_nochangecase/test_data_set_0/input_0.pb,sha256=esXLitVtK2jXZ1_Lr-WQo0JzyzURptvL5Nk19vQXSr8,45 +onnx/backend/test/data/simple/test_strnorm_model_monday_casesensintive_nochangecase/test_data_set_0/output_0.pb,sha256=_-mGRZ7G9I9iAx2P9lMk3aHUawP2XKjO9hn2fpR6tpY,37 +onnx/backend/test/data/simple/test_strnorm_model_monday_casesensintive_upper/model.onnx,sha256=mTkMdQL9kKyFg-wXrRGcGZjp_JkEwLZB-Wv4bTteV2Y,185 +onnx/backend/test/data/simple/test_strnorm_model_monday_casesensintive_upper/test_data_set_0/input_0.pb,sha256=esXLitVtK2jXZ1_Lr-WQo0JzyzURptvL5Nk19vQXSr8,45 +onnx/backend/test/data/simple/test_strnorm_model_monday_casesensintive_upper/test_data_set_0/output_0.pb,sha256=7Zk-IeCaqHCfs5Z1zqHp5ERKMABu8eMRVVYNTMLdFc4,37 +onnx/backend/test/data/simple/test_strnorm_model_monday_empty_output/model.onnx,sha256=7rQjPSYfgK49i9NXSP1wZbZjsKKaHQcuyaZXlfxblJA,185 +onnx/backend/test/data/simple/test_strnorm_model_monday_empty_output/test_data_set_0/input_0.pb,sha256=XGok6pzumeWYmW61YcE2gO49FZP4HhO5L7VQiaofooQ,23 +onnx/backend/test/data/simple/test_strnorm_model_monday_empty_output/test_data_set_0/output_0.pb,sha256=fve41mqTI11I7kUof7Bey6Rtk7Sv23j8xV9xaJLPzQ0,9 +onnx/backend/test/data/simple/test_strnorm_model_monday_insensintive_upper_twodim/model.onnx,sha256=oA_EVTNecnOU1ep5n9OYERU5iqhaEXY7JhmSQE6ycDs,167 +onnx/backend/test/data/simple/test_strnorm_model_monday_insensintive_upper_twodim/test_data_set_0/input_0.pb,sha256=5_crR6dG0l6PE3Zf0m0C1WePv2dsKuUUSeSBHD4-fx8,65 +onnx/backend/test/data/simple/test_strnorm_model_monday_insensintive_upper_twodim/test_data_set_0/output_0.pb,sha256=etP-R5bCcemRt-RrzQK7MPjXOQwqbEBeZCcYma1Jb7M,49 +onnx/backend/test/data/simple/test_strnorm_model_nostopwords_nochangecase/model.onnx,sha256=bVmnHtYfTVDT2Dvx84KyrtEWEatTU3SfnlPGXDSGhGw,128 +onnx/backend/test/data/simple/test_strnorm_model_nostopwords_nochangecase/test_data_set_0/input_0.pb,sha256=uBHq4M-aleAVs_ZnyQty-Yz85ya7wK6Dp7RjXk0bUZw,24 +onnx/backend/test/data/simple/test_strnorm_model_nostopwords_nochangecase/test_data_set_0/output_0.pb,sha256=NhjGWjPVxmkR0f35ghfx6YQm3Is2Eue9LWCC-NaQqQY,24 +onnx/backend/test/loader/__init__.py,sha256=mngOe26IgFmbbCjFIhAyLMnPGhCZoaB7ke-r7U4bm78,1867 +onnx/backend/test/loader/__pycache__/__init__.cpython-39.pyc,, +onnx/backend/test/report/__init__.py,sha256=Ua3s3YmvUusUnffIyct0GnwIqFcMP_AsV5aIe1A4rLs,1261 +onnx/backend/test/report/__pycache__/__init__.cpython-39.pyc,, +onnx/backend/test/report/__pycache__/base.cpython-39.pyc,, +onnx/backend/test/report/__pycache__/coverage.cpython-39.pyc,, +onnx/backend/test/report/base.py,sha256=npe6vog194R-rbyVje3cIUerSkLoFzDaq5xexAGqbDk,119 +onnx/backend/test/report/coverage.py,sha256=uxx9g_E6TpKakqWY_pfmU0ITIFjRDpkI8lPtu4YL9Zk,11258 +onnx/backend/test/runner/__init__.py,sha256=H-fHfM4x0gD-lSzqtgJNKEFUxQHNLNtXyQSBxWRGNRo,21889 +onnx/backend/test/runner/__pycache__/__init__.cpython-39.pyc,, +onnx/backend/test/runner/__pycache__/item.cpython-39.pyc,, +onnx/backend/test/runner/item.py,sha256=PEJjl6K3Z-bga8IMHN3alKcweygjAMzZhSa1ChKF6C8,429 +onnx/backend/test/stat_coverage.py,sha256=ciYJpZf4AkykmQ_WoaTSMowJIGE96_i6Hpym69PD7Aw,11797 +onnx/bin/__init__.py,sha256=HiuD-U6YHJwWikkemKJqBq-35U8KJ9LDdAwdisi7OlQ,85 +onnx/bin/__pycache__/__init__.cpython-39.pyc,, +onnx/bin/__pycache__/checker.cpython-39.pyc,, +onnx/bin/checker.py,sha256=h4tAK3SaViHUdt8nBpookLAqVNSgIHhn2jL_GeHM7m0,684 +onnx/checker.cc,sha256=Z-6MY50pdDY62j_EdDtP8wRj2JsI7YnbEwv-OboNX6Q,39950 +onnx/checker.h,sha256=PB_NiQJPMXmTFC7Kw7sSLw4AB3NNjiftyEmwvtXT6BQ,6338 +onnx/checker.py,sha256=uMT_3QPDKICHxw68OTLIJ2hfWNnr006QZZ6OEwudUPU,6004 +onnx/common/array_ref.h,sha256=PrnfjZhj7K-65E5QIk3iNxjzUHbabWJW9L0BPAfewD4,5851 +onnx/common/assertions.cc,sha256=NWrG0dLEF3Y18ItVSs_BrWhd6eKOFaHdJDtviQyVE4o,1050 +onnx/common/assertions.h,sha256=CbNWCOu-dHXeHXSyAPIKyZUSUfw73RPy8SYmBqvsq58,3114 +onnx/common/common.h,sha256=U0oB3sUzV_U_9EpmH6_z-bh1TErLbOarbCTFMtwwbrk,1745 +onnx/common/constants.h,sha256=Tc32n1bISbVmGwL2AO7-RFdFrRWOa04mODlOUNcY7Ws,1533 +onnx/common/file_utils.h,sha256=0hLCu6BaTHdz1DixAtJCK3fXhyfxW7g2X1dgezbE8lU,993 +onnx/common/graph_node_list.h,sha256=UcjP2oIutYfyXGmX_mFXb_hnCb8BDyNfyvbMINsonec,5570 +onnx/common/interned_strings.cc,sha256=eUZLhlfgxCc2J60D1tscA2_LcKfktTxeJo0H9YPkufw,2112 +onnx/common/interned_strings.h,sha256=4xJ5Ko_kSulW-W-ml-yac_FVIbXu2Xvdsh-peDWWllc,8558 +onnx/common/ir.h,sha256=u2glYzt--nO0YbrJjEriYAeUif2d0aAP27GsJSxIgKE,44547 +onnx/common/ir_pb_converter.cc,sha256=ZXuGhvIqqBxnxnUuK4q3zWywt2bR3-jwQTL4tepn6sE,25139 +onnx/common/ir_pb_converter.h,sha256=sjQJQvUvjZHlkJrSSpFPJVhSYUxlViME4CMz6baNOAg,1356 +onnx/common/model_helpers.cc,sha256=VNtlhrTYTEchCPTk-AYhNLLALr8q9Rsvv-AVzq3XD9o,989 +onnx/common/model_helpers.h,sha256=GlUIQkNA3LFTahX6e4ZGF-BVySdCqD6i24w7bwqpKVY,671 +onnx/common/path.cc,sha256=uPJBMR4KFfC0CZWhkdCXGaqEEZ2Mo5x0VqJ4IDvaoZA,1796 +onnx/common/path.h,sha256=f773P8V2kMMy1ARdWxCnd08_NCqJ050bbiY43FNyx9g,2232 +onnx/common/platform_helpers.h,sha256=lEXtRL8pgSM5MUyIXhyECHj_nrfaQkq_LH5aaGQDzOs,402 +onnx/common/proto_util.h,sha256=U-KYW5fBs1MqmYWmtMONpw8a75SRMD7D32a8dZ4LlN0,1473 +onnx/common/status.cc,sha256=nMqcYhcpXzyX-Ln7kZPC2EwOMbV5ssNuF6z05N6ojrw,1981 +onnx/common/status.h,sha256=7x8_ZLcNIZVL7HF1ArFp03KU2PUPT6eDZRhEedelnFc,2008 +onnx/common/tensor.h,sha256=tCw06bToUEkXWStd7h5n07ikpmNFadfefi0uDIW5jrw,7699 +onnx/common/version.h,sha256=2_9g5wIb82uYGYJA9D8DFmFCEZ-b1NVqPjFvsMtbIYs,308 +onnx/common/visitor.h,sha256=Fn-B-0xAy0r3TEo0jh4yYrFJkBV7xZxrP94VX5AjzFI,3215 +onnx/compose.py,sha256=vDqyWgh3N6ILK2Z_zkRlO7zbFMw9n1KM_FpWcGtyg6I,26885 +onnx/cpp2py_export.cc,sha256=ffp_Z2wm5ABVrMi3OLIg086E1y1RRuN5rnOSqghIR-E,31228 +onnx/defs/__init__.py,sha256=PZ01rEheu0-GhBVbHrehi9fMoS3e9Vx4XBPGXD_DqHo,4284 +onnx/defs/__pycache__/__init__.cpython-39.pyc,, +onnx/defs/__pycache__/gen_doc.cpython-39.pyc,, +onnx/defs/__pycache__/gen_shape_inference_information.cpython-39.pyc,, +onnx/defs/attr_proto_util.cc,sha256=qAGbnnDY7MvKN7nKIYl6K5f7irNeNnNJ61J67OP_TJk,3658 +onnx/defs/attr_proto_util.h,sha256=pXz7gVOJThtEsICpeA-qF7QhexpTp6ULuU63sCxTGNM,1943 +onnx/defs/controlflow/defs.cc,sha256=0Udh4GV6ywO3KKYd2q-bYiST8K4_DpIFg3PE603JyEM,20718 +onnx/defs/controlflow/old.cc,sha256=2XtmWSUTmh-D0CO0uMFWrfZreHMXNpITOdrw04fzf3g,117162 +onnx/defs/controlflow/utils.cc,sha256=hFmMX_mBMBbYZeUunll9OShW0AwhOVcjtfH0O9jZCqY,13832 +onnx/defs/controlflow/utils.h,sha256=214qXEfbSGzBJ1M0D3MYmRlFIvPErZg8cKu0LlWnphg,470 +onnx/defs/data_propagators.h,sha256=96WmV0aibHGGar4fx9g36ucad5nIl-R_I4OO00XhcUo,2437 +onnx/defs/data_type_utils.cc,sha256=SDs0aUU0uNxPKwiYBpXd2oc4ylqO_rnUAeq3C2QS5bY,14546 +onnx/defs/data_type_utils.h,sha256=JfNF_L8zcE0Gc7upEgyXtCMYupnrhzNagvp5xzGRTSc,2529 +onnx/defs/function.cc,sha256=Lhm-RmYgNhzIdEexwxd_rLV86Pl-sGHnipk8lCbE3pg,5716 +onnx/defs/function.h,sha256=nvwzX9OejaUY_wOwPCxQxWf__gKmmJvCGWI5LfRvlRU,6291 +onnx/defs/gen_doc.py,sha256=wtT7o4AStdzgueAlp7gSVcKAd27YXSs_rmYJOSSP2ic,16824 +onnx/defs/gen_shape_inference_information.py,sha256=iRXGJKziAu_Rrwd7emFMyWaU0PrPz9f3SJc7JhDzF9o,870 +onnx/defs/generator/defs.cc,sha256=1J5kGm4ahA7gdKYDEzSKoO_giU_dQrvbba15vAU-j9U,27980 +onnx/defs/generator/old.cc,sha256=vCze7s2ZSqaX5XVKgAKv7OblpjaA63h0iBqUQD8dxiA,15286 +onnx/defs/generator/utils.cc,sha256=VNAMST91p-BvMPn-vRp0triV48iB6IPxxY7fogICJ7E,3738 +onnx/defs/generator/utils.h,sha256=RFZHCTcv5CgNsYcj-7yrCrApEJSZ1CYoqGSjWACLQII,213 +onnx/defs/image/defs.cc,sha256=TwqyFK_GbtLqanIo4ekYw9g4jp-vIlVBAe-1KWscbWA,2834 +onnx/defs/logical/defs.cc,sha256=Tbj__KPHpdhBAk2cPAhPHdJtPrWvlGkCifJtgnGGmFc,12166 +onnx/defs/logical/old.cc,sha256=3mcunnOuj4d2RAG-bjRwqJU8Eeg1qGE9OFLFOeicHUY,10334 +onnx/defs/math/defs.cc,sha256=0oWGGDB4hVOQd6XNZnKZ5_RaUH_jWvbYUfsAuWTXYk0,145250 +onnx/defs/math/old.cc,sha256=pnVHt23OuOyYjiRwCVmLBc4eiDHvHxgs6c0rLbE8ucM,133720 +onnx/defs/math/utils.cc,sha256=J86Vx2b1T3zyrwxrEV2sB4vNzN9yg9i-w6cAOGgFJbk,4981 +onnx/defs/math/utils.h,sha256=rL7InDv3ZROqK2tsINn70J92gsmgYdRLiYMsKEviDTg,1347 +onnx/defs/nn/defs.cc,sha256=6MgRpvf0vjTpVdX6x2h0PzP0c17-QVFuKHAaN8zE0fw,128345 +onnx/defs/nn/old.cc,sha256=4H9-WMIzp__h5PLtDskJ5BrcUGeitCgsaFxMTOk_zRY,109614 +onnx/defs/object_detection/defs.cc,sha256=kke0G3Bo0WXsL1jJ9G3eauu_il6NTGZSTYBCr7bANSM,9533 +onnx/defs/object_detection/old.cc,sha256=Kfck4FSzoyv90IdefqN028RxGXj5syrnMHtX05vFOWI,8343 +onnx/defs/operator_sets.h,sha256=fFYRtIV_r6ExBHUKlngOPpPGEl1O-Y42MpE1GqBZa0A,79190 +onnx/defs/operator_sets_ml.h,sha256=U9K3yyowYc-gWOQino-KCxXvMNDiBynKxn7TLxRSIOE,5140 +onnx/defs/operator_sets_preview.h,sha256=Gc9l-kttvpW1B7X3YburU91E52vgUFJiAINZCigspNY,1273 +onnx/defs/operator_sets_training.h,sha256=l_dkwNQpwzQ7VyXOrmgE6O5oSWRSoEhwn0YT2ToAiXI,518 +onnx/defs/optional/defs.cc,sha256=9wPzDUjtnMQVWmR_2gaXw0pNUUxI1UyDr0rlWW1fU20,6670 +onnx/defs/optional/old.cc,sha256=gEVbnZiknNPPoHk5pJYfUcGQaLgxgjXaXMR2LQ6rbp4,3499 +onnx/defs/parser.cc,sha256=FMVjS5zdCgLs2PyzWSJmEhA5BzokBaJch6AL8vP8kbA,28059 +onnx/defs/parser.h,sha256=HraFTtToShD2doaUOc-_dlWBVaVdHVcge7Apl28rPXE,13351 +onnx/defs/printer.cc,sha256=MxZ75Cp5kXwjdGFd0iF2LKAZMBAY__vOQ070bEd1tJw,14000 +onnx/defs/printer.h,sha256=HZDX6-1Oei0w-IG191JiTUxkS0w9KHd5Qb68BPgCjWA,1435 +onnx/defs/quantization/defs.cc,sha256=az2b0vYdWyROAYrTfe8N8XAa61vx5WX242fLKa1CAkQ,13777 +onnx/defs/quantization/old.cc,sha256=n3ZFYTTIc6c6j9NTITKyXsFQpus6_obhJEq4jlxS1Nc,15703 +onnx/defs/reduction/defs.cc,sha256=rHvP9SDvpIKSVD6MEm1rifZPm5Gtmm8la5_rWlanuiY,6316 +onnx/defs/reduction/old.cc,sha256=cHIoo2USwB9GY7R0ANTR4m47kGZko6laiSNsURHoGVQ,19434 +onnx/defs/reduction/utils.cc,sha256=Ml0jJiMwSq2pnKgHEWyfnOfiX2cqsCyL6pev2D_VeHk,6375 +onnx/defs/reduction/utils.h,sha256=vKGZKxos-q9eFevawslXEwuDvBYm5eM8MxKEsJDRZBQ,1561 +onnx/defs/rnn/defs.cc,sha256=oHWuTnER8fXzvcmd1zf61oXDIZaNo7je6wy_sqfnOFI,20092 +onnx/defs/rnn/old.cc,sha256=HnFsXxbQIzmRR965HRB2X5vQVjpOWHNEPkpmcKkJiY0,40210 +onnx/defs/schema.cc,sha256=W8JryU8rS8MzDiWPN0mrMVr8YLuF3_2UyuppqOurI4s,45639 +onnx/defs/schema.h,sha256=6va0tiMAlrprYZYAfGQG7QngABtrhbsWR9FAPewzd6M,66575 +onnx/defs/sequence/defs.cc,sha256=0CGv3Oxsg9jKoXteXcCzRQ3O7_U_BkbpK9win7gSf1c,34822 +onnx/defs/shape_inference.cc,sha256=HRbVV_Nh0R5iR8Tg28BbGlAv_I327XB-gFZ7jEzL3dU,22755 +onnx/defs/shape_inference.h,sha256=l8kqpl_oHlQcV1UqtSUidpu3pOm-FU-Dek_3ZRIefVQ,34748 +onnx/defs/tensor/defs.cc,sha256=T1SMqOFZIrhTI_EziiBD0zcQck_1gdcuw5X4wy9BqcU,166662 +onnx/defs/tensor/old.cc,sha256=oIpbDbd3ObNXMrWibwWVAz149G6h27Rep86Oo0WxT4U,271738 +onnx/defs/tensor/utils.cc,sha256=EYWBGwdBFu2mPG6nWnounD7fkj6qrRPX_xiW9eQuLYg,19317 +onnx/defs/tensor/utils.h,sha256=NW6Bf_osOOPr7ibHPXYP7aHlU5zLn5OVzvmySHd3m4I,1855 +onnx/defs/tensor_proto_util.cc,sha256=JWCRx4TkqMpUTgtuvQwIvjsJbf73ZkBYXS9H7lmaLF0,11953 +onnx/defs/tensor_proto_util.h,sha256=YsVVC_tCxfWdRZkCZ_dce_uutFOk6zkPcicz2W7ULAY,421 +onnx/defs/tensor_util.cc,sha256=fC_jU6948R-LcuWZSiB7mGi3nyvVjiNv3yFfmQXW9GM,3484 +onnx/defs/tensor_util.h,sha256=AwOYIXs0EyFWRsm0m0UPnaaJYnHILcNKvJXWJPrjGOc,260 +onnx/defs/text/defs.cc,sha256=xle6eQMSIGgADF0LyWnbYAa6GXDWA4uQtopu7iuLq9o,10575 +onnx/defs/traditionalml/defs.cc,sha256=4DdP9SEsnIGILllYtSqwvq1kdNDzrpj8hXgulOiH4IU,63664 +onnx/defs/traditionalml/old.cc,sha256=WkI-LJQHK9_M4K64WW4kW_Tl-tKOkGtsB8g30ZGyesg,34571 +onnx/defs/traditionalml/utils.h,sha256=cVIWYbMo-mWdp9rf76NJvjrboP1ra_CIULcA-E7l9E4,857 +onnx/defs/training/defs.cc,sha256=EsgLfuK_ifWbOjPAkxop1Hb6c-FmVpxRtn79MuI2y_8,28509 +onnx/external_data_helper.py,sha256=teW0FLi_knNys0oA7pNL2znlUuD_cftOPVvwxqQ89qg,11910 +onnx/frontend/__init__.py,sha256=HiuD-U6YHJwWikkemKJqBq-35U8KJ9LDdAwdisi7OlQ,85 +onnx/frontend/__pycache__/__init__.cpython-39.pyc,, +onnx/gen_proto.py,sha256=s21pPEC_HKO2_A0B0QPvcPHt7D-L3vr9n9LNexMuQYs,8887 +onnx/helper.py,sha256=ZlBHegNB76eElEjzbHZeLWOtj7IE52MQql90JzaOF5E,55671 +onnx/hub.py,sha256=Kv7Wly22sYxAxXTllGzOsvs9_6uKAQ5I4Uo9eBdquzs,18326 +onnx/inliner.py,sha256=Ukl3lPP9lC-lUNr2jQzwNZt6g0MPPcNUlxjEHKZbYPk,1772 +onnx/inliner/inliner.cc,sha256=SXTL69Ovr4AexoGM5nDrODbl7MKkV8Xy26bPDGN82bs,23741 +onnx/inliner/inliner.h,sha256=qixZYjM_oDYt4WBYd5EgoAEJCKSNbVg2YPHFuVNC_LI,2126 +onnx/mapping.py,sha256=EEOhJsiUtONiin34fkMInjdOjtqTm3R_wsoNeDeC-W4,8457 +onnx/model_container.py,sha256=aXpLqFjZF2RbjbUni04xYtwVLxG1pr6YYXWx_zz_Pko,13104 +onnx/numpy_helper.py,sha256=iWqc9XmEt-SBbrFn5Ft4BZI93nhui1N0eEmLBL9fPSw,23886 +onnx/onnx-data.in.proto,sha256=STpWDwjabT1X2CjbIIMYVzmKWLwNInoYMI6MsDnJPsc,5251 +onnx/onnx-data.proto,sha256=mZGa9_OSSCfR8u8d7KD1vpX6MmCA4lV9a-oqcL2njUQ,5337 +onnx/onnx-data_pb.h,sha256=NueEKiWvYvrBCihvlr7FOwAFxPyUQUOsTALHctAmqNk,167 +onnx/onnx-ml.proto,sha256=AAWGWKqTqXJSML-tS4-RCZfOUJMwSidLvl9h6lzFXL0,38595 +onnx/onnx-operators-ml.proto,sha256=B6G5p_TJFmnAc-82clXeo3zHZH4RtAaXKkaK4htKWUA,5439 +onnx/onnx-operators.in.proto,sha256=gbHdotozdGsY3fI40d8FNpXV68Nz4pCO4h1WeWX2gEc,5353 +onnx/onnx-operators.proto,sha256=JQbdYTnvJhfQrFk82zzzEKpokcOUujZ4QBKqvSJNBLU,5436 +onnx/onnx-operators_pb.h,sha256=vlw2jzH5sZfKNzs6x4f34-riKuXPy9PjXbo8ZvOx068,202 +onnx/onnx.in.proto,sha256=XTWpwFI3cPFVEn4owXCId3oK6s6wdIIUB4TQCzyzgJE,38526 +onnx/onnx.proto,sha256=mej19HPF15gUvT_6r6hbxI3x7Mxc9pR-Crm7rR9cvnA,38116 +onnx/onnx_cpp2py_export.cp39-win_amd64.pyd,sha256=ovragE5_mpNyHvwVWx4gipqPH-OIc9OGW5CZejCFmI4,2759168 +onnx/onnx_cpp2py_export/__init__.pyi,sha256=uUztmsKvTem8E389D67nfn5uw6igzLC7E2wzZspiQqo,167 +onnx/onnx_cpp2py_export/checker.pyi,sha256=kh5icQ7-nk8bBpv5dziYV5JFZ4SB8l13E5gR6WNQsZ0,1339 +onnx/onnx_cpp2py_export/defs.pyi,sha256=J2aQ82-Divn9DJvVKBNUhhkbnpPgMmn49a1t0MLj1a4,6891 +onnx/onnx_cpp2py_export/inliner.pyi,sha256=8LGJsi37Tv5N-WGQpxRvl9fV3cTNH0slskp0s0Uvw8w,780 +onnx/onnx_cpp2py_export/parser.pyi,sha256=_impFfsu9cqlpkSgmSGFGeNxhf9aHxDu-fc4uRc-tNs,1230 +onnx/onnx_cpp2py_export/printer.pyi,sha256=nvnzhcayV-OTaexbPmz3iQdAmFiQuMuovjJJ9vn6h6w,192 +onnx/onnx_cpp2py_export/shape_inference.pyi,sha256=UgFpCHJaTQ9jmlf0vl_-RkE2tMBoIL4kVzoSEh5n16g,436 +onnx/onnx_cpp2py_export/version_converter.pyi,sha256=onTg8wbPPo322cr9BAUuN4wzY1dxLAFvvAsugapQLJY,167 +onnx/onnx_data_pb.py,sha256=Czjgcf0pv2Vk_mBsch2sG4bYkzRpNG1efzhhdEilN8I,94 +onnx/onnx_data_pb2.py,sha256=YuJmAGmOGE_ClfmD9Gbbc1qn9_FzOMDH61e7ay9t5xw,2894 +onnx/onnx_data_pb2.pyi,sha256=VTSip9Evt2RqdKpntIo6G9LVASJVfc6zJ-yy-T4kW_o,4682 +onnx/onnx_ml_pb2.py,sha256=7sgNYHeL5NSVJEdblfsF5dH5F9Z9gVeFoHXc7rKcH1M,12622 +onnx/onnx_ml_pb2.pyi,sha256=NNXkOJXw_bCns0jLcSZGimhNFKlEZ-WmfF6LNZqmafc,23842 +onnx/onnx_operators_ml_pb2.py,sha256=zUUf3B_H9GJfeZT1HwEH3UXHzWslztq2KZlk4kPylLQ,1797 +onnx/onnx_operators_ml_pb2.pyi,sha256=c5e2qnUu2bw3QH1wH3d1eMTjbhGEk5U1kWzFJvtJ7TA,2188 +onnx/onnx_operators_pb.py,sha256=N5fEPo3jXHFuJNbzTBpJHa_T83-11btCP0H9ArOT5VM,102 +onnx/onnx_pb.h,sha256=aZFGVarVX9rxiPUDO0hMzUiElfrh156hXW2IAKws6eQ,1771 +onnx/onnx_pb.py,sha256=4gbKZBELN10Wshbbg5OWsC2RSgZJn4IJiAz5XX4Hqcs,92 +onnx/parser.py,sha256=8cbbtnTgYkrOH-4YTQpE5x1MqoMtL8Zy1jUcgvVQbfE,1929 +onnx/printer.py,sha256=wOX0eB5wT5ZeQIh4j74SHmoRIewyiz81rKqO7VvqQM8,646 +onnx/proto_utils.h,sha256=Wie70tLqVckc_2wvyJPQIfpFDl9n_4IIopzJHklMxxw,2279 +onnx/py.typed,sha256=9UMKvtXY_BzLXM9JXn_eKOwmwSH_rAJMvoGJfxOW7g0,42 +onnx/py_utils.h,sha256=TBlijr9RjN1lU0hRAvoCME6NJFmD17qINlMMBeWKwLI,568 +onnx/reference/__init__.py,sha256=KgpnTMTiU0CWuKQO_2aezUtIYDe-_x6iJMrddrYTa74,190 +onnx/reference/__pycache__/__init__.cpython-39.pyc,, +onnx/reference/__pycache__/custom_element_types.cpython-39.pyc,, +onnx/reference/__pycache__/op_run.cpython-39.pyc,, +onnx/reference/__pycache__/reference_evaluator.cpython-39.pyc,, +onnx/reference/custom_element_types.py,sha256=5aVSpuIiW4J3x8Wmcnwll9pPcsFlfdls2d-4LXF-7gU,541 +onnx/reference/op_run.py,sha256=JQ8au_lefX707pUwcbPJTrwAMnzzy4LnX00r0xXt4BM,29425 +onnx/reference/ops/__init__.py,sha256=N0lZ39DhOhaf2XWnit3DV3YM3ed_E90qXx8EhKMD_eo,181 +onnx/reference/ops/__pycache__/__init__.cpython-39.pyc,, +onnx/reference/ops/__pycache__/_helpers.cpython-39.pyc,, +onnx/reference/ops/__pycache__/_op.cpython-39.pyc,, +onnx/reference/ops/__pycache__/_op_common_indices.cpython-39.pyc,, +onnx/reference/ops/__pycache__/_op_common_pool.cpython-39.pyc,, +onnx/reference/ops/__pycache__/_op_common_random.cpython-39.pyc,, +onnx/reference/ops/__pycache__/_op_common_window.cpython-39.pyc,, +onnx/reference/ops/__pycache__/_op_list.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_abs.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_acos.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_acosh.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_add.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_affine_grid.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_and.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_argmax.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_argmin.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_asin.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_asinh.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_atan.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_atanh.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_attribute_has_value.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_average_pool.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_batch_normalization.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_bernoulli.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_bitshift.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_bitwise_and.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_bitwise_not.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_bitwise_or.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_bitwise_xor.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_blackman_window.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_cast.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_cast_like.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_ceil.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_celu.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_center_crop_pad.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_clip.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_col2im.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_compress.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_concat.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_concat_from_sequence.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_constant.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_constant_of_shape.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_conv.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_conv_integer.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_conv_transpose.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_cos.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_cosh.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_cum_sum.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_deform_conv.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_depth_to_space.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_dequantize_linear.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_det.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_dft.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_div.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_dropout.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_dynamic_quantize_linear.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_einsum.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_elu.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_equal.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_erf.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_exp.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_expand.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_eyelike.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_flatten.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_floor.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_gather.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_gather_elements.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_gathernd.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_gemm.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_global_average_pool.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_global_max_pool.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_greater.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_greater_or_equal.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_grid_sample.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_gru.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_hamming_window.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_hann_window.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_hard_sigmoid.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_hardmax.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_identity.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_if.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_image_decoder.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_instance_normalization.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_isinf.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_isnan.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_layer_normalization.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_leaky_relu.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_less.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_less_or_equal.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_log.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_log_softmax.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_loop.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_lp_normalization.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_lp_pool.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_lrn.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_lstm.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_matmul.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_matmul_integer.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_max.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_max_pool.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_max_unpool.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_mean.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_mel_weight_matrix.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_min.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_mod.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_mul.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_neg.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_negative_log_likelihood_loss.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_non_max_suppression.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_non_zero.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_not.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_one_hot.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_optional.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_optional_get_element.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_optional_has_element.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_or.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_pad.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_pool_common.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_pow.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_prelu.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_qlinear_conv.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_qlinear_matmul.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_quantize_linear.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_random_normal.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_random_normal_like.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_random_uniform.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_random_uniform_like.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_range.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_reciprocal.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_reduce_l1.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_reduce_l2.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_reduce_log_sum.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_reduce_log_sum_exp.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_reduce_max.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_reduce_mean.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_reduce_min.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_reduce_prod.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_reduce_sum.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_reduce_sum_square.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_regex_full_match.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_relu.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_reshape.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_resize.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_reverse_sequence.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_rnn.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_roi_align.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_round.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_scan.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_scatter_elements.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_scatternd.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_selu.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_sequence_at.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_sequence_construct.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_sequence_empty.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_sequence_erase.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_sequence_insert.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_sequence_length.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_sequence_map.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_shape.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_shrink.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_sigmoid.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_sign.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_sin.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_sinh.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_size.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_slice.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_softmax.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_softmax_cross_entropy_loss.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_softplus.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_softsign.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_space_to_depth.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_split.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_split_to_sequence.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_sqrt.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_squeeze.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_stft.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_string_concat.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_string_normalizer.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_string_split.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_sub.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_sum.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_tan.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_tanh.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_tfidf_vectorizer.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_thresholded_relu.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_tile.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_topk.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_transpose.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_trilu.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_unique.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_unsqueeze.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_upsample.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_where.cpython-39.pyc,, +onnx/reference/ops/__pycache__/op_xor.cpython-39.pyc,, +onnx/reference/ops/_helpers.py,sha256=cm_0eO78YHqGNQ9eSAZFaQ5okX8tBwH5z6T7dDh16ho,1932 +onnx/reference/ops/_op.py,sha256=8DfsFCKY5O-jGLFouimeQWaBz9VwJJPApdIRX0k1YBI,6228 +onnx/reference/ops/_op_common_indices.py,sha256=Hj-ks-Z_nk91nfSAZxcLdR6ZgPgYtsOOdrSDq8tX-rQ,771 +onnx/reference/ops/_op_common_pool.py,sha256=XFQz3mGDS2OvhgpdUjgsYY4pCxSu4DuNi_EyQqotKXM,10732 +onnx/reference/ops/_op_common_random.py,sha256=cg52FYDa0XXgfBTx0U6Qoy7mLbzBv2oAHReRzg62toY,1920 +onnx/reference/ops/_op_common_window.py,sha256=npPa8mWYsYHfWAUy4nfIvw7s4InSDiYnA1mHV35lj8A,715 +onnx/reference/ops/_op_list.py,sha256=vNGniQ5whDw33pqKr9wD2pfjwOb_F-P3pzVf_Znyr-Q,17511 +onnx/reference/ops/aionnx_preview_training/__init__.py,sha256=Vv1EvRHyahq2Ek321j_33sykP0zOPQC60ppauA78t68,246 +onnx/reference/ops/aionnx_preview_training/__pycache__/__init__.cpython-39.pyc,, +onnx/reference/ops/aionnx_preview_training/__pycache__/_op_list.cpython-39.pyc,, +onnx/reference/ops/aionnx_preview_training/__pycache__/_op_run_training.cpython-39.pyc,, +onnx/reference/ops/aionnx_preview_training/__pycache__/op_adagrad.cpython-39.pyc,, +onnx/reference/ops/aionnx_preview_training/__pycache__/op_adam.cpython-39.pyc,, +onnx/reference/ops/aionnx_preview_training/__pycache__/op_momentum.cpython-39.pyc,, +onnx/reference/ops/aionnx_preview_training/_op_list.py,sha256=pdr-w2InKydTvIP5bOPvcFwOTs_YKF1NYdYDc_cj5FI,3239 +onnx/reference/ops/aionnx_preview_training/_op_run_training.py,sha256=H7U7wo178TQVkyDA-HyXdB9xi4UV4a59LcPvSv-bp20,206 +onnx/reference/ops/aionnx_preview_training/op_adagrad.py,sha256=nEdTTqUut14NZOAMtKRizhUBO5CxtpT-Ue5RzziC9aU,1906 +onnx/reference/ops/aionnx_preview_training/op_adam.py,sha256=a2FU4cSqD701PDHASpDTcv6CfFqN9bakJgauiho_Qgo,3033 +onnx/reference/ops/aionnx_preview_training/op_momentum.py,sha256=EcMgM_judWKjD5nBb8pA3RK7fNp5fyzpt9PHlk20nTk,2514 +onnx/reference/ops/aionnxml/__init__.py,sha256=Krif8jwGG9xxjLsi61ITcTYghzNiRuOcOKbvh5CZnic,99 +onnx/reference/ops/aionnxml/__pycache__/__init__.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/_common_classifier.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/_op_list.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/_op_run_aionnxml.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_array_feature_extractor.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_binarizer.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_dict_vectorizer.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_feature_vectorizer.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_imputer.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_label_encoder.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_linear_classifier.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_linear_regressor.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_normalizer.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_one_hot_encoder.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_scaler.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_svm_classifier.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_svm_helper.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_svm_regressor.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_tree_ensemble.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_tree_ensemble_classifier.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_tree_ensemble_helper.cpython-39.pyc,, +onnx/reference/ops/aionnxml/__pycache__/op_tree_ensemble_regressor.cpython-39.pyc,, +onnx/reference/ops/aionnxml/_common_classifier.py,sha256=iLL9ncrqaCyCQ8N6zKKwxCRPt9hkxiapD0T_cUfOPoI,2051 +onnx/reference/ops/aionnxml/_op_list.py,sha256=ZrbDlUiZTyP75hHig5BDvTPHFYIPNn1Zd6zGiE9eeMw,4303 +onnx/reference/ops/aionnxml/_op_run_aionnxml.py,sha256=XbQiFmgb0pInbtApU91mIYWEhjtiwdLvujBSsv6ijy0,147 +onnx/reference/ops/aionnxml/op_array_feature_extractor.py,sha256=u_-8Pqs6WvSxOomd50iBRv-xKFlfwgaYq6DuDd4e-0c,1670 +onnx/reference/ops/aionnxml/op_binarizer.py,sha256=UzMXYzADRZgACwCf3sHbHaz9RLgis6RhIXawUE9x4TM,395 +onnx/reference/ops/aionnxml/op_dict_vectorizer.py,sha256=Hg8buaG9L7GNwCMiifd6G759_z7SZiC0o-RdgF5xj9s,1930 +onnx/reference/ops/aionnxml/op_feature_vectorizer.py,sha256=59sM5vV_JbVld_IoKkim_-LOtvuA-3X0UvAHckV3Eis,954 +onnx/reference/ops/aionnxml/op_imputer.py,sha256=acLZLUk8-wl9Hufqftvc0wBUzIsd7cUY-KHR9LxyonI,1603 +onnx/reference/ops/aionnxml/op_label_encoder.py,sha256=UgLnGk9tADbvtyLbdjAJZYLPkN6pvcm-oVLMhrf3qOI,1576 +onnx/reference/ops/aionnxml/op_linear_classifier.py,sha256=rrBv-IQ8Cy4CLW1jnC3AZvw8pWw-xjueCOGJicuVROI,3669 +onnx/reference/ops/aionnxml/op_linear_regressor.py,sha256=3NaORS2K6zm2jsgllm9VVQ_tK4qMsUS31gstz_-t138,871 +onnx/reference/ops/aionnxml/op_normalizer.py,sha256=joW8_-C0HeRmvjI4eS9n85NFkjAd4nSrLZzuXYFm2Eg,1229 +onnx/reference/ops/aionnxml/op_one_hot_encoder.py,sha256=25-tArnTsqsvfssoDQ13YgmyRdDpKFl2yiXJbwy95Vw,1948 +onnx/reference/ops/aionnxml/op_scaler.py,sha256=ZM8ybZz9f25snpAuEg7Uf7lIBM_SVAQAO1v8Am3Ez1o,332 +onnx/reference/ops/aionnxml/op_svm_classifier.py,sha256=7cWYgFzAnmsQgTH1O0HcrYFu-byruflYF2fU7ZDtHn4,11979 +onnx/reference/ops/aionnxml/op_svm_helper.py,sha256=IJwEAHe-hZM46TCOfzvmnPfLCTw-FHxuGdrWvmRUJqc,3438 +onnx/reference/ops/aionnxml/op_svm_regressor.py,sha256=XBLfMKE0RO7F15NHXr2bekQ_TGmDOxkiygX60S-7kXs,1281 +onnx/reference/ops/aionnxml/op_tree_ensemble.py,sha256=qYawPRbYru2xi81Il2uzGiEOeSu19f7TkIMTYH4tvlo,9318 +onnx/reference/ops/aionnxml/op_tree_ensemble_classifier.py,sha256=qf95Jnu9422ucnMx6RsE6yytUAh-NmVF7m8NbR9QcdI,5024 +onnx/reference/ops/aionnxml/op_tree_ensemble_helper.py,sha256=RVo8vAUBImv7WXaUMHwgVunetZO4p68dNiKOxlUx_O8,3830 +onnx/reference/ops/aionnxml/op_tree_ensemble_regressor.py,sha256=fs1196Ht2Ptp9hqFVlHrH03phiS6ITUWiY6RVFNC4Nk,4177 +onnx/reference/ops/experimental/__init__.py,sha256=Opi9ZO3rW-jDNMT0gvU50IAF9HhVPkzQfiokOwTT2Dg,149 +onnx/reference/ops/experimental/__pycache__/__init__.cpython-39.pyc,, +onnx/reference/ops/experimental/__pycache__/_op_list.cpython-39.pyc,, +onnx/reference/ops/experimental/__pycache__/_op_run_experimental.cpython-39.pyc,, +onnx/reference/ops/experimental/__pycache__/op_im2col.cpython-39.pyc,, +onnx/reference/ops/experimental/_op_list.py,sha256=cebUyg6Fh4ojfKTGaWdf8DOzCavg2mag18Kh8INRsNQ,3097 +onnx/reference/ops/experimental/_op_run_experimental.py,sha256=jNOhRVHdrZ49KfiBjdfP_-Xit5bgEpbUpAaA-pGHBM4,199 +onnx/reference/ops/experimental/op_im2col.py,sha256=YLYL6JVcCBUdvVzq4eGrg-Q9Pvm2eZxWKWOYhrtObsY,1455 +onnx/reference/ops/op_abs.py,sha256=HMCno7u6YfH6g7QsSPITCGC8LHXdDbbm0jmojQi8XNo,265 +onnx/reference/ops/op_acos.py,sha256=tStZt7PZI4GkKZcfr8dcoXOkxrizJe-YbHjZIGDXP0s,264 +onnx/reference/ops/op_acosh.py,sha256=1vizEXskAjRhlC1TG8VX9U6bgW5zLizGQU7LC6AmXnE,266 +onnx/reference/ops/op_add.py,sha256=qvZRuBZOop8wveJ5UdeFzTJG5REG7kSbQHSLve8WAIA,333 +onnx/reference/ops/op_affine_grid.py,sha256=y-M5ZUPUoQw5CNsu5SXXWv-L391SIW9XKtJCdnJXLO0,3649 +onnx/reference/ops/op_and.py,sha256=UgUc0uHMCsfIt4blHu2jQF5gmdx5Gg5c-WDwSsxKNf4,270 +onnx/reference/ops/op_argmax.py,sha256=HAkivPyKXO37XnNRwwdBaPnirlI-KykMYhFmGqMSY4s,1270 +onnx/reference/ops/op_argmin.py,sha256=RsLKJGOdIMguD1bF7ZXH4qSZpvCD2VqEjd8wUM5gbLk,1270 +onnx/reference/ops/op_asin.py,sha256=VJjj4oSIYg592TU2inl5THUhSvaIPUoi6TSVjzXXDYs,264 +onnx/reference/ops/op_asinh.py,sha256=IPd8zTykydfHp94S2NLVLxu6uKIGcgW-SFE4qBk700A,266 +onnx/reference/ops/op_atan.py,sha256=v_J89oZVAI0OK3GYDZ4q_yt7U4ciU4t7BnRLnwyYs4Q,264 +onnx/reference/ops/op_atanh.py,sha256=yyAXkkRKbogcRoJksqZVzMqFGrea7ucOi-WAJDIJuqc,266 +onnx/reference/ops/op_attribute_has_value.py,sha256=VWw8iLvYWuHD-LbkXmYoGxnzS8kg3gHdEFXwg43EXGU,875 +onnx/reference/ops/op_average_pool.py,sha256=hJBJGN16t17D5-GUubU7JLXopW7ZeQUAdF9-MD8K8kQ,2486 +onnx/reference/ops/op_batch_normalization.py,sha256=0oiKJR0S-K3aehJYt-cvWTSDU0p9C9LwA_uN13J5uys,3067 +onnx/reference/ops/op_bernoulli.py,sha256=yKgyK8kwNR0msjh5lDnXq19LtTwY2-R0xpenqAmjWj0,575 +onnx/reference/ops/op_bitshift.py,sha256=0m_nmpC0W3xp3fFxZBnNNvrgcLSPQmzdJONDPJX1Hf0,616 +onnx/reference/ops/op_bitwise_and.py,sha256=oEmcSLuCz7KLzecj5S_atVB4zQwHsm4t08hJDm3eOdw,277 +onnx/reference/ops/op_bitwise_not.py,sha256=2sTqpW3Rd6u9oMMZLv9pnPKoG2LpSFF6KKyvfCkInSM,253 +onnx/reference/ops/op_bitwise_or.py,sha256=c11fyzSev1EfrEfBoCjOfmYFQoBjU87z8ygRm-8JQMY,275 +onnx/reference/ops/op_bitwise_xor.py,sha256=Q7KlAt9gp1qlWHSzut6Ocy3Az70qQIo14F50urgbIGY,277 +onnx/reference/ops/op_blackman_window.py,sha256=7wVdFQ4k3q2zkW9dqcxnRzmFb0Cl_UR1wt1tSEdfuJQ,957 +onnx/reference/ops/op_cast.py,sha256=VAJ-1XZokT-bB1KXTzr8ZcJJm5n4vXYtZbFJgGyrXCo,4576 +onnx/reference/ops/op_cast_like.py,sha256=x3DJ3SLbEsOhwzTq4Jr0C9_WMlr1cWlPi6BFDrK2fuc,1599 +onnx/reference/ops/op_ceil.py,sha256=33qrjTTsy5iNr8CGRWVq6eDaXStkN-u1Gt6C0Wnh4jM,262 +onnx/reference/ops/op_celu.py,sha256=7-IJhYCruRNDsBeCKBEPsk0OPiuvwTPK64mc-ctUdU8,515 +onnx/reference/ops/op_center_crop_pad.py,sha256=sQEjpsvMrR0VB-PEvk88UqFjBcDO_HzqiHQoi54w99k,1658 +onnx/reference/ops/op_clip.py,sha256=d-vi7OGQqVM0D9T58ZGl5hI-NE-NflQWVHGayNdRCrs,838 +onnx/reference/ops/op_col2im.py,sha256=GextHstcAFwvaB5faHu0TreKjtADEm9oKlwtT_3EODE,8208 +onnx/reference/ops/op_compress.py,sha256=sSbNT_q4PVeIv1lbIjfp0MBjzs6yjB4SH5pC_VDgeu8,313 +onnx/reference/ops/op_concat.py,sha256=TLVQIqTRFOwY60vGSdjUPxEXvH29i5J3fGB6z2z_lYA,734 +onnx/reference/ops/op_concat_from_sequence.py,sha256=u3zdOFKEpa5E2z9wHY0Pa0goUdRrNVlnDL4BYpnB_T4,765 +onnx/reference/ops/op_constant.py,sha256=QY-9eRZ9jbg7pW5FjEI3dNvafclANqCLyCJljjW21Z8,5477 +onnx/reference/ops/op_constant_of_shape.py,sha256=2FOwfhYKBxEgXRBPA-gsNoGqsl-kgbL7GG1WMWbtdtY,1086 +onnx/reference/ops/op_conv.py,sha256=HnidPrXN8nCilDhFPWBRqur3CsnpZxfYz2g1tRqv9kc,13487 +onnx/reference/ops/op_conv_integer.py,sha256=5j8O9QMlQvkaKCZD7YSVJtJYTbM2hMlWSEym_IJbdOw,1403 +onnx/reference/ops/op_conv_transpose.py,sha256=stt9UQxfjqoMlWBR6WsX9-mR1iKDlftwP9mMKhoZdlQ,4160 +onnx/reference/ops/op_cos.py,sha256=7oaXJbZxX4j6ttKk8NCvbmhtReGbVrVUA02pAmpyWqo,260 +onnx/reference/ops/op_cosh.py,sha256=qlp-RftBRDW-MSsB0_gndSjry_unlTEsiMOO11jbPO0,262 +onnx/reference/ops/op_cum_sum.py,sha256=NZpt_49UVL_BMn108BTGpSc3Hz6eKRgt5544jvDE7-k,1692 +onnx/reference/ops/op_deform_conv.py,sha256=XDi0ZsQErrN967jPY5FQKWTQQ50Yk2V6fhAgKDqPVjY,6383 +onnx/reference/ops/op_depth_to_space.py,sha256=CFtN343DEdM4Mk4JtpfAW9ubo1uHjJ-IX4bTZKqosSg,1326 +onnx/reference/ops/op_dequantize_linear.py,sha256=LU_FpvrzMBUAD9mjqg3KuGqh5IERhMnScXm_ylaoKQE,3992 +onnx/reference/ops/op_det.py,sha256=_XFKNZTnZW7nft7HkhDEUWIgF8p1Q3rxF2OhXR-UCN4,260 +onnx/reference/ops/op_dft.py,sha256=U00Om0vEykjdg1K2Wyz4y6leeZZj-g4n8l7UG69wLXI,3864 +onnx/reference/ops/op_div.py,sha256=GHjv_ugcZOSNnC3-h1iOBgxWudJHck74-0WtOKKLd8c,533 +onnx/reference/ops/op_dropout.py,sha256=Z6yRY7GaroaQA-Oc30sD8suMBCYe0aJbK8RjH-jhFn0,1943 +onnx/reference/ops/op_dynamic_quantize_linear.py,sha256=qET9MhYJgSDNvYXAyTtLFXRVgCdPT_PWpB-rrLKvUyk,979 +onnx/reference/ops/op_einsum.py,sha256=Ij-eoHvg0xMdHMuZ4eER7k6HYusKastOmsjmOSpjVE8,633 +onnx/reference/ops/op_elu.py,sha256=oIDRqLDO0DLnSw0ZzGpXn7Wdl9x3pYZBt-BbHlnunc8,375 +onnx/reference/ops/op_equal.py,sha256=woowOuqdL1zyRA4o2zNhQWM93ymj2emQ1cGJk81NF9g,286 +onnx/reference/ops/op_erf.py,sha256=1fJp_5XLDQRjmd9LftjFddFv0uasTkuRvtcSvOay6_Q,469 +onnx/reference/ops/op_exp.py,sha256=leNg-UoWvZOkFNeGRzDiTiyrcmhDl_sUEXdTku8gRX8,276 +onnx/reference/ops/op_expand.py,sha256=7ijuAzM-h-G4HtyyAm2l8EtLSlZpj1KJnRNjE7nQ2S4,469 +onnx/reference/ops/op_eyelike.py,sha256=_suysgtj32LNTDDsGDCtzD-QFiSyk_kuUuAddUbASNQ,988 +onnx/reference/ops/op_flatten.py,sha256=O_LQJk_aTJWWfK_Da_ZRcNaGlPKCWZc80NIZfirFL0U,449 +onnx/reference/ops/op_floor.py,sha256=QtwaVC9gPNs7Lq8Ik4KJiDBGkcrKpf-RlrPZTKsrz0w,264 +onnx/reference/ops/op_gather.py,sha256=cI9jHZEg46hF31x9X7gyUmbniwgmtRuvyEFjrkszzys,710 +onnx/reference/ops/op_gather_elements.py,sha256=XheTK62lKw6puRp1wf3vSunEyHp62zv2o5JeNbT65m4,1606 +onnx/reference/ops/op_gathernd.py,sha256=42NffMYXbuHR1JuxwoIEXJ810RF3lCFi0rfsZe0_9wM,2077 +onnx/reference/ops/op_gemm.py,sha256=KEGMOCHoOIq97X6OJVyvu_KOCFPPYFbmQgpHrJdg94U,1995 +onnx/reference/ops/op_global_average_pool.py,sha256=ZsRZSOCFWAnBj2-IhSIBDGQt-BhNYMqCXUwznXEXLyE,506 +onnx/reference/ops/op_global_max_pool.py,sha256=OcmHZCaGTIb1Ru4Ba1-YTJ2Nhp8-fnDkdBrXFlm7i_w,543 +onnx/reference/ops/op_greater.py,sha256=0MDQf2uddC3zwS06S7Uxo0Ml5snhGFropXLom4XaoC0,290 +onnx/reference/ops/op_greater_or_equal.py,sha256=BVcTWfMpwCdJMBU-y5R5lMh_o7fcImYgSjL1yPZ_IJI,303 +onnx/reference/ops/op_grid_sample.py,sha256=WhzO3XoN631Dr7p3u4D7SOVTLFo9pMEir6A_tSqltXI,13764 +onnx/reference/ops/op_gru.py,sha256=F9PJ0i6rsje9-oMS--I1szN0pQhQIJx0q6Ko52pnwIM,4215 +onnx/reference/ops/op_hamming_window.py,sha256=dOE7Pqu02Bn6UaSsVmh9q7-n3s0UiCQnmmDtIAFUj1c,796 +onnx/reference/ops/op_hann_window.py,sha256=8jCZNVTTqdxq-kwQnAg5MInwRjB9mxFbSZPJTqJ4EvA,672 +onnx/reference/ops/op_hard_sigmoid.py,sha256=lMsjHLgOUI0jzOsgZ0bNRKknqLK9ysEjM_Hu5ISicmg,446 +onnx/reference/ops/op_hardmax.py,sha256=P30A0pNoruQiC2B0u3VSU9WEwFvPuELVPfkSJP3fPi0,528 +onnx/reference/ops/op_identity.py,sha256=iEMlHe0USa22LrN2VpWpyAbRCay9dy4X6IT3oNPyaxA,293 +onnx/reference/ops/op_if.py,sha256=ApY8YnX_D-Ejr4c_hqXqTSpCbxWNIXOQLiopOpoMiBA,2624 +onnx/reference/ops/op_image_decoder.py,sha256=m5fyNAFzfuEyB9nYuDF12WbO-pyJOGsJ2PUsgA1ZLOc,1109 +onnx/reference/ops/op_instance_normalization.py,sha256=rBen-okaii157MAM2FcIHBS9q-A2KrTPXUk4a8t_no0,649 +onnx/reference/ops/op_isinf.py,sha256=zPjJRdmniwSnnuSELets7_C7n7DfxDndPNL1OSY9gJ0,570 +onnx/reference/ops/op_isnan.py,sha256=se5s04jat3DQmLzNI_9KalSgj48-GMPN5Z8EFJ2XYzY,264 +onnx/reference/ops/op_layer_normalization.py,sha256=8ZCxXteZIElPQSm7eJBp3kJ18H5zscDbDQA183LJvOU,2697 +onnx/reference/ops/op_leaky_relu.py,sha256=PnXOshwsgTI3M5G4vQr0P7U-tZmLpmYdGA6sKnDKl1Q,547 +onnx/reference/ops/op_less.py,sha256=l6Rp9rN2LGLn10dKQooQGibWdvJPCsR2sV3-jr8saSY,284 +onnx/reference/ops/op_less_or_equal.py,sha256=eGrQjzd_7TNnu1wkmJMf0wcxse_of6teN7IyRBd1AjM,297 +onnx/reference/ops/op_log.py,sha256=KRiPbb8hyHi3mI441XlVhS9ZOrjO4HzHixFjmN80_Eo,276 +onnx/reference/ops/op_log_softmax.py,sha256=3i-8MsqcRMR_38EQwksVgA8UdYRVrewR0Lluqmqs8Hg,318 +onnx/reference/ops/op_loop.py,sha256=F655vD0ZftceA26kwGjANXh_bV-cINBuhYGWLKpgL7w,3592 +onnx/reference/ops/op_lp_normalization.py,sha256=He-AGprkoZmnh4jogGmUI1MHGHMP9mvJ6nmll0uj9tQ,539 +onnx/reference/ops/op_lp_pool.py,sha256=4AMl4FODUawsgvglDNeFZjjaBNoTIu_CrUyVQpxnB9k,1221 +onnx/reference/ops/op_lrn.py,sha256=yQK-CzOOSGKM8rDy_fv5T0Gdu863y-sggKJ-Ea8_aNg,900 +onnx/reference/ops/op_lstm.py,sha256=om1ygpQ7cIL41wgRB3NRRmG_GviP_v3VRv8Z1lylBDo,4918 +onnx/reference/ops/op_matmul.py,sha256=LeEzXymT1_TJ3la_DuhfmyD38HInD4RgTue_xDrNx8o,667 +onnx/reference/ops/op_matmul_integer.py,sha256=pJLw7DlA2qZ1knOO3edFn3sziJhle5AgZt02gVKLmh8,504 +onnx/reference/ops/op_max.py,sha256=Kxi-3gfRyVu_Xtywz_oiywAshEUU96d7XsDL1iwbmKc,752 +onnx/reference/ops/op_max_pool.py,sha256=cj0Mu_157zf7qTL_XOCYfBq-8vxMq6a5YfHTYQ_Yc8M,14964 +onnx/reference/ops/op_max_unpool.py,sha256=Mrj3vu7ztDmkgRZVQ0nDU_7il_1y2qYxvhUu6rTgbsg,1869 +onnx/reference/ops/op_mean.py,sha256=z1o-FfI1A1Zrw9nx8dGozFFdPxqtiYFnHX_gAPQwvlE,336 +onnx/reference/ops/op_mel_weight_matrix.py,sha256=lKAnprj7Ex4NlBpxNok2Io9gYK0SCseBLPPwdkpeI_E,2396 +onnx/reference/ops/op_min.py,sha256=X_9iJi-5wng4iA-oQuEdtVIy5I1zWzdSr0Y7ezL-kzk,752 +onnx/reference/ops/op_mod.py,sha256=NbUcuxxi2eePTDDwp8gwU-d_voFtRKNQ1P8uDthsKDk,514 +onnx/reference/ops/op_mul.py,sha256=RkTGKEAUJXP4QtVGgzyuuVYedPXa9ueMcFs8hjlDPuQ,338 +onnx/reference/ops/op_neg.py,sha256=JYX0EceJRL-ZMoWIzcPdg1hcokgVCMFoIX3ShVce22U,265 +onnx/reference/ops/op_negative_log_likelihood_loss.py,sha256=NCjSGqTWV-SuM_I2eo--gCPd5WWUT6HzQqRg6Im_gL4,2976 +onnx/reference/ops/op_non_max_suppression.py,sha256=bPBjwGZ8JZGgtepm-zClz196mBVQjzbnCyG2OG6Glrc,9677 +onnx/reference/ops/op_non_zero.py,sha256=6h2xw8Xv_zX9wU0Tc_DXl74VWCDTFenh0Oz-j1cpDIs,351 +onnx/reference/ops/op_not.py,sha256=sMet2Pqbp-AJETxVpBN0HZeqdR1srrO0_2_rxltB0ZM,262 +onnx/reference/ops/op_one_hot.py,sha256=k1s1KL2I7qYmTmqyw5fXTIviW9l8j8hAgE1uJG2RuWY,969 +onnx/reference/ops/op_optional.py,sha256=jBJqD6aXowAzkcF2AhFkDexI-KC5tR0TiWh7WxCa-vE,575 +onnx/reference/ops/op_optional_get_element.py,sha256=IsmDqkTsSV0xCqHLcmqpH3erpO90HMKfrNP2ufgj1jA,327 +onnx/reference/ops/op_optional_has_element.py,sha256=7dOdS29oqYqJl1M1onrrMdDQq0fv9CQTSkGMjiDcTmI,277 +onnx/reference/ops/op_or.py,sha256=erAzUjlxNgoeFcqaqqnz07kHnxLhiU44OqceDOz7Q5w,268 +onnx/reference/ops/op_pad.py,sha256=v3wpclWvyQ1YLSU30ScDcVrpYVrI93wtcDH6kCNU78k,2115 +onnx/reference/ops/op_pool_common.py,sha256=_PPBZV0mwZ7JOsa4ALmJIxY6ezNX4C5Lbztm6dKVBEE,11996 +onnx/reference/ops/op_pow.py,sha256=UPwJ2q56z91fjkq8JNlUmM905LluhMJFcd2E0T8N_yI,392 +onnx/reference/ops/op_prelu.py,sha256=7WJEljK3FsBlMsThuyNSIiSAne-kwjqU1xgxPNMIlsI,1193 +onnx/reference/ops/op_qlinear_conv.py,sha256=MuGSwpzS3Qz9ZtL_SL8KY6YaopLYZovfoNBMGYA3JaM,2055 +onnx/reference/ops/op_qlinear_matmul.py,sha256=IAgU50-pEEsgeiB_fh_0IXO-h1lakSrDbgkb-hicdHw,800 +onnx/reference/ops/op_quantize_linear.py,sha256=OEV7_wOfN_mFpqhNY1OQ9_HiigVZidBv5p8LcH-Iua0,8570 +onnx/reference/ops/op_random_normal.py,sha256=wzgMIFhjadkEFeqR2RNRp6TxYkaLsc8Q1n08RgrN0-A,558 +onnx/reference/ops/op_random_normal_like.py,sha256=MMrmw736G9xMUsw_A6ZSLTBJkQshLWe1bFBstoa6wis,661 +onnx/reference/ops/op_random_uniform.py,sha256=NSEIhfFZDpSrc9x5iedK3YApN9-yARGy2H1S1vpu_Mw,547 +onnx/reference/ops/op_random_uniform_like.py,sha256=Kh61N-OKRyNJ-Pj0LSDkRV4zxO2OmM2N2V5r0xi29rg,663 +onnx/reference/ops/op_range.py,sha256=_hp1fHrvMy3ZiE3u0wGQGHqDo-CMVJIZedfdCavxVDw,305 +onnx/reference/ops/op_reciprocal.py,sha256=URpOSGQ0ygCOEsEF6JygBiG-ofZDLeDXDxyp_tSob6I,338 +onnx/reference/ops/op_reduce_l1.py,sha256=hPIxG38tIazLAxQk2SrZKXkY3zCyYDcrTQWv7qF6BpA,1273 +onnx/reference/ops/op_reduce_l2.py,sha256=Iuue0-J9gfXWFCxybfjAoA4ktUwJ3dH24E8wSHp7R-8,1297 +onnx/reference/ops/op_reduce_log_sum.py,sha256=Ip4RdFLxyEAs7iwhGq7DbG1x5EMJCa2Q3Gjv4pjc1V8,1226 +onnx/reference/ops/op_reduce_log_sum_exp.py,sha256=iKNzWoYJTVS1Ra3BaWkKlpsrh4o9q-_B2xf4trCiqRQ,1441 +onnx/reference/ops/op_reduce_max.py,sha256=SD5MbGnT_f-QLgCj59JEUfOgG-dXiFjStTno0jwgziQ,1755 +onnx/reference/ops/op_reduce_mean.py,sha256=8Hym_r1TrO3_KhFEFZIJi8ISjCDDDBUBK6yCi7g6pP4,1453 +onnx/reference/ops/op_reduce_min.py,sha256=rDT8JfWYpT9guQWrbdcwuCWxMF2CbYbBPImZb0sdsJU,1801 +onnx/reference/ops/op_reduce_prod.py,sha256=9kF3NF8RKgddhopCnrEpylKkLfJIzgB-fFWASYh86TE,1200 +onnx/reference/ops/op_reduce_sum.py,sha256=pb9iJEjib4C4lsxEtFtnwnduPciVgfNZ1-zy9oaVN-I,1359 +onnx/reference/ops/op_reduce_sum_square.py,sha256=Hq2brH4IcS_CrNkn75ovcMx2SxZzaAycz_ZYhUA3tGw,1211 +onnx/reference/ops/op_regex_full_match.py,sha256=Iu4NQxjbmD-UTQQ-PX0uHuOkmnrV6SQf8TeaviOIkfQ,1059 +onnx/reference/ops/op_relu.py,sha256=rc6kyj9AGuCvv4N2oAQmQV4sAzsW6GRlLv4-cFg84Tw,284 +onnx/reference/ops/op_reshape.py,sha256=xDcKxawH8mjH9yEuy3qX6M_kRmTPtWVNqB8aASFVKhY,1179 +onnx/reference/ops/op_resize.py,sha256=RfkefQAUjkFDKOqusLS7BH_RqdZcnIUaNYvn901YJeY,16111 +onnx/reference/ops/op_reverse_sequence.py,sha256=0eJLJ8g7q5hDEzuvrh8kBH2Mk_mEcKKCDlDu2hEENPA,755 +onnx/reference/ops/op_rnn.py,sha256=nHcd5II9LwrAiOIMy3J8m2wzCv49iG6LyhxUW_NEQM0,5260 +onnx/reference/ops/op_roi_align.py,sha256=nv9gKrWJqjxUiKib6LOSWyBuzteyXSszQeUNfZ0scMU,11267 +onnx/reference/ops/op_round.py,sha256=fgLE10bIXteK6zBrVw9ViVZKyHEMIeOUykbWJRWS-3Y,280 +onnx/reference/ops/op_scan.py,sha256=FQxtwmFkHhXn4OBThKEZy2dZNcmdgKS5PfZp4RzKcDI,5497 +onnx/reference/ops/op_scatter_elements.py,sha256=0dN0DnXNaHxSX-pBKSuPx6eyYnrCNmOubIqyg0hhPkQ,4774 +onnx/reference/ops/op_scatternd.py,sha256=jyiw_ZgJXkO3x23MlzY14EmTtQeHfoVFcBAuij4CnaE,1004 +onnx/reference/ops/op_selu.py,sha256=F5q56k04f1PAOotO1NgFre-o2j79O1fShs-5NC6bItE,354 +onnx/reference/ops/op_sequence_at.py,sha256=vYAKjGWf4cHKKIfPszwkwNF1tz6YJgvGOSip8mJDiJU,238 +onnx/reference/ops/op_sequence_construct.py,sha256=3x1ExWXD-dTYaSsdyhQMVdl95Yxfdpwfa0T4Lu76A8k,240 +onnx/reference/ops/op_sequence_empty.py,sha256=SPsIkslNj_Sf0iDy88Xv48m_KdfhNlOjG1UbT8wbNkg,233 +onnx/reference/ops/op_sequence_erase.py,sha256=0X8vNfyseVsZkej2nIVzluaRF8J9NzDubX32tsqvVhA,368 +onnx/reference/ops/op_sequence_insert.py,sha256=m2Bvmj-6XUE7d5j4Mgtqbf4Z1mO_boWyaKggUd60wkU,1862 +onnx/reference/ops/op_sequence_length.py,sha256=7kynchf4hwKrNa129UgxQlopGT5BaRmH6El6_KolLTI,476 +onnx/reference/ops/op_sequence_map.py,sha256=ai69kTcqYsLvZlgc1twstKItsvc5OHx_JpK4iOQnqQs,1236 +onnx/reference/ops/op_shape.py,sha256=OKASGloKJW4333_R0NdNpmrNSBhdPpCoePCT-6vAzeI,1167 +onnx/reference/ops/op_shrink.py,sha256=MaXg1z3ML75dqTkgcYjqqELlaqduFpIRBZxQ8l0Nn9E,431 +onnx/reference/ops/op_sigmoid.py,sha256=Jhu9I-7xNT_qBfIX2TW0OzxfkOkI2lEGMr7gSBpu1U0,709 +onnx/reference/ops/op_sign.py,sha256=C8kMlonvmOXzAOO7p_wFxBSa6zlBhTWhdNKBKapuR48,262 +onnx/reference/ops/op_sin.py,sha256=z86WUyCea75C8wDxX6REdVe0ailgP07bpAmXIidQTTo,260 +onnx/reference/ops/op_sinh.py,sha256=XitVTPna_76kAha_5l2UAcS9kw3UFyabhwLDv9G_QsA,262 +onnx/reference/ops/op_size.py,sha256=gMrfClStw_I6dq66T6mXoGZWqMWLLkq6u-6PYtxA6Wc,273 +onnx/reference/ops/op_slice.py,sha256=IPfVdUoXB1npjiuAFbRbOUFNBIvFcC6UN4p8v0qBc2Y,2469 +onnx/reference/ops/op_softmax.py,sha256=QOSHqK4OeOTO5jYWF8BWbqn9SGWg4RCpNtVLXOv3Gf8,481 +onnx/reference/ops/op_softmax_cross_entropy_loss.py,sha256=6vdIhvmeIkH3_vfpFa4ITxhVSaAYhP0i9nnh6fjkUBM,3058 +onnx/reference/ops/op_softplus.py,sha256=kGlUCvkEVx92OwVcx-fWRMDz2uGmoF1JytIBBTdpuYo,348 +onnx/reference/ops/op_softsign.py,sha256=iMlRykp8qc2PEfNrVVSSB5NaXdY9uTuGE1eHwhpCimo,338 +onnx/reference/ops/op_space_to_depth.py,sha256=xMiVykWbHd4GWEHASkGXRHs9XfPRTGpIc32m0pOGfwk,898 +onnx/reference/ops/op_split.py,sha256=E8i9Bs0YR0s_figeMZtWEzqvNvN4QHhoYvL31yVDBt8,1680 +onnx/reference/ops/op_split_to_sequence.py,sha256=3XVj6BVeo_FfR4U8PQ_nPcHs6CbGZK90aQYmKsZZQl0,1551 +onnx/reference/ops/op_sqrt.py,sha256=q30KBpCtXiwhT37KswmrKXmXgw08fnqsM8fWFh2Nm_s,403 +onnx/reference/ops/op_squeeze.py,sha256=4-94S4XgeXEFciDG0lbG-a1BCvWn666ap8fiQx3W76g,1213 +onnx/reference/ops/op_stft.py,sha256=CwPTTV2RFjkpnFSppS78cFahZcSP6tEOFv6a3oweqgk,5702 +onnx/reference/ops/op_string_concat.py,sha256=TTreyFQebClfNJwHYkrmd7oBZpeP8dYkgctZ1qmIfKg,697 +onnx/reference/ops/op_string_normalizer.py,sha256=j6Ud6xRyaq2pKW9_e4iY7TDuYA6h-11RCsDyykVp6SQ,5141 +onnx/reference/ops/op_string_split.py,sha256=Br2Qq0usOAwh_MwiLpSLZeXKcKw04F74wR2YdguIFxA,1640 +onnx/reference/ops/op_sub.py,sha256=tIuxpbyzX5fUMAdlpRQQ16skE4ZY_OEb3oJgPSvUMk0,338 +onnx/reference/ops/op_sum.py,sha256=7kGTFHcxxv9-zZMz33wJsbGQ3tQVrDGcR-lGECcojog,247 +onnx/reference/ops/op_tan.py,sha256=Cr5I4jiABhjOt4QLu2brXKy2e-4BiHHQ0bSiWT5N5xE,260 +onnx/reference/ops/op_tanh.py,sha256=VE7D43FqcVPH2lcn9YqFZsPP8iXR1P3NdX5kRk74nkI,262 +onnx/reference/ops/op_tfidf_vectorizer.py,sha256=ABqT1y2zEU0ylvyGPZgt3v2k-AttH1tDF1aWWA6GMmM,13090 +onnx/reference/ops/op_thresholded_relu.py,sha256=EojcCTLEhTmI-KtiofmfWF6O58m-Yfcmj-yLwyHiaaE,385 +onnx/reference/ops/op_tile.py,sha256=FTs3RxQasWSC7MeykwTnXR7qg4UkfSQxgH7TLEy0hrU,263 +onnx/reference/ops/op_topk.py,sha256=i9k7YhonIXvOXX7xDnbekIHPgLnESLt77a9CaodqiVo,4978 +onnx/reference/ops/op_transpose.py,sha256=9TVudCFNYP6G_CWbmpVr5ZDzkofXr2cJFYKCHqW4ZUY,594 +onnx/reference/ops/op_trilu.py,sha256=lNhw6VjbF82k0Hkw8dqV1GM-dVISaFuncJm3qO5sQXA,397 +onnx/reference/ops/op_unique.py,sha256=adXgCoXtMBGS38lYp_dDC_Tt_cro15_MAaevOj9YC1U,1666 +onnx/reference/ops/op_unsqueeze.py,sha256=KsRGtmrZaoY5yhHFoLfRpb4S1I3MCAD_4gCSe0CZN90,1677 +onnx/reference/ops/op_upsample.py,sha256=AVCXaDDbWQOR3E_thOQKZ4SSWrFj7tuV8ERYQZFBlk8,610 +onnx/reference/ops/op_where.py,sha256=NdZd7y_QzcSAUdRGd4FbLBkeIXD74y0-say63VJ6pos,608 +onnx/reference/ops/op_xor.py,sha256=JUdj8aD74uezoOnbD40pAdYsialv1TW4vTpbbsmpLig,270 +onnx/reference/ops_optimized/__init__.py,sha256=T1nSBzjXBKSnMJzKdQxw39bYBcLj7lbO0uAx63DQkPc,274 +onnx/reference/ops_optimized/__pycache__/__init__.cpython-39.pyc,, +onnx/reference/ops_optimized/__pycache__/op_conv_optimized.cpython-39.pyc,, +onnx/reference/ops_optimized/op_conv_optimized.py,sha256=y0L2AfrN9yti3gRtK8Z2rAIRN7hmivQHYDZUr2ZvO4w,6608 +onnx/reference/reference_evaluator.py,sha256=5Grr4WB9y9ChWMQ4r8Bcz7jie3q4HGVHQXZroDahmn0,24603 +onnx/serialization.py,sha256=kDFNFRgel53rW0V9ZS-tG5G0PV9Rms60TsOxbNOs1IM,8069 +onnx/shape_inference.py,sha256=eiCtyo0v5MR2JQIIe5lTk1E6pwxKgH-hhhpkbLJASrw,5951 +onnx/shape_inference/attribute_binder.h,sha256=ICoFJLBC8GU1n8vEbUD8BW4p0-cuaBbRhNo7dKQ7xgY,2446 +onnx/shape_inference/implementation.cc,sha256=o2yo5deO3LkxzCYzynxKOFZaHMLC5SWmwismDE3LFdQ,43668 +onnx/shape_inference/implementation.h,sha256=zDEvkxX53zf1HgsymUsCfJMlxciW1yw0XzRF7_Svk40,19854 +onnx/string_utils.h,sha256=jAFGm8XyB3PwvijH-0Cni_83gu1rfXQ7v16OGyPeuMU,1337 +onnx/subbyte.py,sha256=wYA14-QUhRWD829f-8cxPviSjffUb19rCJSuXIDdIfg,2361 +onnx/test/__init__.pyi,sha256=VW4Y59avFWUotfJT9rPS5AachoNP4rSbu2Z43Ln5Wzw,254 +onnx/test/__pycache__/basic_test.cpython-39.pyc,, +onnx/test/__pycache__/checker_test.cpython-39.pyc,, +onnx/test/__pycache__/compose_test.cpython-39.pyc,, +onnx/test/__pycache__/data_propagation_test.cpython-39.pyc,, +onnx/test/__pycache__/elu_test.cpython-39.pyc,, +onnx/test/__pycache__/function_inference_test.cpython-39.pyc,, +onnx/test/__pycache__/function_test.cpython-39.pyc,, +onnx/test/__pycache__/helper_test.cpython-39.pyc,, +onnx/test/__pycache__/hub_test.cpython-39.pyc,, +onnx/test/__pycache__/inference_function_test.cpython-39.pyc,, +onnx/test/__pycache__/inliner_test.cpython-39.pyc,, +onnx/test/__pycache__/model_container_refeval_test.cpython-39.pyc,, +onnx/test/__pycache__/model_container_test.cpython-39.pyc,, +onnx/test/__pycache__/model_inference_test.cpython-39.pyc,, +onnx/test/__pycache__/numpy_helper_test.cpython-39.pyc,, +onnx/test/__pycache__/parser_test.cpython-39.pyc,, +onnx/test/__pycache__/printer_test.cpython-39.pyc,, +onnx/test/__pycache__/reference_evaluator_ml_test.cpython-39.pyc,, +onnx/test/__pycache__/reference_evaluator_model_test.cpython-39.pyc,, +onnx/test/__pycache__/reference_evaluator_test.cpython-39.pyc,, +onnx/test/__pycache__/relu_test.cpython-39.pyc,, +onnx/test/__pycache__/schema_test.cpython-39.pyc,, +onnx/test/__pycache__/serialization_test.cpython-39.pyc,, +onnx/test/__pycache__/shape_inference_test.cpython-39.pyc,, +onnx/test/__pycache__/symbolic_shape_test.cpython-39.pyc,, +onnx/test/__pycache__/test_backend_onnxruntime.cpython-39.pyc,, +onnx/test/__pycache__/test_backend_reference.cpython-39.pyc,, +onnx/test/__pycache__/test_backend_test.cpython-39.pyc,, +onnx/test/__pycache__/test_external_data.cpython-39.pyc,, +onnx/test/__pycache__/test_with_ort.cpython-39.pyc,, +onnx/test/__pycache__/tools_test.cpython-39.pyc,, +onnx/test/__pycache__/training_tool_test.cpython-39.pyc,, +onnx/test/__pycache__/utils_test.cpython-39.pyc,, +onnx/test/__pycache__/version_converter_test.cpython-39.pyc,, +onnx/test/__pycache__/version_utils.cpython-39.pyc,, +onnx/test/basic_test.py,sha256=ilA7YFfWJjR-8gRpzhfiMWXzcuytf2dMk3Ipg-hJ0uI,9831 +onnx/test/checker_test.py,sha256=_XttwU3eF9bkb6gRbqcsuLPHx4XSRsznNuP4OfU--YQ,47303 +onnx/test/compose_test.py,sha256=Eg6QTwUsFjyeQcBhePf2aJAPFGKM9zSiguMY6Npjl6Y,35881 +onnx/test/cpp/common_path_test.cc,sha256=Ny3otyHav5hC6saZsi_TMn5oy3mGvx4tZ-9wEIurWHM,2368 +onnx/test/cpp/data_propagation_test.cc,sha256=9CsBR4P0FbGLd4snIOJr8L1OmQeFMF66dZCx9kuFBEU,13161 +onnx/test/cpp/function_context_test.cc,sha256=pDavMKJNYaXuwn6Q0bqwbg29XHGCL3rHJcI4MfUott4,10883 +onnx/test/cpp/function_get_test.cc,sha256=ceuzpIMT9kZEejwrlZi08uUAhFgkw2i9v-I8MNSHJ-U,1489 +onnx/test/cpp/function_verify_test.cc,sha256=v5uISIbr5ldO64J0g1pW9bNILbUvMYLJ22pGneGyd7g,21152 +onnx/test/cpp/inliner_test.cc,sha256=wJbT-g6tRzfnJR6wC39_HA85rhUqSY1XLMSJiz1duA8,10050 +onnx/test/cpp/ir_test.cc,sha256=knT2xPmnDQsTG8KXRNCWQaYqjzGKd6oRTp9nNxY290w,1426 +onnx/test/cpp/op_reg_test.cc,sha256=z0P5usDoFIK5N_mSa-j9awuLiZYDcy8QEM_mmkhmKvU,941 +onnx/test/cpp/parser_test.cc,sha256=pIGSVeNZFWWLS4gVfhd6YnmxOafRePfy3ux6mTPD80w,18111 +onnx/test/cpp/schema_registration_test.cc,sha256=fhqmKCifUeKPazOgaOaizwVY7Z6xia2eBv__3kcO4TI,9889 +onnx/test/cpp/shape_inference_test.cc,sha256=AE9EDqADK4id_D5eYjQTc5de2UheZ4Dr6lwvbjo6cO4,19210 +onnx/test/cpp/test_main.cc,sha256=FcLNQYZGnrP3VFWxJ57zG-KVTgdT4xgJ4BLcLoJ7dIM,332 +onnx/test/data_propagation_test.py,sha256=c9YSyMqkGYxVU8BSh7wjTeOAfuP6hY-1pD3uZqqhSjA,4788 +onnx/test/elu_test.py,sha256=0cLsPbsmbqPCz3k5ha3uOw0QbjBqyowu0NWB19ItLRE,418 +onnx/test/function_inference_test.py,sha256=9dKG2cNKinNY6zhPMMFziT4YkBNr0EXBG_jAXFtxqmU,4445 +onnx/test/function_test.py,sha256=KsgN5U4Q_84CAzuOCAea9aQvaAL2CeGsjEpAOfOB4dc,8798 +onnx/test/helper_test.py,sha256=8aMAKDXU7keeN8JCLnO4Sl6zCabCmJobDNCoVXp2r24,39576 +onnx/test/hub_test.py,sha256=eIucpRqLzoJlGrEryRRz8lyCjBATyiiXCfONqhq7U7Y,3973 +onnx/test/inference_function_test.py,sha256=oqJ6QGGoFgfrd8EvGh-N8W0AYXwrtLRiBqJWUSPLKE0,9890 +onnx/test/inliner_test.py,sha256=Au19Z6pNe_iRvu6PGjgofdUn1WVY836P7uuocQBW7q8,4084 +onnx/test/model_container_refeval_test.py,sha256=mKVUaGXIQpqIkaUerJp_Kua_-1-R6BIHpd1Wl_HkkdY,5082 +onnx/test/model_container_test.py,sha256=xcNJkRcgFpgm-oiwfwKT89ki9JrWmw84yCPmeGFG7qg,5284 +onnx/test/model_inference_test.py,sha256=jrEMBvteFa4fZ9DOGGtIQXo1y_chAe9_OK1yVPJzww8,9634 +onnx/test/numpy_helper_test.py,sha256=Nwv_DpcZw4ZF5Cru2BO39cD_iAK7IOGY-vmVQgJdYxg,22739 +onnx/test/parser_test.py,sha256=PmDpDFICWLeZ7ccMExj8CYgfBeSeMU_jyoMQ4NlM8Co,9799 +onnx/test/printer_test.py,sha256=EFxdnil1D6c23XTPGa_mw2CleCo9FDbXi1XVRn7jqZA,1263 +onnx/test/reference_evaluator_ml_test.py,sha256=iu2j228F1IqBQSSOdcqWNGFAXCuUFzkRiCTOOYnmUDs,87154 +onnx/test/reference_evaluator_model_test.py,sha256=7bY_gVkYIJGkLQbfDGGUEDtw5_FqgNxWyjoOydnJVF8,4055 +onnx/test/reference_evaluator_test.py,sha256=wKqFsolCn7qcTe9jK5k22sJd_-27ZN-8dlcfbzP1Mb4,223489 +onnx/test/relu_test.py,sha256=6IPe0CgOxiCSDgMPKB5UlnHHWLQDey6Qq1dXSzNX5lg,352 +onnx/test/schema_test.py,sha256=GT9Va6yKcXeGH1GjRBXb-2IesBsHAKZp_c0c8pMMQMs,16872 +onnx/test/serialization_test.py,sha256=oMGkikyE_3k-nrMgF2mEadp2E4_iNsAq54ol8GAysF8,3288 +onnx/test/shape_inference_test.py,sha256=HsITAaVR2NLDxqLpwFMs7oRiDdddOSTki3LCIDx1b5A,369821 +onnx/test/symbolic_shape_test.py,sha256=zw4SnnT2RUR1cYueZ-HvT2GqYjdR6XkK4cTsFIAVTcw,8712 +onnx/test/test_backend_onnxruntime.py,sha256=0IWV3IallU8nt8FxeVJXGe-bXOL9CS91Y_C6uSbupDk,14709 +onnx/test/test_backend_reference.py,sha256=OoGR8P9mYXm_VNp8kzXVSxQqV-UA2BBnT3cR1ON_pHk,8012 +onnx/test/test_backend_test.py,sha256=h8ywju_M1QuaRtkranDbTuZurqKv2COFk64TyE9P61E,4319 +onnx/test/test_external_data.py,sha256=iGm5aEeGoVFQ5tfVi_iwIPJg8JqWpLsa22gUXf1LGYk,32344 +onnx/test/test_with_ort.py,sha256=70rjQkSkhrwzN90gP2IykFp9WifBJjTNvRgwNxLq2ig,1640 +onnx/test/tools_test.py,sha256=bgrVr1dDw1PAI6aZ-eJa-1GUZ9fKAb3Y-_P_7aTX9y4,13889 +onnx/test/training_tool_test.py,sha256=ZpTeVUYQEiKa28fBSnwwNUfF7LlHfmTOE3tSp1jbVfk,3776 +onnx/test/utils_test.py,sha256=zeC8ZXnMrgcTJKLV_PKyYz8XkCLnGCq7IvHT2mTAMWY,2281 +onnx/test/version_converter/__pycache__/automatic_conversion_test_base.cpython-39.pyc,, +onnx/test/version_converter/__pycache__/automatic_downgrade_test.cpython-39.pyc,, +onnx/test/version_converter/__pycache__/automatic_upgrade_test.cpython-39.pyc,, +onnx/test/version_converter/automatic_conversion_test_base.py,sha256=Kc2sTGYZY1E9tqGKZTtEIlNYtfIGhhOG60VZ0IUPMqk,7060 +onnx/test/version_converter/automatic_downgrade_test.py,sha256=0QSOEYy416rXaohwwroTbhKANXTyNuSjs4gd2mxN1Zs,3372 +onnx/test/version_converter/automatic_upgrade_test.py,sha256=9-0xuQdYydfFDRFCK3SJZlh4HUdQad0aKa79bIJWbpk,56018 +onnx/test/version_converter_test.py,sha256=jpMNO5k4buF2J1-Xks3JXzzou-NXoHzmU4mM0zC9Clk,81907 +onnx/test/version_utils.py,sha256=rE_z_LYBjFiz6A_EpNJczEXX_jQ9iP1Bhj1FUkiwDos,374 +onnx/tools/__init__.py,sha256=HiuD-U6YHJwWikkemKJqBq-35U8KJ9LDdAwdisi7OlQ,85 +onnx/tools/__pycache__/__init__.cpython-39.pyc,, +onnx/tools/__pycache__/net_drawer.cpython-39.pyc,, +onnx/tools/__pycache__/replace_constants.cpython-39.pyc,, +onnx/tools/__pycache__/update_model_dims.cpython-39.pyc,, +onnx/tools/net_drawer.py,sha256=D1CjUu2SqPr9M5YuFWRknQGczHcXY6wJF5B1TP_72G8,5055 +onnx/tools/replace_constants.py,sha256=-CKmbtN4oqaOiPBKQoiilFMBlAPyhRGyCK0ZrcdH2FQ,15272 +onnx/tools/update_model_dims.py,sha256=_18ze7JC_En-IC6SX3zHNhIFABaJa7vc5wsAEc9ygdg,3528 +onnx/utils.py,sha256=OYvWSG_ulwV6e_h6udGrTPnHCbBnMns_y6G_mrDY0t4,8600 +onnx/version.py,sha256=8Vf9pXsUELnWLjzu3NFQ1s5ZzUZbHld_hyjeWgPal08,134 +onnx/version_converter.py,sha256=Qi0hGuU45xf8rD8E7W8f9MVgZiGIrPU5lzl2bOX6gG4,1274 +onnx/version_converter/BaseConverter.h,sha256=VeziFFvwvPKyaCwuTtb6gtN3kAzFXOVxHiFsoBUBC5c,3853 +onnx/version_converter/adapters/adapter.h,sha256=yRKkg0Uay4qaFwsIxvsWTJrqDFv_vxEoITXODkruo3U,1876 +onnx/version_converter/adapters/axes_attribute_to_input.h,sha256=pWLOODCRZcWLeWBVJUp0HlLV_SwLgvoN7G4EBVgtLVY,1349 +onnx/version_converter/adapters/axes_input_to_attribute.h,sha256=vn5BFi3s-UkzEUWrM7PAT4xkci35P1tgerxuCU7mzDg,2559 +onnx/version_converter/adapters/axis_attribute_to_input.h,sha256=-2joYwLE2ToHpMvN4q5SKP8j14GNTECgL80rZIr7jVI,2079 +onnx/version_converter/adapters/axis_input_to_attribute.h,sha256=zhiD6L1Xq7cRJUjtLldAYdyEYRNmqh3xLV_57L9MqxI,3168 +onnx/version_converter/adapters/batch_normalization_13_14.h,sha256=qsYoHjKC7uk58KjUdX_txMJJMTm2BJaXaUaRLesKqB8,898 +onnx/version_converter/adapters/broadcast_backward_compatibility.h,sha256=Oe0envdCyS2MIweNcgWXLbvgdDi8jlApSJ_DqYuAaxQ,2213 +onnx/version_converter/adapters/broadcast_forward_compatibility.h,sha256=L2j0-bZcJ9cct4zo6uPhQ3q6aQMvS-vlEC2gHymUrRU,3357 +onnx/version_converter/adapters/cast_9_8.h,sha256=T74swDaSFLrsigbKiothgmduDMcas3iXlROk5yzd8yM,909 +onnx/version_converter/adapters/clip_10_11.h,sha256=AQZJAo097oOVEg2DhIprRIHKSD1mSnXAP8xu0wAYLnk,1556 +onnx/version_converter/adapters/compatible.h,sha256=lxxpI1pHPjEVjYz9U2-gwdq5pNZVcEM1ZiaDVcNhNmk,728 +onnx/version_converter/adapters/dropout_11_12.h,sha256=PEU8LGiGodFBbJCsbdJvxuvhH1scyrms08wMipnQypU,1176 +onnx/version_converter/adapters/extend_supported_types.h,sha256=Un9tnAI-Ilw7LbW2iJneYbctlEydbY4gjGcFUgpUvtk,3949 +onnx/version_converter/adapters/gemm_6_7.h,sha256=vJht2gd7UQpqm6EZAjNuftg61yvJi0PtH6AT7G0eAjw,1701 +onnx/version_converter/adapters/gemm_7_6.h,sha256=EE1YkPGxLAISOkWr6C5K_jPd57KGp3My7z_Xjm3I8S4,1904 +onnx/version_converter/adapters/gridsample_19_20.h,sha256=IkWGarYmz72Y08yJuFoGL1rom9LD5xFQlQ27Yeb79qw,927 +onnx/version_converter/adapters/group_normalization_20_21.h,sha256=MV3rYQp1NcH2hTW7l5hBgigDDiRehuW6l_OhBmaW26Y,4557 +onnx/version_converter/adapters/maxpool_8_7.h,sha256=dzjqelAnTap_8CbM7IsYUI8NZfDAr29tPFj6LpmNayI,976 +onnx/version_converter/adapters/no_previous_version.h,sha256=klE5tGThmTeUmogeVe7K1PNbAew-WTsgsbU0KRBKWic,836 +onnx/version_converter/adapters/pad_10_11.h,sha256=4Q1LltBKD0SuWNiq0oNQWJosvliati3WTEQ8zo19fLE,1698 +onnx/version_converter/adapters/q_dq_21_20.h,sha256=mTrX6lwi-MxnbjSRNagcGAJkcss_woYSk99iJi89a88,2526 +onnx/version_converter/adapters/remove_consumed_inputs.h,sha256=UziYNEVvNepFjFep9Im03wTFiFE8jMrH_JVsY0yrScc,796 +onnx/version_converter/adapters/reshape_4_5.h,sha256=l2Gi2ln5yWY6fR6LOkmYVfsTKURJKUuqzBf22DpcS_M,1401 +onnx/version_converter/adapters/reshape_5_4.h,sha256=edjZ8rrYvcNefmceOKNYfubDr8a7pjbC7id2EX823NE,2619 +onnx/version_converter/adapters/resize_10_11.h,sha256=-kEQGaJihl3P0QtZjVrK1ZkVeGfDCBc_3MTwgiTRj9Q,1302 +onnx/version_converter/adapters/scan_8_9.h,sha256=BWaO0EOnXNoUt3vN8P3LWQOJUa2MG9K7SQQa_wZzPSs,1804 +onnx/version_converter/adapters/scan_9_8.h,sha256=rJz_ULE2pZNh52RB1IpfGBa9FzR39_fDbZEKL-lPw-c,2987 +onnx/version_converter/adapters/scatter_10_11.h,sha256=ZwkmuEN-mQtnA8LAMekplLQuBXMblq3300KMVXrE0_0,1219 +onnx/version_converter/adapters/slice_9_10.h,sha256=SWVrpymCn2oYTf8GQRA0MH5SI3KFSQWbd-F2xr4vC24,1492 +onnx/version_converter/adapters/softmax_12_13.h,sha256=UdMHzPf4f32lzm_P4CL2tTQUGGbQmSJ-l1yPWFZDie4,2864 +onnx/version_converter/adapters/split_12_13.h,sha256=Wk7ko4EQpohINFM4okhEe1D5LxRCjvu_X5nXdbintAs,1242 +onnx/version_converter/adapters/split_13_12.h,sha256=YwgpXKp8NBOGeri7rltyna4-kj3-8b8SJqKYyklBICE,2480 +onnx/version_converter/adapters/split_17_18.h,sha256=95fElNRIc03h-zwciH_VlWKNskNxqMkZJqj9Hc7sbew,1047 +onnx/version_converter/adapters/sum_8_7.h,sha256=61O0cfKIyS8lLCmvnK-MUBMEkHgaRWjcwQxkVJdf8ac,1124 +onnx/version_converter/adapters/topk_9_10.h,sha256=maKiyMzSKmn1n0BLWh7bq2ZU1aVLJ7oAtHINkzB7EBM,1049 +onnx/version_converter/adapters/transformers.h,sha256=uqyMKYag4l_KuZQAs5dIn3fHJUCQQscIwSnrKOgUQsc,2220 +onnx/version_converter/adapters/type_restriction.h,sha256=wWtoHnkXLBRVm9ux1h56BEg5eksw_OzBwXlgSizYR0o,1718 +onnx/version_converter/adapters/upsample_6_7.h,sha256=vHwjsHvCw3d5d4rE1UZBSI_EyGL0J7t9VgZTnoTinLw,1554 +onnx/version_converter/adapters/upsample_8_9.h,sha256=a3U4oqwTt8gJXR2bf1d0LgOjjOg0JobuinpE-QBC4Fg,1334 +onnx/version_converter/adapters/upsample_9_10.h,sha256=J5xRPJQ6mTpLCV1yp6LaEoTaXghP9FhTFS-IAxRi5rs,1112 +onnx/version_converter/adapters/upsample_9_8.h,sha256=miIzjt52ofR78p3TAi5V9mQWUG8EZy8ExKsilj5SSvY,2598 +onnx/version_converter/convert.cc,sha256=u4mMZw4_La0EC8gA3i3H42KK_GgAhLadKeYkIwhjMCY,5620 +onnx/version_converter/convert.h,sha256=xJB6cALvSbAUCSGSiYpUMfXqyJdOx8C3DiYzTAx8XV8,47761 +onnx/version_converter/helper.cc,sha256=ONLSawtzC-fiefOHOy_Xxt3zzfWTAOZaEfjLgT-kF6Y,2827 +onnx/version_converter/helper.h,sha256=WqdXOd__8tZPhB-cJK5DA4ZGG1kXIw6zV3vgHr9RCNQ,778 diff --git a/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/REQUESTED b/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/WHEEL b/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..b7132af6d27aab726a7499fc58ccd63c206a0a33 --- /dev/null +++ b/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.43.0) +Root-Is-Purelib: false +Tag: cp39-cp39-win_amd64 + diff --git a/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/entry_points.txt b/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..e513d7dc56c333d0c1b6df78909609635e32d254 --- /dev/null +++ b/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/entry_points.txt @@ -0,0 +1,4 @@ +[console_scripts] +backend-test-tools = onnx.backend.test.cmd_tools:main +check-model = onnx.bin.checker:check_model +check-node = onnx.bin.checker:check_node diff --git a/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/top_level.txt b/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..b20b937c09b8aa4d88413538c56c80739b73b041 --- /dev/null +++ b/MLPY/Lib/site-packages/onnx-1.16.1.dist-info/top_level.txt @@ -0,0 +1 @@ +onnx diff --git a/MLPY/Lib/site-packages/pettingzoo/__init__.py b/MLPY/Lib/site-packages/pettingzoo/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..662485069fa1f9f1ba4f23b8280ff9389f5d8a85 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/__init__.py @@ -0,0 +1,16 @@ +import os +import sys + +from pettingzoo.utils import AECEnv, ParallelEnv + +# Initializing pygame initializes audio connections through SDL. SDL uses alsa by default on all Linux systems +# SDL connecting to alsa frequently create these giant lists of warnings every time you import an environment using pygame +# DSP is far more benign (and should probably be the default in SDL anyways) + +if sys.platform.startswith("linux"): + + os.environ["SDL_AUDIODRIVER"] = "dsp" + +os.environ["PYGAME_HIDE_SUPPORT_PROMPT"] = "hide" + +__version__ = "1.15.0" diff --git a/MLPY/Lib/site-packages/pettingzoo/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bfaaa0cb5e851616afc1c3573bdc7c94df998c57 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__init__.py b/MLPY/Lib/site-packages/pettingzoo/atari/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ded2c453b63215d5c2e1c2eb55d820238962a0fb --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/__init__.py @@ -0,0 +1,5 @@ +from pettingzoo.utils.deprecated_module import depricated_handler + + +def __getattr__(env_name): + return depricated_handler(env_name, __path__, __name__) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e85cd591ed61861ba0b84f8649486ec94414719f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/base_atari_env.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/base_atari_env.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f9c7ae37ba2bb133e30a19df3882705e37169d6a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/base_atari_env.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/basketball_pong_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/basketball_pong_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1134086b2bd40f569761a8ed216097b1481ae086 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/basketball_pong_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/boxing_v1.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/boxing_v1.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..53ff91282c8263641e579eb7bd713fc7b88f0044 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/boxing_v1.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/combat_plane_v1.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/combat_plane_v1.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1756be40c52e3314bf95adb06268994d6d8d574a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/combat_plane_v1.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/combat_tank_v1.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/combat_tank_v1.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..23870ed5d0d088068824a26efb98c21bf52f281d Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/combat_tank_v1.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/double_dunk_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/double_dunk_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1e9be05f5e86967c0bd27023653f4e394c4e2c83 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/double_dunk_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/entombed_competitive_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/entombed_competitive_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7598d27c41fbd3e412e70f1c85838b0a2540ce18 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/entombed_competitive_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/entombed_cooperative_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/entombed_cooperative_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ce76937bbd22eb2f7f9be69c058f7ef2aa550aaf Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/entombed_cooperative_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/flag_capture_v1.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/flag_capture_v1.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d2ddedc5ad9d5104a1e43f730dbd190bd4b0a052 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/flag_capture_v1.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/foozpong_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/foozpong_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..963e7fd003f704a918a25a0d34d82c3001a13cae Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/foozpong_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/ice_hockey_v1.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/ice_hockey_v1.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1014909753f63855241a1f3ff59418a2ed8c826b Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/ice_hockey_v1.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/joust_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/joust_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d0318f202785d4d46c666d4e660d792b9ee71129 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/joust_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/mario_bros_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/mario_bros_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4db6b6385feb280c831cbe3752cf5ed1a802decc Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/mario_bros_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/maze_craze_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/maze_craze_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ac56b1a692dd94ad7082d8ebf5fc20231de5de5a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/maze_craze_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/othello_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/othello_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3846d499c387bb1ba8b3d395f128925802f45de7 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/othello_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/pong_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/pong_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4836158772e92228e6bbec68554f8b00cc1d0637 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/pong_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/quadrapong_v3.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/quadrapong_v3.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..abb41eebdf91fe0e6747cd3e512b31c731defbd0 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/quadrapong_v3.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/space_invaders_v1.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/space_invaders_v1.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..76dc95bd367066737287a1eece23370dd064e417 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/space_invaders_v1.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/space_war_v1.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/space_war_v1.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8fc910a73722c9cd4c62fe7064f1838da3f8ec8e Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/space_war_v1.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/surround_v1.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/surround_v1.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e78bc22479bbc52edccd6832b1ebe1eec89c0098 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/surround_v1.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/tennis_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/tennis_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fddd7f17a636134d41fab6c76ac82f341030eed8 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/tennis_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/video_checkers_v3.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/video_checkers_v3.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4070f7d0828a4492b516b6e9bed0f852f9c088c2 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/video_checkers_v3.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/volleyball_pong_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/volleyball_pong_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..47912d19ffa98d67fdc796c5f5784df963ae5eb0 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/volleyball_pong_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/warlords_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/warlords_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..85a6fdbe86c77e6ce50e928885cbcdc1d0dac3c6 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/warlords_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/wizard_of_wor_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/wizard_of_wor_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7a9925ca294de7f20ca956f44cfa6f51dc2ead89 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/atari/__pycache__/wizard_of_wor_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/base_atari_env.py b/MLPY/Lib/site-packages/pettingzoo/atari/base_atari_env.py new file mode 100644 index 0000000000000000000000000000000000000000..492f63c66e9a4abf1fc15422a3d9013e98ddfd12 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/base_atari_env.py @@ -0,0 +1,245 @@ +from pathlib import Path + +import gym +import multi_agent_ale_py +import numpy as np +from gym import spaces +from gym.utils import EzPickle, seeding + +from pettingzoo import AECEnv +from pettingzoo.utils import agent_selector, wrappers +from pettingzoo.utils.conversions import parallel_to_aec_wrapper, parallel_wrapper_fn +from pettingzoo.utils.env import ParallelEnv + + +def base_env_wrapper_fn(raw_env_fn): + def env_fn(**kwargs): + env = raw_env_fn(**kwargs) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + return env_fn + + +def BaseAtariEnv(**kwargs): + return parallel_to_aec_wrapper(ParallelAtariEnv(**kwargs)) + + +class ParallelAtariEnv(ParallelEnv, EzPickle): + def __init__( + self, + game, + num_players, + mode_num=None, + seed=None, + obs_type='rgb_image', + full_action_space=True, + env_name=None, + max_cycles=100000, + auto_rom_install_path=None): + """Frameskip should be either a tuple (indicating a random range to + choose from, with the top value exclude), or an int.""" + EzPickle.__init__( + self, + game, + num_players, + mode_num, + seed, + obs_type, + full_action_space, + env_name, + max_cycles, + auto_rom_install_path, + ) + + assert obs_type in ('ram', 'rgb_image', "grayscale_image"), "obs_type must either be 'ram' or 'rgb_image' or 'grayscale_image'" + self.obs_type = obs_type + self.full_action_space = full_action_space + self.num_players = num_players + self.max_cycles = max_cycles + if env_name is None: + env_name = "custom_" + game + self.metadata = {'render.modes': ['human', 'rgb_array'], + 'name': env_name, + 'video.frames_per_second': 60} + + multi_agent_ale_py.ALEInterface.setLoggerMode("error") + self.ale = multi_agent_ale_py.ALEInterface() + + self.ale.setFloat(b'repeat_action_probability', 0.) + + if auto_rom_install_path is None: + start = Path(multi_agent_ale_py.__file__).parent + else: + start = Path(auto_rom_install_path).resolve() + + # start looking in local directory + final = start / f"{game}.bin" + if not final.exists(): + # if that doesn't work, look in 'roms' + final = start / "roms" / f"{game}.bin" + + if not final.exists(): + # use old AutoROM install path as backup + final = start / "ROM" / game / f"{game}.bin" + + if not final.exists(): + raise OSError(f"rom {game} is not installed. Please install roms using AutoROM tool (https://github.com/Farama-Foundation/AutoROM) " + "or specify and double-check the path to your Atari rom using the `rom_path` argument.") + + self.rom_path = str(final) + self.ale.loadROM(self.rom_path) + + all_modes = self.ale.getAvailableModes(num_players) + + if mode_num is None: + mode = all_modes[0] + else: + mode = mode_num + assert mode in all_modes, f"mode_num parameter is wrong. Mode {mode_num} selected, only {list(all_modes)} modes are supported" + + self.mode = mode + self.ale.setMode(self.mode) + assert num_players == self.ale.numPlayersActive() + + if full_action_space: + action_size = 18 + action_mapping = np.arange(action_size) + else: + action_mapping = self.ale.getMinimalActionSet() + action_size = len(action_mapping) + + self.action_mapping = action_mapping + + if obs_type == 'ram': + observation_space = gym.spaces.Box(low=0, high=255, dtype=np.uint8, shape=(128,)) + else: + (screen_width, screen_height) = self.ale.getScreenDims() + if obs_type == 'rgb_image': + num_channels = 3 + elif obs_type == 'grayscale_image': + num_channels = 1 + observation_space = spaces.Box(low=0, high=255, shape=(screen_height, screen_width, num_channels), dtype=np.uint8) + + player_names = ["first", "second", "third", "fourth"] + self.agents = [f"{player_names[n]}_0" for n in range(num_players)] + self.possible_agents = self.agents[:] + + self.action_spaces = {agent: gym.spaces.Discrete(action_size) for agent in self.possible_agents} + self.observation_spaces = {agent: observation_space for agent in self.possible_agents} + + self._screen = None + self.seed(seed) + + def seed(self, seed=None): + if seed is None: + seed = seeding.create_seed(seed, max_bytes=4) + self.ale.setInt(b"random_seed", seed) + self.ale.loadROM(self.rom_path) + self.ale.setMode(self.mode) + + def reset(self): + self.ale.reset_game() + self.agents = self.possible_agents[:] + self.dones = {agent: False for agent in self.possible_agents} + self.frame = 0 + + obs = self._observe() + return {agent: obs for agent in self.agents} + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def _observe(self): + if self.obs_type == 'ram': + bytes = self.ale.getRAM() + return bytes + elif self.obs_type == 'rgb_image': + return self.ale.getScreenRGB() + elif self.obs_type == 'grayscale_image': + return self.ale.getScreenGrayscale() + + def step(self, action_dict): + actions = np.zeros(self.max_num_agents, dtype=np.int32) + for i, agent in enumerate(self.possible_agents): + if agent in action_dict: + actions[i] = action_dict[agent] + + actions = self.action_mapping[actions] + rewards = self.ale.act(actions) + self.frame += 1 + if self.ale.game_over() or self.frame >= self.max_cycles: + dones = {agent: True for agent in self.agents} + else: + lives = self.ale.allLives() + # an inactive agent in ale gets a -1 life. + dones = {agent: int(life) < 0 for agent, life in zip(self.possible_agents, lives) if agent in self.agents} + + obs = self._observe() + observations = {agent: obs for agent in self.agents} + rewards = {agent: rew for agent, rew in zip(self.possible_agents, rewards) if agent in self.agents} + infos = {agent: {} for agent in self.possible_agents if agent in self.agents} + self.agents = [agent for agent in self.agents if not dones[agent]] + return observations, rewards, dones, infos + + def render(self, mode="human"): + (screen_width, screen_height) = self.ale.getScreenDims() + image = self.ale.getScreenRGB() + if mode == "human": + import os + + import pygame + zoom_factor = 4 + if self._screen is None: + pygame.init() + self._screen = pygame.display.set_mode((screen_width * zoom_factor, screen_height * zoom_factor)) + + myImage = pygame.image.fromstring(image.tobytes(), image.shape[:2][::-1], "RGB") + + myImage = pygame.transform.scale(myImage, (screen_width * zoom_factor, screen_height * zoom_factor)) + + self._screen.blit(myImage, (0, 0)) + + pygame.display.flip() + elif mode == "rgb_array": + return image + else: + raise ValueError("bad value for render mode") + + def close(self): + if self._screen is not None: + import pygame + pygame.quit() + self._screen = None + + def clone_state(self): + """Clone emulator state w/o system state. Restoring this state will + *not* give an identical environment. For complete cloning and restoring + of the full state, see `{clone,restore}_full_state()`.""" + state_ref = self.ale.cloneState() + state = self.ale.encodeState(state_ref) + self.ale.deleteState(state_ref) + return state + + def restore_state(self, state): + """Restore emulator state w/o system state.""" + state_ref = self.ale.decodeState(state) + self.ale.restoreState(state_ref) + self.ale.deleteState(state_ref) + + def clone_full_state(self): + """Clone emulator state w/ system state including pseudorandomness. + Restoring this state will give an identical environment.""" + state_ref = self.ale.cloneSystemState() + state = self.ale.encodeState(state_ref) + self.ale.deleteState(state_ref) + return state + + def restore_full_state(self, state): + """Restore emulator state w/ system state including pseudorandomness.""" + state_ref = self.ale.decodeState(state) + self.ale.restoreSystemState(state_ref) + self.ale.deleteState(state_ref) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/basketball_pong_v2.py b/MLPY/Lib/site-packages/pettingzoo/atari/basketball_pong_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..a4d77f11c6c2df26beba440f7f5f54fd9ace873f --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/basketball_pong_v2.py @@ -0,0 +1,14 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(num_players=2, **kwargs): + assert num_players == 2 or num_players == 4, "pong only supports 2 or 4 players" + mode_mapping = {2: 45, 4: 49} + mode = mode_mapping[num_players] + return BaseAtariEnv(game="pong", num_players=num_players, mode_num=mode, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/boxing_v1.py b/MLPY/Lib/site-packages/pettingzoo/atari/boxing_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..d3eef0bb6281e9340fd14a621237adcbdd160eff --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/boxing_v1.py @@ -0,0 +1,11 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(**kwargs): + return BaseAtariEnv(game="boxing", num_players=2, mode_num=None, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/combat_plane_v1.py b/MLPY/Lib/site-packages/pettingzoo/atari/combat_plane_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..2f6aa7345037e8f84a4011669d06cc3badebfaac --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/combat_plane_v1.py @@ -0,0 +1,19 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + +avaliable_versions = { + "bi-plane": 15, + "jet": 21, +} + + +def raw_env(game_version="bi-plane", guided_missile=True, **kwargs): + assert game_version in avaliable_versions, "game_version must be either 'jet' or 'bi-plane'" + mode = avaliable_versions[game_version] + (0 if guided_missile else 1) + + return BaseAtariEnv(game="combat", num_players=2, mode_num=mode, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/combat_tank_v1.py b/MLPY/Lib/site-packages/pettingzoo/atari/combat_tank_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..53dd0582243f8d0d1ced240d863848991468159c --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/combat_tank_v1.py @@ -0,0 +1,22 @@ +import os +import warnings + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(has_maze=True, is_invisible=False, billiard_hit=True, **kwargs): + if has_maze is False and is_invisible is False and billiard_hit is False: + warnings.warn("combat_tank has interesting parameters to consider overriding including is_invisible, billiard_hit and has_maze") + start_mapping = { + (False, False): 1, + (False, True): 8, + (True, False): 10, + (True, True): 13, + } + mode = start_mapping[(is_invisible, billiard_hit)] + has_maze + + return BaseAtariEnv(game="combat", num_players=2, mode_num=mode, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/double_dunk_v2.py b/MLPY/Lib/site-packages/pettingzoo/atari/double_dunk_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..384a7c21eb01fe226b1f014444ec20df675d5d08 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/double_dunk_v2.py @@ -0,0 +1,11 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(**kwargs): + return BaseAtariEnv(game="double_dunk", num_players=2, mode_num=None, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/entombed_competitive_v2.py b/MLPY/Lib/site-packages/pettingzoo/atari/entombed_competitive_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..8d98d795d41a0e4b0e3074c05ea213000ab0caea --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/entombed_competitive_v2.py @@ -0,0 +1,11 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(**kwargs): + return BaseAtariEnv(game="entombed", num_players=2, mode_num=2, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/entombed_cooperative_v2.py b/MLPY/Lib/site-packages/pettingzoo/atari/entombed_cooperative_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..43eba7b519492a9f8c135b9aad8f10ac0f75c3c9 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/entombed_cooperative_v2.py @@ -0,0 +1,11 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(**kwargs): + return BaseAtariEnv(game="entombed", num_players=2, mode_num=3, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/flag_capture_v1.py b/MLPY/Lib/site-packages/pettingzoo/atari/flag_capture_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..dabc61e9a08551b58dcaf51922da4d0d7a930270 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/flag_capture_v1.py @@ -0,0 +1,11 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(**kwargs): + return BaseAtariEnv(game="flag_capture", num_players=2, mode_num=None, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/foozpong_v2.py b/MLPY/Lib/site-packages/pettingzoo/atari/foozpong_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..b3dab019a73bac4ebc882fa8353b92cfd927ebce --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/foozpong_v2.py @@ -0,0 +1,14 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(num_players=4, **kwargs): + assert num_players == 2 or num_players == 4, "pong only supports 2 or 4 players" + mode_mapping = {2: 19, 4: 21} + mode = mode_mapping[num_players] + return BaseAtariEnv(game="pong", num_players=num_players, mode_num=mode, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/ice_hockey_v1.py b/MLPY/Lib/site-packages/pettingzoo/atari/ice_hockey_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..ef0c8fa06eee0536072ec8e81994633146a74b04 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/ice_hockey_v1.py @@ -0,0 +1,11 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(**kwargs): + return BaseAtariEnv(game="ice_hockey", num_players=2, mode_num=None, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/joust_v2.py b/MLPY/Lib/site-packages/pettingzoo/atari/joust_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..40be9e52f2b4e4e886bb85661e3420b51dc4f88a --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/joust_v2.py @@ -0,0 +1,11 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(**kwargs): + return BaseAtariEnv(game="joust", num_players=2, mode_num=None, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/mario_bros_v2.py b/MLPY/Lib/site-packages/pettingzoo/atari/mario_bros_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..38160e36082e2989a245a3eafd688324ad5eb8ee --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/mario_bros_v2.py @@ -0,0 +1,11 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(**kwargs): + return BaseAtariEnv(game="mario_bros", num_players=2, mode_num=None, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/maze_craze_v2.py b/MLPY/Lib/site-packages/pettingzoo/atari/maze_craze_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..848cc537769e7f8452c7950782a1364e0f5023cd --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/maze_craze_v2.py @@ -0,0 +1,24 @@ +import os +import warnings + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + +avaliable_versions = { + "robbers": 2, + "race": 1, + "capture": 12, +} + + +def raw_env(game_version="robbers", visibilty_level=0, **kwargs): + if game_version == "robbers" and visibilty_level == 0: + warnings.warn("maze_craze has different versions of the game via the `game_version` argument, consider overriding.") + assert game_version in avaliable_versions, f"`game_version` parameter must be one of {avaliable_versions.keys()}" + assert 0 <= visibilty_level < 4, "visibility level must be between 0 and 4, where 0 is 100% visibility and 3 is 0% visibility" + base_mode = (avaliable_versions[game_version] - 1) * 4 + mode = base_mode + visibilty_level + return BaseAtariEnv(game="maze_craze", num_players=2, mode_num=mode, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/othello_v2.py b/MLPY/Lib/site-packages/pettingzoo/atari/othello_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..82159b90783716c8b0c62091f37ae1348646af32 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/othello_v2.py @@ -0,0 +1,11 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(**kwargs): + return BaseAtariEnv(game="othello", num_players=2, mode_num=None, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/pong_v2.py b/MLPY/Lib/site-packages/pettingzoo/atari/pong_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..4ef13f0b3ef4e76bb9f255fb8ff65e36e12146bf --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/pong_v2.py @@ -0,0 +1,37 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + +avaliable_2p_versions = { + "classic": 4, + "two_paddles": 10, + "soccer": 14, + "foozpong": 19, + "hockey": 27, + "handball": 35, + "volleyball": 39, + "basketball": 45, +} +avaliable_4p_versions = { + "classic": 6, + "two_paddles": 11, + "soccer": 16, + "foozpong": 21, + "hockey": 29, + "quadrapong": 33, + "handball": 37, + "volleyball": 41, + "basketball": 49, +} + + +def raw_env(num_players=2, game_version="classic", **kwargs): + assert num_players == 2 or num_players == 4, "pong only supports 2 or 4 players" + versions = avaliable_2p_versions if num_players == 2 else avaliable_4p_versions + assert game_version in versions, f"pong version {game_version} not supported for number of players {num_players}. Available options are {list(versions)}" + mode = versions[game_version] + return BaseAtariEnv(game="pong", num_players=num_players, mode_num=mode, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/quadrapong_v3.py b/MLPY/Lib/site-packages/pettingzoo/atari/quadrapong_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..96943babd9233b79fd03eaae6d45e97d2d520e24 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/quadrapong_v3.py @@ -0,0 +1,13 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(**kwargs): + mode = 33 + num_players = 4 + return BaseAtariEnv(game="pong", num_players=num_players, mode_num=mode, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/space_invaders_v1.py b/MLPY/Lib/site-packages/pettingzoo/atari/space_invaders_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..cb350ebab290a355c7e384f7ddb64f688e96248f --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/space_invaders_v1.py @@ -0,0 +1,18 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(alternating_control=False, moving_shields=True, zigzaging_bombs=False, fast_bomb=False, invisible_invaders=False, **kwargs): + mode = 33 + ( + moving_shields * 1 + + zigzaging_bombs * 2 + + fast_bomb * 4 + + invisible_invaders * 8 + + alternating_control * 16 + ) + return BaseAtariEnv(game="space_invaders", num_players=2, mode_num=mode, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/space_war_v1.py b/MLPY/Lib/site-packages/pettingzoo/atari/space_war_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..91c7ba704605b0b3898d86e48065eb7522c1332a --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/space_war_v1.py @@ -0,0 +1,11 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(**kwargs): + return BaseAtariEnv(game="space_war", num_players=2, mode_num=None, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/surround_v1.py b/MLPY/Lib/site-packages/pettingzoo/atari/surround_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..7ed3d390a3e487b90d0878c960035562a1abead9 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/surround_v1.py @@ -0,0 +1,11 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(**kwargs): + return BaseAtariEnv(game="surround", num_players=2, mode_num=None, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/tennis_v2.py b/MLPY/Lib/site-packages/pettingzoo/atari/tennis_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..45cdf9065597610e4f4427e057a6ece915cba5bc --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/tennis_v2.py @@ -0,0 +1,11 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(**kwargs): + return BaseAtariEnv(game="tennis", num_players=2, mode_num=None, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/video_checkers_v3.py b/MLPY/Lib/site-packages/pettingzoo/atari/video_checkers_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..903f7913c55d1e3764115b637a5dd9b917aaa294 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/video_checkers_v3.py @@ -0,0 +1,11 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(**kwargs): + return BaseAtariEnv(game="video_checkers", num_players=2, mode_num=None, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/volleyball_pong_v2.py b/MLPY/Lib/site-packages/pettingzoo/atari/volleyball_pong_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..eb2cedf1538762343bc2b62573e6b7d9bc952eec --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/volleyball_pong_v2.py @@ -0,0 +1,14 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(num_players=4, **kwargs): + assert num_players == 2 or num_players == 4, "pong only supports 2 or 4 players" + mode_mapping = {2: 39, 4: 41} + mode = mode_mapping[num_players] + return BaseAtariEnv(game="pong", num_players=num_players, mode_num=mode, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/warlords_v2.py b/MLPY/Lib/site-packages/pettingzoo/atari/warlords_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..ce7e44930581edfdbf58d342f1b23bfbcdf80a4f --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/warlords_v2.py @@ -0,0 +1,11 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(**kwargs): + return BaseAtariEnv(game="warlords", num_players=4, mode_num=None, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/atari/wizard_of_wor_v2.py b/MLPY/Lib/site-packages/pettingzoo/atari/wizard_of_wor_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..5b199591b8285d6100463ebab3ffa6437ea61614 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/atari/wizard_of_wor_v2.py @@ -0,0 +1,11 @@ +import os + +from .base_atari_env import BaseAtariEnv, base_env_wrapper_fn, parallel_wrapper_fn + + +def raw_env(**kwargs): + return BaseAtariEnv(game="wizard_of_wor", num_players=2, mode_num=None, env_name=os.path.basename(__file__)[:-3], **kwargs) + + +env = base_env_wrapper_fn(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/__init__.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ded2c453b63215d5c2e1c2eb55d820238962a0fb --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/__init__.py @@ -0,0 +1,5 @@ +from pettingzoo.utils.deprecated_module import depricated_handler + + +def __getattr__(env_name): + return depricated_handler(env_name, __path__, __name__) diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b67e9d7528e7b36155470af1ddd117a3f33bef1f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/cooperative_pong_v5.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/cooperative_pong_v5.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fe8fa51cd5525ce3fb023d59e95e2eb3fb9dcee1 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/cooperative_pong_v5.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/knights_archers_zombies_v8.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/knights_archers_zombies_v8.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aecde2ce1cd5115f2251f97334314c3248097a76 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/knights_archers_zombies_v8.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/pistonball_v6.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/pistonball_v6.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3de72aab3b321bb9653d9eeea2c62420defe3e71 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/pistonball_v6.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/prison_v3.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/prison_v3.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f2affa52109f4b0cf61752998deb95dddc288915 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/prison_v3.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/prospector_v4.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/prospector_v4.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7e7acb67958f7a1267d324c77a71ad4e076d562e Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/__pycache__/prospector_v4.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__init__.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9b760bb4e4c4260ea3714db17d7fbb8f76bba994 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/ball.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/ball.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c594e09a229e3dff039f67d20fe4d68e13ef1161 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/ball.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/cake_paddle.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/cake_paddle.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5d7d921fdf89582c2a6b8c51974d990631fb6404 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/cake_paddle.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/cooperative_pong.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/cooperative_pong.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c5618912db807fd2f09966216475d382a35b736f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/cooperative_pong.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/manual_control.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/manual_control.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..732b9d0dc877aa0d616d9134f3944b5f14410845 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/manual_control.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/paddle.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/paddle.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c9447c63bc97919414951c85ed30c9d7fa0a22bc Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/__pycache__/paddle.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/ball.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/ball.py new file mode 100644 index 0000000000000000000000000000000000000000..3c9970f7a17e5b1e80d108b3c9559d1a1eb3d957 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/ball.py @@ -0,0 +1,62 @@ +import numpy as np +import pygame + + +def get_small_random_value(randomizer): + # generates a small random value between [0, 1/100) + return (1 / 100) * randomizer.rand() + + +class Ball(pygame.sprite.Sprite): + def __init__(self, randomizer, dims, speed, bounce_randomness=False): + self.surf = pygame.Surface(dims) + self.rect = self.surf.get_rect() + self.speed_val = speed + self.speed = [int(self.speed_val * np.cos(np.pi / 4)), int(self.speed_val * np.sin(np.pi / 4))] + self.bounce_randomness = bounce_randomness + self.done = False + self.hit = False + self.randomizer = randomizer + + def update2(self, area, p0, p1): + # move ball rect + self.rect.x += self.speed[0] + self.rect.y += self.speed[1] + + if not area.contains(self.rect): + # bottom wall + if self.rect.bottom > area.bottom: + self.rect.bottom = area.bottom + self.speed[1] = -self.speed[1] + # top wall + elif self.rect.top < area.top: + self.rect.top = area.top + self.speed[1] = -self.speed[1] + # right or left walls + else: + return True + self.speed[0] = -self.speed[0] + + else: + # Do ball and bat collide? + # add some randomness + r_val = 0 + if self.bounce_randomness: + r_val = get_small_random_value(self.randomizer) + + # ball in left half of screen + if self.rect.center[0] < area.center[0]: + is_collision, self.rect, self.speed = p0.process_collision(self.rect, self.speed, 1) + if is_collision: + self.speed = [self.speed[0] + np.sign(self.speed[0]) * r_val, self.speed[1] + np.sign(self.speed[1]) * r_val] + # ball in right half + else: + is_collision, self.rect, self.speed = p1.process_collision(self.rect, self.speed, 2) + if is_collision: + self.speed = [self.speed[0] + np.sign(self.speed[0]) * r_val, self.speed[1] + np.sign(self.speed[1]) * r_val] + + return False + + def draw(self, screen): + # screen.blit(self.surf, self.rect) + pygame.draw.rect(screen, (255, 255, 255), self.rect) diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/cake_paddle.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/cake_paddle.py new file mode 100644 index 0000000000000000000000000000000000000000..18c127a2f687aa5cb422a01b719981296ee23027 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/cake_paddle.py @@ -0,0 +1,93 @@ +import os + +import pygame + + +class CakePaddle(pygame.sprite.Sprite): + + def __init__(self, speed=12, render_ratio=2): + self.render_ratio = render_ratio + # surf is the right-most (largest) tier of the cake + self.surf = pygame.Surface((30 // render_ratio, 120 // render_ratio)) + self.rect = self.surf.get_rect() + self.surf2 = pygame.Surface((30 // render_ratio, 80 // render_ratio)) + self.rect2 = self.surf2.get_rect() + self.surf3 = pygame.Surface((30 // render_ratio, 40 // render_ratio)) + self.rect3 = self.surf3.get_rect() + self.surf4 = pygame.Surface((30 // render_ratio, 10 // render_ratio)) + self.rect4 = self.surf4.get_rect() + + self.speed = speed + + def reset(self): + # self.rect is set from env class + self.rect2.midright = self.rect.midleft + self.rect3.midright = self.rect2.midleft + self.rect4.midright = self.rect3.midleft + + def draw(self, screen): + pygame.draw.rect(screen, (255, 255, 255), self.rect) + pygame.draw.rect(screen, (255, 255, 255), self.rect2) + pygame.draw.rect(screen, (255, 255, 255), self.rect3) + pygame.draw.rect(screen, (255, 255, 255), self.rect4) + + def update(self, area, action): + # action: 1 - up, 2 - down + movepos = [0, 0] + if action == 1: + movepos[1] = movepos[1] - self.speed + elif action == 2: + movepos[1] = movepos[1] + self.speed + + newpos = self.rect.move(movepos) + if area.contains(newpos): + self.rect = newpos + # move other rects too + self.rect2 = self.rect2.move(movepos) + self.rect3 = self.rect3.move(movepos) + self.rect4 = self.rect4.move(movepos) + + def _process_collision_with_rect(self, rect, b_rect, b_speed, paddle_type): + # handle collision from top + if b_rect.bottom > rect.top and b_rect.top - b_speed[1] < rect.top and b_speed[1] > 0: + b_rect.bottom = rect.top + if b_speed[1] > 0: + b_speed[1] *= -1 + # handle collision from bottom + elif b_rect.top < rect.bottom and b_rect.bottom - b_speed[1] > rect.bottom and b_speed[1] < 0: + b_rect.top = rect.bottom + if b_speed[1] < 0: + b_speed[1] *= -1 + # handle collision from left + if b_rect.right > rect.left: + b_rect.right = rect.left + if b_speed[0] > 0: + b_speed[0] *= -1 + return True, b_rect, b_speed + + def process_collision(self, b_rect, b_speed, paddle_type): + ''' + + Parameters + ---------- + b_rect : Ball rect + dx, dy : Ball speed along single axis + b_speed : Ball speed + ignore paddle type + + Returns + ------- + is_collision: 1 if ball collides with paddle + b_rect: new ball rect + b_speed: new ball speed + + ''' + if self.rect4.colliderect(b_rect): + return self._process_collision_with_rect(self.rect4, b_rect, b_speed, paddle_type) + elif self.rect3.colliderect(b_rect): + return self._process_collision_with_rect(self.rect3, b_rect, b_speed, paddle_type) + elif self.rect2.colliderect(b_rect): + return self._process_collision_with_rect(self.rect2, b_rect, b_speed, paddle_type) + elif self.rect.colliderect(b_rect): + return self._process_collision_with_rect(self.rect, b_rect, b_speed, paddle_type) + return False, b_rect, b_speed diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/cooperative_pong.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/cooperative_pong.py new file mode 100644 index 0000000000000000000000000000000000000000..b67e0e5d0ad76a91e8a7539bed9612589043f793 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/cooperative_pong.py @@ -0,0 +1,310 @@ +import os + +import gym +import numpy as np +import pygame +from gym.utils import EzPickle, seeding + +from pettingzoo import AECEnv +from pettingzoo.utils import wrappers +from pettingzoo.utils.agent_selector import agent_selector +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from .ball import Ball +from .cake_paddle import CakePaddle +from .paddle import Paddle + +FPS = 15 + + +def deg_to_rad(deg): + return deg * np.pi / 180 + + +def get_flat_shape(width, height, kernel_window_length=2): + return int(width * height / (kernel_window_length * kernel_window_length)) + + +def original_obs_shape(screen_width, screen_height, kernel_window_length=2): + return (int(screen_height * 2 / kernel_window_length), int(screen_width * 2 / (kernel_window_length)), 1) + + +def get_valid_angle(randomizer): + # generates an angle in [0, 2*np.pi) that + # excludes (90 +- ver_deg_range), (270 +- ver_deg_range), (0 +- hor_deg_range), (180 +- hor_deg_range) + # (65, 115), (245, 295), (170, 190), (0, 10), (350, 360) + ver_deg_range = 25 + hor_deg_range = 10 + a1 = deg_to_rad(90 - ver_deg_range) + b1 = deg_to_rad(90 + ver_deg_range) + a2 = deg_to_rad(270 - ver_deg_range) + b2 = deg_to_rad(270 + ver_deg_range) + c1 = deg_to_rad(180 - hor_deg_range) + d1 = deg_to_rad(180 + hor_deg_range) + c2 = deg_to_rad(360 - hor_deg_range) + d2 = deg_to_rad(0 + hor_deg_range) + + angle = 0 + while ((angle > a1 and angle < b1) or (angle > a2 and angle < b2) or (angle > c1 and angle < d1) or (angle > c2) or (angle < d2)): + angle = 2 * np.pi * randomizer.rand() + + return angle + + +class CooperativePong: + def __init__(self, randomizer, ball_speed=9, left_paddle_speed=12, right_paddle_speed=12, cake_paddle=True, max_cycles=900, bounce_randomness=False, max_reward=100, off_screen_penalty=-10, render_ratio=2, kernel_window_length=2): + super().__init__() + + pygame.init() + self.num_agents = 2 + + self.render_ratio = render_ratio + self.kernel_window_length = kernel_window_length + + # Display screen + self.s_width, self.s_height = 960 // render_ratio, 560 // render_ratio + self.screen = pygame.Surface((self.s_width, self.s_height)) # (960, 720) # (640, 480) # (100, 200) + self.area = self.screen.get_rect() + self.max_reward = max_reward + self.off_screen_penalty = off_screen_penalty + + # define action and observation spaces + self.action_space = [gym.spaces.Discrete(3) for _ in range(self.num_agents)] + original_shape = original_obs_shape(self.s_width, self.s_height, kernel_window_length=kernel_window_length) + original_color_shape = (original_shape[0], original_shape[1], 3) + self.observation_space = [gym.spaces.Box(low=0, high=255, shape=(original_color_shape), dtype=np.uint8) for _ in range(self.num_agents)] + # define the global space of the environment or state + self.state_space = gym.spaces.Box(low=0, high=255, shape=((self.s_height, self.s_width, 3)), dtype=np.uint8) + + self.renderOn = False + + # set speed + self.speed = [ball_speed, left_paddle_speed, right_paddle_speed] + + self.max_cycles = max_cycles + + # paddles + self.p0 = Paddle((20 // render_ratio, 80 // render_ratio), left_paddle_speed) + if cake_paddle: + self.p1 = CakePaddle(right_paddle_speed, render_ratio=render_ratio) + else: + self.p1 = Paddle((20 // render_ratio, 100 // render_ratio), right_paddle_speed) + + self.agents = ["paddle_0", "paddle_1"] # list(range(self.num_agents)) + + # ball + self.ball = Ball(randomizer, (20 // render_ratio, 20 // render_ratio), ball_speed, bounce_randomness) + self.randomizer = randomizer + + self.reinit() + + def reinit(self): + self.rewards = dict(zip(self.agents, [0.0] * len(self.agents))) + self.dones = dict(zip(self.agents, [False] * len(self.agents))) + self.infos = dict(zip(self.agents, [{}] * len(self.agents))) + self.score = 0 + + def reset(self): + # reset ball and paddle init conditions + self.ball.rect.center = self.area.center + # set the direction to an angle between [0, 2*np.pi) + angle = get_valid_angle(self.randomizer) + # angle = deg_to_rad(89) + self.ball.speed = [int(self.ball.speed_val * np.cos(angle)), int(self.ball.speed_val * np.sin(angle))] + + self.p0.rect.midleft = self.area.midleft + self.p1.rect.midright = self.area.midright + self.p0.reset() + self.p1.reset() + self.p0.speed = self.speed[1] + self.p1.speed = self.speed[2] + + self.done = False + + self.num_frames = 0 + + self.reinit() + + self.draw() + + def close(self): + if self.renderOn: + pygame.event.pump() + pygame.display.quit() + self.renderOn = False + + def enable_render(self): + self.screen = pygame.display.set_mode(self.screen.get_size()) + self.renderOn = True + self.draw() + + def render(self, mode='human'): + if not self.renderOn and mode == "human": + # sets self.renderOn to true and initializes display + self.enable_render() + + observation = np.array(pygame.surfarray.pixels3d(self.screen)) + if mode == "human": + pygame.display.flip() + return np.transpose(observation, axes=(1, 0, 2)) if mode == "rgb_array" else None + + def observe(self): + observation = np.array(pygame.surfarray.pixels3d(self.screen)) + observation = np.rot90(observation, k=3) # now the obs is laid out as H, W as rows and cols + observation = np.fliplr(observation) # laid out in the correct order + return observation + + def state(self): + ''' + Returns an observation of the global environment + ''' + state = pygame.surfarray.pixels3d(self.screen).copy() + state = np.rot90(state, k=3) + state = np.fliplr(state) + return state + + def draw(self): + pygame.draw.rect(self.screen, (0, 0, 0), self.area) + self.p0.draw(self.screen) + self.p1.draw(self.screen) + self.ball.draw(self.screen) + + def step(self, action, agent): + + # update p0, p1 accordingly + # action: 0: do nothing, + # action: 1: p[i] move up + # action: 2: p[i] move down + if agent == self.agents[0]: + self.rewards = {a: 0 for a in self.agents} + self.p0.update(self.area, action) + elif agent == self.agents[1]: + self.p1.update(self.area, action) + + # do the rest if not done + if not self.done: + # update ball position + self.done = self.ball.update2(self.area, self.p0, self.p1) + + # do the miscellaneous stuff after the last agent has moved + # reward is the length of time ball is in play + reward = 0 + # ball is out-of-bounds + if self.done: + reward = self.off_screen_penalty + self.score += reward + if not self.done: + self.num_frames += 1 + reward = self.max_reward / self.max_cycles + self.score += reward + if self.num_frames == self.max_cycles: + self.done = True + + for ag in self.agents: + self.rewards[ag] = reward + self.dones[ag] = self.done + self.infos[ag] = {} + + if self.renderOn: + pygame.event.pump() + self.draw() + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +parallel_env = parallel_wrapper_fn(env) + + +class raw_env(AECEnv, EzPickle): + # class env(MultiAgentEnv): + metadata = { + 'render.modes': ['human', "rgb_array"], + 'name': "cooperative_pong_v5", + 'is_parallelizable': True, + 'video.frames_per_second': FPS + } + + def __init__(self, **kwargs): + EzPickle.__init__(self, **kwargs) + self._kwargs = kwargs + + self.seed() + + self.agents = self.env.agents[:] + self.possible_agents = self.agents[:] + self._agent_selector = agent_selector(self.agents) + self.agent_selection = self._agent_selector.reset() + # spaces + self.action_spaces = dict(zip(self.agents, self.env.action_space)) + self.observation_spaces = dict(zip(self.agents, self.env.observation_space)) + self.state_space = self.env.state_space + # dicts + self.observations = {} + self.rewards = self.env.rewards + self.dones = self.env.dones + self.infos = self.env.infos + + self.score = self.env.score + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + # def convert_to_dict(self, list_of_list): + # return dict(zip(self.agents, list_of_list)) + def seed(self, seed=None): + self.randomizer, seed = seeding.np_random(seed) + self.env = CooperativePong(self.randomizer, **self._kwargs) + + def reset(self): + self.env.reset() + self.agents = self.possible_agents[:] + self.agent_selection = self._agent_selector.reset() + self.rewards = self.env.rewards + self._cumulative_rewards = {a: 0 for a in self.agents} + self.dones = self.env.dones + self.infos = self.env.infos + + def observe(self, agent): + obs = self.env.observe() + return obs + + def state(self): + state = self.env.state() + return state + + def close(self): + self.env.close() + + def render(self, mode='human'): + return self.env.render(mode) + + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + agent = self.agent_selection + if not self.action_spaces[agent].contains(action): + raise Exception('Action for agent {} must be in Discrete({}).' + 'It is currently {}'.format(agent, self.action_spaces[agent].n, action)) + + self.env.step(action, agent) + # select next agent and observe + self.agent_selection = self._agent_selector.next() + self.rewards = self.env.rewards + self.dones = self.env.dones + self.infos = self.env.infos + + self.score = self.env.score + + self._cumulative_rewards[agent] = 0 + self._accumulate_rewards() + +# This was originally created, in full, by Ananth Hari in a different repo, and was +# added in by J K Terry (which is why they're shown as the creator in the git history) diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/manual_control.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/manual_control.py new file mode 100644 index 0000000000000000000000000000000000000000..05b3c8a1ac2c337cb9101bc5acb58aceb5101034 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/manual_control.py @@ -0,0 +1,63 @@ +import os + +import numpy as np +import pygame + + +def manual_control(**kwargs): + from .cooperative_pong import env as _env + env = _env(**kwargs) + env.reset() + + quit_loop = 0 + + pygame.key.set_repeat(20, 0) + clock = pygame.time.Clock() + + total_reward = 0 + initial_iteration = {agent: True for agent in env.agents} + dones = {agent: False for agent in env.agents} + done = all(dones.values()) + + while not done: + clock.tick(15) + action_dict = {agent: 0 for agent in env.agents} # do nothing + for event in pygame.event.get(): + if event.type == pygame.QUIT: + quit_loop = 1 + break + elif event.type == pygame.KEYDOWN: + # Quit if ESC is pressed + if event.key == pygame.K_ESCAPE: + quit_loop = 1 + break + if event.key == pygame.K_BACKSPACE: + env.reset() + total_reward = 0 + if event.key == pygame.K_w: + # player1.moveup() + action_dict[env.agents[0]] = 1 + if event.key == pygame.K_s: + # player1.movedown() + action_dict[env.agents[0]] = 2 + if event.key == pygame.K_UP: + # player2.moveup() + action_dict[env.agents[1]] = 1 + if event.key == pygame.K_DOWN: + # player2.movedown() + action_dict[env.agents[1]] = 2 + if quit_loop: + break + + for _ in env.agents: + agent = env.agent_selection + obs, reward, dones[agent], _ = env.last() + total_reward += reward + initial_iteration[agent] = False + env.step(action_dict[agent]) + done = all(env.dones.values()) + + env.render() + pygame.event.pump() + + env.close() diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/paddle.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/paddle.py new file mode 100644 index 0000000000000000000000000000000000000000..99d4a189dfeeb3661517bf1eb3ee6bdf52ef13e0 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong/paddle.py @@ -0,0 +1,67 @@ +import pygame + + +class Paddle(pygame.sprite.Sprite): + def __init__(self, dims, speed): + self.surf = pygame.Surface(dims) + self.rect = self.surf.get_rect() + self.speed = speed + + def reset(self): + pass + + def draw(self, screen): + pygame.draw.rect(screen, (255, 255, 255), self.rect) + + def update(self, area, action): + # action: 1 - up, 2 - down + movepos = [0, 0] + if action > 0: + if action == 1: + movepos[1] = movepos[1] - self.speed + elif action == 2: + movepos[1] = movepos[1] + self.speed + + # make sure the players stay inside the screen + newpos = self.rect.move(movepos) + if area.contains(newpos): + self.rect = newpos + + def process_collision(self, b_rect, b_speed, paddle_type): + ''' + + Parameters + ---------- + b_rect : Ball rect + dx, dy : Ball speed along single axis + b_speed : Ball speed + + Returns + ------- + is_collision: 1 if ball collides with paddle + b_rect: new ball rect + b_speed: new ball speed + + ''' + if not self.rect.colliderect(b_rect): + return False, b_rect, b_speed + # handle collision from left or right + if paddle_type == 1 and b_rect.left < self.rect.right: + b_rect.left = self.rect.right + if b_speed[0] < 0: + b_speed[0] *= -1 + elif paddle_type == 2 and b_rect.right > self.rect.left: + b_rect.right = self.rect.left + if b_speed[0] > 0: + b_speed[0] *= -1 + # handle collision from top + if b_rect.bottom > self.rect.top and b_rect.top - b_speed[1] < self.rect.top and b_speed[1] > 0: + b_rect.bottom = self.rect.top + if b_speed[1] > 0: + b_speed[1] *= -1 + # handle collision from bottom + elif b_rect.top < self.rect.bottom and b_rect.bottom - b_speed[1] > self.rect.bottom and b_speed[1] < 0: + b_rect.top = self.rect.bottom - 1 + if b_speed[1] < 0: + b_speed[1] *= -1 + return True, b_rect, b_speed diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong_v5.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong_v5.py new file mode 100644 index 0000000000000000000000000000000000000000..65d597070ad22cc958450e11cf3e2f8eae3d2cb3 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/cooperative_pong_v5.py @@ -0,0 +1,2 @@ +from .cooperative_pong import manual_control +from .cooperative_pong.cooperative_pong import env, parallel_env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/__init__.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f876375e90587b987d9240a569f2ea3ae841a95c Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/__pycache__/knights_archers_zombies.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/__pycache__/knights_archers_zombies.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..09d82e2a06d5970b407e615f6b40e71fdab45323 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/__pycache__/knights_archers_zombies.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/__pycache__/manual_control.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/__pycache__/manual_control.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..abcdb2393075680090de8bd2c251922d6c64750f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/__pycache__/manual_control.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/archer.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/archer.png new file mode 100644 index 0000000000000000000000000000000000000000..171310377a5070cad2370fdf65e80585598f150a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/archer.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/arrow.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/arrow.png new file mode 100644 index 0000000000000000000000000000000000000000..29e8e097aa0fd29536fe878fc2a96c97c4c63bc5 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/arrow.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/knight.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/knight.png new file mode 100644 index 0000000000000000000000000000000000000000..c307907fe0784681a61454c81e1c0c0c78b5c82d Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/knight.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/left_wall.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/left_wall.png new file mode 100644 index 0000000000000000000000000000000000000000..8af6f3eaab471d6edf120073897341c1014df4ce Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/left_wall.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/mace.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/mace.png new file mode 100644 index 0000000000000000000000000000000000000000..3f735b9d9f4e1f9013f8f799a9639ced9bfeed07 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/mace.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/patch1.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/patch1.png new file mode 100644 index 0000000000000000000000000000000000000000..5e2466bce300b52f28005cf79f6723b7c46c867e Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/patch1.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/patch2.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/patch2.png new file mode 100644 index 0000000000000000000000000000000000000000..fad38ab9af61c65ff2542fab2ed0738213a7f99f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/patch2.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/patch3.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/patch3.png new file mode 100644 index 0000000000000000000000000000000000000000..80a0b9ce21285c6422f43efc66bd851b514bb617 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/patch3.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/patch4.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/patch4.png new file mode 100644 index 0000000000000000000000000000000000000000..20595ff91c675cde2b4996f9de1f5605b649cd64 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/patch4.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/right_wall.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/right_wall.png new file mode 100644 index 0000000000000000000000000000000000000000..eecaf7b32873d6a653aa0168257a7b7b0572af62 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/right_wall.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/zombie.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/zombie.png new file mode 100644 index 0000000000000000000000000000000000000000..1ef77a3f666fd26dbaa2833d891e875d6b694ddc Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/img/zombie.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/knights_archers_zombies.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/knights_archers_zombies.py new file mode 100644 index 0000000000000000000000000000000000000000..a0871b9780c6ddc45fc68ffb05997360b00318da --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/knights_archers_zombies.py @@ -0,0 +1,526 @@ +import os +import sys + +import numpy as np +import pygame +import pygame.gfxdraw +from gym.spaces import Box, Discrete +from gym.utils import EzPickle, seeding + +from pettingzoo import AECEnv +from pettingzoo.utils import agent_selector, wrappers +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from .manual_control import manual_control +from .src import constants as const +from .src.img import get_image +from .src.players import Archer, Knight +from .src.weapons import Arrow, Sword +from .src.zombie import Zombie + +sys.dont_write_bytecode = True + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +parallel_env = parallel_wrapper_fn(env) + + +class raw_env(AECEnv, EzPickle): + + metadata = { + "render.modes": ["human", "rgb_array"], + "name": "knights_archers_zombies_v8", + "is_parallelizable": True, + "video.frames_per_second": const.FPS, + } + + def __init__( + self, + spawn_rate=20, + num_archers=2, + num_knights=2, + killable_knights=True, + killable_archers=True, + pad_observation=True, + line_death=False, + max_cycles=900, + ): + EzPickle.__init__( + self, + spawn_rate, + num_archers, + num_knights, + killable_knights, + killable_archers, + pad_observation, + line_death, + max_cycles, + ) + + # Game Status + self.frames = 0 + self.closed = False + self.has_reset = False + self.render_on = False + + # Game Constants + self.seed() + self.spawn_rate = spawn_rate + self.max_cycles = max_cycles + self.pad_observation = pad_observation + self.killable_knights = killable_knights + self.killable_archers = killable_archers + self.line_death = line_death + self.num_archers = num_archers + self.num_knights = num_knights + + # Represents agents to remove at end of cycle + self.kill_list = [] + self.agent_list = [] + self.agents = [] + self.dead_agents = [] + + self.agent_name_mapping = {} + a_count = 0 + for i in range(self.num_archers): + a_name = "archer_" + str(i) + self.agents.append(a_name) + self.agent_name_mapping[a_name] = a_count + a_count += 1 + for i in range(self.num_knights): + k_name = "knight_" + str(i) + self.agents.append(k_name) + self.agent_name_mapping[k_name] = a_count + a_count += 1 + + self.observation_spaces = dict( + zip( + self.agents, + [ + Box(low=0, high=255, shape=(512, 512, 3), dtype=np.uint8) + for _ in enumerate(self.agents) + ], + ) + ) + self.action_spaces = dict( + zip(self.agents, [Discrete(6) for _ in enumerate(self.agents)]) + ) + self.state_space = Box( + low=0, + high=255, + shape=(const.SCREEN_HEIGHT, const.SCREEN_WIDTH, 3), + dtype=np.uint8, + ) + self.possible_agents = self.agents + + # Initializing Pygame + pygame.init() + # self.WINDOW = pygame.display.set_mode([self.WIDTH, self.HEIGHT]) + self.WINDOW = pygame.Surface((const.SCREEN_WIDTH, const.SCREEN_HEIGHT)) + pygame.display.set_caption("Knights, Archers, Zombies") + self.left_wall = get_image(os.path.join("img", "left_wall.png")) + self.right_wall = get_image(os.path.join("img", "right_wall.png")) + self.right_wall_rect = self.right_wall.get_rect() + self.right_wall_rect.left = const.SCREEN_WIDTH - self.right_wall_rect.width + self.floor_patch1 = get_image(os.path.join("img", "patch1.png")) + self.floor_patch2 = get_image(os.path.join("img", "patch2.png")) + self.floor_patch3 = get_image(os.path.join("img", "patch3.png")) + self.floor_patch4 = get_image(os.path.join("img", "patch4.png")) + + self._agent_selector = agent_selector(self.agents) + self.reinit() + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def seed(self, seed=None): + self.np_random, seed = seeding.np_random(seed) + + # Spawn Zombies at Random Location at every 100 iterations + def spawn_zombie(self): + self.zombie_spawn_rate += 1 + zombie = Zombie(self.np_random) + + if self.zombie_spawn_rate >= self.spawn_rate: + zombie.rect.x = self.np_random.randint(0, const.SCREEN_WIDTH) + zombie.rect.y = 5 + + self.zombie_list.add(zombie) + self.zombie_spawn_rate = 0 + + # Spawn Swords for Players + def action_sword(self, action, agent): + if ( + action == 5 + and agent.is_knight + and agent.weapon_timeout > const.SWORD_TIMEOUT + ): + # make sure that the current knight doesn't have a sword already + for sword in self.sword_list: + if sword.knight == agent: + return + + self.sword_list.add(Sword(agent)) + + # Spawn Arrows for Players + def action_arrow(self, action, agent): + if ( + action == 5 + and agent.is_archer + and agent.weapon_timeout > const.ARROW_TIMEOUT + ): + self.arrow_list.add(Arrow(agent)) + + # Stab the Sword + def move_sword(self): + for sword in self.sword_list: + sword_active = sword.update() + if not sword_active: + self.sword_list.remove(sword) + + # Zombie Kills the Knight (also remove the sword) + def zombit_hit_knight(self): + for zombie in self.zombie_list: + zombie_knight_list = pygame.sprite.spritecollide( + zombie, self.knight_list, True + ) + + for knight in zombie_knight_list: + knight.alive = False + self.knight_list.remove(knight) + if knight.agent_name not in self.kill_list: + self.kill_list.append(knight.agent_name) + + for sword in self.sword_list: + if sword.knight == knight: + self.sword_list.remove(sword) + + # Zombie Kills the Archer + def zombie_hit_archer(self): + for zombie in self.zombie_list: + zombie_archer_list = pygame.sprite.spritecollide( + zombie, self.archer_list, True + ) + + for archer in zombie_archer_list: + archer.alive = False + self.archer_list.remove(archer) + if archer.agent_name not in self.kill_list: + self.kill_list.append(archer.agent_name) + + # Zombie Kills the Sword + def sword_hit(self): + for sword in self.sword_list: + zombie_sword_list = pygame.sprite.spritecollide( + sword, self.zombie_list, True + ) + + # For each zombie hit, remove the sword and add to the score + for zombie in zombie_sword_list: + self.zombie_list.remove(zombie) + sword.knight.score += 1 + + # Zombie Kills the Arrow + def arrow_hit(self): + for arrow in self.arrow_list: + zombie_arrow_list = pygame.sprite.spritecollide( + arrow, self.zombie_list, True + ) + + # For each zombie hit, remove the arrow, zombie and add to the score + for zombie in zombie_arrow_list: + self.arrow_list.remove(arrow) + self.zombie_list.remove(zombie) + # score += 1 + arrow.archer.score += 1 + + # Remove the arrow if it flies up off the screen + if arrow.rect.y < 0: + self.arrow_list.remove(arrow) + + # Zombie reaches the End of the Screen + def zombie_endscreen(self, run, zombie_list): + for zombie in zombie_list: + if zombie.rect.y > 690: + run = False + return run + + # Zombie Kills all Players + def zombie_all_players(self, knight_list, archer_list, run): + if not knight_list and not archer_list: + run = False + return run + + def observe(self, agent): + screen = pygame.surfarray.pixels3d(self.WINDOW) + + i = self.agent_name_mapping[agent] + agent_obj = self.agent_list[i] + agent_position = (agent_obj.rect.x, agent_obj.rect.y) + + if not agent_obj.alive: + cropped = np.zeros((512, 512, 3), dtype=np.uint8) + else: + min_x = agent_position[0] - 256 + max_x = agent_position[0] + 256 + min_y = agent_position[1] - 256 + max_y = agent_position[1] + 256 + lower_y_bound = max(min_y, 0) + upper_y_bound = min(max_y, const.SCREEN_HEIGHT) + lower_x_bound = max(min_x, 0) + upper_x_bound = min(max_x, const.SCREEN_WIDTH) + startx = lower_x_bound - min_x + starty = lower_y_bound - min_y + endx = 512 + upper_x_bound - max_x + endy = 512 + upper_y_bound - max_y + cropped = np.zeros_like(self.observation_spaces[agent].low) + cropped[startx:endx, starty:endy, :] = screen[ + lower_x_bound:upper_x_bound, lower_y_bound:upper_y_bound, : + ] + + return np.swapaxes(cropped, 1, 0) + + def state(self): + """ + Returns an observation of the global environment + """ + state = pygame.surfarray.pixels3d(self.WINDOW).copy() + state = np.rot90(state, k=3) + state = np.fliplr(state) + return state + + def step(self, action): + # check if the particular agent is done + if self.dones[self.agent_selection]: + return self._was_done_step(action) + + # agent_list : list of agent instance indexed by number + # agent_name_mapping: dict of {str, idx} for agent index and name + # agent_selection : str representing the agent name + # agent: agent instance + agent = self.agent_list[self.agent_name_mapping[self.agent_selection]] + + # this is... so whacky... but all actions here are index with 1 so... ok + action = action + 1 + out_of_bounds = agent.update(action) + + # check for out of bounds death + if self.line_death and out_of_bounds: + agent.alive = False + if agent in self.archer_list: + self.archer_list.remove(agent) + else: + self.knight_list.remove(agent) + self.kill_list.append(agent.agent_name) + + # actuate the sword + self.action_sword(action, agent) + + # actuate the arrow + self.action_arrow(action, agent) + + # Do these things once per cycle + if self._agent_selector.is_last(): + + # Stab the Sword + self.move_sword() + + # Zombie Kills the Arrow + self.arrow_hit() + + # Zombie Kills the Sword + self.sword_hit() + + # Zombie Kills the Archer + if self.killable_archers: + self.zombie_hit_archer() + + # Zombie Kills the Knight + if self.killable_knights: + self.zombit_hit_knight() + + # update some zombies + for zombie in self.zombie_list: + zombie.update() + + # update some arrows + for arrow in self.arrow_list: + arrow.update() + + # Spawning Zombies at Random Location at every 100 iterations + self.spawn_zombie() + + self.draw() + + self.check_game_end() + self.frames += 1 + + self._clear_rewards() + self.rewards[self.agent_selection] = agent.score + agent.score = 0 + done = not self.run or self.frames >= self.max_cycles + self.dones = {a: done for a in self.possible_agents} + + # manage the kill list + if self._agent_selector.is_last(): + for k in self.kill_list: + self.agents.remove(k) + self.dead_agents.append(k) + + for k in self.dead_agents: + self.dones[k] = True + + # reset the kill list + self.kill_list = [] + + # reinit the agent selector with existing agents + self._agent_selector.reinit(self.agents) + + # if there still exist agents, get the next one + if len(self._agent_selector.agent_order): + self.agent_selection = self._agent_selector.next() + + self._cumulative_rewards[agent.agent_name] = 0 + self._accumulate_rewards() + self._dones_step_first() + + def enable_render(self): + self.WINDOW = pygame.display.set_mode([const.SCREEN_WIDTH, const.SCREEN_HEIGHT]) + # self.WINDOW = pygame.Surface((const.SCREEN_WIDTH, const.SCREEN_HEIGHT)) + self.render_on = True + self.draw() + + def draw(self): + self.WINDOW.fill((66, 40, 53)) + self.WINDOW.blit(self.left_wall, self.left_wall.get_rect()) + self.WINDOW.blit(self.right_wall, self.right_wall_rect) + self.WINDOW.blit(self.floor_patch1, (500, 500)) + self.WINDOW.blit(self.floor_patch2, (900, 30)) + self.WINDOW.blit(self.floor_patch3, (150, 430)) + self.WINDOW.blit(self.floor_patch4, (300, 50)) + self.WINDOW.blit(self.floor_patch1, (1000, 250)) + + # draw all the sprites + self.zombie_list.draw(self.WINDOW) + self.arrow_list.draw(self.WINDOW) + self.sword_list.draw(self.WINDOW) + self.archer_list.draw(self.WINDOW) + self.knight_list.draw(self.WINDOW) + + def render(self, mode="human"): + if not self.render_on and mode == "human": + # sets self.render_on to true and initializes display + self.enable_render() + + observation = np.array(pygame.surfarray.pixels3d(self.WINDOW)) + if mode == "human": + pygame.display.flip() + return ( + np.transpose(observation, axes=(1, 0, 2)) if mode == "rgb_array" else None + ) + + def close(self): + if not self.closed: + self.closed = True + if self.render_on: + # self.WINDOW = pygame.display.set_mode([const.SCREEN_WIDTH, const.SCREEN_HEIGHT]) + self.WINDOW = pygame.Surface((const.SCREEN_WIDTH, const.SCREEN_HEIGHT)) + self.render_on = False + pygame.event.pump() + pygame.display.quit() + + def check_game_end(self): + # Zombie reaches the End of the Screen + self.run = self.zombie_endscreen(self.run, self.zombie_list) + + # Zombie Kills all Players + self.run = self.zombie_all_players(self.knight_list, self.archer_list, self.run) + + def reinit(self): + # Dictionaries for holding new players and their weapons + self.archer_dict = {} + self.knight_dict = {} + + # Game Variables + self.score = 0 + self.run = True + self.zombie_spawn_rate = 0 + self.knight_player_num = self.archer_player_num = 0 + + # Creating Sprite Groups + self.zombie_list = pygame.sprite.Group() + self.arrow_list = pygame.sprite.Group() + self.sword_list = pygame.sprite.Group() + self.archer_list = pygame.sprite.Group() + self.knight_list = pygame.sprite.Group() + + # agent_list is a list of instances + # agents is s list of strings + self.agent_list = [] + self.agents = [] + self.dead_agents = [] + + for i in range(self.num_archers): + name = "archer_" + str(i) + self.archer_dict[f"archer{self.archer_player_num}"] = Archer( + agent_name=name + ) + self.archer_dict[f"archer{self.archer_player_num}"].offset(i * 50, 0) + self.archer_list.add(self.archer_dict[f"archer{self.archer_player_num}"]) + self.agent_list.append(self.archer_dict[f"archer{self.archer_player_num}"]) + if i != self.num_archers - 1: + self.archer_player_num += 1 + + for i in range(self.num_knights): + name = "knight_" + str(i) + self.knight_dict[f"knight{self.knight_player_num}"] = Knight( + agent_name=name + ) + self.knight_dict[f"knight{self.knight_player_num}"].offset(i * 50, 0) + self.knight_list.add(self.knight_dict[f"knight{self.knight_player_num}"]) + self.agent_list.append(self.knight_dict[f"knight{self.knight_player_num}"]) + if i != self.num_knights - 1: + self.knight_player_num += 1 + + self.agent_name_mapping = {} + a_count = 0 + for i in range(self.num_archers): + a_name = "archer_" + str(i) + self.agents.append(a_name) + self.agent_name_mapping[a_name] = a_count + a_count += 1 + for i in range(self.num_knights): + k_name = "knight_" + str(i) + self.agents.append(k_name) + self.agent_name_mapping[k_name] = a_count + a_count += 1 + + self.draw() + self.frames = 0 + + def reset(self): + self.has_reset = True + self.agents = self.possible_agents + self._agent_selector.reinit(self.agents) + self.agent_selection = self._agent_selector.next() + self.rewards = dict(zip(self.agents, [0 for _ in self.agents])) + self._cumulative_rewards = {a: 0 for a in self.agents} + self.dones = dict(zip(self.agents, [False for _ in self.agents])) + self.infos = dict(zip(self.agents, [{} for _ in self.agents])) + self.reinit() + + +# The original code for this game, that was added by J K Terry, was +# created by Dipam Patel in a different repository (hence the git history) + +# Game art purchased from https://finalbossblues.itch.io/time-fantasy-monsters +# and https://finalbossblues.itch.io/icons diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/manual_control.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/manual_control.py new file mode 100644 index 0000000000000000000000000000000000000000..529beaa89f072b71c0b011dec12a7862ca4d3c93 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/manual_control.py @@ -0,0 +1,62 @@ +import os +import time + +import pygame + + +def manual_control(**kwargs): + from .knights_archers_zombies import env as _env + env = _env(**kwargs) + env.reset() + done = False + FPS = 15 + clock = pygame.time.Clock() + + cur_agent = 0 + frame_count = 0 + # frame_limit = 500 + quit_game = 0 + + while not done: + clock.tick(FPS) + # while frame_count < frame_limit: # Uncomment this if you want the game to run for fame_limit amount of frames instead of ending by normal game conditions (useful for testing purposes) + agents = env.agents + frame_count += 1 + actions = [5 for x in range(len(env.agents))] # If you want to do manual input + # 5 is do nothing, 0 is up, 1 is down, 2 is turn CW, 3 is CCW, 4 is attack + for event in pygame.event.get(): + if event.type == pygame.KEYDOWN: + if event.key == pygame.K_ESCAPE: + quit_game = 1 + break + if event.key == pygame.K_BACKSPACE: + # Backspace to reset + env.reset() + # totalReward = 0 + if event.key == pygame.K_a: + cur_agent -= 1 + if cur_agent < 0: + cur_agent = len(agents) - 1 + if event.key == pygame.K_d: + cur_agent += 1 + if cur_agent > len(agents) - 1: + cur_agent = 0 + if event.key == pygame.K_q: + actions[cur_agent] = 2 + if event.key == pygame.K_e: + actions[cur_agent] = 3 + if event.key == pygame.K_w: + actions[cur_agent] = 0 + if event.key == pygame.K_s: + actions[cur_agent] = 1 + if event.key == pygame.K_f: + actions[cur_agent] = 4 + + if quit_game: + break + for a in actions: + env.step(a) + env.render() + done = any(env.dones.values()) + + env.close() diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__init__.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..453d3de5f14ca0e52f6e71ea2d6aef347c16bcf3 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/constants.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/constants.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6dd46678949ebea390c999b88e6586576d64cdec Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/constants.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/img.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/img.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a607acce6ce1d7842ef2ba69cf4954f26e539a02 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/img.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/players.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/players.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0d87d9cc0cb06bca4938bf60b94e0a1f9be0a566 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/players.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/weapons.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/weapons.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..750dd01b1a91fac69e5b61c84911abffdb441e09 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/weapons.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/zombie.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/zombie.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0334fb70fef09fabf51b310b86087001b8328cb3 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/__pycache__/zombie.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/constants.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..b8f3a1f168d6436bb2ddf38e580032c67b8fe2ca --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/constants.py @@ -0,0 +1,88 @@ +# video options +FPS = 15 +SCREEN_WIDTH = 1280 +SCREEN_HEIGHT = 720 +SCREEN_UNITS = 15 + +# zombie speeds +ZOMBIE_Y_SPEED = 5 +ZOMBIE_Y_SPEED = ZOMBIE_Y_SPEED * 15.0 / FPS +if ZOMBIE_Y_SPEED % 1.0 != 0.0: + raise ValueError( + f"FPS of {FPS} leads to decimal place value of {ZOMBIE_Y_SPEED} for zombie_y_speed." + ) +ZOMBIE_Y_SPEED = int(ZOMBIE_Y_SPEED) + +ZOMBIE_X_SPEED = 30 +ZOMBIE_X_SPEED = ZOMBIE_X_SPEED * 15.0 / FPS +if ZOMBIE_X_SPEED % 1.0 != 0.0: + raise ValueError( + f"FPS of {FPS} leads to decimal place value of {ZOMBIE_X_SPEED} for zombie_x_speed." + ) +ZOMBIE_X_SPEED = int(ZOMBIE_X_SPEED) + +# player rotation rate +PLAYER_ANG_RATE = 10 +PLAYER_ANG_RATE = PLAYER_ANG_RATE * 15.0 / FPS +if PLAYER_ANG_RATE % 1.0 != 0.0: + raise ValueError( + f"FPS of {FPS} leads to decimal place value of {PLAYER_ANG_RATE} for angle_rate." + ) +PLAYER_ANG_RATE = int(PLAYER_ANG_RATE) + +# archer stuff +ARCHER_X, ARCHER_Y = 400, 610 + +ARCHER_SPEED = 25 +ARCHER_SPEED = ARCHER_SPEED * 15.0 / FPS +if ARCHER_SPEED % 1.0 != 0.0: + raise ValueError( + f"FPS of {FPS} leads to decimal place value of {ARCHER_SPEED} for archer_speed." + ) +ARCHER_SPEED = int(ARCHER_SPEED) + +# knight stuff +KNIGHT_X, KNIGHT_Y = 800, 610 + +KNIGHT_SPEED = 25 +KNIGHT_SPEED = KNIGHT_SPEED * 15.0 / FPS +if KNIGHT_SPEED % 1.0 != 0.0: + raise ValueError( + f"FPS of {FPS} leads to decimal place value of {KNIGHT_SPEED} for knight_speed." + ) +KNIGHT_SPEED = int(KNIGHT_SPEED) + +# arrow stuff +ARROW_SPEED = 45 +ARROW_SPEED = ARROW_SPEED * 15.0 / FPS +if ARROW_SPEED % 1.0 != 0.0: + raise ValueError( + f"FPS of {FPS} leads to decimal place value of {ARROW_SPEED} for arrow_speed." + ) +ARROW_SPEED = int(ARROW_SPEED) + +# sword stuff +SWORD_SPEED = 20 +SWORD_SPEED = SWORD_SPEED * 15.0 / FPS +if SWORD_SPEED % 1.0 != 0.0: + raise ValueError( + f"FPS of {FPS} leads to decimal place value of {SWORD_SPEED} for sword_speed." + ) +SWORD_SPEED = int(SWORD_SPEED) + +MIN_PHASE = -3 / 15.0 * FPS +if MIN_PHASE % 1.0 != 0.0: + raise ValueError( + f"FPS of {FPS} leads to decimal place value of {MIN_PHASE} for min_phase." + ) +MIN_PHASE = int(MIN_PHASE) + +MAX_PHASE = 3 / 15.0 * FPS +if MAX_PHASE % 1.0 != 0.0: + raise ValueError( + f"FPS of {FPS} leads to decimal place value of {MAX_PHASE} for max_phase." + ) +MAX_PHASE = int(MAX_PHASE) + +ARROW_TIMEOUT = 3 +SWORD_TIMEOUT = 0 diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/img.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/img.py new file mode 100644 index 0000000000000000000000000000000000000000..590dda442d833573d39af05845f837922d3ebd96 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/img.py @@ -0,0 +1,11 @@ +from os import path as os_path + +import pygame + + +def get_image(path): + cwd = os_path.dirname(os_path.dirname(__file__)) + image = pygame.image.load(cwd + "/" + path) + sfc = pygame.Surface(image.get_size(), flags=pygame.SRCALPHA) + sfc.blit(image, (0, 0)) + return sfc diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/players.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/players.py new file mode 100644 index 0000000000000000000000000000000000000000..2368282c433b38421b70f9fd2441bd51939403b9 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/players.py @@ -0,0 +1,108 @@ +import math +import os + +import pygame + +from . import constants as const +from .img import get_image + + +class Player(pygame.sprite.Sprite): + def __init__(self): + super().__init__() + self.agent_name = None + + self.rect = pygame.Rect(0.0, 0.0, 0.0, 0.0) + self.image = None + self.org_image = None + + self.angle = 0 + self.pos = pygame.Vector2(self.rect.center) + self.direction = pygame.Vector2(0, -1) + + self.alive = True + self.score = 0 + + self.is_archer = False + self.is_knight = False + + self.speed = 0 + self.ang_rate = 0 + + self.action = 6 + self.attacking = False + self.weapon_timeout = 99 + + def update(self, action): + self.action = action + went_out_of_bounds = False + + if not self.attacking: + move_angle = math.radians(self.angle + 90) + # Up and Down movement + if action == 1 and self.rect.y > 20: + self.rect.x += math.cos(move_angle) * self.speed + self.rect.y -= math.sin(move_angle) * self.speed + elif action == 2 and self.rect.y < const.SCREEN_HEIGHT - 40: + self.rect.x += math.cos(move_angle) * self.speed + self.rect.y += math.sin(move_angle) * self.speed + # Turn CCW & CW + elif action == 3: + self.angle += self.ang_rate + elif action == 4: + self.angle -= self.ang_rate + # weapon and do nothing + elif action == 5 and self.alive: + pass + elif action == 6: + pass + + # Clamp to stay inside the screen + if self.rect.y < 0 or self.rect.y > (const.SCREEN_HEIGHT - 40): + went_out_of_bounds = True + + self.rect.x = max(min(self.rect.x, const.SCREEN_WIDTH - 132), 100) + self.rect.y = max(min(self.rect.y, const.SCREEN_HEIGHT - 40), 0) + + # add to weapon timeout when we know we're not attacking + self.weapon_timeout += 1 + else: + self.weapon_timeout = 0 + + self.direction = pygame.Vector2(0, -1).rotate(-self.angle) + self.image = pygame.transform.rotate(self.org_image, self.angle) + self.rect = self.image.get_rect(center=self.rect.center) + return went_out_of_bounds + + def offset(self, x_offset, y_offset): + self.rect.x += x_offset + self.rect.y += y_offset + + def is_done(self): + return not self.alive + + +class Archer(Player): + def __init__(self, agent_name): + super().__init__() + self.agent_name = agent_name + self.image = get_image(os.path.join("img", "archer.png")) + self.rect = self.image.get_rect(center=(const.ARCHER_X, const.ARCHER_Y)) + self.org_image = self.image.copy() + self.pos = pygame.Vector2(self.rect.center) + self.is_archer = True + self.speed = const.ARCHER_SPEED + self.ang_rate = const.PLAYER_ANG_RATE + + +class Knight(Player): + def __init__(self, agent_name): + super().__init__() + self.agent_name = agent_name + self.image = get_image(os.path.join("img", "knight.png")) + self.rect = self.image.get_rect(center=(const.KNIGHT_X, const.KNIGHT_Y)) + self.org_image = self.image.copy() + self.pos = pygame.Vector2(self.rect.center) + self.is_knight = True + self.speed = const.KNIGHT_SPEED + self.ang_rate = const.PLAYER_ANG_RATE diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/weapons.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/weapons.py new file mode 100644 index 0000000000000000000000000000000000000000..7668c98f45d883550e469df9cacbac95a63fa95d --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/weapons.py @@ -0,0 +1,79 @@ +import math +import os + +import pygame + +from . import constants as const +from .img import get_image + + +class Arrow(pygame.sprite.Sprite): + def __init__(self, archer): + super().__init__() + self.archer = archer + self.image = get_image(os.path.join("img", "arrow.png")) + self.rect = self.image.get_rect(center=self.archer.pos) + self.image = pygame.transform.rotate(self.image, self.archer.angle) + + self.pos = pygame.Vector2(self.archer.rect.center) + self.fire_direction = self.archer.direction + + # reset the archer timeout when arrow fired + archer.weapon_timeout = 0 + + def update(self): + if self.archer.alive: + self.pos += self.fire_direction * const.ARROW_SPEED + self.rect.center = self.pos + else: + self.rect.x = -100 + + def is_active(self): + if not self.archer.alive: + return False + if self.rect.x < 0 or self.rect.y < 0: + return False + if self.rect.x > const.SCREEN_WIDTH or self.rect.y > const.SCREEN_HEIGHT: + return False + return True + + +class Sword(pygame.sprite.Sprite): + def __init__(self, knight): + # the sword is actually a mace, but we refer to it as sword everywhere + super().__init__() + self.knight = knight + self.image = get_image(os.path.join("img", "mace.png")) + self.rect = self.image.get_rect(center=self.knight.rect.center) + self.active = False + + # phase of the sword, starts at the left most part + self.phase = const.MAX_PHASE + + def update(self): + if self.knight.action == 5: + self.active = True + + if self.active and self.knight.alive: + # phase goes from max to min because + # it counts positive from CCW + if self.phase > const.MIN_PHASE: + self.phase -= 1 + self.knight.attacking = True + + angle = math.radians( + self.knight.angle + 90 + const.SWORD_SPEED * self.phase + ) + self.rect = self.image.get_rect(center=self.knight.rect.center) + self.rect.x += (math.cos(angle) * (self.rect.width / 2)) + ( + math.cos(angle) * (self.knight.rect.width / 2) + ) + self.rect.y -= (math.sin(angle) * (self.rect.height / 2)) + ( + math.sin(angle) * (self.knight.rect.height / 2) + ) + else: + self.phase = const.MAX_PHASE + self.active = False + self.knight.attacking = False + + return self.active diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/zombie.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/zombie.py new file mode 100644 index 0000000000000000000000000000000000000000..81cdf521ee518a8be34de90bc3c9b4f2f21edb5a --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies/src/zombie.py @@ -0,0 +1,39 @@ +import os + +import pygame + +from . import constants as const +from .img import get_image + + +class Zombie(pygame.sprite.Sprite): + def __init__(self, randomizer): + super().__init__() + self.image = get_image(os.path.join("img", "zombie.png")) + self.rect = self.image.get_rect(center=(50, 50)) + self.randomizer = randomizer + + self.x_lims = [const.SCREEN_UNITS, const.SCREEN_WIDTH - const.SCREEN_UNITS] + + def update(self): + rand_x = self.randomizer.randint(0, 10) + + self.rect.y += const.ZOMBIE_Y_SPEED + + # Wobbling in X-Y Direction + if self.rect.y % const.SCREEN_UNITS == 0: + if self.rect.x > self.x_lims[0] and self.rect.x < self.x_lims[1]: + if rand_x in [1, 3, 6]: + self.rect.x += const.ZOMBIE_X_SPEED + elif rand_x in [2, 4, 5, 8]: + self.rect.x -= const.ZOMBIE_X_SPEED + + # Bringing the Zombies back on the Screen + else: + if self.rect.x <= self.x_lims[0]: + self.rect.x += 2 * const.ZOMBIE_X_SPEED + elif self.rect.x >= self.x_lims[1]: + self.rect.x -= 2 * const.ZOMBIE_X_SPEED + + # Clamp to stay inside the screen + self.rect.x = max(min(self.rect.x, const.SCREEN_WIDTH - 100), 100) diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies_v8.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies_v8.py new file mode 100644 index 0000000000000000000000000000000000000000..c157e0bf21f683191277526f2b8fe96065acc827 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/knights_archers_zombies_v8.py @@ -0,0 +1,2 @@ +from .knights_archers_zombies.knights_archers_zombies import (env, manual_control, parallel_env, + raw_env) diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/__init__.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b080274b55b603ddfe6a50f92e8fcf617a9976ab Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/__pycache__/manual_control.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/__pycache__/manual_control.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8d64d11530d633a26aae32be80999f791bd3d758 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/__pycache__/manual_control.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/__pycache__/pistonball.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/__pycache__/pistonball.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d89b2de8509fe95195ed4d5cec70a52870151dca Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/__pycache__/pistonball.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/background.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/background.png new file mode 100644 index 0000000000000000000000000000000000000000..e6fe5a9dc8861af115872f37726ed2209159ff08 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/background.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/manual_control.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/manual_control.py new file mode 100644 index 0000000000000000000000000000000000000000..521dec0eb9a9cd4eb5abc9319f53790b40152b22 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/manual_control.py @@ -0,0 +1,58 @@ +import os +import time + +import numpy as np +import pygame + + +def manual_control(**kwargs): + from .pistonball import env as _env + + # flatten_obs is True by default + env = _env(**kwargs) + env.reset() + # Use save_observation to save a dictionary of observations + # save_observation(obs_dict, reverse_colors=False) + # exit() + i = 19 + clock = pygame.time.Clock() + start = time.time() + done = False + quit_game = 0 + pygame.key.set_repeat(20, 0) + num_agents = len(env.agents) # 20 + while not done: + clock.tick(60) + action_list = np.zeros((num_agents,1),dtype="float32") + for event in pygame.event.get(): + if event.type == pygame.KEYDOWN: + if event.key == pygame.K_ESCAPE: + quit_game = 1 + break + if event.key == pygame.K_BACKSPACE: + # Backspace to reset + env.reset() + i = 19 + if event.key == pygame.K_a and time.time() - start > .1: + i = (i - 1) if (i != 0) else i + start = time.time() + if event.key == pygame.K_d and time.time() - start > .1: + i = (i + 1) if (i != num_agents - 1) else i + start = time.time() + if event.key == pygame.K_s: + action_list[i] = -1. + if event.key == pygame.K_w: + action_list[i] = 1. + + if quit_game: + break + # actions should be a dict of numpy arrays + for a in action_list: + env.step(a) + pygame.event.pump() + + env.render() + done = any(env.dones.values()) + # Uncomment next line to print FPS at which the game runs + + env.close() diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/piston.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/piston.png new file mode 100644 index 0000000000000000000000000000000000000000..a4de5bee233ef994008864b9de7f810bb3635bda Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/piston.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/piston_body.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/piston_body.png new file mode 100644 index 0000000000000000000000000000000000000000..f53c324c10b02c7399e00763b20e7f5f91152d44 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/piston_body.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/pistonball.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/pistonball.py new file mode 100644 index 0000000000000000000000000000000000000000..e06ead5b628013744a13283984e5b195a2606080 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball/pistonball.py @@ -0,0 +1,450 @@ +import math +import os + +import gym +import numpy as np +import pygame +import pymunk +import pymunk.pygame_util +from gym.utils import EzPickle, seeding + +from pettingzoo import AECEnv +from pettingzoo.utils import agent_selector, wrappers +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from .manual_control import manual_control + +_image_library = {} + +FPS = 20 + +def get_image(path): + from os import path as os_path + cwd = os_path.dirname(__file__) + image = pygame.image.load(cwd + '/' + path) + sfc = pygame.Surface(image.get_size(), flags=pygame.SRCALPHA) + sfc.blit(image, (0, 0)) + return sfc + + +def env(**kwargs): + env = raw_env(**kwargs) + if env.continuous: + env = wrappers.ClipOutOfBoundsWrapper(env) + else: + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +parallel_env = parallel_wrapper_fn(env) + + +class raw_env(AECEnv, EzPickle): + + metadata = { + 'render.modes': ['human', "rgb_array"], + 'name': "pistonball_v6", + 'is_parallelizable': True, + 'video.frames_per_second': FPS + } + + def __init__(self, n_pistons=20, time_penalty=-0.1, continuous=True, random_drop=True, random_rotate=True, ball_mass=0.75, ball_friction=0.3, ball_elasticity=1.5, max_cycles=125): + EzPickle.__init__(self, n_pistons, time_penalty, continuous, random_drop, random_rotate, ball_mass, ball_friction, ball_elasticity, max_cycles) + self.dt = 1./FPS + self.n_pistons = n_pistons + self.piston_head_height = 11 + self.piston_width = 40 + self.piston_height = 40 + self.piston_body_height = 23 + self.piston_radius = 5 + self.wall_width = 40 + self.ball_radius = 40 + self.screen_width = (2 * self.wall_width) + (self.piston_width * self.n_pistons) + self.screen_height = 560 + y_high = self.screen_height - self.wall_width - self.piston_body_height + y_low = self.wall_width + obs_height = y_high - y_low + + assert self.piston_width == self.wall_width, "Wall width and piston width must be equal for observation calculation" + assert self.n_pistons > 1, "n_pistons must be greater than 1" + + self.agents = ["piston_" + str(r) for r in range(self.n_pistons)] + self.possible_agents = self.agents[:] + self.agent_name_mapping = dict(zip(self.agents, list(range(self.n_pistons)))) + self._agent_selector = agent_selector(self.agents) + + self.observation_spaces = dict( + zip(self.agents, [gym.spaces.Box(low=0, high=255, shape=(obs_height, self.piston_width * 3, 3), dtype=np.uint8)] * self.n_pistons)) + self.continuous = continuous + if self.continuous: + self.action_spaces = dict(zip(self.agents, [gym.spaces.Box(low=-1, high=1, shape=(1,))] * self.n_pistons)) + else: + self.action_spaces = dict(zip(self.agents, [gym.spaces.Discrete(3)] * self.n_pistons)) + self.state_space = gym.spaces.Box(low=0, high=255, shape=(self.screen_height, self.screen_width, 3), dtype=np.uint8) + + pygame.init() + pymunk.pygame_util.positive_y_is_up = False + + self.renderOn = False + self.screen = pygame.Surface((self.screen_width, self.screen_height)) + self.max_cycles = max_cycles + + self.piston_sprite = get_image('piston.png') + self.piston_body_sprite = get_image('piston_body.png') + self.background = get_image('background.png') + self.random_drop = random_drop + self.random_rotate = random_rotate + + self.pistonList = [] + self.pistonRewards = [] # Keeps track of individual rewards + self.recentFrameLimit = 20 # Defines what "recent" means in terms of number of frames. + self.recentPistons = set() # Set of pistons that have touched the ball recently + self.time_penalty = time_penalty + self.local_ratio = 0 # TODO: this was a bad idea and the logic this uses should be removed at some point + self.ball_mass = ball_mass + self.ball_friction = ball_friction + self.ball_elasticity = ball_elasticity + + self.done = False + + self.pixels_per_position = 4 + self.n_piston_positions = 16 + + self.screen.fill((0, 0, 0)) + self.draw_background() + # self.screen.blit(self.background, (0, 0)) + + self.render_rect = pygame.Rect( + self.wall_width, # Left + self.wall_width, # Top + self.screen_width - (2 * self.wall_width), # Width + self.screen_height - (2 * self.wall_width) - self.piston_body_height # Height + ) + + # Blit background image if ball goes out of bounds. Ball radius is 40 + self.valid_ball_position_rect = pygame.Rect( + self.render_rect.left + self.ball_radius, # Left + self.render_rect.top + self.ball_radius, # Top + self.render_rect.width - (2 * self.ball_radius), # Width + self.render_rect.height - (2 * self.ball_radius) # Height + ) + + self.frames = 0 + + self.has_reset = False + self.closed = False + self.seed() + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def seed(self, seed=None): + self.np_random, seed = seeding.np_random(seed) + + def observe(self, agent): + observation = pygame.surfarray.pixels3d(self.screen) + i = self.agent_name_mapping[agent] + # Set x bounds to include 40px left and 40px right of piston + x_high = self.wall_width + self.piston_width * (i + 2) + x_low = self.wall_width + self.piston_width * (i - 1) + y_high = self.screen_height - self.wall_width - self.piston_body_height + y_low = self.wall_width + cropped = np.array(observation[x_low:x_high, y_low:y_high, :]) + observation = np.rot90(cropped, k=3) + observation = np.fliplr(observation) + return observation + + def state(self): + ''' + Returns an observation of the global environment + ''' + state = pygame.surfarray.pixels3d(self.screen).copy() + state = np.rot90(state, k=3) + state = np.fliplr(state) + return state + + def enable_render(self): + self.screen = pygame.display.set_mode((self.screen_width, self.screen_height)) + + self.renderOn = True + # self.screen.blit(self.background, (0, 0)) + self.draw_background() + self.draw() + + def close(self): + if not self.closed: + self.closed = True + if self.renderOn: + self.screen = pygame.Surface((self.screen_width, self.screen_height)) + self.renderOn = False + pygame.event.pump() + pygame.display.quit() + + def add_walls(self): + top_left = (self.wall_width, self.wall_width) + top_right = (self.screen_width - self.wall_width, self.wall_width) + bot_left = (self.wall_width, self.screen_height - self.wall_width) + bot_right = (self.screen_width - self.wall_width, self.screen_height - self.wall_width) + walls = [ + pymunk.Segment(self.space.static_body, top_left, top_right, 1), # Top wall + pymunk.Segment(self.space.static_body, top_left, bot_left, 1), # Left wall + pymunk.Segment(self.space.static_body, bot_left, bot_right, 1), # Bottom wall + pymunk.Segment(self.space.static_body, top_right, bot_right, 1) # Right + ] + for wall in walls: + wall.friction = .64 + self.space.add(wall) + + def add_ball(self, x, y, b_mass, b_friction, b_elasticity): + mass = b_mass + radius = 40 + inertia = pymunk.moment_for_circle(mass, 0, radius, (0, 0)) + body = pymunk.Body(mass, inertia) + body.position = x, y + # radians per second + if self.random_rotate: + body.angular_velocity = self.np_random.uniform(-6 * math.pi, 6 * math.pi) + shape = pymunk.Circle(body, radius, (0, 0)) + shape.friction = b_friction + shape.elasticity = b_elasticity + self.space.add(body, shape) + return body + + def add_piston(self, space, x, y): + piston = pymunk.Body(body_type=pymunk.Body.KINEMATIC) + piston.position = x, y + segment = pymunk.Segment(piston, (0, 0), (self.piston_width - (2 * self.piston_radius), 0), self.piston_radius) + segment.friction = .64 + segment.color = pygame.color.THECOLORS["blue"] + space.add(piston, segment) + return piston + + def move_piston(self, piston, v): + + def cap(y): + maximum_piston_y = self.screen_height - self.wall_width - (self.piston_height - self.piston_head_height) + if y > maximum_piston_y: + y = maximum_piston_y + elif y < maximum_piston_y - (self.n_piston_positions * self.pixels_per_position): + y = maximum_piston_y - (self.n_piston_positions * self.pixels_per_position) + return y + + piston.position = (piston.position[0], cap(piston.position[1] - v * self.pixels_per_position)) + + def reset(self): + self.space = pymunk.Space(threaded=False) + self.add_walls() + # self.space.threads = 2 + self.space.gravity = (0.0, 750.0) + self.space.collision_bias = .0001 + self.space.iterations = 10 # 10 is default in PyMunk + + self.pistonList = [] + maximum_piston_y = self.screen_height - self.wall_width - (self.piston_height - self.piston_head_height) + for i in range(self.n_pistons): + # Multiply by 0.5 to use only the lower half of possible positions + possible_y_displacements = np.arange(0, .5 * self.pixels_per_position * self.n_piston_positions, self.pixels_per_position) + piston = self.add_piston( + self.space, + self.wall_width + self.piston_radius + self.piston_width * i, # x position + maximum_piston_y - self.np_random.choice(possible_y_displacements) # y position + ) + piston.velociy = 0 + self.pistonList.append(piston) + + self.horizontal_offset = 0 + self.vertical_offset = 0 + horizontal_offset_range = 30 + vertical_offset_range = 15 + if self.random_drop: + self.vertical_offset = self.np_random.randint(-vertical_offset_range, vertical_offset_range + 1) + self.horizontal_offset = self.np_random.randint(-horizontal_offset_range, horizontal_offset_range + 1) + ball_x = (self.screen_width + - self.wall_width + - self.ball_radius + - horizontal_offset_range + + self.horizontal_offset) + ball_y = (self.screen_height + - self.wall_width + - self.piston_body_height + - self.ball_radius + - (0.5 * self.pixels_per_position * self.n_piston_positions) + - vertical_offset_range + + self.vertical_offset) + + # Ensure ball starts somewhere right of the left wall + ball_x = max(ball_x, self.wall_width + self.ball_radius + 1) + + self.ball = self.add_ball(ball_x, ball_y, self.ball_mass, self.ball_friction, self.ball_elasticity) + self.ball.angle = 0 + self.ball.velocity = (0, 0) + if self.random_rotate: + self.ball.angular_velocity = self.np_random.uniform(-6 * math.pi, 6 * math.pi) + + self.lastX = int(self.ball.position[0] - self.ball_radius) + self.distance = self.lastX - self.wall_width + + self.draw_background() + self.draw() + + self.agents = self.possible_agents[:] + + self._agent_selector.reinit(self.agents) + self.agent_selection = self._agent_selector.next() + + self.has_reset = True + self.done = False + self.rewards = dict(zip(self.agents, [0 for _ in self.agents])) + self._cumulative_rewards = dict(zip(self.agents, [0 for _ in self.agents])) + self.dones = dict(zip(self.agents, [False for _ in self.agents])) + self.infos = dict(zip(self.agents, [{} for _ in self.agents])) + + self.frames = 0 + + def draw_background(self): + outer_walls = pygame.Rect( + 0, # Left + 0, # Top + self.screen_width, # Width + self.screen_height, # Height + ) + outer_wall_color = (58, 64, 65) + pygame.draw.rect(self.screen, outer_wall_color, outer_walls) + inner_walls = pygame.Rect( + self.wall_width / 2, # Left + self.wall_width / 2, # Top + self.screen_width - self.wall_width, # Width + self.screen_height - self.wall_width, # Height + ) + inner_wall_color = (68, 76, 77) + pygame.draw.rect(self.screen, inner_wall_color, inner_walls) + self.draw_pistons() + + def draw_pistons(self): + piston_color = (65, 159, 221) + x_pos = self.wall_width + for piston in self.pistonList: + self.screen.blit(self.piston_body_sprite, (x_pos, self.screen_height - self.wall_width - self.piston_body_height)) + # Height is the size of the blue part of the piston. 6 is the piston base height (the gray part at the bottom) + height = self.screen_height - self.wall_width - self.piston_body_height - (piston.position[1] + self.piston_radius) + (self.piston_body_height - 6) + body_rect = pygame.Rect( + piston.position[0] + self.piston_radius + 1, # +1 to match up to piston graphics + piston.position[1] + self.piston_radius + 1, + 18, + height + ) + pygame.draw.rect(self.screen, piston_color, body_rect) + x_pos += self.piston_width + + def draw(self): + # redraw the background image if ball goes outside valid position + if not self.valid_ball_position_rect.collidepoint(self.ball.position): + # self.screen.blit(self.background, (0, 0)) + self.draw_background() + + ball_x = int(self.ball.position[0]) + ball_y = int(self.ball.position[1]) + + color = (255, 255, 255) + pygame.draw.rect(self.screen, color, self.render_rect) + color = (65, 159, 221) + pygame.draw.circle(self.screen, color, (ball_x, ball_y), self.ball_radius) + + line_end_x = ball_x + (self.ball_radius - 1) * np.cos(self.ball.angle) + line_end_y = ball_y + (self.ball_radius - 1) * np.sin(self.ball.angle) + color = (58, 64, 65) + pygame.draw.line(self.screen, color, (ball_x, ball_y), (line_end_x, line_end_y), 3) # 39 because it kept sticking over by 1 at 40 + + for piston in self.pistonList: + self.screen.blit(self.piston_sprite, (piston.position[0] - self.piston_radius, piston.position[1] - self.piston_radius)) + self.draw_pistons() + + def get_nearby_pistons(self): + # first piston = leftmost + nearby_pistons = [] + ball_pos = int(self.ball.position[0] - self.ball_radius) + closest = abs(self.pistonList[0].position.x - ball_pos) + closest_piston_index = 0 + for i in range(self.n_pistons): + next_distance = abs(self.pistonList[i].position.x - ball_pos) + if next_distance < closest: + closest = next_distance + closest_piston_index = i + + if closest_piston_index > 0: + nearby_pistons.append(closest_piston_index - 1) + nearby_pistons.append(closest_piston_index) + if closest_piston_index < self.n_pistons - 1: + nearby_pistons.append(closest_piston_index + 1) + + return nearby_pistons + + def get_local_reward(self, prev_position, curr_position): + local_reward = .5 * (prev_position - curr_position) + return local_reward + + def render(self, mode="human"): + if mode == 'human' and not self.renderOn: + # sets self.renderOn to true and initializes display + self.enable_render() + + self.draw_background() + self.draw() + + observation = np.array(pygame.surfarray.pixels3d(self.screen)) + if mode == 'human': + pygame.display.flip() + return np.transpose(observation, axes=(1, 0, 2)) if mode == "rgb_array" else None + + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + + action = np.asarray(action) + agent = self.agent_selection + if self.continuous: + self.move_piston(self.pistonList[self.agent_name_mapping[agent]], action) + else: + self.move_piston(self.pistonList[self.agent_name_mapping[agent]], action - 1) + + self.space.step(self.dt) + if self._agent_selector.is_last(): + ball_min_x = int(self.ball.position[0] - self.ball_radius) + ball_next_x = self.ball.position[0] - self.ball_radius + self.ball.velocity[0] * self.dt + if ball_next_x <= self.wall_width + 1: + self.done = True + # ensures that the ball can't pass through the wall + ball_min_x = max(self.wall_width, ball_min_x) + self.draw() + local_reward = self.get_local_reward(self.lastX, ball_min_x) + # Opposite order due to moving right to left + global_reward = (100 / self.distance) * (self.lastX - ball_min_x) + if not self.done: + global_reward += self.time_penalty + total_reward = [global_reward * (1 - self.local_ratio)] * self.n_pistons # start with global reward + local_pistons_to_reward = self.get_nearby_pistons() + for index in local_pistons_to_reward: + total_reward[index] += local_reward * self.local_ratio + self.rewards = dict(zip(self.agents, total_reward)) + self.lastX = ball_min_x + self.frames += 1 + else: + self._clear_rewards() + + if self.frames >= self.max_cycles: + self.done = True + # Clear the list of recent pistons for the next reward cycle + if self.frames % self.recentFrameLimit == 0: + self.recentPistons = set() + if self._agent_selector.is_last(): + self.dones = dict(zip(self.agents, [self.done for _ in self.agents])) + + self.agent_selection = self._agent_selector.next() + self._cumulative_rewards[agent] = 0 + self._accumulate_rewards() + +# Game art created by J K Terry diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball_v6.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball_v6.py new file mode 100644 index 0000000000000000000000000000000000000000..368b8cae68c5fa001ad3c622f628db835a878f6d --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/pistonball_v6.py @@ -0,0 +1 @@ +from .pistonball.pistonball import env, manual_control, parallel_env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/__init__.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a76b05f7a92fbf1a35d345b630527c8391075729 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/__pycache__/manual_control.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/__pycache__/manual_control.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..08571e448143ad750ee1adb5831752237d95167f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/__pycache__/manual_control.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/__pycache__/prison.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/__pycache__/prison.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..53bdf4dcb30394533bb6c09b385b2478ad959e62 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/__pycache__/prison.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/background.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/background.png new file mode 100644 index 0000000000000000000000000000000000000000..4f0d8d0ef6f8babcd61cbffb9e26ef571ceaca9b Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/background.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/background_append.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/background_append.png new file mode 100644 index 0000000000000000000000000000000000000000..d42c404cb65a498562762ffc6cd53f07824c909c Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/background_append.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/blit_background.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/blit_background.png new file mode 100644 index 0000000000000000000000000000000000000000..082f55873153bf9c314aac1378105a1504b1012a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/blit_background.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/blit_background_append.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/blit_background_append.png new file mode 100644 index 0000000000000000000000000000000000000000..975fa83aca1e985beed6c49a336b6051a454ff0e Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/blit_background_append.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/manual_control.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/manual_control.py new file mode 100644 index 0000000000000000000000000000000000000000..d1c9b865dffb2e6ef120950b793b7a7b58f4b9f5 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/manual_control.py @@ -0,0 +1,59 @@ +import os + +import numpy as np +import pygame + + +def manual_control(**kwargs): + from .prison import env as _env + env = _env(**kwargs) + env.reset() + clock = pygame.time.Clock() + x = 0 + y = 0 + prisoner_mapping = {} + for prisoner in env.agents: + prisoner_mapping[env.infos[prisoner]['map_tuple']] = prisoner + while True: + clock.tick(15) + agent_actions = {agent: 1 for agent in env.agents} + num_actions = 0 + test_done = False + for event in pygame.event.get(): + # wasd to switch prisoner, jk to move left and right + if event.type == pygame.KEYDOWN: + if event.key == pygame.K_a: + x = 0 + elif event.key == pygame.K_d: + x = 1 + elif event.key == pygame.K_w: + y = max(0, y - 1) + elif event.key == pygame.K_s: + y = min(3, y + 1) + elif event.key == pygame.K_j: + num_actions += 1 + agent_actions[prisoner_mapping[ + (x, y)]] = 0 + elif event.key == pygame.K_k: + num_actions += 1 + agent_actions[prisoner_mapping[ + (x, y)]] = 2 + elif event.key == pygame.K_ESCAPE: + test_done = True + for i in env.agents: + observation, reward, done, info = env.last() + # if reward != 0: + # print("Agent {} was reward {}".format(i, reward)) + if done: + test_done = True + action = agent_actions[i] + env.step(action) + env.render() + + if test_done: + break + env.close() + + +if __name__ == "__main__": + manual_control() diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/prison.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/prison.py new file mode 100644 index 0000000000000000000000000000000000000000..d016e01557712ae531b6b27dd77a16f67527767c --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/prison.py @@ -0,0 +1,398 @@ +import os + +import numpy as np +import pygame +from gym import spaces +from gym.utils import EzPickle, seeding + +from pettingzoo import AECEnv +from pettingzoo.utils import agent_selector, wrappers +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from .manual_control import manual_control + +FPS = 15 +PRISONER_VELOCITY = 24 + +def get_image(path): + from os import path as os_path + cwd = os_path.dirname(__file__) + image = pygame.image.load(cwd + '/' + path) + sfc = pygame.Surface(image.get_size(), flags=pygame.SRCALPHA) + sfc.blit(image, (0, 0)) + return sfc + + +def within(a, b, diff): + return abs(a - b) <= diff + + +class Prisoner: + def __init__(self, p, l, r, w, name): + self.position = p + self.left_bound = l + self.right_bound = r + self.window = w + self.name = name + self.first_touch = -1 # rewarded on touching bound != first_touch + self.last_touch = -1 # to track last touched wall + self.still_sprite = None + self.left_sprite = None + self.right_sprite = None + self.state = 0 + self.sprite_timer_on = False + self.last_sprite_movement = 0 + self.sprite_timer = 0 # if the agent hasn't moved left or right in X frames, set sprite to "still" + + def set_sprite(self, s): + self.still_sprite = get_image(s + "_still.png") + self.left_sprite = get_image(s + "_left.png") + self.right_sprite = get_image(s + "_right.png") + self.right_bound -= self.right_sprite.get_width() + + def set_state(self, st): + self.state = st + + def get_sprite(self): + if self.last_sprite_movement == 0: + return self.still_sprite + elif self.last_sprite_movement > 0: + return self.right_sprite + elif self.last_sprite_movement < 0: + return self.left_sprite + + def update_sprite(self, movement): + if movement != 0: + self.sprite_timer_on = True + self.sprite_timer = 0 + self.last_sprite_movement = movement + if self.sprite_timer_on: + self.sprite_timer += 1 + if self.sprite_timer > 2: + self.sprite_timer = 0 + self.sprite_timer_on = False + self.last_sprite_movement = 0 + + +def env(**kwargs): + env = raw_env(**kwargs) + + if env.continuous: + env = wrappers.ClipOutOfBoundsWrapper(env) + else: + env = wrappers.AssertOutOfBoundsWrapper(env) + + env = wrappers.OrderEnforcingWrapper(env) + return env + + +parallel_env = parallel_wrapper_fn(env) + + +class raw_env(AECEnv, EzPickle): + + def __init__(self, continuous=False, vector_observation=False, max_cycles=150, num_floors=4, synchronized_start=False, identical_aliens=False, random_aliens=False): + EzPickle.__init__(self, continuous, vector_observation, max_cycles, num_floors, synchronized_start, identical_aliens, random_aliens) + num_agents = 2 * num_floors + self.agents = ["prisoner_" + str(s) for s in range(0, num_agents)] + self.possible_agents = self.agents[:] + self._agent_selector = agent_selector(self.agents) + self.sprite_list = ["sprites/alien", "sprites/drone", "sprites/glowy", "sprites/reptile", "sprites/ufo", "sprites/bunny", "sprites/robot", "sprites/tank"] + self.sprite_img_heights = [40, 40, 46, 48, 32, 54, 48, 53] + self.metadata = { + 'render.modes': ['human', "rgb_array"], + 'name': "prison_v3", + 'is_parallelizable': True, + 'video.frames_per_second': FPS, + } + self.infos = {} + self.rendering = False + self.max_cycles = max_cycles + pygame.init() + self.clock = pygame.time.Clock() + self.num_frames = 0 + self.done_val = False + self.num_floors = num_floors + self.background = get_image('background.png') + self.background_append = get_image('background_append.png') + self.dynamic_background = get_image('blit_background.png') + self.dynamic_background_append = get_image('blit_background_append.png') + self.velocity = PRISONER_VELOCITY * 15. / FPS + if self.velocity % 1. != 0: + raise ValueError(f'FPS of {FPS} leads to decimal place value of {self.velocity} for velocity.') + self.velocity = int(self.velocity) + self.continuous = continuous + self.vector_obs = vector_observation + self.synchronized_start = synchronized_start + self.identical_aliens = identical_aliens + if (self.identical_aliens): + self.random_aliens = False + else: + self.random_aliens = random_aliens + self.seed() + self.closed = False + + self.action_spaces = {} + if continuous: + for a in self.agents: + self.action_spaces[a] = spaces.Box(low=-PRISONER_VELOCITY, high=PRISONER_VELOCITY, shape=(1,), dtype=np.float32) + else: + for a in self.agents: + self.action_spaces[a] = spaces.Discrete(3) + + self.observation_spaces = {} + self.last_observation = {} + for a in self.agents: + self.last_observation[a] = None + if vector_observation: + self.observation_spaces[a] = spaces.Box(low=-300, high=300, shape=(1,), dtype=np.float32) + else: + self.observation_spaces[a] = spaces.Box(low=0, high=255, shape=(100, 300, 3), dtype=np.uint8) + self.state_space = spaces.Box(low=0, high=255, shape=(650, 750, 3), dtype=np.uint8) + + self.walls = [] + self.create_walls(num_floors) + + self.spawn_prisoners() + self.has_reset = False + + self.reinit() + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def seed(self, seed=None): + self.np_random, seed = seeding.np_random(seed) + + def create_walls(self, num_floors): + self.walls = [(0, 0, 50, 700), (350, 0, 50, 700), + (700, 0, 50, 700)] + # roof of prison + self.walls.append((50, 0, 300, 50)) + self.walls.append((400, 0, 300, 50)) + for i in range(num_floors): + y = 150 * (i + 1) + self.walls.append((50, y, 300, 50)) + self.walls.append((400, y, 300, 50)) + + def spawn_prisoners(self): + + chosen_sprites_imgs = [] + chosen_sprites_heights = [] + # possible sprite configurations are, identical_aliens, random_aliens or neither + if self.identical_aliens: + # randomly chosen sprite used for all aliens + sprite_id = self.np_random.randint(0, len(self.sprite_list)) + for s in range(self.num_agents): + chosen_sprites_imgs.append(self.sprite_list[sprite_id]) + chosen_sprites_heights.append(self.sprite_img_heights[sprite_id]) + elif self.random_aliens: + # randomly choose sprite for each agent + for s in range(self.num_agents): + sprite_id = self.np_random.randint(0, len(self.sprite_list)) + chosen_sprites_imgs.append(self.sprite_list[sprite_id]) + chosen_sprites_heights.append(self.sprite_img_heights[sprite_id]) + else: + # cycle through each sprite and assign to agent + p = 0 + for s in range(self.num_agents): + chosen_sprites_imgs.append(self.sprite_list[p]) + chosen_sprites_heights.append(self.sprite_img_heights[p]) + p = (p + 1) % len(self.sprite_list) + + self.prisoners = {} + prisoner_spawn_locs = [] + self.prisoner_mapping = {} + map_count = 0 + for f in range(self.num_floors): + spawn_y = 150 * (f + 1) + first_view_window = (50, 50 + 150 * f, 350, 150 + 150 * f) + second_view_window = (400, 50 + 150 * f, 700, 150 + 150 * f) + prisoner_spawn_locs.append((200, spawn_y, 50, 350, first_view_window)) + prisoner_spawn_locs.append((550, spawn_y, 400, 700, second_view_window)) + map_tuple_0 = (0, f) + map_tuple_1 = (1, f) + self.prisoner_mapping[map_tuple_0] = map_count + self.prisoner_mapping[map_tuple_1] = map_count + 1 + prisoner0_name = "prisoner_" + str(map_count) + prisoner1_name = "prisoner_" + str(map_count + 1) + self.infos[prisoner0_name] = {"map_tuple": map_tuple_0} + self.infos[prisoner1_name] = {"map_tuple": map_tuple_1} + map_count += 2 + p_count = 0 + for p in prisoner_spawn_locs: + agent_name = self.agents[p_count] + x_pos, y_pos, l_bound, r_bound, view_window = p + x_noise = 0 + if not self.synchronized_start: + x_noise = self.np_random.randint(-20, 20 + 1) + self.prisoners[agent_name] = self.create_prisoner( + x_pos + x_noise, y_pos - chosen_sprites_heights[p_count], l_bound, r_bound, view_window, agent_name) + self.prisoners[agent_name].set_sprite(chosen_sprites_imgs[p_count]) + p_count += 1 + + def create_prisoner(self, x, y, l, r, u, nam): + return Prisoner((x, y), l, r, u, nam) + + def reward(self): + return dict(zip(self.agents, self.last_rewards)) + + # returns reward of hitting both sides of room, 0 if not + def move_prisoner(self, prisoner_id, movement): + prisoner = self.prisoners[prisoner_id] + if not np.isscalar(movement): + movement = movement[0] + prisoner.update_sprite(movement) + if self.continuous: + prisoner.position = ( + prisoner.position[0] + movement * 15. / FPS, prisoner.position[1]) + else: + prisoner.position = ( + prisoner.position[0] + movement * self.velocity, prisoner.position[1]) + reward = 0 + if prisoner.position[0] < prisoner.left_bound: + prisoner.position = (prisoner.left_bound, prisoner.position[1]) + if prisoner.first_touch == -1: + prisoner.first_touch = prisoner.left_bound + if prisoner.first_touch != prisoner.left_bound and prisoner.last_touch == prisoner.right_bound: + reward = 1 + prisoner.last_touch = prisoner.left_bound + if prisoner.position[0] > prisoner.right_bound: + prisoner.position = (prisoner.right_bound, prisoner.position[1]) + if prisoner.first_touch == -1: + prisoner.first_touch = prisoner.right_bound + if prisoner.first_touch != prisoner.right_bound and prisoner.last_touch == prisoner.left_bound: + reward = 1 + prisoner.last_touch = prisoner.right_bound + return reward + + def convert_coord_to_prisoner_id(self, c): + return self.prisoner_mapping[c] + + def close(self): + if not self.closed: + self.closed = True + if self.rendering: + pygame.event.pump() + pygame.display.quit() + pygame.quit() + + def draw(self): + for k in range(self.num_floors): + h = 50 + 150 * k + self.screen.blit(self.dynamic_background_append, (50, h)) + for p in self.prisoners: + self.screen.blit(self.prisoners[p].get_sprite(), self.prisoners[p].position) + + def observe(self, agent): + if self.vector_obs: + p = self.prisoners[agent] + x = p.position[0] + obs = [x - p.left_bound] + return np.array(obs, dtype=np.float32) + else: + capture = pygame.surfarray.pixels3d(self.screen) + p = self.prisoners[agent] + x1, y1, x2, y2 = p.window + sub_screen = np.array(capture[x1:x2, y1:y2, :]) + sub_screen = np.rot90(sub_screen, k=3) + sub_screen = np.fliplr(sub_screen) + return sub_screen + + def state(self): + ''' + Returns an observation of the global environment + ''' + state = pygame.surfarray.pixels3d(self.screen).copy() + state = np.rot90(state, k=3) + state = np.fliplr(state) + return state + + def reinit(self): + self.done_val = False + self.num_frames = 0 + self.last_rewards = [0 for _ in self.agents] + self.frames = 0 + self.rendering = False + self.screen = pygame.Surface((750, 50 + 150 * self.num_floors)) + self.screen.blit(self.background, (0, 0)) + self.rendering = False + + def reset(self): + self.has_reset = True + self.agents = self.possible_agents[:] + self.rewards = dict(zip(self.agents, [0 for _ in self.agents])) + self._cumulative_rewards = dict(zip(self.agents, [0 for _ in self.agents])) + self.dones = dict(zip(self.agents, [False for _ in self.agents])) + self.infos = dict(zip(self.agents, [{} for _ in self.agents])) + self._agent_selector.reinit(self.agents) + self.agent_selection = self._agent_selector.next() + self.num_frames = 0 + self.reinit() + self.spawn_prisoners() + self.draw() + + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + # move prisoners, -1 = move left, 0 = do nothing and 1 is move right + if not isinstance(action, int): + action = np.asarray(action) + agent = self.agent_selection + reward = 0 + if self.continuous: + reward = self.move_prisoner(agent, action) + else: + reward = self.move_prisoner(agent, action - 1) + + # set the sprite state to action normalized + if action != 0: + self.prisoners[agent].set_state(action / abs(action)) + else: + self.prisoners[agent].set_state(0) + + self._clear_rewards() + self.rewards[agent] = reward + + if self._agent_selector.is_last(): + self.num_frames += 1 + + if (self.num_frames >= self.max_cycles): + self.done_val = True + for d in self.dones: + self.dones[d] = True + if self._agent_selector.is_last(): + self.draw() + if self.rendering: + pygame.event.pump() + + self.agent_selection = self._agent_selector.next() + self._cumulative_rewards[agent] = 0 + self._accumulate_rewards() + + def render(self, mode='human'): + if not self.rendering and mode == "human": + pygame.display.init() + old_screen = self.screen + self.screen = pygame.display.set_mode((750, 50 + 150 * self.num_floors)) + self.screen.blit(old_screen, (0, 0)) + self.screen.blit(self.background, (0, 0)) + if self.num_floors > 4: + min_rows = self.num_floors - 4 + for k in range(min_rows): + h = 650 + 150 * k + self.screen.blit(self.background_append, (0, h)) + self.rendering = True + + observation = np.array(pygame.surfarray.pixels3d(self.screen)) + if mode == "human": + pygame.display.flip() + return np.transpose(observation, axes=(1, 0, 2)) if mode == "rgb_array" else None + + +# Sprites other than bunny and tank purchased from https://nebelstern.itch.io/futura-seven +# Tank and bunny sprites commissioned from https://www.fiverr.com/jeimansutrisman +# Art other than sprites created by Niall Williams diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/prisoner.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/prisoner.png new file mode 100644 index 0000000000000000000000000000000000000000..a1f7b7de495b348a6c3543313036f7a2cdf7a4d1 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/prisoner.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/alien_left.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/alien_left.png new file mode 100644 index 0000000000000000000000000000000000000000..c56bbbf45aa28a81c0228e21f7d1d5e7cd041c7a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/alien_left.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/alien_right.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/alien_right.png new file mode 100644 index 0000000000000000000000000000000000000000..deaf4f353dc779e66de47ee71bd339cca08ce8e5 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/alien_right.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/alien_still.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/alien_still.png new file mode 100644 index 0000000000000000000000000000000000000000..02fd8ef21d729b76af52c58193ffe9e53bfedb35 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/alien_still.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/bunny_left.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/bunny_left.png new file mode 100644 index 0000000000000000000000000000000000000000..02e6f10833e6fdb9ca0ffe855ed812b49a44f47a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/bunny_left.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/bunny_right.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/bunny_right.png new file mode 100644 index 0000000000000000000000000000000000000000..c6e0b84ad2ccbdaee20115c90af080b82e4c3763 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/bunny_right.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/bunny_still.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/bunny_still.png new file mode 100644 index 0000000000000000000000000000000000000000..9348af753b7910a04770316e35061d24e2d2f3bd Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/bunny_still.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/drone_left.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/drone_left.png new file mode 100644 index 0000000000000000000000000000000000000000..1d54216918f5690661d5a98c5ce4c87d24f69aa0 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/drone_left.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/drone_right.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/drone_right.png new file mode 100644 index 0000000000000000000000000000000000000000..2cce9e3d11ba947a1a68b0d1ee96896669f5e699 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/drone_right.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/drone_still.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/drone_still.png new file mode 100644 index 0000000000000000000000000000000000000000..c08c7b4793bdd54cfe2e297936bc8969effb87fc Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/drone_still.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/glowy_left.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/glowy_left.png new file mode 100644 index 0000000000000000000000000000000000000000..a681c399255dfb85a79f75ddf2a7d1af23cdd8ea Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/glowy_left.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/glowy_right.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/glowy_right.png new file mode 100644 index 0000000000000000000000000000000000000000..a153e7a5f05a120fa145f5ef730a81c4f2f81f01 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/glowy_right.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/glowy_still.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/glowy_still.png new file mode 100644 index 0000000000000000000000000000000000000000..7f7cfc89fb42a54b844022364e67db4c7e763d05 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/glowy_still.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/rabbit_left.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/rabbit_left.png new file mode 100644 index 0000000000000000000000000000000000000000..38f4e084d69f8fe41d4c9d3c6b1436d2900493d8 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/rabbit_left.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/rabbit_right.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/rabbit_right.png new file mode 100644 index 0000000000000000000000000000000000000000..97da084572a71040233826928d9c7ffc304156e5 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/rabbit_right.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/rabbit_still.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/rabbit_still.png new file mode 100644 index 0000000000000000000000000000000000000000..7c8607317af8b74bb60a71fbfd840785bb16a868 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/rabbit_still.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/reptile_left.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/reptile_left.png new file mode 100644 index 0000000000000000000000000000000000000000..66ae543c087fce3048f3bd1979a0d5bb34397178 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/reptile_left.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/reptile_right.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/reptile_right.png new file mode 100644 index 0000000000000000000000000000000000000000..3b3a232db9ba79bba14b361b3d43e40e18bbc64b Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/reptile_right.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/reptile_still.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/reptile_still.png new file mode 100644 index 0000000000000000000000000000000000000000..0d4cb9e0dc576d4e94971d0e2df6e5373e4aa728 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/reptile_still.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/robot_left.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/robot_left.png new file mode 100644 index 0000000000000000000000000000000000000000..eea033a32d4435578a9d0d8d57df9a31840267c0 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/robot_left.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/robot_right.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/robot_right.png new file mode 100644 index 0000000000000000000000000000000000000000..7800f67db4aff60ef44836d4191aa42d53b599b5 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/robot_right.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/robot_still.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/robot_still.png new file mode 100644 index 0000000000000000000000000000000000000000..ddec05349a9cd324bbf1d978d08656ca6058def6 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/robot_still.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/tank_left.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/tank_left.png new file mode 100644 index 0000000000000000000000000000000000000000..db62113daa8ae016de18c8736fc95918fd31fb92 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/tank_left.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/tank_right.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/tank_right.png new file mode 100644 index 0000000000000000000000000000000000000000..b09ca2895e2d5e6ebdfa15986f49ceb4fbb1bda9 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/tank_right.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/tank_still.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/tank_still.png new file mode 100644 index 0000000000000000000000000000000000000000..1f52ce2040fd7958aef8ee39d2a71b732e06896a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/tank_still.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/ufo_left.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/ufo_left.png new file mode 100644 index 0000000000000000000000000000000000000000..0b8c43d610f24c1aba8c82bf3bbc53786b1d0c4f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/ufo_left.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/ufo_right.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/ufo_right.png new file mode 100644 index 0000000000000000000000000000000000000000..4c40ec8e57b18aefdea098b041a3e485e5b421c0 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/ufo_right.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/ufo_still.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/ufo_still.png new file mode 100644 index 0000000000000000000000000000000000000000..f473db2a2a1c3bdde773396afd59c8b122cd33a8 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison/sprites/ufo_still.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prison_v3.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..0d6132bc23b80635515673417c7962c5431b0fd6 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/prison_v3.py @@ -0,0 +1 @@ +from .prison.prison import env, manual_control, parallel_env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__init__.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6c7ca41d4beb871eeeb155c2c5175be45db92e3b Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__pycache__/constants.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__pycache__/constants.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ccd6a90a6f416874755ebfb371ffa9cc75fd1536 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__pycache__/constants.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__pycache__/manual_control.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__pycache__/manual_control.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7fb724a01d9dde001077ecf7c3b00820fb2db075 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__pycache__/manual_control.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__pycache__/prospector.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__pycache__/prospector.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..60ea4bdcd038a2231bb93f9ddea1f0e24ccf9432 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__pycache__/prospector.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__pycache__/utils.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..58862d3167f175ea538c883dad9da5369b850254 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/__pycache__/utils.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/constants.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..4406bb3b1808eb52ab34f4a6dbb7cdfe4272c998 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/constants.py @@ -0,0 +1,95 @@ +import math + +SCREEN_SIZE = SCREEN_WIDTH, SCREEN_HEIGHT = (1280, 720) +BACKGROUND_COLOR = (217, 151, 106) + +FPS = 15 +STEPS_PER_FRAME = 10 +SPACE_STEP_DELTA = 1 / (FPS * STEPS_PER_FRAME) + +TILE_SIZE = 50 +FENCE_WIDTH = 22 +WATER_HEIGHT = 100 + +AGENT_RADIUS = 15 +AGENT_DIAMETER = AGENT_RADIUS * 2 +GOLD_RADIUS = 6 +# The 3 is for RBG values +PROSPEC_OBSERV_SIDE_LEN = 5 * AGENT_DIAMETER +PROSPEC_OBSERV_SHAPE = (PROSPEC_OBSERV_SIDE_LEN, PROSPEC_OBSERV_SIDE_LEN, 3) +# Slightly bigger for bankers - 2 integers bigger on each side +BANKER_OBSERV_SIDE_LEN = PROSPEC_OBSERV_SIDE_LEN + 4 +BANKER_OBSERV_SHAPE = (BANKER_OBSERV_SIDE_LEN, BANKER_OBSERV_SIDE_LEN, 3) + +MAX_SPRITE_ROTATION = math.pi / 4. / 15. * FPS + +NUM_PROSPECTORS = 4 +NUM_BANKERS = 3 +NUM_AGENTS = NUM_PROSPECTORS + NUM_BANKERS + +PROSPECTOR_SPEED = 1800 * 80 / 15. * FPS +BANKER_SPEED = 1200 * 80 / 15. * FPS +BANKER_HANDOFF_TOLERANCE = math.pi / 4 +TWO_PI = math.pi * 2.0 + +if PROSPECTOR_SPEED % 1. != 0.: + raise ValueError(f'FPS of {FPS} leads to decimal place value of {PROSPECTOR_SPEED} for PROSPECTOR_SPEED.') +if BANKER_SPEED % 1. != 0.: + raise ValueError(f'FPS of {FPS} leads to decimal place value of {BANKER_SPEED} for BANKER_SPEED.') +PROSPECTOR_SPEED = int(PROSPECTOR_SPEED) +BANKER_SPEED = int(BANKER_SPEED) + +FENCE_COLLISION_BUFFER = AGENT_DIAMETER +VERT_FENCE_HEIGHT = SCREEN_HEIGHT - WATER_HEIGHT + +# For the left and right fences +FENCE_VERT_VERTICES = ( + (0, 0), + (FENCE_WIDTH + FENCE_COLLISION_BUFFER, 0), + (FENCE_WIDTH + FENCE_COLLISION_BUFFER, VERT_FENCE_HEIGHT), + (0, VERT_FENCE_HEIGHT), +) + +# For the top fence +FENCE_HORIZ_VERTICES = ( + (0, 0), + (SCREEN_WIDTH, 0), + (SCREEN_WIDTH, FENCE_WIDTH + FENCE_COLLISION_BUFFER), + (0, FENCE_WIDTH + FENCE_COLLISION_BUFFER), +) + +FENCE_INFO = [ + ("left", [0, 0], [0, 0], FENCE_VERT_VERTICES), # left boundary + ("top", [0, 0], [0, 0], FENCE_HORIZ_VERTICES), # top boundary + ( + "right", + [SCREEN_WIDTH - FENCE_WIDTH, 0], + [SCREEN_WIDTH - (FENCE_WIDTH + FENCE_COLLISION_BUFFER), 0], + FENCE_VERT_VERTICES, + ), +] + +BANK_SIZE = BANK_WIDTH, BANK_HEIGHT = 184, 100 + +BANK_VERTS = ( + (0, 0), + (BANK_WIDTH, 0), + (BANK_WIDTH, BANK_HEIGHT), + (0, BANK_HEIGHT), +) + +BANK_INFO = [ + ([184 * 1, 50], BANK_VERTS), + ([184 * 3, 50], BANK_VERTS), + ([184 * 5, 50], BANK_VERTS), +] + +WATER_INFO = [ + (0, SCREEN_HEIGHT - WATER_HEIGHT), # position + ( # vertices + (0, 0), + (SCREEN_WIDTH, 0), + (SCREEN_WIDTH, WATER_HEIGHT), + (0, WATER_HEIGHT), + ), +] diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/bank.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/bank.png new file mode 100644 index 0000000000000000000000000000000000000000..ab3a8df29437f5b050950eb9acbe5d8cb1bc4481 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/bank.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/bankers/0.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/bankers/0.png new file mode 100644 index 0000000000000000000000000000000000000000..e0af308f03a8916740f283b5fd300b6b0fb3637b Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/bankers/0.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/bankers/1.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/bankers/1.png new file mode 100644 index 0000000000000000000000000000000000000000..287aae1446538c7f1fb134830856bf503f284815 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/bankers/1.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/bankers/2.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/bankers/2.png new file mode 100644 index 0000000000000000000000000000000000000000..2a88ee75375e304d4c52694acff8c0b3dac75d18 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/bankers/2.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/debris/0.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/debris/0.png new file mode 100644 index 0000000000000000000000000000000000000000..bd974f90a5a3da031f9254928df2f9bddce86822 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/debris/0.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/debris/1.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/debris/1.png new file mode 100644 index 0000000000000000000000000000000000000000..75391f5fc34111c51476f4a5eaebe3246b3243be Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/debris/1.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/debris/2.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/debris/2.png new file mode 100644 index 0000000000000000000000000000000000000000..bd171ae01c58e9c01b2c6013b84dd8607f13cd52 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/debris/2.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/debris/3.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/debris/3.png new file mode 100644 index 0000000000000000000000000000000000000000..4bc4e18fafe1e28a4156a4d6bec14c6db8251bc4 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/debris/3.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/debris/seaweed_water.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/debris/seaweed_water.png new file mode 100644 index 0000000000000000000000000000000000000000..296d207aacb384bbd37b49239d82f94b9eaa5e28 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/debris/seaweed_water.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/fence_horiz_tile.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/fence_horiz_tile.png new file mode 100644 index 0000000000000000000000000000000000000000..6d1b01c2253a35b611ba549a89ea6e25095ea8fa Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/fence_horiz_tile.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/fence_vert_tile.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/fence_vert_tile.png new file mode 100644 index 0000000000000000000000000000000000000000..b64be451f1ff2bbfc2a95826dd58f67df67221df Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/fence_vert_tile.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/gold.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/gold.png new file mode 100644 index 0000000000000000000000000000000000000000..6c7114330a563b8d9fba846791832cfa2300fbc1 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/gold.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/prospector.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/prospector.png new file mode 100644 index 0000000000000000000000000000000000000000..b6566c215a3afbb52cbe3c176d03a1054928de1a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/prospector.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/river_tile.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/river_tile.png new file mode 100644 index 0000000000000000000000000000000000000000..5085fb98d8bac4869b7f200940c3fb9ffbf002a0 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/river_tile.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/river_to_sand_tile.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/river_to_sand_tile.png new file mode 100644 index 0000000000000000000000000000000000000000..09c60cf60d8f1cdc913c3837a57f46d96205ebf5 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/river_to_sand_tile.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/sand_tile.png b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/sand_tile.png new file mode 100644 index 0000000000000000000000000000000000000000..ae98d27811fa2f88a9291b828f72d948dfa3fdcb Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/data/sand_tile.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/manual_control.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/manual_control.py new file mode 100644 index 0000000000000000000000000000000000000000..a623b3c34262dfc924f7634d983ff9558a787cce --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/manual_control.py @@ -0,0 +1,65 @@ +import os + +import numpy as np +import pygame + +from . import constants as const + + +def manual_control(**kwargs): + from .prospector import env as _env + + env = _env(**kwargs) + clock = pygame.time.Clock() + env.reset() + default_scalar = 1 + agent = 0 + done = False + quit_while = False + + while not done: + clock.tick(const.FPS) + agent_actions = ( + [np.array([0, 0, 0], dtype=np.float32) for _ in range(const.NUM_PROSPECTORS)] + + [np.array([0, 0], dtype=np.float32) for _ in range(const.NUM_BANKERS)] + ) + for event in pygame.event.get(): + # Use left/right arrow keys to switch between agents + # Use WASD to control bankers + # Use WASD and QE to control prospectors + # Note: QE while selecting a banker has no effect. + if event.type == pygame.KEYDOWN: + # Agent selection + if event.key == pygame.K_LEFT: + agent = (agent - 1) % const.NUM_AGENTS + elif event.key == pygame.K_RIGHT: + agent = (agent + 1) % const.NUM_AGENTS + # Forward/backward or up/down movement + elif event.key == pygame.K_w: + agent_actions[agent][0] = default_scalar + elif event.key == pygame.K_s: + agent_actions[agent][0] = -default_scalar + # left/right movement + elif event.key == pygame.K_a: + agent_actions[agent][1] = -default_scalar + elif event.key == pygame.K_d: + agent_actions[agent][1] = default_scalar + # rotation + elif event.key == pygame.K_q: + if 0 <= agent <= 3: + agent_actions[agent][2] = default_scalar + elif event.key == pygame.K_e: + if 0 <= agent <= 3: + agent_actions[agent][2] = -default_scalar + elif event.key == pygame.K_ESCAPE: + done = True + quit_while = True + if quit_while: + break + for a in agent_actions: + env.step(a) + env.render() + + done = any(env.dones.values()) + + env.close() diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/prospector.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/prospector.py new file mode 100644 index 0000000000000000000000000000000000000000..da87c79c3bfa705a9f3233ad38af685638a5e765 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/prospector.py @@ -0,0 +1,896 @@ +import itertools as it +import math +import os +from enum import IntEnum, auto + +import numpy as np +import pygame as pg +import pymunk as pm +from gym import spaces +from gym.utils import EzPickle, seeding +from pymunk import Vec2d + +from pettingzoo import AECEnv +from pettingzoo.utils import agent_selector, wrappers +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from . import constants as const +from . import utils +from .manual_control import manual_control + + +class CollisionTypes(IntEnum): + PROSPECTOR = auto() + BOUNDARY = auto() + WATER = auto() + BANK = auto() + GOLD = auto() + BANKER = auto() + + +class Prospector(pg.sprite.Sprite): + def __init__(self, pos, space, num, *sprite_groups): + super().__init__(sprite_groups) + self.image = utils.load_image(["prospector.png"]) + self.id = num + + self.rect = self.image.get_rect(center=pos) + self.orig_image = self.image.copy() + + # Create the physics body and shape of this object. + moment = pm.moment_for_circle(1, 0, const.AGENT_RADIUS) + + self.body = pm.Body(1, moment, body_type=pm.Body.DYNAMIC) + self.body.nugget = None + self.body.sprite_type = "prospector" + + self.shape = pm.Circle(self.body, const.AGENT_RADIUS) + self.shape.elasticity = 0.0 + self.shape.collision_type = CollisionTypes.PROSPECTOR + + self.body.position = utils.flipy(pos) + # Add them to the Pymunk space. + self.space = space + self.space.add(self.body, self.shape) + + def reset(self, pos): + self.body.angle = 0 + self.body.angular_velocity = 0 + self.image = pg.transform.rotozoom(self.orig_image, 0, 1) + self.rect = self.image.get_rect(center=pos) + self.body.position = utils.flipy(pos) + self.body.velocity = Vec2d(0.0, 0.0) + self.body.force = Vec2d(0.0, 0.0) + self.body.nugget = None + + @property + def center(self): + return self.rect.center + + def update(self, action): + # These actions are performed with the agent's angle in mind + # forward/backward action + y_vel = action[0] * const.PROSPECTOR_SPEED + # left/right action + x_vel = action[1] * const.PROSPECTOR_SPEED + + delta_angle = action[2] * const.MAX_SPRITE_ROTATION + + self.body.angle += delta_angle + self.body.angular_velocity = 0 + + move = pm.Vec2d(x_vel, y_vel) + self.body.apply_force_at_local_point(move, point=(0, 0)) + + def synchronize_center(self): + self.rect.center = utils.flipy(self.body.position) + self.image = pg.transform.rotate(self.orig_image, math.degrees(self.body.angle)) + self.rect = self.image.get_rect(center=self.rect.center) + + def update_gold(self): + if self.body.nugget is not None: + self.body.nugget.update(self.body.position, self.body.angle, False) + + def convert_img(self): + self.image = self.image.convert_alpha() + + def __str__(self): + return f"prospector_{self.id}" + + def __repr__(self): + return self.__str__() + + +class Banker(pg.sprite.Sprite): + def __init__(self, pos, space, num, *sprite_groups): + super().__init__(sprite_groups) + self.image = utils.load_image(["bankers", f"{num}.png"]) + self.id = num + + self.rect = self.image.get_rect(center=pos) + self.orig_image = self.image.copy() + + moment = pm.moment_for_circle(1, 0, const.AGENT_RADIUS) + + self.body = pm.Body(1, moment, body_type=pm.Body.DYNAMIC) + self.body.nugget = None + self.body.sprite_type = "banker" + + self.shape = pm.Circle(self.body, const.AGENT_RADIUS) + self.shape.collision_type = CollisionTypes.BANKER + + self.body.position = utils.flipy(pos) + # Add them to the Pymunk space. + self.space = space + self.space.add(self.body, self.shape) + + def reset(self, pos): + self.body.angle = 0 + self.image = pg.transform.rotozoom(self.orig_image, 0, 1) + self.rect = self.image.get_rect(center=pos) + self.body.position = utils.flipy(pos) + self.body.velocity = Vec2d(0.0, 0.0) + self.body.nugget = None + + @property + def center(self): + return self.rect.center + + def update(self, action): + # up/down action + y_vel = action[0] * const.BANKER_SPEED + # left/right action + x_vel = action[1] * const.BANKER_SPEED + + # Subtract math.pi / 2 because sprite starts off with math.pi / 2 rotated + angle_radians = math.atan2(y_vel, x_vel) - (math.pi / 2) + + # Angle is determined only by current trajectory. + if not all(a == 0 for a in action): + self.body.angle = angle_radians + self.body.angular_velocity = 0 + + # rotate movement backwards with a magnitude of self.body.angle + # so that sprite moves forward in chosen direction + move = pm.Vec2d(x_vel, y_vel).rotated(-self.body.angle) + self.body.apply_force_at_local_point(move, point=(0, 0)) + + def synchronize_center(self): + self.rect.center = utils.flipy(self.body.position) + self.image = pg.transform.rotate(self.orig_image, math.degrees(self.body.angle)) + self.rect = self.image.get_rect(center=self.rect.center) + + def update_gold(self): + if self.body.nugget is not None: + self.body.nugget.update( + self.body.position, self.body.angle + (math.pi / 2), True + ) + + def convert_img(self): + self.image = self.image.convert_alpha() + + def __str__(self): + return f"banker_{self.id}" + + def __repr__(self): + return self.__str__() + + +class Fence(pg.sprite.Sprite): + def __init__(self, w_type, sprite_pos, body_pos, verts, space, *sprite_groups): + super().__init__(sprite_groups) + + self.rects = [] + if w_type == "top": + self.tile = utils.load_image(["fence_horiz_tile.png"]) + size = self.tile.get_rect().size + + x = 15 + y = 0 + while x <= 1230: + rect = pg.Rect(x, y, *size) + self.rects.append(rect) + x += 50 + + elif w_type in ["right", "left"]: + self.tile = utils.load_image(["fence_vert_tile.png"]) + size = self.tile.get_rect().size + + x = 6 if w_type == "left" else 1265 + y = 0 + while y <= const.VERT_FENCE_HEIGHT: + rect = pg.Rect(x, y, *size) + self.rects.append(rect) + y += 33 + else: + raise ValueError("Fence image not found! Check the spelling") + + self.body = pm.Body(body_type=pm.Body.STATIC) + + # Transform pygame vertices to fit Pymunk body + invert_verts = utils.invert_y(verts) + self.shape = pm.Poly(self.body, invert_verts) + self.shape.elasticity = 0.0 + self.shape.collision_type = CollisionTypes.BOUNDARY + + self.body.position = utils.flipy(body_pos) + space.add(self.body, self.shape) + + def full_draw(self, screen): + for rect in self.rects: + screen.blit(self.tile, rect) + + def convert_img(self): + self.tile = self.tile.convert_alpha() + + +class Bank(pg.sprite.Sprite): + def __init__(self, pos, verts, space, *sprite_groups): + super().__init__(sprite_groups) + + self.image = utils.load_image(["bank.png"]) + self.rect = self.image.get_rect(topleft=pos) + + self.body = pm.Body(body_type=pm.Body.STATIC) + + invert_verts = utils.invert_y(verts) + self.shape = pm.Poly(self.body, invert_verts) + self.shape.collision_type = CollisionTypes.BANK + + self.body.position = utils.flipy(pos) + self.space = space + self.space.add(self.body, self.shape) + + def convert_img(self): + self.image = self.image.convert_alpha() + + +class Gold(pg.sprite.Sprite): + ids = it.count(0) + + def __init__(self, pos, body, space, *sprite_groups): + super().__init__(sprite_groups) + self.id = next(self.ids) + + self.image = utils.load_image(["gold.png"]) + self.orig_image = self.image + + self.rect = self.image.get_rect() + + self.moment = pm.moment_for_circle(1, 0, const.GOLD_RADIUS) + self.body = pm.Body(1, self.moment, body_type=pm.Body.KINEMATIC) + self.body.position = body.position + + self.shape = pm.Circle(self.body, const.GOLD_RADIUS) + self.shape.collision_type = CollisionTypes.GOLD + # only triggers collision callbacks, doesn't create real collisions + self.shape.sensor = True + self.shape.id = self.id + + self.space = space + self.space.add(self.body, self.shape) + + self.initial_angle = body.angle - Vec2d(0, -1).angle + self.parent_body = body + + def update(self, pos, angle, banker: bool): + if banker: + new_angle = angle + else: + new_angle = angle - self.initial_angle + new_pos = pos + Vec2d(const.AGENT_RADIUS + 9, 0).rotated(new_angle) + + self.body.position = new_pos + self.body.angular_velocity = 0 + self.rect.center = utils.flipy(self.body.position) + self.image = pg.transform.rotozoom( + self.orig_image, math.degrees(self.body.angle), 1 + ) + self.rect = self.image.get_rect(center=self.rect.center) + + def convert_img(self): + self.image = self.image.convert_alpha() + + +class Water: + def __init__(self, pos, verts, space, rng): + self.num_cols = math.ceil(const.SCREEN_WIDTH / const.TILE_SIZE) + self.num_rows = math.ceil(const.WATER_HEIGHT / const.TILE_SIZE) + + self.top_tile = utils.load_image(["river_to_sand_tile.png"]) + self.tile = utils.load_image(["river_tile.png"]) + + self.debris_tile = utils.load_image(["debris", "seaweed_water.png"]) + tile_size = self.tile.get_size() + + self.rects = [] + for row in range(self.num_rows): + new_row = [] + for col in range(self.num_cols): + rect = pg.Rect( + col * const.TILE_SIZE, pos[1] + (row * const.TILE_SIZE), *tile_size + ) + new_row.append(rect) + self.rects.append(new_row) + + self.body = pm.Body(body_type=pm.Body.STATIC) + + # Transform pygame vertices to fit Pymunk body + invert_verts = utils.invert_y(verts) + self.shape = pm.Poly(self.body, invert_verts) + self.shape.collision_type = CollisionTypes.WATER + + self.body.position = utils.flipy(pos) + self.space = space + self.space.add(self.body, self.shape) + + def generate_debris(self, rng): + self.debris = [] + for col in range(1, self.num_cols - 1, 3): + if rng.random_sample() >= 0.5: + y = rng.randint(0, 2) + x = col + rng.randint(0, 3) + rect = self.rects[y][x].copy() + rect.x += 3 + rect.y += 9 + self.debris.append([self.debris_tile, rect]) + + def full_draw(self, screen): + for rect in self.rects[0]: + screen.blit(self.top_tile, rect) + + for rect in self.rects[1]: + screen.blit(self.tile, rect) + + for pair in self.debris: + screen.blit(pair[0], pair[1]) + + def draw(self, screen): + self.full_draw() + + def convert_img(self): + self.top_tile = self.top_tile.convert_alpha() + self.tile = self.tile.convert_alpha() + self.debris_tile = self.debris_tile.convert_alpha() + + +class Background: + def __init__(self, rng): + self.num_cols = math.ceil(const.SCREEN_WIDTH / const.TILE_SIZE) + self.num_rows = ( + math.ceil((const.SCREEN_HEIGHT - const.WATER_HEIGHT) / const.TILE_SIZE) + 1 + ) + + self.tile = utils.load_image(["sand_tile.png"]) + + self.debris_tiles = { + 0: utils.load_image(["debris", "0.png"]), + 1: utils.load_image(["debris", "1.png"]), + 2: utils.load_image(["debris", "2.png"]), + 3: utils.load_image(["debris", "3.png"]), + } + + # Used when updating environment and drawing + self.dirty_rects = [] + + self.rects = [] + # same as (const.TILE_SIZE, const.TILE_SIZE) + tile_size = self.tile.get_size() + for row in range(self.num_rows): + new_row = [] + for col in range(self.num_cols): + rect = pg.Rect(col * const.TILE_SIZE, row * const.TILE_SIZE, *tile_size) + new_row.append(rect) + self.rects.append(new_row) + + def generate_debris(self, rng): + self.debris = {} + for row in range(1, self.num_rows - 1, 3): + for col in range(1, self.num_cols - 1, 3): + y = row + rng.randint(0, 3) + if y == self.num_rows - 2: + y += -1 + x = col + rng.randint(0, 3) + choice = rng.randint(0, 4) + self.debris[self.rects[y][x].topleft] = self.debris_tiles[choice] + + def full_draw(self, screen): + for row in self.rects: + for rect in row: + screen.blit(self.tile, rect) + debris = self.debris.get(rect.topleft, None) + if debris is not None: + screen.blit(debris, rect) + + def draw(self, screen): + # self.full_draw(screen) + for rect in self.dirty_rects: + screen.blit(self.tile, rect) + debris = self.debris.get(rect.topleft, None) + if debris is not None: + screen.blit(debris, rect) + + self.dirty_rects.clear() + + def update(self, sprite_rect: pg.Rect): + top_y = int(sprite_rect.top // const.TILE_SIZE) + bottom_y = int(sprite_rect.bottom // const.TILE_SIZE) + left_x = int(sprite_rect.left // const.TILE_SIZE) + right_x = int(sprite_rect.right // const.TILE_SIZE) + + self.dirty_rects.append(self.rects[top_y][left_x]) + self.dirty_rects.append(self.rects[top_y][right_x]) + self.dirty_rects.append(self.rects[bottom_y][left_x]) + self.dirty_rects.append(self.rects[bottom_y][right_x]) + + def convert_img(self): + self.tile = self.tile.convert_alpha() + + for i in self.debris_tiles: + self.debris_tiles[i].convert_alpha() + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.ClipOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +parallel_env = parallel_wrapper_fn(env) + + +class raw_env(AECEnv, EzPickle): + def __init__( + self, + ind_reward=0.8, + group_reward=0.1, + other_group_reward=0.1, + prospec_find_gold_reward=1, + prospec_handoff_gold_reward=1, + banker_receive_gold_reward=1, + banker_deposit_gold_reward=1, + max_cycles=450, + ): + EzPickle.__init__( + self, + ind_reward, + group_reward, + other_group_reward, + prospec_find_gold_reward, + prospec_handoff_gold_reward, + banker_receive_gold_reward, + banker_deposit_gold_reward, + max_cycles, + ) + + total_reward_factor = ind_reward + group_reward + other_group_reward + if not math.isclose(total_reward_factor, 1.0, rel_tol=1e-09): + raise ValueError( + "The sum of the individual reward, group reward, and other " + "group reward should add up to approximately 1.0" + ) + + self.agents = [] + + self.sprite_list = [ + "bankers/0.png", + "bankers/1.png", + "bankers/2.png", + "prospector.png", + ] + self.max_cycles = max_cycles + + pg.init() + self.seed() + self.closed = False + + self.background = Background(self.rng) + + self.space = pm.Space() + self.space.gravity = Vec2d(0.0, 0.0) + self.space.iterations = 20 # for decreasing bounciness + self.space.damping = 0.0 + + self.all_sprites = pg.sprite.RenderUpdates() + self.gold = [] + + self.water = Water( + const.WATER_INFO[0], const.WATER_INFO[1], self.space, self.rng + ) + + # Generate random positions for each prospector agent + prospector_info = [ + (i, utils.rand_pos("prospector", self.rng)) + for i in range(const.NUM_PROSPECTORS) + ] + self.prospectors = {} + for num, pos in prospector_info: + prospector = Prospector(pos, self.space, num, self.all_sprites) + identifier = f"prospector_{num}" + self.prospectors[identifier] = prospector + self.agents.append(identifier) + + banker_info = [ + (i, utils.rand_pos("banker", self.rng)) for i in range(const.NUM_BANKERS) + ] + self.bankers = {} + for num, pos in banker_info: + banker = Banker(pos, self.space, num, self.all_sprites) + identifier = f"banker_{num}" + self.bankers[identifier] = banker + self.agents.append(identifier) + + self.banks = [] + for pos, verts in const.BANK_INFO: + self.banks.append(Bank(pos, verts, self.space, self.all_sprites)) + + self.fences = [] + for w_type, s_pos, b_pos, verts in const.FENCE_INFO: + f = Fence(w_type, s_pos, b_pos, verts, self.space) + self.fences.append(f) + + self.metadata = { + "render.modes": ["human", "rgb_array"], + 'name': "prospector_v4", + 'is_parallelizable': True, + 'video.frames_per_second': const.FPS, + } + + self.action_spaces = {} + for p in self.prospectors: + self.action_spaces[p] = spaces.Box( + low=np.float32(-1.0), high=np.float32(1.0), shape=(3,) + ) + + for b in self.bankers: + self.action_spaces[b] = spaces.Box( + low=np.float32(-1.0), high=np.float32(1.0), shape=(2,) + ) + + self.observation_spaces = {} + self.last_observation = {} + + for p in self.prospectors: + self.last_observation[p] = None + self.observation_spaces[p] = spaces.Box( + low=0, high=255, shape=const.PROSPEC_OBSERV_SHAPE, dtype=np.uint8 + ) + + for b in self.bankers: + self.last_observation[b] = None + self.observation_spaces[b] = spaces.Box( + low=0, high=255, shape=const.BANKER_OBSERV_SHAPE, dtype=np.uint8 + ) + + self.state_space = spaces.Box(low=0, high=255, shape=((const.SCREEN_HEIGHT, const.SCREEN_WIDTH, 3)), dtype=np.uint8) + + self.possible_agents = self.agents[:] + self._agent_selector = agent_selector(self.agents) + self.agent_selection = self._agent_selector.next() + self.reset() + + # Collision Handler Functions -------------------------------------------- + # Water to Prospector + def add_gold(arbiter, space, data): + prospec_shape = arbiter.shapes[0] + prospec_body = prospec_shape.body + + for k, v in self.prospectors.items(): + if v.body is prospec_body: + self.rewards[k] += ind_reward * prospec_find_gold_reward + else: + self.rewards[k] += group_reward * prospec_find_gold_reward + + for k in self.bankers: + self.rewards[k] += other_group_reward * prospec_find_gold_reward + + if prospec_body.nugget is None: + position = arbiter.contact_point_set.points[0].point_a + + gold = Gold(position, prospec_body, self.space, self.all_sprites) + self.gold.append(gold) + prospec_body.nugget = gold + + return True + + # Prospector to banker + def handoff_gold_handler(arbiter, space, data): + banker_shape, gold_shape = arbiter.shapes + + gold_sprite = None + for g in self.gold: + if g.id == gold_shape.id: + gold_sprite = g + + # gold_sprite is None if gold was handed off to the bank right before + # calling this collision handler + # This collision handler is only for prospector -> banker gold handoffs + if ( + gold_sprite is None + or gold_sprite.parent_body.sprite_type != "prospector" + ): + return True + + banker_body = banker_shape.body + prospec_body = gold_sprite.parent_body + + normal = arbiter.contact_point_set.normal + # Correct the angle because banker's head is rotated pi/2 + corrected = utils.normalize_angle(banker_body.angle + (math.pi / 2)) + normalized_normal = utils.normalize_angle(normal.angle) + if ( + corrected - const.BANKER_HANDOFF_TOLERANCE + <= normalized_normal + <= corrected + const.BANKER_HANDOFF_TOLERANCE + ): + + # transfer gold + gold_sprite.parent_body.nugget = None + gold_sprite.parent_body = banker_body + banker_body.nugget = gold_sprite + + for k, v in self.prospectors.items(): + self.rewards[k] += other_group_reward * banker_receive_gold_reward + if v.body is prospec_body: + self.rewards[k] += ind_reward * prospec_handoff_gold_reward + else: + self.rewards[k] += group_reward * prospec_handoff_gold_reward + + for k, v in self.bankers.items(): + self.rewards[k] += other_group_reward * prospec_handoff_gold_reward + if v.body is banker_body: + self.rewards[k] += ind_reward * banker_receive_gold_reward + else: + self.rewards[k] += group_reward * banker_receive_gold_reward + + return True + + # Banker to bank + def gold_score_handler(arbiter, space, data): + gold_shape, _ = arbiter.shapes + + for g in self.gold: + if g.id == gold_shape.id: + gold_class = g + + if gold_class.parent_body.sprite_type == "banker": + self.space.remove(gold_shape, gold_shape.body) + gold_class.parent_body.nugget = None + banker_body = gold_class.parent_body + + for k, v in self.bankers.items(): + if v.body is banker_body: + self.rewards[k] += ind_reward * banker_deposit_gold_reward + else: + self.rewards[k] += group_reward * banker_deposit_gold_reward + + for k in self.prospectors: + self.rewards[k] += other_group_reward * banker_deposit_gold_reward + + self.gold.remove(gold_class) + self.all_sprites.remove(gold_class) + + return False + + # Create the collision event generators + gold_dispenser = self.space.add_collision_handler( + CollisionTypes.PROSPECTOR, CollisionTypes.WATER + ) + + gold_dispenser.begin = add_gold + + handoff_gold = self.space.add_collision_handler( + CollisionTypes.BANKER, CollisionTypes.GOLD + ) + + handoff_gold.begin = handoff_gold_handler + + gold_score = self.space.add_collision_handler( + CollisionTypes.GOLD, CollisionTypes.BANK + ) + + gold_score.begin = gold_score_handler + + def seed(self, seed=None): + self.rng, seed = seeding.np_random(seed) + + def observe(self, agent): + capture = pg.surfarray.pixels3d(self.screen) + if agent in self.prospectors: + ag = self.prospectors[agent] + side_len = const.PROSPEC_OBSERV_SIDE_LEN + else: + ag = self.bankers[agent] + side_len = const.BANKER_OBSERV_SIDE_LEN + + delta = side_len // 2 + x, y = ag.center # Calculated property added to prospector and banker classes + sub_screen = np.array( + capture[ + max(0, x - delta): min(const.SCREEN_WIDTH, x + delta), + max(0, y - delta): min(const.SCREEN_HEIGHT, y + delta), :, + ], + dtype=np.uint8, + ) + + s_x, s_y, _ = sub_screen.shape + pad_x = side_len - s_x + + if x > const.SCREEN_WIDTH - delta: # Right side of the screen + sub_screen = np.pad( + sub_screen, pad_width=((0, pad_x), (0, 0), (0, 0)), mode="constant" + ) + elif x < 0 + delta: + sub_screen = np.pad( + sub_screen, pad_width=((pad_x, 0), (0, 0), (0, 0)), mode="constant" + ) + + pad_y = side_len - s_y + + if y > const.SCREEN_HEIGHT - delta: # Bottom of the screen + sub_screen = np.pad( + sub_screen, pad_width=((0, 0), (0, pad_y), (0, 0)), mode="constant" + ) + elif y < 0 + delta: + sub_screen = np.pad( + sub_screen, pad_width=((0, 0), (pad_y, 0), (0, 0)), mode="constant" + ) + + sub_screen = np.rot90(sub_screen, k=3) + sub_screen = np.fliplr(sub_screen).astype(np.uint8) + self.last_observation[agent] = sub_screen + + return sub_screen + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def state(self): + ''' + Returns an observation of the global environment + ''' + state = pg.surfarray.pixels3d(self.screen).copy() + state = np.rot90(state, k=3) + state = np.fliplr(state) + return state + + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + agent_id = self.agent_selection + all_agents_updated = self._agent_selector.is_last() + self.rewards = {agent: 0 for agent in self.agents} + + if agent_id in self.prospectors: + agent = self.prospectors[agent_id] + else: + agent = self.bankers[agent_id] + + self.background.update(agent.rect) + + nugget = agent.body.nugget + if nugget is not None: + self.background.update(nugget.rect) + + agent.update(action) + + # Only take next step in game if all agents have received an action + if all_agents_updated: + for _ in range(const.STEPS_PER_FRAME): + self.space.step(const.SPACE_STEP_DELTA) + + for pr in self.prospectors.values(): + pr.synchronize_center() + pr.update_gold() + self.background.update(pr.rect) + + nugget = pr.body.nugget + if nugget is not None: + self.background.update(nugget.rect) + + for b in self.bankers.values(): + b.synchronize_center() + b.update_gold() + self.background.update(b.rect) + + nugget = b.body.nugget + if nugget is not None: + self.background.update(nugget.rect) + + self.draw() + + self.frame += 1 + # If we reached max frames, we're done + if self.frame == self.max_cycles: + self.dones = dict(zip(self.agents, [True for _ in self.agents])) + + if self.rendering: + pg.event.pump() + + self.agent_selection = self._agent_selector.next() + self._cumulative_rewards[agent_id] = 0 + self._accumulate_rewards() + + def reset(self): + self.screen = pg.Surface(const.SCREEN_SIZE) + self.done = False + + self.background.generate_debris(self.rng) + self.water.generate_debris(self.rng) + + for p in self.prospectors.values(): + p.reset(utils.rand_pos("prospector", self.rng)) + + for b in self.bankers.values(): + b.reset(utils.rand_pos("banker", self.rng)) + + for g in self.gold: + self.space.remove(g.shape, g.body) + self.all_sprites.remove(g) + + self.gold = [] + + self.agents = self.possible_agents[:] + self.rewards = dict(zip(self.agents, [0 for _ in self.agents])) + self._cumulative_rewards = dict(zip(self.agents, [0 for _ in self.agents])) + self.dones = dict(zip(self.agents, [False for _ in self.agents])) + self.infos = dict(zip(self.agents, [{} for _ in self.agents])) + self.rendering = False + self.frame = 0 + self.background.dirty_rects.clear() + + self._agent_selector.reinit(self.agents) + self.agent_selection = self._agent_selector.next() + self.full_draw() + + def render(self, mode="human"): + if mode == "human": + if not self.rendering: + self.rendering = True + pg.display.init() + self.screen = pg.display.set_mode(const.SCREEN_SIZE) + self.background.convert_img() + self.water.convert_img() + for f in self.fences: + f.convert_img() + for s in self.all_sprites.sprites(): + s.convert_img() + self.full_draw() + + pg.display.flip() + + elif mode == "rgb_array": # no display, return whole screen as array + observation = np.array(pg.surfarray.pixels3d(self.screen)) + transposed = np.transpose(observation, axes=(1, 0, 2)) + return transposed + + def full_draw(self): + """ Called to draw everything when first rendering """ + self.background.full_draw(self.screen) + for f in self.fences: + f.full_draw(self.screen) + self.water.full_draw(self.screen) + self.all_sprites.draw(self.screen) + + def draw(self): + """ Called after each frame, all agents updated """ + self.background.draw(self.screen) + for f in self.fences: + f.full_draw(self.screen) + self.water.full_draw(self.screen) + self.all_sprites.draw(self.screen) + + def close(self): + if not self.closed: + self.closed = True + if self.rendering: + pg.event.pump() + pg.display.quit() + pg.quit() + + +# Art by Keira Wentworth diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/utils.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..16e844e0d4742c7f9da5b4df610f8f44337116e4 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector/utils.py @@ -0,0 +1,39 @@ +import os + +import pygame as pg +from pymunk import Vec2d + +from . import constants as const + + +def load_image(path: list) -> pg.Surface: # All images stored in data/ + cwd = os.path.dirname(__file__) + img = pg.image.load(os.path.join(cwd, "data", *path)) + sfc = pg.Surface(img.get_size(), flags=pg.SRCALPHA) + sfc.blit(img, (0, 0)) + return sfc + + +# Convert chipmunk coords to pymunk coords, flipping and offsetting y-coordinate +def flipy(point): + return Vec2d(point[0], const.SCREEN_HEIGHT - point[1]) + + +def invert_y(points): + return [(x, -y) for x, y in points] + + +def rand_pos(sprite, rng): + x = rng.randint(100, const.SCREEN_WIDTH - 100) + if sprite == "banker": + return x, rng.randint(170, 300) + elif sprite == "prospector": + return x, rng.randint(350, const.SCREEN_HEIGHT - (const.WATER_HEIGHT + 30)) + + +def normalize_angle(angle): + if angle > const.TWO_PI: + return angle - const.TWO_PI + elif angle < 0.0: + return angle + const.TWO_PI + return angle diff --git a/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector_v4.py b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector_v4.py new file mode 100644 index 0000000000000000000000000000000000000000..a754029d3fd7abe1771a578dbae59f062376ec92 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/butterfly/prospector_v4.py @@ -0,0 +1 @@ +from .prospector.prospector import env, manual_control, parallel_env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__init__.py b/MLPY/Lib/site-packages/pettingzoo/classic/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ded2c453b63215d5c2e1c2eb55d820238962a0fb --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/__init__.py @@ -0,0 +1,5 @@ +from pettingzoo.utils.deprecated_module import depricated_handler + + +def __getattr__(env_name): + return depricated_handler(env_name, __path__, __name__) diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4be65481c3ff7ca4da8292c1aa746037e068933f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/backgammon_v3.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/backgammon_v3.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3c4565389094fc3e05f44e41dcb08a90383a6932 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/backgammon_v3.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/checkers_v3.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/checkers_v3.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e7414a5643041c8c30a07d1849dcd1655ba41fab Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/checkers_v3.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/chess_v5.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/chess_v5.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..29e5aca29f3f08fcaf958124d21172d53856a4e0 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/chess_v5.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/connect_four_v3.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/connect_four_v3.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..55c60adf9c05573a23a92c5ccaca396a2d93dbb9 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/connect_four_v3.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/dou_dizhu_v4.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/dou_dizhu_v4.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b814e5822e3af85c400551a50e0f05f686945bad Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/dou_dizhu_v4.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/gin_rummy_v4.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/gin_rummy_v4.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d87c61e4aa157912207c8d55bba13efbc2b51122 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/gin_rummy_v4.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/go_v5.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/go_v5.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c6af5a828a49c3ceaa050e06a2f827f1e2952b9c Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/go_v5.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/hanabi_v4.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/hanabi_v4.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6ca3e147ad2329b98b2177816b84e9937defae2b Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/hanabi_v4.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/leduc_holdem_v4.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/leduc_holdem_v4.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..36f43c3e9c02dafddfd9d02fc84055d04a31e108 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/leduc_holdem_v4.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/mahjong_v4.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/mahjong_v4.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a2caa6cf643cd47d735d39156858f820eca90fb1 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/mahjong_v4.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/rps_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/rps_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6ef7cc38a8086bba46774ba8efbad0629a836919 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/rps_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/texas_holdem_no_limit_v6.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/texas_holdem_no_limit_v6.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d21d60adb4d02f240a23159240b06dce2a3aa0c4 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/texas_holdem_no_limit_v6.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/texas_holdem_v4.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/texas_holdem_v4.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..74841aab71836446b8fda9b93d5c2fc5d4a10c5b Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/texas_holdem_v4.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/tictactoe_v3.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/tictactoe_v3.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..70034dac67b49b2a58ffc3fbf09c1b4969b44295 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/tictactoe_v3.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/uno_v4.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/uno_v4.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3b7b23c51a96ad178c9df8f85153b4ad10243faa Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/__pycache__/uno_v4.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/__init__.py b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6e2c208e68497c79e5336197ab79771247e525a8 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/__pycache__/backgammon.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/__pycache__/backgammon.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e6f97a85902cdb969eb46e4d2d688cd65767a6ce Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/__pycache__/backgammon.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/__pycache__/backgammon_env.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/__pycache__/backgammon_env.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..807fedb898e39dca42716ced48869ec1285920f1 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/__pycache__/backgammon_env.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/__pycache__/bg_utils.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/__pycache__/bg_utils.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..40900b4d8a85019b19cc751e8c006514058ae040 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/__pycache__/bg_utils.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/backgammon.py b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/backgammon.py new file mode 100644 index 0000000000000000000000000000000000000000..52564f002ec852fc0a464275970470e9b975a8b9 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/backgammon.py @@ -0,0 +1,1529 @@ +import itertools +from collections import namedtuple + +# Code based on: https://github.com/dellalibera/gym-backgammon + +WHITE = 0 +BLACK = 1 +NUM_POINTS = 24 +BAR = "bar" +OFF = 'off' +TOKEN = {WHITE: "X", BLACK: "O"} +COLORS = {WHITE: "White", BLACK: 'Black'} + +BackgammonState = namedtuple('BackgammonState', ['board', 'bar', 'off', 'players_positions']) + + +# c = checker +# t = target +# s = source +# p = position + + +def clamp(target): + return -1 if target < 0 else 24 if target > 23 else target + + +def comp1(player, x, y): + return x < abs(y) if player == WHITE else x > (23 - abs(y)) + + +def comp2(player, x, y): + return x > y if player == WHITE else x < y + + +def comp3(player, x, y): + return x >= y if player == WHITE else x <= y + + +def highest(player, positions): + return max(positions, default=None) if player == WHITE else min(positions, default=None) + + +def init_board(): + board = [(0, None)] * NUM_POINTS + board[0] = (2, BLACK) + board[11] = (5, BLACK) + board[16] = (3, BLACK) + board[18] = (5, BLACK) + board[5] = (5, WHITE) + board[7] = (3, WHITE) + board[12] = (5, WHITE) + board[23] = (2, WHITE) + return board + + +class Backgammon: + def __init__(self): + self.board = init_board() + self.bar = [0, 0] + self.off = [0, 0] + self.players_home_positions = {WHITE: [5, 4, 3, 2, 1, 0], BLACK: [18, 19, 20, 21, 22, 23]} + self.players_positions = self.get_players_positions() + self.state = self.save_state() + + def can_bear_off(self, player): + tot = [self.board[position][0] for position in self.players_home_positions[player] if player == self.board[position][1]] + return sum(tot) == 15 - self.off[player] + + def could_bear_off(self, player, roll): + # I could bear off if I use one roll to move the only checkers outside my home board, to my home board. Then I can bear off with the other roll + tot = sum(self.board[position][0] for position in self.players_home_positions[player] if player == self.board[position][1]) + threshold = 15 - 1 if len(roll) == 2 else 15 - 3 + return tot >= (threshold - self.off[player]) + + def can_move_to(self, player, target): + if target < 0 or target > 23: + return self.can_bear_off(player) + return self.board[target][0] < 2 or (self.board[target][0] > 1 and self.board[target][1] == player) + + def is_valid(self, player, target): + if 0 <= target < NUM_POINTS: + return self.board[target][0] < 2 or (self.board[target][0] > 1 and self.board[target][1] == player) + return False + + # ================================================================================= + # NORMAL PLAYS ==================================================================== + # ================================================================================= + def get_normal_plays(self, player, roll): + # Generate normal legal plays (not bear off moves) + plays = set() + + positions = self.players_positions[player] + combinations_positions = list(itertools.combinations(positions, 2)) + + for s in positions: + if self.board[s][0] > 1: + combinations_positions.append((s, s)) + + if self.is_valid(player, s + roll[0]) and self.is_valid(player, s + roll[0] + roll[1]): + plays.add(((s, s + roll[0]), (s + roll[0], s + roll[0] + roll[1]))) + + if self.is_valid(player, s + roll[1]) and self.is_valid(player, s + roll[0] + roll[1]): + plays.add(((s, s + roll[1]), (s + roll[1], s + roll[0] + roll[1]))) + + for (s1, s2) in combinations_positions: + t1 = s1 + roll[0] + t2 = s1 + roll[1] + t3 = s2 + roll[0] + t4 = s2 + roll[1] + t_far1 = s1 + roll[0] + roll[1] + t_far2 = s2 + roll[0] + roll[1] + + if self.is_valid(player, t1) and self.is_valid(player, t4): + plays.add(((s1, t1), (s2, t4))) + + if s1 != s2 and self.is_valid(player, t2) and self.is_valid(player, t3): # if (s1 == s2) => (target1 == target3 and target2 == target4). Same move as before, but swapped + plays.add(((s1, t2), (s2, t3))) + + if self.is_valid(player, t1) and self.is_valid(player, t_far1): + plays.add(((s1, t1), (t1, t_far1))) + + if self.is_valid(player, t2) and self.is_valid(player, t_far1): + plays.add(((s1, t2), (t2, t_far1))) + + if s1 != s2 and self.is_valid(player, t3) and self.is_valid(player, t_far2): # if (s1 == s2) => (target_far1 == target_far2) + plays.add(((s2, t3), (t3, t_far2))) + + if s1 != s2 and self.is_valid(player, t4) and self.is_valid(player, t_far2): # if (s1 == s2) => (target_far1 == target_far2) + plays.add(((s2, t4), (t4, t_far2))) + + if len(plays) == 0: + # https://bkgm.com/faq/BasicRules.html#what_if_i_can_only_play_one_number_ + # "If you can play one number but not both, then you must play the higher one." + r = min(roll) if player == WHITE else max(roll) + single_moves = self.get_single_moves(player, r) + + if len(single_moves) == 0: + # get the other roll + r = max(roll) if player == WHITE else min(roll) + single_moves = self.get_single_moves(player, r) + + for move in single_moves: + plays.add((move,)) + + return plays + + # NORMAL PLAYS - DOUBLE =========================================================== + def get_normal_plays_double(self, player, roll): + plays = set() + r = roll[0] + + sources = { + 1: [p for p in self.players_positions[player] if self.board[p][0] > 0], + 2: [p for p in self.players_positions[player] if self.board[p][0] > 1], + 3: [p for p in self.players_positions[player] if self.board[p][0] > 2], + 4: [p for p in self.players_positions[player] if self.board[p][0] > 3], + } + + combo2 = set(itertools.combinations(sources[1], 2)) + combo3 = set(itertools.combinations(sources[1], 3)) + + for s1 in sources[4]: + if self.is_valid(player, s1 + r): + plays.add(((s1, s1 + r), (s1, s1 + r), (s1, s1 + r), (s1, s1 + r))) + + for s1 in sources[3]: + if self.is_valid(player, s1 + r): + plays.add(((s1, s1 + r), (s1, s1 + r), (s1, s1 + r))) + + for s2 in sources[1]: + if s1 != s2 and self.is_valid(player, s2 + r): + plays.add(((s1, s1 + r), (s1, s1 + r), (s1, s1 + r), (s2, s2 + r))) + + target_far = s1 + r + r + if self.is_valid(player, target_far): + plays.add(((s1, s1 + r), (s1, s1 + r), (s1, s1 + r), (s1 + r, target_far))) + + for s1 in sources[2]: + if self.is_valid(player, s1 + r): + plays.add(((s1, s1 + r), (s1, s1 + r))) + + for (s2, s3) in combo2: + if s1 != s2 and s1 != s3 and self.is_valid(player, s2 + r) and self.is_valid(player, s3 + r): + plays.add(((s1, s1 + r), (s1, s1 + r), (s2, s2 + r), (s3, s3 + r))) + + for s2 in sources[2]: + if s1 != s2 and self.is_valid(player, s2 + r): + plays.add(((s1, s1 + r), (s1, s1 + r), (s2, s2 + r), (s2, s2 + r))) + + for s2 in sources[1]: + if s1 != s2 and self.is_valid(player, s2 + r): + plays.add(((s1, s1 + r), (s1, s1 + r), (s2, s2 + r))) + + target_far = s1 + r + r + if self.is_valid(player, target_far): + plays.add(((s1, s1 + r), (s1, s1 + r), (s1 + r, target_far), (s2, s2 + r))) + + target_far = s2 + r + r + if self.is_valid(player, target_far): + plays.add(((s1, s1 + r), (s1, s1 + r), (s2, s2 + r), (s2 + r, target_far))) + + target_far = s1 + r + r + if self.is_valid(player, target_far): + plays.add(((s1, s1 + r), (s1, s1 + r), (s1 + r, target_far), (s1 + r, target_far))) + + target_far2 = s1 + r + r + r + if self.is_valid(player, target_far2): + plays.add(((s1, s1 + r), (s1, s1 + r), (s1 + r, target_far), (target_far, target_far2))) + + for s1 in sources[1]: + if self.is_valid(player, s1 + r): + plays.add(((s1, s1 + r),)) + + target_far1 = s1 + r + r + target_far2 = s1 + r + r + r + target_far3 = s1 + r + r + r + r + + if self.is_valid(player, target_far1): + plays.add(((s1, s1 + r), (s1 + r, target_far1))) + + if self.is_valid(player, target_far2): + plays.add(((s1, s1 + r), (s1 + r, target_far1), (target_far1, target_far2))) + + if self.is_valid(player, target_far3): + plays.add(((s1, s1 + r), (s1 + r, target_far1), (target_far1, target_far2), (target_far2, target_far3))) + + for s2 in sources[1]: + if s2 != s1 and self.is_valid(player, s2 + r): + plays.add(((s1, s1 + r), (s1 + r, target_far1), (target_far1, target_far2), (s2, s2 + r))) + + for s2 in sources[1]: + if s1 != s2 and self.is_valid(player, s2 + r): + plays.add(((s1, s1 + r), (s1 + r, target_far1), (s2, s2 + r))) + + s2_target_far1 = s2 + r + r + + if self.is_valid(player, s2_target_far1): + plays.add(((s1, s1 + r), (s1 + r, target_far1), (s2, s2 + r), (s2 + r, s2_target_far1))) + + for s2 in sources[1]: + if s1 != s2 and self.is_valid(player, s2 + r): + plays.add(((s1, s1 + r), (s2, s2 + r))) + + for (s2, s3, s4) in combo3: + if s1 != s2 and s1 != s3 and s1 != s4 \ + and self.is_valid(player, s2 + r) and self.is_valid(player, s3 + r) and self.is_valid(player, s4 + r): + plays.add(((s1, s1 + r), (s2, s2 + r), (s3, s3 + r), (s4, s4 + r))) + + for (s2, s3) in combo2: + if s1 != s2 and s1 != s3 and self.is_valid(player, s2 + r) and self.is_valid(player, s3 + r): + plays.add(((s1, s1 + r), (s2, s2 + r), (s3, s3 + r))) + + if self.is_valid(player, target_far1): + plays.add(((s1, s1 + r), (s1 + r, target_far1), (s2, s2 + r), (s3, s3 + r))) + return plays + + # ================================================================================= + # BEAR OFF PLAYS ================================================================== + # ================================================================================= + def move_to_go_home(self, player, r, out_of_home): + # I try to move all the checkers to home board, so I can start to bear off. I can use at most 3 rolls, so I can use the last one for a bear off move + move = () + + home_positions = set(self.players_home_positions[player]) + + if len(out_of_home) == 1: # there is 1 position where there is/are checker(s) out of home + out = out_of_home[0] + + if self.board[out][0] <= 3: + t1 = out + r + t2 = t1 + r + t3 = t2 + r + + move1 = (out, clamp(t1)) + move2 = (t1, clamp(t2)) + move3 = (t2, clamp(t3)) + + if self.is_valid(player, t1): + if t1 in home_positions: # I used 1 die to move 1 checker from the only position out of home board, to home board + if self.board[out][0] == 1: + move = (move1,) + elif self.board[out][0] == 2: + move = (move1, move1) + elif self.board[out][0] == 3: + move = (move1, move1, move1) + else: + if self.is_valid(player, t2) and self.board[out][0] == 1: + if t2 in home_positions: # I used 2 dice to move 1 checker from the only position out of home board, to home board + move = (move1, move2) + else: + if self.is_valid(player, t3): + if t3 in home_positions: # I used 3 dice to move 1 checker from the only position out of home board to home board + move = (move1, move2, move3) + + elif len(out_of_home) == 2: # there are 2 positions where there is/are checker(s) out of home + out1 = out_of_home[0] + out2 = out_of_home[1] + + if (self.board[out1][0] + self.board[out2][0]) <= 3: + + t11 = out1 + r + t21 = t11 + r + + t12 = out2 + r + t22 = t12 + r + + move11 = (out1, clamp(t11)) + move21 = (t11, clamp(t21)) + + move12 = (out2, clamp(t12)) + move22 = (t12, clamp(t22)) + + if self.is_valid(player, t11) and self.is_valid(player, t12): + + if t11 in home_positions and t12 in home_positions: + move = (move11, move12) + + if self.board[out1][0] == 1 and self.board[out2][0] == 2: + move = (move11, move12, move12) + elif self.board[out2][0] == 1 and self.board[out1][0] == 2: + move = (move11, move11, move12) + + elif t11 in home_positions and self.board[out1][0] == 1 and self.board[out2][0] == 1: + if self.is_valid(player, t22): + if t22 in home_positions: + move = (move11, move12, move22) + + elif t12 in home_positions and self.board[out1][0] == 1 and self.board[out2][0] == 1: + if self.is_valid(player, t21): + if t21 in home_positions: + move = (move12, move11, move21) + + elif len(out_of_home) == 3: + out1 = out_of_home[0] + out2 = out_of_home[1] + out3 = out_of_home[2] + + if self.is_valid(player, out1 + r) and self.is_valid(player, out2 + r) and self.is_valid(player, out3 + r) \ + and self.board[out1][0] == 1 and self.board[out2][0] == 1 and self.board[out3][0] == 1 \ + and (out1 + r) in home_positions and (out2 + r) in home_positions and (out3 + r) in home_positions: + move = ((out1, out1 + r), (out2, out2 + r), (out3, out3 + r)) + + return move + + def get_bear_off_plays(self, player, roll): + """ + http://usbgf.org/learn-backgammon/backgammon-rules-and-terms/rules-of-backgammon/ + A player bears off a checker by rolling a number that corresponds to the point on which the checker resides, and then removing that checker from the board. + Thus, rolling a 6 permits the player to remove a checker from the six point. If there is no checker on the point indicated by the roll, the player must make a legal move using a checker on a higher-numbered point. + If there are no checkers on higher-numbered points, the player is permitted (and required) to remove a checker from the highest point on which one of his checkers resides. + A player is under no obligation to bear off if he can make an otherwise legal move. + + https://en.wikipedia.org/wiki/Backgammon#Bearing_off + When all of a player's checkers are in that player's home board, that player may start removing them; this is called "bearing off". + A roll of 1 may be used to bear off a checker from the 1-point, a 2 from the 2-point, and so on. + If all of a player's checkers are on points lower than the number showing on a particular die, the player may use that die to bear off one checker from the highest occupied point + """ + plays = set() + reverse = player != WHITE + r1, r2 = sorted(roll, reverse=reverse) # for WHITE player the rolls are negative (i.e (-3,-2)) + + active_positions = set(self.players_positions[player]) + home_positions = set(self.players_home_positions[player]) + + # Point on which the checker resides from which I can bear off + s1 = abs(r1) - 1 if player == WHITE else NUM_POINTS - abs(r1) + s2 = abs(r2) - 1 if player == WHITE else NUM_POINTS - abs(r2) + + if self.can_bear_off(player): + + if s1 in active_positions: + move = (s1, s1 + r1) + plays.add((move,)) + src = [s for s in active_positions if s1 != s or not (s1 == s and self.board[s1][0] == 1)] + plays.update([(move, single) for single in self.get_single_moves(player, roll=r2, player_src=src)]) + + if s2 in active_positions: + move = (s2, clamp(s2 + r2)) + plays.add((move,)) + src = [s for s in active_positions if s2 != s or not (s2 == s and self.board[s2][0] == 1)] + plays.update([(move, single) for single in self.get_single_moves(player, roll=r1, player_src=src)]) + + if s1 in active_positions and s2 in active_positions: + plays.add(((s1, clamp(s1 + r1)), (s2, clamp(s2 + r2)))) + + for s in active_positions: + # Check if I can be in a valid bear off position (s1 or s2) + t1 = s + r1 + t2 = s + r2 + if self.is_valid(player, t1) and t1 == s2: + plays.add(((s, t1), (s2, clamp(s2 + r2)))) + if self.is_valid(player, t2) and t2 == s1: + plays.add(((s, t2), (s1, clamp(s1 + r1)))) + + # checkers that are higher than the maximum roll (i.e. if the roll is (2,3), the highest checkers are the checkers in position 4,5,6,... (if player WHITE is the current player)) + highest_src = {s for s in active_positions if comp2(player, s, s1)} + + # If there are no checkers on higher-numbered points,... + if len(highest_src) == 0: + # ... the player is permitted (and required) to remove a checker from the highest point on which one of his checkers resides. + s1_best = highest(player, [s for s in active_positions if comp1(player, s, r1)]) + + if s1_best is not None: + move = (s1_best, clamp(s1_best + r1)) + plays.add((move,)) + + for s in active_positions: + if self.is_valid(player, s + r2) and not (s == s1_best and self.board[s1_best][0] == 1): + plays.add((move, (s, s + r2))) + + if self.board[s1_best][0] == 1: + active_positions -= {s1_best} + + s2_best = highest(player, [s for s in active_positions if comp1(player, s, r1)]) + + if s2_best is not None and self.can_move_to(player, s2_best + r2): + plays.add((move, (s2_best, clamp(s2_best + r2)))) + + if s2 in active_positions: + plays.add((move, (s2, clamp(s2 + r2)))) + + # move from the highest position with the lower roll, and then check if I can bear off with the other roll + if self.is_valid(player, s1_best + r2) and self.board[s1_best][0] == 1: + move = (s1_best, (s1_best + r2)) + + active_positions.add(s1_best + r2) + + s2_best = highest(player, [s for s in active_positions if comp1(player, s, r1)]) + + if s2_best is not None and self.can_move_to(player, s2_best + r1): + plays.add((move, (s2_best, clamp(s2_best + r1)))) + + elif len(highest_src) == 1: + # Here I have only one position that is higher than than the maximum roll + s = list(highest_src)[0] + + if self.is_valid(player, s + r2) and self.board[s][0] == 1: + + if comp3(player, s1, s + r2): + tmp = set(list(active_positions)[:]) + tmp -= {s} + tmp.add(s + r2) + + s1_best = highest(player, [s for s in tmp if comp1(player, s, r1)]) + move = (s, clamp(s + r2)) + + if s1_best is not None: + + higher_src1 = {s for s in tmp if comp2(player, s, s1_best)} + + if len(higher_src1) == 0: + plays.add((move, (s1_best, clamp(s1_best + r1)))) + + if self.is_valid(player, s + r1) and self.board[s][0] == 1: + + if comp3(player, s2, s + r1): + tmp = set(list(active_positions)[:]) + tmp -= {s} + tmp.add(s + r1) + + s1_best = highest(player, [s for s in tmp if comp1(player, s, r2)]) + move = (s, clamp(s + r1)) + + if s1_best is not None: + + higher_src1 = {s for s in tmp if comp2(player, s, s1_best)} + + if len(higher_src1) == 0: + plays.add((move, (s1_best, clamp(s1_best + r2)))) + + else: + candidate_src = [s for s in active_positions if s not in home_positions] + assert len(candidate_src) == 1, print(f"Should be 1 instead of {candidate_src}") + candidate_src = candidate_src[0] + t1 = candidate_src + r1 + t2 = candidate_src + r2 + + if self.is_valid(player, t1) and t1 in home_positions: + tmp = set(list(active_positions)[:]) + tmp -= {candidate_src} + tmp.add(t1) + highest_src = {s for s in tmp if comp2(player, s, s2)} + + if s2 in tmp: + plays.add(((candidate_src, t1), (s2, clamp(s2 + r2)))) + + elif comp3(player, s2, t1) and len(highest_src) == 0: + s_best = highest(player, [s for s in tmp if comp1(player, s, r2)]) + + if s_best is not None: + plays.add(((candidate_src, t1), (s_best, clamp(s_best + r2)))) + + if self.is_valid(player, t2) and t2 in home_positions: + tmp = set(list(active_positions)[:]) + tmp -= {candidate_src} + tmp.add(t2) + highest_src = {s for s in tmp if comp2(player, s, s1)} + + if s1 in tmp: + plays.add(((candidate_src, t2), (s1, clamp(s1 + r1)))) + + elif comp3(player, s1, t2): + s_best = highest(player, [s for s in tmp if comp1(player, s, r1)]) + + if s_best is not None and ((len(highest_src) == 1 and list(highest_src)[0] == s_best) or len(highest_src) == 0): + plays.add(((candidate_src, t2), (s_best, clamp(s_best + r1)))) + + return plays + + # BEAR OFF PLAYS - DOUBLE ========================================================= + def get_bear_off_play_double(self, player, roll): + plays = set() + r = roll[0] + old_state = self.save_state() + + home_positions = set(self.players_home_positions[player]) + # position from which I could bear off + s1 = abs(r) - 1 if player == WHITE else NUM_POINTS - abs(r) + move = (s1, clamp(s1 + r)) + + out_of_home = [s for s in set(self.players_positions[player]) if s not in home_positions] + # Actions needed to move the checkers (if any) that are out of home, to home board + out_of_home_move = self.move_to_go_home(player, r, out_of_home) + if len(out_of_home_move) > 0: + # I execute the move needed to move all the checkers that are not in home board to home board (in order to update the board - I saved the state) + self.execute_play(player, out_of_home_move) + self.players_positions = self.get_players_positions() + assert len(list(out_of_home_move)) <= 3, print(f"Should be <= 3 instead of {out_of_home_move}") + # number of dice used to move all the checkers that are not in home board to home board (should be <= 3) + dice_used = len(out_of_home_move) + + if self.can_bear_off(player): + # All the checkers are in the home board + active_positions = set(self.players_positions[player]) + # checkers that are higher than position from which I could bear off + higher_src = {s for s in active_positions if comp2(player, s, s1)} + + single_moves = self.get_single_moves(player, roll=r) + double_moves = self.get_double_moves(player, roll=r, single_moves=single_moves) + triple_moves = self.get_triple_moves(player, roll=r, double_moves=double_moves) + + # I have a checker on the exact position from which I can bear off + if s1 in active_positions: + if dice_used == 0: + if self.board[s1][0] >= 1: + plays.add((move,)) + plays.update([(move, single) for single in single_moves]) + plays.update([(move, double[0], double[1]) for double in double_moves]) + plays.update([(move, triple[0], triple[1], triple[2]) for triple in triple_moves]) + + if self.board[s1][0] >= 2: + plays.add((move, move)) + plays.update([(move, move, single) for single in single_moves]) + plays.update([(move, move, double[0], double[1]) for double in double_moves]) + + if self.board[s1][0] >= 3: + plays.add((move, move, move)) + plays.update([(move, move, move, single) for single in single_moves]) + + if self.board[s1][0] >= 4: + plays.add((move, move, move, move)) + elif dice_used == 1: + if self.board[s1][0] >= 1: + plays.add(out_of_home_move + (move, )) + plays.update([(out_of_home_move + (move, ) + (single, )) for single in single_moves]) + plays.update([(out_of_home_move + (move, ) + (double[0], ) + (double[1], )) for double in double_moves]) + + if self.board[s1][0] >= 2: + plays.add(out_of_home_move + (move, ) + (move, )) + plays.update([(out_of_home_move + (move, ) + (move, ) + (single, )) for single in single_moves]) + + if self.board[s1][0] >= 3: + plays.add(out_of_home_move + (move, ) + (move, ) + (move, )) + elif dice_used == 2: + if self.board[s1][0] >= 1: + plays.add(out_of_home_move + (move, )) + plays.update([(out_of_home_move + (move, ) + (single, )) for single in single_moves]) + + if self.board[s1][0] >= 2: + plays.add(out_of_home_move + (move, ) + (move, )) + elif dice_used == 3: + if self.board[s1][0] >= 1: + plays.add(out_of_home_move + (move, )) + + for s in active_positions: + if dice_used == 0: + t1 = s + r + if self.is_valid(player, t1): + if t1 == s1: + plays.add((move, (s, t1))) + if self.board[s1][0] >= 1: + plays.add((move, move, (s, t1))) + plays.update([(move, move, (s, t1), single) for single in single_moves if single != (s, t1)]) + + if self.board[s1][0] >= 2: + plays.add((move, move, move, (s, t1))) + else: + t2 = t1 + r + if self.is_valid(player, t2): + if t2 == s1: + plays.add((move, (s, t1), (t1, t2))) + if self.board[s1][0] >= 1: + plays.add((move, move, (s, t1), (t1, t2))) + elif dice_used == 1: + t1 = s + r + if self.is_valid(player, t1): + if t1 == s1: + plays.add(out_of_home_move + (move,) + ((s, t1), )) + if self.board[s1][0] >= 1: + plays.add(out_of_home_move + (move, ) + (move, ) + ((s, t1), )) + else: + t2 = t1 + r + if self.is_valid(player, t2): + if t2 == s1: + plays.add(out_of_home_move + (move, ) + ((s, t1), ) + ((t1, t2), )) + elif dice_used == 2: + t1 = s + r + if self.is_valid(player, t1): + if t1 == s1: + plays.add(out_of_home_move + (move,) + ((s, t1),)) + + # I don't have a checker on the exact position from which I can bear off, but I try to move higher checkers to that position + for s in higher_src: + if dice_used == 0: + t1 = s + r + if self.is_valid(player, t1): + if t1 == s1: + if self.board[s][0] >= 1: + plays.add(((s, t1), move)) + if self.board[s][0] == 1: + tmp = active_positions - {s} # remove the source from active position so I can generate consistent single moves + tmp_single_moves = self.get_single_moves(player, roll=r, player_src=tmp) + tmp_double_moves = self.get_double_moves(player, roll=r, single_moves=tmp_single_moves) + plays.update([((s, t1), move, single) for single in tmp_single_moves]) + plays.update([((s, t1), move, double[0], double[1]) for double in tmp_double_moves]) + else: + plays.update([((s, t1), move, single) for single in single_moves]) + + if self.board[s][0] == 2: + tmp = active_positions - {s} # remove the source from active position so I can generate consistent single moves + tmp_single_moves = self.get_single_moves(player, roll=r, player_src=tmp) + tmp_double_moves = self.get_double_moves(player, roll=r, single_moves=tmp_single_moves) + plays.update([((s, t1), move, double[0], double[1]) for double in tmp_double_moves]) + else: + plays.update([((s, t1), move, double[0], double[1]) for double in double_moves]) + + if self.board[s][0] >= 2: + plays.add(((s, t1), (s, t1), move, move)) + if self.board[s][0] >= 3: + plays.add(((s, t1), (s, t1), (s, t1), move)) + else: + t2 = s + r + r + if self.is_valid(player, t2): + if t2 == s1: + if self.board[s][0] >= 1: + plays.add(((s, t1), (t1, t2), move)) + + if self.board[s][0] == 1: + tmp = active_positions - {s} + tmp_single_moves = self.get_single_moves(player, roll=r, player_src=tmp) + plays.update([((s, t1), (t1, t2), move, single) for single in tmp_single_moves]) + else: + plays.update([((s, t1), (t1, t2), move, single) for single in single_moves]) + + if self.board[s][0] >= 2: + plays.add(((s, t1), (s, t1), (t1, t2), move)) + else: + t3 = s + r + r + r + if self.is_valid(player, t3): + if t3 == s1: + if self.board[s][0] == 1: + plays.add(((s, t1), (t1, t2), (t2, t3), move)) + elif dice_used == 1: + t1 = s + r + if self.is_valid(player, t1): + if t1 == s1: + if self.board[s][0] >= 1: + plays.add(out_of_home_move + ((s, t1),) + (move,)) + + if self.board[s][0] == 1: + tmp = active_positions - {s} + tmp_single_moves = self.get_single_moves(player, roll=r, player_src=tmp) + plays.update([(out_of_home_move + ((s, t1),) + (move, ) + (single, )) for single in tmp_single_moves]) + else: + plays.update([(out_of_home_move + ((s, t1),) + (move, ) + (single, )) for single in single_moves]) + + if self.board[s][0] >= 2: + plays.add(out_of_home_move + ((s, t1),) + ((s, t1),) + (move,)) + else: + t2 = s + r + r + if self.is_valid(player, t2): + if t2 == s1: + if self.board[s][0] >= 1: + plays.add(out_of_home_move + ((s, t1),) + ((t1, t2),) + (move,)) + elif dice_used == 2: + t1 = s + r + if self.is_valid(player, t1): + if t1 == s1: + if self.board[s][0] >= 1: + plays.add(out_of_home_move + ((s, t1),) + (move,)) + if len(higher_src) == 0: + s1_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + active_positions -= {s1_highest} + + s2_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + active_positions -= {s2_highest} + + s3_highest = highest(player, [x for x in active_positions if comp1(player, x, r)]) + active_positions -= {s3_highest} + + s4_highest = highest(player, [x for x in active_positions if comp1(player, x, r)]) + + # I don't have higher checkers than the position from which I can bear off + if s1_highest is not None: + move1 = (s1_highest, clamp(s1_highest + r)) + + if dice_used == 0: + if self.board[s1_highest][0] == 1: + plays.add((move1,)) + + if s2_highest is not None: + move2 = (s2_highest, clamp(s2_highest + r)) + + if self.board[s2_highest][0] == 1: + plays.add((move1, move2)) + + if s3_highest is not None: + move3 = (s3_highest, clamp(s3_highest + r)) + + if self.board[s3_highest][0] == 1: + plays.add((move1, move2, move3)) + + if s4_highest is not None: + move4 = (s4_highest, clamp(s4_highest + r)) + plays.add((move1, move2, move3, move4)) + + elif self.board[s3_highest][0] >= 2: + plays.add((move1, move2, move3, move3)) + + elif self.board[s2_highest][0] == 2: + plays.add((move1, move2, move2)) + + if s3_highest is not None: + move3 = (s3_highest, clamp(s3_highest + r)) + plays.add((move1, move2, move2, move3)) + + elif self.board[s2_highest][0] >= 3: + plays.add((move1, move2, move2, move2)) + + elif self.board[s1_highest][0] == 2: + plays.add((move1, move1)) + + if s2_highest is not None: + move2 = (s2_highest, clamp(s2_highest + r)) + + if self.board[s2_highest][0] == 1: + plays.add((move1, move1, move2)) + + if s3_highest is not None: + move3 = (s3_highest, clamp(s3_highest + r)) + plays.add((move1, move1, move2, move3)) + + elif self.board[s2_highest][0] >= 2: + plays.add((move1, move1, move2, move2)) + + elif self.board[s1_highest][0] == 3: + plays.add((move1, move1, move1)) + + if s2_highest is not None: + move2 = (s2_highest, clamp(s2_highest + r)) + plays.add((move1, move1, move1, move2)) + + elif self.board[s1_highest][0] >= 4: + plays.add((move1, move1, move1, move1)) + + elif dice_used == 1: + if self.board[s1_highest][0] == 1: + plays.add(out_of_home_move + (move1,)) + + if s2_highest is not None: + move2 = (s2_highest, clamp(s2_highest + r)) + + if self.board[s2_highest][0] == 1: + plays.add(out_of_home_move + (move1,) + (move2, )) + + if s3_highest is not None: + move3 = (s3_highest, clamp(s3_highest + r)) + + if self.board[s3_highest][0] >= 1: + plays.add(out_of_home_move + (move1, ) + (move2, ) + (move3, )) + + elif self.board[s2_highest][0] >= 2: + plays.add(out_of_home_move + (move1, ) + (move2, ) + (move2, )) + + elif self.board[s1_highest][0] == 2: + plays.add(out_of_home_move + (move1,) + (move1, )) + + if s2_highest is not None: + move2 = (s2_highest, clamp(s2_highest + r)) + + if self.board[s2_highest][0] >= 1: + plays.add(out_of_home_move + (move1,) + (move1,) + (move2, )) + + elif self.board[s1_highest][0] >= 3: + plays.add(out_of_home_move + (move1,) + (move1,) + (move1, )) + + elif dice_used == 2: + if self.board[s1_highest][0] == 1: + plays.add(out_of_home_move + (move1,)) + + if s2_highest is not None: + move2 = (s2_highest, clamp(s2_highest + r)) + + if self.board[s2_highest][0] >= 1: + plays.add(out_of_home_move + (move1,) + (move2,)) + + elif self.board[s1_highest][0] >= 2: + plays.add(out_of_home_move + (move1,) + (move1,)) + + elif dice_used == 3: + plays.add(out_of_home_move + (move1,)) + + if len(higher_src) == 1: + if dice_used == 0: + + high_src = list(higher_src)[0] + active_positions -= {high_src} + + t1 = high_src + r + t2 = t1 + r + t3 = t2 + r + + move1 = (high_src, clamp(t1)) + move2 = (t1, clamp(t2)) + move3 = (t2, clamp(t3)) + + if self.is_valid(player, t1): + + if self.board[high_src][0] == 1: + + if comp3(player, s1, t1): + active_positions.add(t1) + s1_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + + if s1_highest is not None: + if self.board[s1_highest][1] != player: + c1 = self.board[high_src][0] + else: + c1 = (self.board[s1_highest][0] + 1) if t1 == s1_highest else self.board[s1_highest][0] + + active_positions -= {s1_highest} + move_s1 = (s1_highest, clamp(s1_highest + r)) + + if c1 == 1: + s2_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + plays.add((move1, move_s1)) + + if s2_highest is not None: + if self.board[s2_highest][1] != player: + c2 = 1 + else: + c2 = (self.board[s2_highest][0] + 1) if t1 == s2_highest else self.board[s2_highest][0] + active_positions -= {s2_highest} + move_s2 = (s2_highest, clamp(s2_highest + r)) + + if c2 == 1: + s3_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + plays.add((move1, move_s1, move_s2)) + + if s3_highest is not None: + move_s3 = (s3_highest, clamp(s3_highest + r)) + plays.add((move1, move_s1, move_s2, move_s3)) + + elif c2 >= 2: + plays.add((move1, move_s1, move_s2, move_s2)) + + elif c1 == 2: + plays.add((move1, move_s1, move_s1)) + s2_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + + if s2_highest is not None: + move_s2 = (s2_highest, clamp(s2_highest + r)) + plays.add((move1, move_s1, move_s1, move_s2)) + + elif c1 >= 3: + plays.add((move1, move_s1, move_s1, move_s1)) + + elif self.is_valid(player, t2): + + if comp3(player, s1, t2): + active_positions.add(t2) + s1_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + + if s1_highest is not None: + if self.board[s1_highest][1] != player: + c1 = self.board[high_src][0] + else: + c1 = (self.board[s1_highest][0] + 1) if t2 == s1_highest else self.board[s1_highest][0] + active_positions -= {s1_highest} + move_s1 = (s1_highest, clamp(s1_highest + r)) + + if c1 == 1: + s2_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + plays.add((move1, move2, move_s1)) + + if s2_highest is not None: + move_s2 = (s2_highest, clamp(s2_highest + r)) + plays.add((move1, move2, move_s1, move_s2)) + + elif c1 >= 2: + plays.add((move1, move2, move_s1, move_s1)) + + elif self.is_valid(player, t3): + + if comp3(player, s1, t3): + active_positions.add(t3) + s1_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + + if s1_highest is not None: + move_s1 = (s1_highest, clamp(s1_highest + r)) + plays.add((move1, move2, move3, move_s1)) + + elif self.board[high_src][0] == 2: + + if comp3(player, s1, t1): + active_positions.add(t1) + s1_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + + if s1_highest is not None: + if self.board[s1_highest][1] != player: + c1 = self.board[high_src][0] + else: + c1 = (self.board[s1_highest][0] + 1) if t1 == s1_highest else self.board[s1_highest][0] + move_s1 = (s1_highest, clamp(s1_highest + r)) + + if c1 == 1: + active_positions -= {s1_highest} + s2_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + plays.add((move1, move1, move_s1)) + + if s2_highest is not None: + move_s2 = (s2_highest, clamp(s2_highest + r)) + plays.add((move1, move1, move_s1, move_s2)) + + elif c1 >= 2: + plays.add((move1, move1, move_s1, move_s1)) + + elif self.board[high_src][0] == 3: + + if comp3(player, s1, t1): + active_positions.add(t1) + s1_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + + if s1_highest is not None: + move_s1 = (s1_highest, clamp(s1_highest + r)) + plays.add((move1, move1, move1, move_s1)) + + if dice_used == 1: + high_src = list(higher_src)[0] + active_positions -= {high_src} + + t1 = high_src + r + t2 = t1 + r + + move1 = (high_src, clamp(t1)) + move2 = (t1, clamp(t2)) + + if self.is_valid(player, t1): + + if self.board[high_src][0] == 1: + + if comp3(player, s1, t1): + active_positions.add(t1) + s1_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + + if s1_highest is not None: + if self.board[s1_highest][1] != player: + c1 = self.board[high_src][0] + else: + c1 = (self.board[s1_highest][0] + 1) if t1 == s1_highest else self.board[s1_highest][0] + + active_positions -= {s1_highest} + move_s1 = (s1_highest, clamp(s1_highest + r)) + + if c1 == 1: + s2_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + plays.add(out_of_home_move + (move1,) + (move_s1, )) + + if s2_highest is not None: + if self.board[s2_highest][1] != player: + c2 = 1 + else: + c2 = (self.board[s2_highest][0] + 1) if t1 == s2_highest else self.board[s2_highest][0] + active_positions -= {s2_highest} + move_s2 = (s2_highest, clamp(s2_highest + r)) + + if c2 >= 1: + plays.add(out_of_home_move + (move1,) + (move_s1,) + (move_s2, )) + + elif c1 >= 2: + plays.add(out_of_home_move + (move1, ) + (move_s1, ) + (move_s1, )) + + elif self.is_valid(player, t2): + + if comp3(player, s1, t2): + active_positions.add(t2) + s1_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + + if s1_highest is not None: + if self.board[s1_highest][1] != player: + c1 = self.board[high_src][0] + else: + c1 = (self.board[s1_highest][0] + 1) if t2 == s1_highest else self.board[s1_highest][0] + active_positions -= {s1_highest} + move_s1 = (s1_highest, clamp(s1_highest + r)) + + if c1 >= 1: + plays.add(out_of_home_move + (move1, ) + (move2,) + (move_s1, )) + + elif self.board[high_src][0] == 2: + if comp3(player, s1, t1): + active_positions.add(t1) + s1_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + + if s1_highest is not None: + if self.board[s1_highest][1] != player: + c1 = self.board[high_src][0] + else: + c1 = (self.board[s1_highest][0] + 1) if t1 == s1_highest else self.board[s1_highest][0] + move_s1 = (s1_highest, clamp(s1_highest + r)) + + if c1 >= 1: + active_positions -= {s1_highest} + plays.add(out_of_home_move + (move1,) + (move1,) + (move_s1, )) + + if dice_used == 2: + high_src = list(higher_src)[0] + active_positions -= {high_src} + + t1 = high_src + r + t2 = t1 + r + + move1 = (high_src, clamp(t1)) + + if self.is_valid(player, t1): + + if self.board[high_src][0] == 1: + + if comp3(player, s1, t1): + active_positions.add(t1) + s1_highest = highest(player, [s for s in active_positions if comp1(player, s, r)]) + + if s1_highest is not None: + if self.board[s1_highest][1] != player: + c1 = self.board[high_src][0] + else: + c1 = (self.board[s1_highest][0] + 1) if t1 == s1_highest else self.board[s1_highest][0] + + active_positions -= {s1_highest} + move_s1 = (s1_highest, clamp(s1_highest + r)) + + if c1 >= 1: + plays.add(out_of_home_move + (move1,) + (move_s1,)) + if len(higher_src) == 2: + if dice_used == 0: + + high_src1 = list(higher_src)[0] + high_src2 = list(higher_src)[1] + + if (self.board[high_src1][0] + self.board[high_src2][0]) <= 3: + + active_positions -= {high_src1} + active_positions -= {high_src2} + + t11 = high_src1 + r + t21 = t11 + r + + t12 = high_src2 + r + t22 = t12 + r + + move11 = (high_src1, clamp(t11)) + move21 = (t11, clamp(t21)) + + move12 = (high_src2, clamp(t12)) + move22 = (t12, clamp(t22)) + + if self.is_valid(player, t11) and self.is_valid(player, t12): + + if comp3(player, s1, t11) and comp3(player, s1, t12): + active_positions.add(t11) + active_positions.add(t12) + s1_best = highest(player, [s for s in active_positions if comp1(player, s, r)]) + + if s1_best is not None: + if self.board[s1_best][1] != player: + c1 = 2 if s1_best == t11 and s1_best == t12 else 1 + else: + c1 = (self.board[s1_best][0] + 1) if t11 == s1_best else self.board[s1_best][0] + c1 = (c1 + 1) if t12 == s1_best else c1 + + active_positions -= {s1_best} + move_s1 = (s1_best, clamp(s1_best + r)) + + if self.board[high_src1][0] == 1 and self.board[high_src2][0] == 1: + + if c1 == 1: + s2_best = highest(player, [s for s in active_positions if comp1(player, s, r)]) + plays.add((move11, move12, move_s1)) + + if s2_best is not None: + move_s2 = (s2_best, clamp(s2_best + r)) + plays.add((move11, move12, move_s1, move_s2)) + + elif c1 >= 2: + plays.add((move11, move12, move_s1, move_s1)) + + elif self.board[high_src1][0] == 2 and self.board[high_src2][0] == 1: + plays.add((move11, move11, move12, move_s1)) + + elif self.board[high_src1][0] == 1 and self.board[high_src2][0] == 2: + plays.add((move12, move12, move11, move_s1)) + + elif comp3(player, s1, t11) and self.board[high_src2][0] < 2: + + if self.is_valid(player, t12) and comp3(player, s1, t22) and self.board[high_src1][0] < 2: + + if self.is_valid(player, t22): + active_positions.add(t11) + active_positions.add(t22) + s1_best = highest(player, [s for s in active_positions if comp1(player, s, r)]) + + if s1_best is not None: + move_s1 = (s1_best, clamp(s1_best + r)) + plays.add((move11, move12, move22, move_s1)) + + elif comp3(player, s1, t12) and self.board[high_src1][0] < 2: + + if self.is_valid(player, t11) and comp3(player, s1, t21) and self.board[high_src2][0] < 2: + + if self.is_valid(player, t21): + active_positions.add(t12) + active_positions.add(t21) + s1_best = highest(player, [s for s in active_positions if comp1(player, s, r)]) + + if s1_best is not None: + move_s1 = (s1_best, clamp(s1_best + r)) + plays.add((move12, move11, move21, move_s1)) + + if dice_used == 1: + + high_src1 = list(higher_src)[0] + high_src2 = list(higher_src)[1] + + if (self.board[high_src1][0] + self.board[high_src2][0]) <= 2: + + active_positions -= {high_src1} + active_positions -= {high_src2} + + t11 = high_src1 + r + + t12 = high_src2 + r + + move11 = (high_src1, clamp(t11)) + move12 = (high_src2, clamp(t12)) + + if self.is_valid(player, t11) and self.is_valid(player, t12): + + if comp3(player, s1, t11) and comp3(player, s1, t12): + active_positions.add(t11) + active_positions.add(t12) + s1_best = highest(player, [s for s in active_positions if comp1(player, s, r)]) + + if s1_best is not None: + if self.board[s1_best][1] != player: + c1 = 2 if s1_best == t11 and s1_best == t12 else 1 + else: + c1 = (self.board[s1_best][0] + 1) if t11 == s1_best else self.board[s1_best][0] + c1 = (c1 + 1) if t12 == s1_best else c1 + + active_positions -= {s1_best} + move_s1 = (s1_best, clamp(s1_best + r)) + + if self.board[high_src1][0] == 1 and self.board[high_src2][0] == 1: + + if c1 >= 1: + plays.add(out_of_home_move + (move11,) + (move12,) + (move_s1, )) + if len(higher_src) == 3: + if dice_used == 0: + high_src1 = list(higher_src)[0] + high_src2 = list(higher_src)[1] + high_src3 = list(higher_src)[2] + + t1 = high_src1 + r + t2 = high_src2 + r + t3 = high_src3 + r + + move1 = (high_src1, clamp(t1)) + move2 = (high_src2, clamp(t2)) + move3 = (high_src3, clamp(t3)) + + if (self.board[high_src1][0] + self.board[high_src2][0] + self.board[high_src3][0]) <= 3: + + if self.is_valid(player, t1) and self.is_valid(player, t2) and self.is_valid(player, t3): + + if comp3(player, s1, t1) and comp3(player, s1, t2) and comp3(player, s1, t3): + active_positions.add(t1) + active_positions.add(t2) + active_positions.add(t3) + s1_best = highest(player, [s for s in active_positions if comp1(player, s, r)]) + + if s1_best is not None: + move_s1 = (s1_best, clamp(s1_best + r)) + plays.add((move1, move2, move3, move_s1)) + + self.restore_state(old_state) + return plays + + # ================================================================================= + # BAR PLAYS ======================================================================= + # ================================================================================= + def get_single_moves(self, player, roll, other_move_target=None, player_src=None): + if player_src is not None: + moves = {(s, s + roll) for s in player_src if self.is_valid(player, s + roll)} + else: + moves = {(s, s + roll) for s in list(self.players_positions[player]) if self.is_valid(player, s + roll)} + + if other_move_target is not None and self.is_valid(player, other_move_target + roll): + moves.add((other_move_target, other_move_target + roll)) + + return moves + + def get_double_moves(self, player, roll, single_moves): + moves = set() + if len(single_moves) > 0: + moves = {((s, t), (t, t + roll)) for (s, t) in single_moves if self.is_valid(player, t + roll)} + moves.update(list(itertools.combinations(single_moves, 2))) + moves.update([((s, t), (s, t)) for (s, t) in single_moves if self.board[s][0] >= 2]) + return moves + + def get_triple_moves(self, player, roll, double_moves): + moves = set() + reverse = player == WHITE + if len(double_moves) > 0: + for (m1, m2) in double_moves: + s1, t1 = m1 + s2, t2 = m2 + + if self.is_valid(player, t1 + roll) and ((t1 != s2) or (self.board[t1][0] > 0 and self.board[t1][1] == player)): + moves.add((m1, (t1, t1 + roll), m2)) + + if self.is_valid(player, t2 + roll) and (t2 != s1): + moves.add((m1, m2, (t2, t2 + roll))) + + for s in self.players_positions[player]: + t = s + roll + if self.is_valid(player, t): + if (self.board[s][0] > 2 and ((s, t) == m1 or (s, t) == m2)) or ((s, t) != m1 and (s, t) != m2): + moves.add((m1, m2, (s, t))) + + if (m1 != m2) and self.board[s][0] > 1 and ((s, t) == m1 or (s, t) == m2): + moves.add((m1, m2, (s, t))) + + moves = {tuple(sorted(play, reverse=reverse)) for play in moves} + return moves + + def get_bar_plays(self, player, roll): + plays = set() + r1, r2 = roll + + t1 = NUM_POINTS - abs(r1) if player == WHITE else abs(r1) - 1 + t2 = NUM_POINTS - abs(r2) if player == WHITE else abs(r2) - 1 + + if self.can_move_to(player, t1) and self.can_move_to(player, t2): + # "If you can play one number but not both, then you must play the higher one." + t = min(t1, t2) if player == WHITE else max(t1, t2) + plays.add(((BAR, t),)) + + else: + if self.can_move_to(player, t1): + plays.add(((BAR, t1),)) + + if self.can_move_to(player, t2): + plays.add(((BAR, t2),)) + + if self.bar[player] >= 2: + if self.can_move_to(player, t1) and self.can_move_to(player, t2): + plays.add(((BAR, t1), (BAR, t2))) + + else: + if self.can_move_to(player, t1): + plays.update([((BAR, t1), move) for move in self.get_single_moves(player, r2, other_move_target=t1)]) + + if self.can_move_to(player, t2): + plays.update([((BAR, t2), move) for move in self.get_single_moves(player, r1, other_move_target=t2)]) + + return plays + + # BAR PLAYS - DOUBLE ============================================================== + def get_bar_plays_double(self, player, roll): + plays = set() + + r = roll[0] + t = NUM_POINTS - abs(r) if player == WHITE else abs(r) - 1 + + if self.is_valid(player, t): + old_state = self.save_state() + move = (BAR, t) + + if self.board[t][1] == player: + self.board[t] = (self.board[t][0] + self.bar[player], player) + else: + self.board[t] = (self.bar[player], player) + + self.players_positions = self.get_players_positions() + + if self.bar[player] == 1: + single_moves = self.get_single_moves(player, roll=r, other_move_target=t) + double_moves = self.get_double_moves(player, roll=r, single_moves=single_moves) + triple_moves = self.get_triple_moves(player, roll=r, double_moves=double_moves) + + plays.add((move,)) + plays.update([(move, single) for single in single_moves]) + plays.update([(move, double[0], double[1]) for double in double_moves]) + plays.update([(move, triple[0], triple[1], triple[2]) for triple in triple_moves]) + + elif self.bar[player] == 2: + single_moves = self.get_single_moves(player, roll=r, other_move_target=t) + double_moves = self.get_double_moves(player, roll=r, single_moves=single_moves) + + plays.add((move, move)) + plays.update([(move, move, single) for single in single_moves]) + plays.update([(move, move, double[0], double[1]) for double in double_moves]) + + elif self.bar[player] == 3: + single_moves = self.get_single_moves(player, roll=r, other_move_target=t) + + plays.add((move, move, move)) + plays.update([(move, move, move, single) for single in single_moves]) + + elif self.bar[player] >= 4: + plays.add((move, move, move, move)) + + self.restore_state(old_state) + + return plays + + def render(self): + points = [p[0] for p in self.board] + bottom_board = points[:12][::-1] + top_board = points[12:] + + colors = [TOKEN[WHITE] if p[1] == WHITE else TOKEN[BLACK] for p in self.board] + bottom_checkers_color = colors[:12][::-1] + top_checkers_color = colors[12:] + + assert len(bottom_board) + len(top_board) == 24 + assert len(bottom_checkers_color) + len(top_checkers_color) == 24 + + print("| 12 | 13 | 14 | 15 | 16 | 17 | BAR | 18 | 19 | 20 | 21 | 22 | 23 | OFF |") + print(f"|--------Outer Board----------| |-------P={TOKEN[BLACK]} Home Board--------| |") + self.print_half_board(top_board, top_checkers_color, WHITE, reversed_=1) + print("|-----------------------------| |-----------------------------| |") + self.print_half_board(bottom_board, bottom_checkers_color, BLACK, reversed_=-1) + print(f"|--------Outer Board----------| |-------P={TOKEN[WHITE]} Home Board--------| |") + print("| 11 | 10 | 9 | 8 | 7 | 6 | BAR | 5 | 4 | 3 | 2 | 1 | 0 | OFF |\n") + + def print_half_board(self, half_board, checkers_color, player, reversed_=-1): + token = TOKEN[player] + max_length = max(max(half_board), max([self.bar[player], self.off[player]])) + for i in range(max_length)[::reversed_]: + row = [str(checkers_color[k]) if half_board[k] > i else " " for k in range(len(half_board))] + bar = [f"{token} " if self.bar[player] > i else " "] + off = [f"{token} " if self.off[player] > i else " "] + row = row[:6] + bar + row[6:] + off + print("| " + " | ".join(row) + " |") + + def get_players_positions(self): + player_positions = [[], []] + for key, (checkers, player) in enumerate(self.board): + if player is not None and key not in player_positions: + player_positions[player].append(key) + return player_positions + + def get_valid_plays(self, player, roll): + valid_plays = set() + top_valid_plays = set() + + normal_plays = set() + bear_off_plays = set() + bar_plays = set() + reverse = player == WHITE + + roll = (roll[0], roll[0], roll[0], roll[0]) if roll[0] == roll[1] else roll + + if self.bar[player]: + bar_plays = self.get_bar_plays(player, roll) if len(roll) <= 2 else self.get_bar_plays_double(player, roll) + else: + if self.could_bear_off(player, roll): + bear_off_plays = self.get_bear_off_plays(player, roll) if len(roll) <= 2 else self.get_bear_off_play_double(player, roll) + normal_plays = self.get_normal_plays(player, roll) if len(roll) <= 2 else self.get_normal_plays_double(player, roll) + + valid_plays.update(normal_plays) + valid_plays.update(bear_off_plays) + + valid_plays = {tuple(sorted(play, reverse=reverse)) for play in valid_plays} + + valid_plays.update(bar_plays) + + if len(valid_plays) > 0: + max_length_move = len(max(valid_plays, key=len)) # select the plays that use the most number of dice + top_valid_plays = {play for play in valid_plays if len(play) == max_length_move} + + return top_valid_plays + + def execute_play(self, current_player, action): + if action: + tmp = self.board[:] + for move in action: + if move: + src, target = move + if 0 <= target < NUM_POINTS: + checkers_on_target, player_on_target = self.board[target] + + if current_player != player_on_target and player_on_target is not None: + self.bar[player_on_target] += 1 + checkers_on_target = 0 + + if src == BAR: + self.bar[current_player] -= 1 + self.board[target] = (checkers_on_target + 1, current_player) + else: + checkers_on_src, player_on_src = self.board[src] + checkers_on_src -= 1 + + if checkers_on_src < 1: + player_on_src = None + + self.board[src] = (checkers_on_src, player_on_src) + self.board[target] = (checkers_on_target + 1, current_player) + else: # OFF BOARD MOVE + checkers_on_src, player_on_src = self.board[src] + checkers_on_src -= 1 + + if checkers_on_src < 1: + player_on_src = None + + self.board[src] = (checkers_on_src, player_on_src) + self.off[current_player] += 1 + + self.players_positions = self.get_players_positions() + + assert_board(action=action, board=self.board, bar=self.bar, off=self.off, game=self, old_board=tmp) + + def save_state(self): + return BackgammonState(board=self.board[:], bar=self.bar[:], off=self.off[:], players_positions=self.players_positions[:]) + + def restore_state(self, old_state): + self.board, self.bar, self.off, self.players_positions = old_state.board[:], old_state.bar[:], old_state.off[:], old_state.players_positions[:] + self.state = BackgammonState(board=self.board, bar=self.bar, off=self.off, players_positions=self.players_positions) + + def get_winner(self): + if self.off[WHITE] == 15: + return WHITE + elif self.off[BLACK] == 15: + return BLACK + return None + + def get_opponent(self, player): + return BLACK if player == WHITE else WHITE + + def get_board_features(self, current_player): + """ + - encode each point (24) with 4 units => 4 * 24 = 96 + - for each player => 96 * 2 = 192 + - 2 units indicating who is the current player + - 2 units for white and black bar checkers + - 2 units for white and block off checkers + - tot = 192 + 2 + 2 + 2 = 198 + """ + features_vector = [] + for p in [WHITE, BLACK]: + for point in range(0, NUM_POINTS): + checkers, player = self.board[point] + if player == p and checkers > 0: + if checkers == 1: + features_vector += [1.0, 0.0, 0.0, 0.0] + elif checkers == 2: + features_vector += [1.0, 1.0, 0.0, 0.0] + elif checkers >= 3: + features_vector += [1.0, 1.0, 1.0, (checkers - 3.0) / 2.0] + else: + features_vector += [0.0, 0.0, 0.0, 0.0] + + features_vector += [self.bar[p] / 2.0, self.off[p] / 15.0] + + if current_player == WHITE: + # features_vector += [0.0, 1.0] + features_vector += [1.0, 0.0] + else: + # features_vector += [1.0, 0.0] + features_vector += [0.0, 1.0] + assert len(features_vector) == 198, print(f"Should be 198 instead of {len(features_vector)}") + return features_vector + + +def assert_board(action, board, bar, off, game=None, old_board=None): + sum_white = 0 + sum_black = 0 + for (checkers, player) in board: + if player == WHITE: + sum_white += checkers + elif player == BLACK: + sum_black += checkers + + assert 0 <= sum_white <= 15 and 0 <= sum_black <= 15, print_assert(game, sum_white, sum_black, bar, off, action, old_board) + assert bar[WHITE] < 16 and bar[BLACK] < 16, print_assert(game, sum_white, sum_black, bar, off, action, old_board) + assert off[WHITE] < 16 and off[BLACK] < 16, print_assert(game, sum_white, sum_black, bar, off, action, old_board) + assert sum_white + bar[WHITE] + off[WHITE] == 15, print_assert(game, sum_white, sum_black, bar, off, action, old_board) + assert sum_black + bar[BLACK] + off[BLACK] == 15, print_assert(game, sum_white, sum_black, bar, off, action, old_board) + + +def print_assert(game, sum_white, sum_black, bar, off, action, old_board): + if game is not None: + game.render() + + if old_board is not None: + game.board = old_board + game.render() + print(f"sum_white={sum_white} | sum_black={sum_black} | bar={bar} | off={off} | action={action}") diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/backgammon_env.py b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/backgammon_env.py new file mode 100644 index 0000000000000000000000000000000000000000..8b68a6418989ac4f5933b62bb2580887b0e929c2 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/backgammon_env.py @@ -0,0 +1,163 @@ +import numpy as np +from gym import spaces + +from pettingzoo import AECEnv +from pettingzoo.utils import agent_selector, wrappers + +from . import bg_utils +from .backgammon import BLACK, COLORS, WHITE +from .backgammon import Backgammon as Game + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.CaptureStdoutWrapper(env) + env = wrappers.TerminateIllegalWrapper(env, illegal_reward=-1) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +class raw_env(AECEnv): + metadata = { + "render.modes": ["human"], + "name": "backgammon_v3", + "is_parallelizable": False, + "video.frames_per_second": 2, + } + + def __init__(self): + super().__init__() + self.game = Game() + self.seed() + + self.agents = [f"player_{i}" for i in range(2)] + self.possible_agents = self.agents[:] + self._agent_order = list(self.agents) + self._agent_selector = agent_selector(self._agent_order) + self.infos = {i: {} for i in self.agents} + + self.action_spaces = {name: spaces.Discrete(26 * 26 * 2 + 1) for name in self.agents} + + low = np.zeros((198,)) + high = np.ones((198,)) + for i in range(3, 97, 4): + high[i] = 6.0 + high[96] = 7.5 + for i in range(101, 195, 4): + high[i] = 6.0 + high[194] = 7.5 + self.observation_spaces = { + i: spaces.Dict({'observation': spaces.Box(low=np.float32(low), high=np.float32(high), dtype=np.float32), + 'action_mask': spaces.Box(low=0, high=1, shape=(1353,), dtype=np.int8)}) + for i in self.agents} + self.double_roll = 0 + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def seed(self, seed=None): + self.np_random = np.random.RandomState(seed) + + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + + if action != 26**2 * 2: + action = bg_utils.to_bg_format(action, self.roll) + self.game.execute_play(self.colors[self.agent_selection], action) + + winner = self.game.get_winner() + if winner is not None: + opp_agent = bg_utils.opp_agent(self, self.agent_selection) + if winner == self.colors[self.agent_selection]: + self.rewards[self.agent_selection] = 1 + self.rewards[opp_agent] = -1 + else: + self.rewards[self.agent_selection] = -1 + self.rewards[opp_agent] = 1 + self.dones = {i: True for i in self.agents} + else: + self._clear_rewards() + + if self.double_roll == 0: + self.agent_selection = self._agent_selector.next() + + roll = self.np_random.randint(1, 6), self.np_random.randint(1, 6) + if roll[0] == roll[1]: + self.double_roll = 2 + if(self.colors[self.agent_selection] == WHITE): + roll = (-roll[0], -roll[1]) + self.roll = roll + + self._accumulate_rewards() + + def observe(self, agent): + action_mask = np.zeros(1353, 'int8') + observation = np.array(self.game.get_board_features(agent), dtype=np.float32).reshape(198, ) + # only current agent can make legal moves + if agent == self.agent_selection: + valid_moves = bg_utils.get_valid_actions(self, self.roll) + + if self.double_roll > 0: + self.handle_double_roll() + valid_moves = bg_utils.double_roll(valid_moves) + self.double_roll -= 1 + + legal_moves = bg_utils.to_gym_format(valid_moves, self.roll) + if len(legal_moves) == 0: + legal_moves = [26**2 * 2] + else: + legal_moves = [] + + for i in legal_moves: + action_mask[i] = 1 + + return {'observation': observation, 'action_mask': action_mask} + + def reset(self): + self.agents = self.possible_agents[:] + self.dones = {i: False for i in self.agents} + self.infos = {i: {'legal_moves': []} for i in self.agents} + self._agent_order = list(self.agents) + self._agent_selector.reinit(self._agent_order) + self.agent_selection = self._agent_selector.reset() + self.rewards = {i: 0 for i in self.agents} + self._cumulative_rewards = {i: 0 for i in self.agents} + self.colors = {} + self.double_roll = 0 + self.game = Game() + + opp_agent = bg_utils.opp_agent(self, self.agent_selection) + + roll = self.np_random.randint(1, 6), self.np_random.randint(1, 6) + while roll[0] == roll[1]: + roll = self.np_random.randint(1, 6), self.np_random.randint(1, 6) + if roll[0] > roll[1]: + self.colors[self.agent_selection] = WHITE + self.colors[opp_agent] = BLACK + roll = (-roll[0], -roll[1]) + else: + self.colors[self.agent_selection] = BLACK + self.colors[opp_agent] = WHITE + self.roll = roll + + def render(self, mode='human'): + assert mode in ['human'], print(mode) + if mode == 'human': + self.game.render() + + def close(self): + pass + + def handle_double_roll(self): + if self.double_roll == 1: + a = self._agent_order[0] + self._agent_order[0] = self._agent_order[1] + self._agent_order[1] = a + self._agent_selector.reinit(self._agent_order) + if self.agent_selection == self._agent_order[0]: + self._agent_selector.next() diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/bg_utils.py b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/bg_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..162532ce22e74b1554559e6d5baf0497762b0c11 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon/bg_utils.py @@ -0,0 +1,122 @@ +def get_valid_actions(env, roll): + a = env.game.get_valid_plays(env.colors[env.agent_selection], roll) + return a + + +def to_bar(action, roll): + if action == 25: # bar + if roll < 0: # white + return ('bar', 24 - abs(roll)) + else: # black + return ('bar', abs(roll) - 1) + else: + if action + roll - 1 > 23: + return (action - 1, 24) + elif action + roll - 1 < 0: + return (action - 1, -1) + else: + return (action - 1, action + roll - 1) + + +def from_bar(action): + bears_off = False + if action[1] == -1 or action[1] == 24: + bears_off = True + if action[0] == 'bar': + if action[1] > 12: # white, top + return (25, -(24 - action[1]), bears_off) + else: # black, bottom + return (25, (action[1] + 1), bears_off) + else: + return (action[0] + 1, action[1] - action[0], bears_off) + + +# action goes from single number to a tuple +def to_bg_format(action, roll): + base = 26 + low_roll = min(roll) + high_roll = max(roll) + + if action == base**2 * 2: + return (()) + + if action < base**2: # Low roll first + dig1 = action % base + dig2 = action // base + a = to_bar(dig1, low_roll) + b = to_bar(dig2, high_roll) + if b[0] != 'bar' and b[0] > -1: + return (a, b) + else: + return (a,) + + else: # High roll first + action = action - base**2 + dig1 = action % base + dig2 = action // base + a = to_bar(dig1, high_roll) + b = to_bar(dig2, low_roll) + if b[0] != 'bar' and b[0] > -1: + return (a, b) + else: + return (a,) + + +# takes list of tuples and converts to a discrete value +def to_gym_format(actions, roll): + high_roll = max(roll) + low_roll = min(roll) + nums = [] + base = 26 + for act in actions: + if len(act) == 1: + a, diff1, bears_off = from_bar(act[0]) + if bears_off: + diff1 = high_roll if abs(diff1) > abs(low_roll) else low_roll + if abs(diff1) == abs(high_roll): # high first + a += base**2 + nums.append(a) + elif isinstance(act[0], int) or act[0] == 'bar': + a, diff1, bears_off = from_bar(act) + if bears_off: + diff1 = high_roll if abs(diff1) > abs(low_roll) else low_roll + if abs(diff1) == abs(high_roll): # high first + a += base**2 + nums.append(a) + elif len(act) == 2: + a, diff1, bears_off1 = from_bar(act[0]) + b, diff2, bears_off2 = from_bar(act[1]) + if bears_off1 or bears_off2: + if bears_off1 and not bears_off2: + if abs(diff2) == abs(high_roll): + diff1 = low_roll + else: + diff1 = high_roll + elif not bears_off1 and bears_off2: + if abs(diff1) == abs(high_roll): + diff2 = low_roll + else: + diff2 = high_roll + num = a + base * b + if diff1 > diff2: # high first + num += base**2 + nums.append(num) + return nums + + +def double_roll(moves): + out = [] + for move in moves: + if len(move) > 1: + out.append((move[0], move[1])) + else: + out.append(move[0]) + return out + + +def opp_agent(env, agent): + return env.agents[0] if agent == env.agents[1] else env.agents[1] + + +def valid_action(env, action): + return env.action_spaces[env.agent_selection].contains(action) diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/backgammon_v3.py b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..9c95b0369f54b429d1a26181d3aa5bf3e670ac39 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/backgammon_v3.py @@ -0,0 +1 @@ +from .backgammon.backgammon_env import env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/checkers/__pycache__/checkers.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/checkers/__pycache__/checkers.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4d8d3997eadc1365fc95ed154e4e4bb9e51e8c44 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/checkers/__pycache__/checkers.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/checkers/checkers.py b/MLPY/Lib/site-packages/pettingzoo/classic/checkers/checkers.py new file mode 100644 index 0000000000000000000000000000000000000000..0349a1ae634fa454e53d5a11534ef0b9d2880600 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/checkers/checkers.py @@ -0,0 +1,639 @@ +import copy +import itertools +import warnings + +import numpy as np +from gym import spaces + +from pettingzoo import AECEnv +from pettingzoo.utils import wrappers +from pettingzoo.utils.agent_selector import agent_selector + + +def env(): + env = raw_env() + env = wrappers.CaptureStdoutWrapper(env) + env = wrappers.TerminateIllegalWrapper(env, illegal_reward=-1) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +class raw_env(AECEnv): + + metadata = { + "render.modes": ["human"], + "name": "checkers_v3", + "is_parallelizable": False, + "video.frames_per_second": 2, + } + + move64_32 = { + 1: 0, + 3: 1, + 5: 2, + 7: 3, + 8: 4, + 10: 5, + 12: 6, + 14: 7, + 17: 8, + 19: 9, + 21: 10, + 23: 11, + 24: 12, + 26: 13, + 28: 14, + 30: 15, + 33: 16, + 35: 17, + 37: 18, + 39: 19, + 40: 20, + 42: 21, + 44: 22, + 46: 23, + 49: 24, + 51: 25, + 53: 26, + 55: 27, + 56: 28, + 58: 29, + 60: 30, + 62: 31, + } + move32_64 = {v: k for k, v in move64_32.items()} + move_to_action = {"player_0": {}, "player_1": {}} + + def __init__(self): + super().__init__() + + self.ch = CheckersRules() + num_agents = 2 + self.agents = [f"player_{i}" for i in range(num_agents)] + self.possible_agents = self.agents[:] + self.agent_order = list(self.agents) + + self.action_spaces = {name: spaces.Discrete(64 * 4) for name in self.agents} + self.observation_spaces = { + name: spaces.Dict({'observation': spaces.Box(low=0, high=1, shape=(8, 8, 4), dtype="float64"), + 'action_mask': spaces.Box(low=0, high=1, shape=(256,), dtype=np.int8)}) + for name in self.agents + } + self.observation = np.zeros((8, 8, 4)) + + self.reset() + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def observe(self, agent): + # Use self.ch.flatboard to update self.observation + board = self.ch.flat_board() + obs = np.zeros((8, 8, 4)) + for i, row in enumerate(board): + for j, sq in enumerate(row): + if sq > 0: + obs[i, j, sq - 1] = 1 + if agent == "player_1": + # Rotate last two planes (white pieces) to front two positions + obs = np.roll(obs, 2, axis=2) + # Rotate board to place black pieces at bottom + obs = np.rot90(obs, 2, axes=(0, 1)) + self.observation = np.array(obs) + + legal_moves = self.legal_moves() if agent == self.agent_selection else [] + action_mask = np.zeros(256, 'int8') + for i in legal_moves: + action_mask[i] = 1 + + return {'observation': self.observation, 'action_mask': action_mask} + + def reset(self): + self.ch = CheckersRules() + self.num_moves = 0 + self.agents = self.possible_agents[:] + self.agent_order = list(self.agents) + self.agent_selection = self.agent_order[0] + self.infos = {name: {} for name in self.agents} + self.observation = self.observe(self.agent_selection) + self.last_turn = "black" + self.rewards = {name: 0 for name in self.agents} + self._cumulative_rewards = {name: 0 for name in self.agents} + self.dones = {name: False for name in self.agents} + self.winner = -1 + + # Parse action from (256) action space into (32)x(32) action space + # Action validation is performed later by the gym environment + # Directions are from the player's perspective + def _parse_action(self, action): + + # Check if given move is a jump + def check_jump(pos): + opponent = ["white"] if self.agent_selection == "player_0" else ["black"] + return self.ch.check_occupancy(raw_env.move64_32[pos], by_players=opponent) + + direction = int(action / 64) + pos = action % 64 + + # From the current player's perspective directions are as follows: + # 3 _ 2 + # _ M _ + # 1 _ 0 + # Adjust action for current player + if self.agent_selection == "player_1": + direction = 3 - direction + pos = 63 - pos + + dest_pos = 0 + + if direction == 0: + # Move back-right + dest_pos = pos - 9 + + if check_jump(dest_pos): + dest_pos = dest_pos - 9 + elif direction == 1: + # Move back-left + dest_pos = pos - 7 + + if check_jump(dest_pos): + dest_pos = dest_pos - 7 + elif direction == 2: + # Move forward-right + dest_pos = pos + 7 + + if check_jump(dest_pos): + dest_pos = dest_pos + 7 + elif direction == 3: + # Move forward-left + dest_pos = pos + 9 + + if check_jump(dest_pos): + dest_pos = dest_pos + 9 + + # Cache action conversion + move = (raw_env.move64_32[pos], raw_env.move64_32[dest_pos]) + raw_env.move_to_action[self.agent_selection][move] = action + return move + + def legal_moves(self): + moves = self.ch.legal_moves() + legal_moves = [] + for move in moves: + if move in raw_env.move_to_action[self.agent_selection]: + legal_moves.append(raw_env.move_to_action[self.agent_selection][move]) + continue + + srcpos = raw_env.move32_64[move[0]] + destpos = raw_env.move32_64[move[1]] + + direction = -1 + if destpos == srcpos - 9 or destpos == srcpos - 18: + direction = 0 + elif destpos == srcpos - 7 or destpos == srcpos - 14: + direction = 1 + elif destpos == srcpos + 7 or destpos == srcpos + 14: + direction = 2 + elif destpos == srcpos + 9 or destpos == srcpos + 18: + direction = 3 + + # Adjust action for current player + if self.agent_selection == "player_1": + direction = 3 - direction + srcpos = 63 - srcpos + + # Cache move conversion + action = srcpos + (64 * direction) + raw_env.move_to_action[self.agent_selection][move] = action + legal_moves.append(action) + + return legal_moves + + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + if action not in self.legal_moves(): + warnings.warn( + "Bad checkers move made, game terminating with current player losing. \n env.infos[player]['legal_moves'] contains a list of all legal moves that can be chosen." + ) + winner = "white" if self.last_turn == "black" else "black" + else: + self.num_moves += 1 + action = self._parse_action(action) + self.board, turn, last_moved_piece, moves, winner = self.ch.move( + action[0], action[1] + ) + + self.agent_selection = "player_0" if turn == "black" else "player_1" + + if winner == "black": + self.winner = 0 + self.rewards[self.agents[0]] = 1 + self.rewards[self.agents[1]] = -1 + elif winner == "white": + self.winner = 1 + self.rewards[self.agents[0]] = -1 + self.rewards[self.agents[1]] = 1 + + self.dones[self.agent_order[0]] = winner is not None + self.dones[self.agent_order[1]] = winner is not None + + self._accumulate_rewards() + + def render(self, mode="human"): + board = self.ch.flat_board() + pieces = { + 1: "M", + 2: "K", + 3: "m", + 4: "k", + } + for row, line in enumerate(board): + for col, sq in enumerate(line): + if sq == 0: + if row % 2 == 0 and col % 2 == 1 or row % 2 == 1 and col % 2 == 0: + print("_", end=" ") + else: + print(" ", end=" ") + else: + print(pieces[sq], end=" ") + print("") + + def close(self): + pass + + +class CheckersRules: + + size = 8 + n_positions = int(size ** 2 // 2) + n_per_row = int(size // 2) + + # TODO change players to top/bottom players + all_players = ["black", "white"] + all_piece_types = ["men", "kings"] + + # Converting to a flat representation of the board + empty_square = 0 + black_man = 1 + black_king = 2 + white_man = 3 + white_king = 4 + + # Directions + pos2dir = ["sw", "se", "ne", "nw"] + dir2del = [(+1, -1), (+1, +1), (-1, +1), (-1, -1)] + + # The directions a piece is allowed to move in + legal_dirs = { + "black": { + "men": [0, 1], + "kings": [0, 1, 2, 3], + }, + "white": { + "men": [2, 3], + "kings": [0, 1, 2, 3], + }, + } + + def __init__( + self, board=None, turn="black", last_moved_piece=None, empty_corner=True + ): + """ + Args: + empty_corner : bool + If the upper left corner of the board should be used. Default to be False. + """ + # assert size == 8, 'Only supports size 8.' + # assert turn in CheckersRules.all_players, 'It must be either `black` or `white`\'s turn' + self.empty_corner = empty_corner + + # Game state + self._board = board or self.initial_board() + self._turn = turn + self._last_moved_piece = None + + # LUT for the neighboring 4 squares in each directions respectively. None for a missing neighbor + # XXX there is another way to find neighbors (consider [sq+4, sq+5, sq-4, sq-5]) + self.neighbors = {sq: [] for sq in range(self.n_positions)} + for sq in range(self.n_positions): + row, col = self.sq2pos(sq) + # For each direction + for di, (drow, dcol) in enumerate(CheckersRules.dir2del): + next_row, next_col = row + drow, col + dcol + # Out of bound + if not (0 <= next_row < self.size and 0 <= next_col < self.size): + self.neighbors[sq].append(None) + else: + self.neighbors[sq].append(self.pos2sq(next_row, next_col)) + + @staticmethod + def initial_board(): + """Returns the initial configuration of the board""" + # Black starts at the top of the board + board = { + "black": { + "men": set(range(12)), + "kings": set(), + }, + "white": { + "men": set(range(32 - 12, 32)), + "kings": set(), + }, + } + return board + + @staticmethod + def empty_board(): + board = { + "black": { + "men": set(), + "kings": set(), + }, + "white": { + "men": set(), + "kings": set(), + }, + } + return board + + @staticmethod + def immutable_board(board): + # TODO Bitboard representation? + pieces = ( + frozenset(board["black"]["men"]), + frozenset(board["black"]["kings"]), + frozenset(board["white"]["men"]), + frozenset(board["white"]["kings"]), + ) + return pieces + + @staticmethod + def board_equal(board1, board2): + return CheckersRules.immutable_board(board1) == CheckersRules.immutable_board( + board2 + ) + + @property + def board(self): + return self._board + + @property + def turn(self): + return self._turn + + @property + def last_moved_piece(self): + return self._last_moved_piece + + def move(self, from_sq, to_sq, skip_check=False): + """Update the game state after the current player moves its piece from `from_sq` to `to_sq`. Reference: https://en.wikipedia.org/wiki/English_draughts#Rules + Args: + skip_check : bool + If the move is chosen from results returned by `legal_moves()`, the legality check can be skipped for efficiency. Default to be False. + """ + if not skip_check: + # Reject illegal moves + assert (from_sq, to_sq) in self.legal_moves(), "The move is not legal." + + # The move is legal + switch_turn = True + # Move the piece + for type in ["men", "kings"]: + pieces = self._board[self._turn][type] + if from_sq in pieces: + pieces.remove(from_sq) + pieces.add(to_sq) + piece_type = type + self._last_moved_piece = to_sq + break + else: + assert False, "A friendly piece must be moved." + + # The move is a jump + if to_sq not in self.neighbors[from_sq]: + # Remove the captured piece + to_row, to_col = self.sq2pos(to_sq) + from_row, from_col = self.sq2pos(from_sq) + capture_row, capture_col = (from_row + to_row) / 2, (from_col + to_col) / 2 + capture_sq = self.pos2sq(capture_row, capture_col) + for type in ["men", "kings"]: + pieces = self._board[self.adversary][type] + if capture_sq in pieces: + pieces.remove(capture_sq) + break + else: + assert False, "An opposing piece must be captured." + # Check for new available jumps for the moved piece before crowning a king + jumps = self.available_jumps(self._turn, piece_type, to_sq) + # Switch the turn, if there is no more jumps for the current player + switch_turn = len(jumps) == 0 + + # Crowning a king (must end the turn) + if piece_type == "men": + # Kings row is at the bottom for black + if self._turn == "black" and self.n_positions - to_sq <= self.n_per_row: + self._board[self._turn]["men"].remove(to_sq) + self._board[self._turn]["kings"].add(to_sq) + # Kings row is at the top for white + if self._turn == "white" and to_sq < self.n_per_row: + self._board[self._turn]["men"].remove(to_sq) + self._board[self._turn]["kings"].add(to_sq) + + if switch_turn: + self._turn = self.adversary + self._last_moved_piece = None + + # Check win/loss, winner is None before the game ends + all_next_moves = self.legal_moves() + if len(all_next_moves) == 0: + winner = self.adversary + else: + winner = None + return self.board, self.turn, self.last_moved_piece, all_next_moves, winner + + @property + def adversary(self): + return "black" if self._turn == "white" else "white" + + def available_simple_moves(self, player, type, sq): + simple_moves = [] + for di in CheckersRules.legal_dirs[player][type]: + next_sq = self.neighbors[sq][di] + # There is a neighboring square + if next_sq is not None: + # Check its occupancy + if not self.check_occupancy(next_sq): + simple_moves.append(next_sq) + return simple_moves + + def check_occupancy(self, sq, by_players=all_players): + """ + Return : bool + True if `sq` is occupied. + """ + for player in by_players: + for type in ["men", "kings"]: + if sq in self._board[player][type]: + return True + return False + + def available_jumps(self, player, type, sq): + """Returns the available jumps of `player`'s piece of `type` at `sq`.""" + jumps = [] + adversary = "black" if player == "white" else "white" + for di in CheckersRules.legal_dirs[player][type]: + capture_sq = self.neighbors[sq][di] + # There is a neighboring square + if capture_sq is not None: + # The square is occupied by the adversary's piece + if self.check_occupancy(capture_sq, [adversary]): + # Must jump over two squares in a single direction + next_sq = self.neighbors[capture_sq][di] + if next_sq is not None: + # The square is not occupied + if not self.check_occupancy(next_sq): + jumps.append(next_sq) + return jumps + + def all_jumps(self): + if self._last_moved_piece is None: + jumps = [] + for type in ["men", "kings"]: + for sq in self._board[self._turn][type]: + jumps += itertools.product( + [sq], self.available_jumps(self._turn, type, sq) + ) + else: + piece_type = ( + "men" + if self._last_moved_piece in self._board[self._turn]["men"] + else "kings" + ) + jumps = itertools.product( + [self._last_moved_piece], + self.available_jumps(self._turn, piece_type, self._last_moved_piece), + ) + return list(jumps) + + def legal_moves(self): + """Returns all legal moves of the current `player`.""" + all_moves = self.all_jumps() + # Jumps are mandatory + if 0 < len(all_moves): + return all_moves + # No jumps available + for type in ["men", "kings"]: + for sq in self._board[self._turn][type]: + all_moves += itertools.product( + [sq], self.available_simple_moves(self._turn, type, sq) + ) + return all_moves + + def pos2sq(self, row, col): + if self.empty_corner and row % 2 == 0: + # Even rows starts with an empty square + col -= 1 + elif not self.empty_corner and row % 2 == 1: + # Odd rows starts with an empty square + col -= 1 + col /= 2 + return int(row * (self.size / 2) + col) + + def sq2pos(self, sq): + row, col = int(sq // (self.size / 2)), int(sq % (self.size / 2)) + col *= 2 + if self.empty_corner and row % 2 == 0: + # Even rows starts with an empty square + col += 1 + elif not self.empty_corner and row % 2 == 1: + # Odd rows starts with an empty square + col += 1 + return row, col + + def flat_board(self): + # Empty board + board = ( + np.ones((self.size, self.size), dtype="int") * CheckersRules.empty_square + ) + # Place the pieces + for sq in self._board["black"]["men"]: + row, col = self.sq2pos(sq) + board[row][col] = CheckersRules.black_man + for sq in self._board["black"]["kings"]: + row, col = self.sq2pos(sq) + board[row][col] = CheckersRules.black_king + for sq in self._board["white"]["men"]: + row, col = self.sq2pos(sq) + board[row][col] = CheckersRules.white_man + for sq in self._board["white"]["kings"]: + row, col = self.sq2pos(sq) + board[row][col] = CheckersRules.white_king + return board + + def print_board(self): + # Symbols + # print(self.flat_board()) + empty_square = "_" + empty_playable_square = "." + black_man = "b" + black_king = "B" + white_man = "w" + white_king = "W" + symbols = [empty_playable_square, black_man, black_king, white_man, white_king] + # Print board + for i, row in enumerate(self.flat_board()): + for j, col in enumerate(row): + if ((i + self.empty_corner) % 2 + j) % 2 == 1: + # Not playable squares + print(empty_square, end="") + else: + print(symbols[col], end="") + print() + + def print_empty_board(self): + """Display the standard representation of the board with squares: + __00__01__02__03 + 04__05__06__07__ + __08__09__10__11 + 12__13__14__15__ + __16__17__18__19 + 20__21__22__23__ + __24__25__26__27 + 28__29__30__31__ + """ + board = -1 * np.ones((self.size, self.size), dtype="int") + # Print board + for sq in range(self.n_positions): + board[self.sq2pos(sq)] = sq + for row in board: + for col in row: + print("__" if col < 0 else "%02i" % col, end="") + print() + + def save_state(self): + return copy.deepcopy(self.board), self.turn, self.last_moved_piece + + def restore_state(self, state): + board, turn, last_moved_piece = state + self._board = copy.deepcopy(board) + self._turn = turn + self._last_moved_piece = last_moved_piece + + +""" +# Human keyboard player +def keyboard_player_move(board, last_moved_piece): + '''A player that uses keyboard to select moves.''' + if last_moved_piece is None: + input_str = input('* move `from_square, to_square`: ') + else: + input_str = input('* move `%i, to_square`: ' % last_moved_piece) + from_sq, to_sq = map(int, input_str.strip().split(',')) + return from_sq, to_sq + +""" diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/checkers_v3.py b/MLPY/Lib/site-packages/pettingzoo/classic/checkers_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..50a6bd3aa22f89ace6ccc6a033141bf5851d28e6 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/checkers_v3.py @@ -0,0 +1 @@ +from .checkers.checkers import env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/chess/__init__.py b/MLPY/Lib/site-packages/pettingzoo/classic/chess/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/chess/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/chess/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..47a800fb742a0943dc405525f7a19aa032424bd2 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/chess/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/chess/__pycache__/chess_env.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/chess/__pycache__/chess_env.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a2725fe65897dd007a224f3663339796bad5a4a5 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/chess/__pycache__/chess_env.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/chess/__pycache__/chess_utils.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/chess/__pycache__/chess_utils.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..14902a59465378ad05d4a51c59da5197126e4e29 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/chess/__pycache__/chess_utils.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/chess/__pycache__/test_chess.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/chess/__pycache__/test_chess.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a020bbd269212dedccbacd5a0cff7398fc577f91 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/chess/__pycache__/test_chess.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/chess/chess_env.py b/MLPY/Lib/site-packages/pettingzoo/classic/chess/chess_env.py new file mode 100644 index 0000000000000000000000000000000000000000..f07cf70040e466d111cb3819c3cf0e8ae4c6d5d1 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/chess/chess_env.py @@ -0,0 +1,131 @@ +import warnings + +import chess +import numpy as np +from gym import spaces + +from pettingzoo import AECEnv +from pettingzoo.utils import wrappers +from pettingzoo.utils.agent_selector import agent_selector + +from . import chess_utils + + +def env(): + env = raw_env() + env = wrappers.CaptureStdoutWrapper(env) + env = wrappers.TerminateIllegalWrapper(env, illegal_reward=-1) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +class raw_env(AECEnv): + + metadata = { + 'render.modes': ['human'], + "name": "chess_v5", + "is_parallelizable": False, + "video.frames_per_second": 2, + } + + def __init__(self): + super().__init__() + + self.board = chess.Board() + + self.agents = [f"player_{i}" for i in range(2)] + self.possible_agents = self.agents[:] + + self._agent_selector = agent_selector(self.agents) + + self.action_spaces = {name: spaces.Discrete(8 * 8 * 73) for name in self.agents} + self.observation_spaces = {name: spaces.Dict({ + 'observation': spaces.Box(low=0, high=1, shape=(8, 8, 111), dtype=bool), + 'action_mask': spaces.Box(low=0, high=1, shape=(4672,), dtype=np.int8) + }) for name in self.agents} + + self.rewards = None + self.dones = None + self.infos = {name: {} for name in self.agents} + + self.agent_selection = None + + self.board_history = np.zeros((8, 8, 104), dtype=bool) + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def observe(self, agent): + observation = chess_utils.get_observation(self.board, self.possible_agents.index(agent)) + observation = np.dstack((observation[:, :, :7], self.board_history)) + legal_moves = chess_utils.legal_moves(self.board) if agent == self.agent_selection else [] + + action_mask = np.zeros(4672, 'int8') + for i in legal_moves: + action_mask[i] = 1 + + return {'observation': observation, 'action_mask': action_mask} + + def reset(self): + self.has_reset = True + + self.agents = self.possible_agents[:] + + self.board = chess.Board() + + self._agent_selector = agent_selector(self.agents) + self.agent_selection = self._agent_selector.reset() + + self.rewards = {name: 0 for name in self.agents} + self._cumulative_rewards = {name: 0 for name in self.agents} + self.dones = {name: False for name in self.agents} + self.infos = {name: {} for name in self.agents} + + self.board_history = np.zeros((8, 8, 104), dtype=bool) + + def set_game_result(self, result_val): + for i, name in enumerate(self.agents): + self.dones[name] = True + result_coef = 1 if i == 0 else -1 + self.rewards[name] = result_val * result_coef + self.infos[name] = {'legal_moves': []} + + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + current_agent = self.agent_selection + current_index = self.agents.index(current_agent) + next_board = chess_utils.get_observation(self.board, current_agent) + self.board_history = np.dstack((next_board[:, :, 7:], self.board_history[:, :, :-13])) + self.agent_selection = self._agent_selector.next() + + chosen_move = chess_utils.action_to_move(self.board, action, current_index) + assert chosen_move in self.board.legal_moves + self.board.push(chosen_move) + + next_legal_moves = chess_utils.legal_moves(self.board) + + is_stale_or_checkmate = not any(next_legal_moves) + + # claim draw is set to be true to align with normal tournament rules + is_repetition = self.board.is_repetition(3) + is_50_move_rule = self.board.can_claim_fifty_moves() + is_claimable_draw = is_repetition or is_50_move_rule + game_over = is_claimable_draw or is_stale_or_checkmate + + if game_over: + result = self.board.result(claim_draw=True) + result_val = chess_utils.result_to_int(result) + self.set_game_result(result_val) + + self._accumulate_rewards() + + def render(self, mode='human'): + print(self.board) + + def close(self): + pass diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/chess/chess_utils.py b/MLPY/Lib/site-packages/pettingzoo/classic/chess/chess_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a8c778ae3ea9361d8d69fbfd99c3211cb04daf75 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/chess/chess_utils.py @@ -0,0 +1,319 @@ +import chess +import numpy as np + + +def boards_to_ndarray(boards): + arr64 = np.array(boards, dtype=np.uint64) + arr8 = arr64.view(dtype=np.uint8) + bits = np.unpackbits(arr8) + floats = bits.astype(bool) + boardstack = floats.reshape([len(boards), 8, 8]) + boardimage = np.transpose(boardstack, [1, 2, 0]) + return boardimage + + +def square_to_coord(s): + col = s % 8 + row = s // 8 + return (col, row) + + +def diff(c1, c2): + x1, y1 = c1 + x2, y2 = c2 + return (x2 - x1, y2 - y1) + + +def sign(v): + return -1 if v < 0 else (1 if v > 0 else 0) + + +def mirror_move(move): + return chess.Move(chess.square_mirror(move.from_square), chess.square_mirror(move.to_square), promotion=move.promotion) + + +def result_to_int(result_str): + if result_str == "1-0": + return 1 + elif result_str == "0-1": + return -1 + elif result_str == "1/2-1/2": + return 0 + else: + assert False, "bad result" + + +def get_queen_dir(diff): + dx, dy = diff + assert dx == 0 or dy == 0 or abs(dx) == abs(dy) + magnitude = max(abs(dx), abs(dy)) - 1 + + assert magnitude < 8 and magnitude >= 0 + counter = 0 + for x in range(-1, 1 + 1): + for y in range(-1, 1 + 1): + if x == 0 and y == 0: + continue + if x == sign(dx) and y == sign(dy): + return magnitude, counter + counter += 1 + assert False, "bad queen move inputted" + + +def get_queen_plane(diff): + NUM_COUNTERS = 8 + mag, counter = get_queen_dir(diff) + return mag * NUM_COUNTERS + counter + + +def get_knight_dir(diff): + dx, dy = diff + counter = 0 + for x in range(-2, 2 + 1): + for y in range(-2, 2 + 1): + if abs(x) + abs(y) == 3: + if dx == x and dy == y: + return counter + counter += 1 + assert False, "bad knight move inputted" + + +def is_knight_move(diff): + dx, dy = diff + return abs(dx) + abs(dy) == 3 and 1 <= abs(dx) <= 2 + + +def get_pawn_promotion_move(diff): + dx, dy = diff + assert dy == 1 + assert -1 <= dx <= 1 + return dx + 1 + + +def get_pawn_promotion_num(promotion): + assert promotion == chess.KNIGHT or promotion == chess.BISHOP or promotion == chess.ROOK + return 0 if promotion == chess.KNIGHT else (1 if promotion == chess.BISHOP else 2) + + +def move_to_coord(move): + return square_to_coord(move.from_square) + + +def get_move_plane(move): + source = move.from_square + dest = move.to_square + difference = diff(square_to_coord(source), square_to_coord(dest)) + + QUEEN_MOVES = 56 + KNIGHT_MOVES = 8 + QUEEN_OFFSET = 0 + KNIGHT_OFFSET = QUEEN_MOVES + UNDER_OFFSET = KNIGHT_OFFSET + KNIGHT_MOVES + + if is_knight_move(difference): + return KNIGHT_OFFSET + get_knight_dir(difference) + else: + if move.promotion is not None and move.promotion != chess.QUEEN: + return UNDER_OFFSET + 3 * get_pawn_promotion_move(difference) + get_pawn_promotion_num(move.promotion) + else: + return QUEEN_OFFSET + get_queen_plane(difference) + + +moves_to_actions = {} +actions_to_moves = {} + + +def action_to_move(board, action, player): + base_move = chess.Move.from_uci(actions_to_moves[action]) + + base_coord = square_to_coord(base_move.from_square) + mirr_move = mirror_move(base_move) if player else base_move + if mirr_move.promotion == chess.QUEEN: + mirr_move.promotion = None + if mirr_move.promotion is None and str(board.piece_at(mirr_move.from_square)).lower() == 'p' and base_coord[1] == 6: + mirr_move.promotion = chess.QUEEN + return mirr_move + + +def make_move_mapping(uci_move): + TOTAL = 73 + move = chess.Move.from_uci(uci_move) + source = move.from_square + + coord = square_to_coord(source) + panel = get_move_plane(move) + cur_action = (coord[0] * 8 + coord[1]) * TOTAL + panel + + moves_to_actions[uci_move] = cur_action + actions_to_moves[cur_action] = uci_move + + +def legal_moves(orig_board): + ''' + action space is a 8x8x73 dimensional array + Each of the 8×8 + positions identifies the square from which to “pick up” a piece. The first 56 planes encode + possible ‘queen moves’ for any piece: a number of squares [1..7] in which the piece will be + moved, along one of eight relative compass directions {N, NE, E, SE, S, SW, W, NW}. The + next 8 planes encode possible knight moves for that piece. The final 9 planes encode possible + underpromotions for pawn moves or captures in two possible diagonals, to knight, bishop or + rook respectively. Other pawn moves or captures from the seventh rank are promoted to a + queen + ''' + if orig_board.turn == chess.BLACK: # white is 1, black is 0 + board = orig_board.mirror() + else: + board = orig_board + + legal_moves = [] + for move in board.legal_moves: + uci_move = move.uci() + if uci_move in moves_to_actions: + legal_moves.append(moves_to_actions[move.uci()]) + else: + make_move_mapping(uci_move) + legal_moves.append(moves_to_actions[move.uci()]) + + return legal_moves + + +def get_observation(orig_board, player): + ''' + Observation is an 8x8x(P + L) dimensional array + P is going to be your pieces positions + your opponents pieces positions + L is going to be some metadata such as repetition count,, + ''' + board = orig_board + if player: + board = board.mirror() + else: + board = board + + all_squares = chess.SquareSet(chess.BB_ALL) + HISTORY_LEN = 1 + PLANES_PER_BOARD = 13 + AUX_SIZE = 7 + RESULT_SIZE = AUX_SIZE + HISTORY_LEN * PLANES_PER_BOARD + result = [chess.SquareSet(chess.BB_EMPTY) for _ in range(RESULT_SIZE)] + AUX_OFF = 0 + BASE = AUX_SIZE + + ''' // "Legacy" input planes with: + // - Plane 104 (0-based) filled with 1 if white can castle queenside. + // - Plane 105 filled with ones if white can castle kingside. + // - Plane 106 filled with ones if black can castle queenside. + // - Plane 107 filled with ones if white can castle kingside. + if (board.castlings().we_can_000()) result[kAuxPlaneBase + 0].SetAll(); + if (board.castlings().we_can_00()) result[kAuxPlaneBase + 1].SetAll(); + if (board.castlings().they_can_000()) { + result[kAuxPlaneBase + 2].SetAll(); + } + if (board.castlings().they_can_00()) result[kAuxPlaneBase + 3].SetAll(); + ''' + if board.castling_rights & chess.BB_H1: + result[AUX_OFF + 0] = all_squares + if board.castling_rights & chess.BB_A1: + result[AUX_OFF + 1] = all_squares + if board.castling_rights & chess.BB_H8: + result[AUX_OFF + 2] = all_squares + if board.castling_rights & chess.BB_A8: + result[AUX_OFF + 3] = all_squares + ''' + if (we_are_black) result[kAuxPlaneBase + 4].SetAll(); + result[kAuxPlaneBase + 5].Fill(history.Last().GetNoCaptureNoPawnPly()); + // Plane kAuxPlaneBase + 6 used to be movecount plane, now it's all zeros. + // Plane kAuxPlaneBase + 7 is all ones to help NN find board edges. + result[kAuxPlaneBase + 7].SetAll(); + } + ''' + if player: + result[AUX_OFF + 4] = all_squares + result[AUX_OFF + 5].add(board.halfmove_clock // 2) + result[AUX_OFF + 6] = all_squares + ''' + bool flip = false; + int history_idx = history.GetLength() - 1; + for (int i = 0; i < std::min(history_planes, kMoveHistory); + ++i, --history_idx) { + const Position& position = + history.GetPositionAt(history_idx < 0 ? 0 : history_idx); + const ChessBoard& board = + flip ? position.GetThemBoard() : position.GetBoard(); + if (history_idx < 0 && fill_empty_history == FillEmptyHistory::NO) break; + // Board may be flipped so compare with position.GetBoard(). + if (history_idx < 0 && fill_empty_history == FillEmptyHistory::FEN_ONLY && + position.GetBoard() == ChessBoard::kStartposBoard) { + break; + } + + const int base = i * kPlanesPerBoard; + result[base + 0].mask = (board.ours() & board.pawns()).as_int(); + result[base + 1].mask = (board.our_knights()).as_int(); + result[base + 2].mask = (board.ours() & board.bishops()).as_int(); + result[base + 3].mask = (board.ours() & board.rooks()).as_int(); + result[base + 4].mask = (board.ours() & board.queens()).as_int(); + result[base + 5].mask = (board.our_king()).as_int(); + + result[base + 6].mask = (board.theirs() & board.pawns()).as_int(); + result[base + 7].mask = (board.their_knights()).as_int(); + result[base + 8].mask = (board.theirs() & board.bishops()).as_int(); + result[base + 9].mask = (board.theirs() & board.rooks()).as_int(); + result[base + 10].mask = (board.theirs() & board.queens()).as_int(); + result[base + 11].mask = (board.their_king()).as_int(); + + ''' + base = BASE + OURS = 0 + THEIRS = 1 + result[base + 0] = board.pieces(chess.PAWN, OURS) + result[base + 1] = board.pieces(chess.KNIGHT, OURS) + result[base + 2] = board.pieces(chess.BISHOP, OURS) + result[base + 3] = board.pieces(chess.ROOK, OURS) + result[base + 4] = board.pieces(chess.QUEEN, OURS) + result[base + 5] = board.pieces(chess.KING, OURS) + + result[base + 6] = board.pieces(chess.PAWN, THEIRS) + result[base + 7] = board.pieces(chess.KNIGHT, THEIRS) + result[base + 8] = board.pieces(chess.BISHOP, THEIRS) + result[base + 9] = board.pieces(chess.ROOK, THEIRS) + result[base + 10] = board.pieces(chess.QUEEN, THEIRS) + result[base + 11] = board.pieces(chess.KING, THEIRS) + + ''' + const int repetitions = position.GetRepetitions(); + if (repetitions >= 1) result[base + 12].SetAll(); + ''' + has_repeated = board.is_repetition(2) + if has_repeated >= 1: + result[base + 12] = all_squares + ''' + // If en passant flag is set, undo last pawn move by removing the pawn from + // the new square and putting into pre-move square. + if (history_idx < 0 && !board.en_passant().empty()) { + const auto idx = GetLowestBit(board.en_passant().as_int()); + if (idx < 8) { // "Us" board + result[base + 0].mask += + ((0x0000000000000100ULL - 0x0000000001000000ULL) << idx); + } else { + result[base + 6].mask += + ((0x0001000000000000ULL - 0x0000000100000000ULL) << (idx - 56)); + } + } + if (history_idx > 0) flip = !flip; + } + ''' + # from 0-63 + square = board.ep_square + if square: + ours = square > 32 + row = square % 8 + dest_col_add = 8 * 7 if ours else 0 + dest_square = dest_col_add + row + if ours: + result[base + 0].remove(square - 8) + result[base + 0].add(dest_square) + else: + result[base + 6].remove(square + 8) + result[base + 6].add(dest_square) + + return boards_to_ndarray(result) diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/chess/test_chess.py b/MLPY/Lib/site-packages/pettingzoo/classic/chess/test_chess.py new file mode 100644 index 0000000000000000000000000000000000000000..7a9994097d995ce274ec1d69da52464d0e12224d --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/chess/test_chess.py @@ -0,0 +1,54 @@ +import chess +import chess_utils +import numpy as np + + +def assert_asserts(x): + try: + x() + except AssertionError: + return True + return False + + +assert chess_utils.move_to_coord(chess.Move.from_uci("a8b7")) == (0, 7) +assert chess_utils.move_to_coord(chess.Move.from_uci("g3b7")) == (6, 2) + +assert (chess_utils.get_knight_dir((2, 1)) == 7) +assert (chess_utils.get_knight_dir((-2, 1)) == 1) +assert assert_asserts(lambda: chess_utils.get_knight_dir((-1, 1))) + +assert chess_utils.get_queen_dir((5, -5)) == (4, 5) +assert chess_utils.get_queen_dir((8, 0)) == (7, 6) +assert chess_utils.get_queen_dir((0, -1)) == (0, 3) +assert assert_asserts(lambda: chess_utils.get_queen_dir((0, 0))) +assert assert_asserts(lambda: chess_utils.get_queen_dir((1, 2))) +assert assert_asserts(lambda: chess_utils.get_queen_dir((2, -8))) + +assert chess_utils.get_move_plane(chess.Move.from_uci("e1g1"), chess.KING) == chess_utils.get_queen_plane((2, 0)) # castles kingside +assert chess_utils.get_move_plane(chess.Move.from_uci("g1f3"), chess.KNIGHT) == 56 + chess_utils.get_knight_dir((-1, 2)) # castles kingside +assert chess_utils.get_move_plane(chess.Move.from_uci("f7f8q"), chess.PAWN) == chess_utils.get_queen_plane((0, 1)) +assert chess_utils.get_move_plane(chess.Move.from_uci("f7f8r"), chess.PAWN) == 56 + 8 + 2 + 1 * 3 +assert chess_utils.get_move_plane(chess.Move.from_uci("f7g8n"), chess.PAWN) == 56 + 8 + 0 + 2 * 3 + +assert str(chess_utils.mirror_move(chess.Move.from_uci("f7g8"))) == "f2g1" + +board = chess.Board() +board.push_san("e4") +print(chess_utils.sample_action(board, np.ones([8, 8, 73]))) +print(chess_utils.sample_action(board, np.ones([8, 8, 73]))) +test_action = np.ones([8, 8, 73]) * -100 +test_action[0, 1, 4] = 1 +assert str(chess_utils.sample_action(board, test_action)) == "a2a4" +board.push_san("c5") +obs = chess_utils.get_observation(board, player=1) +board.push_san("e5") +obs = chess_utils.get_observation(board, player=1) +board.push_san("d5") +obs = chess_utils.get_observation(board, player=1) +board.push_san("a3") +obs = chess_utils.get_observation(board, player=1) +board.push_san("d4") +obs = chess_utils.get_observation(board, player=1) +board.push_san("c4") +obs = chess_utils.get_observation(board, player=1) diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/chess_v5.py b/MLPY/Lib/site-packages/pettingzoo/classic/chess_v5.py new file mode 100644 index 0000000000000000000000000000000000000000..bb3e0f61f85ab5167d5ffddd765e9cd9d2ce5420 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/chess_v5.py @@ -0,0 +1 @@ +from .chess.chess_env import env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/__init__.py b/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7289c2a78952fef53fece72d65d6b33519ac6c43 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/__pycache__/connect_four.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/__pycache__/connect_four.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d652d39e08af18a9da0ad9a180cd3853541e7aec Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/__pycache__/connect_four.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/connect_four.py b/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/connect_four.py new file mode 100644 index 0000000000000000000000000000000000000000..86303b49f9767ba0f58b68c7e891770b0bb13e7b --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/connect_four.py @@ -0,0 +1,221 @@ +import os + +import numpy as np +import pygame +from gym import spaces + +from pettingzoo import AECEnv +from pettingzoo.utils import wrappers +from pettingzoo.utils.agent_selector import agent_selector + + +def get_image(path): + from os import path as os_path + + import pygame + cwd = os_path.dirname(__file__) + image = pygame.image.load(cwd + '/' + path) + sfc = pygame.Surface(image.get_size(), flags=pygame.SRCALPHA) + sfc.blit(image, (0, 0)) + return sfc + + +def env(): + env = raw_env() + env = wrappers.TerminateIllegalWrapper(env, illegal_reward=-1) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +class raw_env(AECEnv): + metadata = { + "render.modes": ["human", "rgb_array"], + "name": "connect_four_v3", + "is_parallelizable": False, + "video.frames_per_second": 2, + } + + def __init__(self): + super().__init__() + # 6 rows x 7 columns + # blank space = 0 + # agent 0 -- 1 + # agent 1 -- 2 + # flat representation in row major order + self.screen = None + + self.board = [0] * (6 * 7) + + self.agents = ['player_0', 'player_1'] + self.possible_agents = self.agents[:] + + self.action_spaces = {i: spaces.Discrete(7) for i in self.agents} + self.observation_spaces = {i: spaces.Dict({ + 'observation': spaces.Box(low=0, high=1, shape=(6, 7, 2), dtype=np.int8), + 'action_mask': spaces.Box(low=0, high=1, shape=(7,), dtype=np.int8) + }) for i in self.agents} + + # Key + # ---- + # blank space = 0 + # agent 0 = 1 + # agent 1 = 2 + # An observation is list of lists, where each list represents a row + # + # array([[0, 1, 1, 2, 0, 1, 0], + # [1, 0, 1, 2, 2, 2, 1], + # [0, 1, 0, 0, 1, 2, 1], + # [1, 0, 2, 0, 1, 1, 0], + # [2, 0, 0, 0, 1, 1, 0], + # [1, 1, 2, 1, 0, 1, 0]], dtype=int8) + def observe(self, agent): + board_vals = np.array(self.board).reshape(6, 7) + cur_player = self.possible_agents.index(agent) + opp_player = (cur_player + 1) % 2 + + cur_p_board = np.equal(board_vals, cur_player + 1) + opp_p_board = np.equal(board_vals, opp_player + 1) + + observation = np.stack([cur_p_board, opp_p_board], axis=2).astype(np.int8) + legal_moves = self._legal_moves() if agent == self.agent_selection else [] + + action_mask = np.zeros(7, 'int8') + for i in legal_moves: + action_mask[i] = 1 + + return {'observation': observation, 'action_mask': action_mask} + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def _legal_moves(self): + return [i for i in range(7) if self.board[i] == 0] + + # action in this case is a value from 0 to 6 indicating position to move on the flat representation of the connect4 board + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + # assert valid move + assert (self.board[0:7][action] == 0), "played illegal move." + + piece = self.agents.index(self.agent_selection) + 1 + for i in list(filter(lambda x: x % 7 == action, list(range(41, -1, -1)))): + if self.board[i] == 0: + self.board[i] = piece + break + + next_agent = self._agent_selector.next() + + winner = self.check_for_winner() + + # check if there is a winner + if winner: + self.rewards[self.agent_selection] += 1 + self.rewards[next_agent] -= 1 + self.dones = {i: True for i in self.agents} + # check if there is a tie + elif all(x in [1, 2] for x in self.board): + # once either play wins or there is a draw, game over, both players are done + self.dones = {i: True for i in self.agents} + else: + # no winner yet + self.agent_selection = next_agent + + self._accumulate_rewards() + + def reset(self): + # reset environment + self.board = [0] * (6 * 7) + + self.agents = self.possible_agents[:] + self.rewards = {i: 0 for i in self.agents} + self._cumulative_rewards = {name: 0 for name in self.agents} + self.dones = {i: False for i in self.agents} + self.infos = {i: {} for i in self.agents} + + self._agent_selector = agent_selector(self.agents) + + self.agent_selection = self._agent_selector.reset() + + def render(self, mode='human'): + screen_width = 1287 + screen_height = 1118 + if self.screen is None: + if mode == "human": + pygame.init() + self.screen = pygame.display.set_mode((screen_width, screen_height)) + else: + self.screen = pygame.Surface((screen_width, screen_height)) + if mode == "human": + pygame.event.get() + + # Load and scale all of the necessary images + tile_size = (screen_width * (91 / 99)) / 7 + + red_chip = get_image(os.path.join('img', 'C4RedPiece.png')) + red_chip = pygame.transform.scale(red_chip, (int(tile_size * (9 / 13)), int(tile_size * (9 / 13)))) + + black_chip = get_image(os.path.join('img', 'C4BlackPiece.png')) + black_chip = pygame.transform.scale(black_chip, (int(tile_size * (9 / 13)), int(tile_size * (9 / 13)))) + + board_img = get_image(os.path.join('img', 'Connect4Board.png')) + board_img = pygame.transform.scale(board_img, ((int(screen_width)), int(screen_height))) + + self.screen.blit(board_img, (0, 0)) + + # Blit the necessary chips and their positions + for i in range(0, 42): + if self.board[i] == 1: + self.screen.blit(red_chip, ((i % 7) * (tile_size) + (tile_size * (6 / 13)), int(i / 7) * (tile_size) + (tile_size * (6 / 13)))) + elif self.board[i] == 2: + self.screen.blit(black_chip, ((i % 7) * (tile_size) + (tile_size * (6 / 13)), int(i / 7) * (tile_size) + (tile_size * (6 / 13)))) + + if mode == "human": + pygame.display.update() + + observation = np.array(pygame.surfarray.pixels3d(self.screen)) + + return np.transpose(observation, axes=(1, 0, 2)) if mode == "rgb_array" else None + + def close(self): + if self.screen is not None: + import pygame + pygame.quit() + self.screen = None + + def check_for_winner(self): + board = np.array(self.board).reshape(6, 7) + piece = self.agents.index(self.agent_selection) + 1 + + # Check horizontal locations for win + column_count = 7 + row_count = 6 + + for c in range(column_count - 3): + for r in range(row_count): + if board[r][c] == piece and board[r][c + 1] == piece and board[r][c + 2] == piece and board[r][c + 3] == piece: + return True + + # Check vertical locations for win + for c in range(column_count): + for r in range(row_count - 3): + if board[r][c] == piece and board[r + 1][c] == piece and board[r + 2][c] == piece and board[r + 3][c] == piece: + return True + + # Check positively sloped diagonals + for c in range(column_count - 3): + for r in range(row_count - 3): + if board[r][c] == piece and board[r + 1][c + 1] == piece and board[r + 2][c + 2] == piece and board[r + 3][c + 3] == piece: + return True + + # Check negatively sloped diagonals + for c in range(column_count - 3): + for r in range(3, row_count): + if board[r][c] == piece and board[r - 1][c + 1] == piece and board[r - 2][c + 2] == piece and board[r - 3][c + 3] == piece: + return True + + return False diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/img/C4BlackPiece.png b/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/img/C4BlackPiece.png new file mode 100644 index 0000000000000000000000000000000000000000..46cdd6904a2ae815e18c1c5e55a2943d0cd54832 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/img/C4BlackPiece.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/img/C4RedPiece.png b/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/img/C4RedPiece.png new file mode 100644 index 0000000000000000000000000000000000000000..63a1c67a8ce89b42423ce96c2d5dc660de088c21 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/img/C4RedPiece.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/img/Connect4Board.png b/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/img/Connect4Board.png new file mode 100644 index 0000000000000000000000000000000000000000..491fad6323a47891f0ddb7c63b99e8212b2c04c5 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/connect_four/img/Connect4Board.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/connect_four_v3.py b/MLPY/Lib/site-packages/pettingzoo/classic/connect_four_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..68df48ceed07dcb59f90a213043cd9bea3d018ad --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/connect_four_v3.py @@ -0,0 +1 @@ +from .connect_four.connect_four import env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/dou_dizhu_v4.py b/MLPY/Lib/site-packages/pettingzoo/classic/dou_dizhu_v4.py new file mode 100644 index 0000000000000000000000000000000000000000..377c8755081b23b7fe4b70edb200494f0dc544ac --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/dou_dizhu_v4.py @@ -0,0 +1 @@ +from .rlcard_envs.dou_dizhu import env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/gin_rummy_v4.py b/MLPY/Lib/site-packages/pettingzoo/classic/gin_rummy_v4.py new file mode 100644 index 0000000000000000000000000000000000000000..9557197d6975617f500d80f33f30d78ad852253c --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/gin_rummy_v4.py @@ -0,0 +1 @@ +from .rlcard_envs.gin_rummy import env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/__init__.py b/MLPY/Lib/site-packages/pettingzoo/classic/go/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/go/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9e379241dc6384feeabb5cb9c2e6bded4b8cc0cd Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/go/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/__pycache__/coords.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/go/__pycache__/coords.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..29ea00dd2cc16051f03b1bb8005c389928a4de3b Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/go/__pycache__/coords.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/__pycache__/go.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/go/__pycache__/go.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..050f106602d03d16b4b7cb4d62ccf2978451b57b Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/go/__pycache__/go.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/__pycache__/go_env.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/go/__pycache__/go_env.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6307af0fbe266711a9f88d8cba3c347e42f52e22 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/go/__pycache__/go_env.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/coords.py b/MLPY/Lib/site-packages/pettingzoo/classic/go/coords.py new file mode 100644 index 0000000000000000000000000000000000000000..221e1cd3127fc400a45c562167c459dee4adafd1 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/go/coords.py @@ -0,0 +1,95 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Code from: https://github.com/tensorflow/minigo + +"""Logic for dealing with coordinates. + +This introduces some helpers and terminology that are used throughout Minigo. + +Minigo Coordinate: This is a tuple of the form (row, column) that is indexed + starting out at (0, 0) from the upper-left. +Flattened Coordinate: this is a number ranging from 0 - N^2 (so N^2+1 + possible values). The extra value N^2 is used to mark a 'pass' move. +SGF Coordinate: Coordinate used for SGF serialization format. Coordinates use + two-letter pairs having the form (column, row) indexed from the upper-left + where 0, 0 = 'aa'. +GTP Coordinate: Human-readable coordinate string indexed from bottom left, with + the first character a capital letter for the column and the second a number + from 1-19 for the row. Note that GTP chooses to skip the letter 'I' due to + its similarity with 'l' (lowercase 'L'). +PYGTP Coordinate: Tuple coordinate indexed starting at 1,1 from bottom-left + in the format (column, row) + +So, for a 19x19, + +Coord Type upper_left upper_right pass +------------------------------------------------------- +minigo coord (0, 0) (0, 18) None +flat 0 18 361 +SGF 'aa' 'sa' '' +GTP 'A19' 'T19' 'pass' +""" + +from . import go + +# We provide more than 19 entries here in case of boards larger than 19 x 19. +_SGF_COLUMNS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' +_GTP_COLUMNS = 'ABCDEFGHJKLMNOPQRSTUVWXYZ' + + +def from_flat(flat): + """Converts from a flattened coordinate to a Minigo coordinate.""" + if flat == go.N * go.N: + return None + return divmod(flat, go.N) + + +def to_flat(coord): + """Converts from a Minigo coordinate to a flattened coordinate.""" + if coord is None: + return go.N * go.N + return go.N * coord[0] + coord[1] + + +def from_sgf(sgfc): + """Converts from an SGF coordinate to a Minigo coordinate.""" + if sgfc is None or sgfc == '' or (go.N <= 19 and sgfc == 'tt'): + return None + return _SGF_COLUMNS.index(sgfc[1]), _SGF_COLUMNS.index(sgfc[0]) + + +def to_sgf(coord): + """Converts from a Minigo coordinate to an SGF coordinate.""" + if coord is None: + return '' + return _SGF_COLUMNS[coord[1]] + _SGF_COLUMNS[coord[0]] + + +def from_gtp(gtpc): + """Converts from a GTP coordinate to a Minigo coordinate.""" + gtpc = gtpc.upper() + if gtpc == 'PASS': + return None + col = _GTP_COLUMNS.index(gtpc[0]) + row_from_bottom = int(gtpc[1:]) + return go.N - row_from_bottom, col + + +def to_gtp(coord): + """Converts from a Minigo coordinate to a GTP coordinate.""" + if coord is None: + return 'pass' + y, x = coord + return f'{_GTP_COLUMNS[x]}{go.N - y}' diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/go.py b/MLPY/Lib/site-packages/pettingzoo/classic/go/go.py new file mode 100644 index 0000000000000000000000000000000000000000..6c9d3e71842751979a507b18a0a74f0a62f074c2 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/go/go.py @@ -0,0 +1,544 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Code from: https://github.com/tensorflow/minigo + +""" +A board is a NxN numpy array. +A Coordinate is a tuple index into the board. +A Move is a (Coordinate c | None). +A PlayerMove is a (Color, Move) tuple + +(0, 0) is considered to be the upper left corner of the board, and (18, 0) is the lower left. +""" +import copy +import itertools +import os +from collections import namedtuple + +import numpy as np + +from . import coords + +N = int(os.environ.get('BOARD_SIZE', 19)) + +# Represent a board as a numpy array, with 0 empty, 1 is black, -1 is white. +# This means that swapping colors is as simple as multiplying array by -1. +WHITE, EMPTY, BLACK, FILL, KO, UNKNOWN = range(-1, 5) + +# Represents "group not found" in the LibertyTracker object +MISSING_GROUP_ID = -1 + +ALL_COORDS = [(i, j) for i in range(N) for j in range(N)] +EMPTY_BOARD = np.zeros([N, N], dtype=np.int8) + + +def _check_bounds(c): + return 0 <= c[0] < N and 0 <= c[1] < N + + +NEIGHBORS = {(x, y): list(filter(_check_bounds, [ + (x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)])) for x, y in ALL_COORDS} +DIAGONALS = {(x, y): list(filter(_check_bounds, [ + (x + 1, y + 1), (x + 1, y - 1), (x - 1, y + 1), (x - 1, y - 1)])) for x, y in ALL_COORDS} + + +class IllegalMove(Exception): + pass + + +class PlayerMove(namedtuple('PlayerMove', ['color', 'move'])): + pass + + +class PositionWithContext(namedtuple('SgfPosition', ['position', 'next_move', 'result'])): + pass + + +def place_stones(board, color, stones): + for s in stones: + board[s] = color + + +def replay_position(position, result): + """ + Wrapper for a go.Position which replays its history. + Assumes an empty start position! (i.e. no handicap, and history must be exhaustive.) + + Result must be passed in, since a resign cannot be inferred from position + history alone. + + for position_w_context in replay_position(position): + print(position_w_context.position) + """ + assert position.n == len(position.recent), "Position history is incomplete" + pos = Position(komi=position.komi) + for player_move in position.recent: + color, next_move = player_move + yield PositionWithContext(pos, next_move, result) + pos = pos.play_move(next_move, color=color) + + +def find_reached(board, c): + color = board[c] + chain = {c} + reached = set() + frontier = [c] + while frontier: + current = frontier.pop() + chain.add(current) + for n in NEIGHBORS[current]: + if board[n] == color and n not in chain: + frontier.append(n) + elif board[n] != color: + reached.add(n) + return chain, reached + + +def is_koish(board, c): + 'Check if c is surrounded on all sides by 1 color, and return that color' + if board[c] != EMPTY: + return None + neighbors = {board[n] for n in NEIGHBORS[c]} + if len(neighbors) == 1 and EMPTY not in neighbors: + return list(neighbors)[0] + else: + return None + + +def is_eyeish(board, c): + 'Check if c is an eye, for the purpose of restricting MC rollouts.' + # pass is fine. + if c is None: + return + color = is_koish(board, c) + if color is None: + return None + diagonal_faults = 0 + diagonals = DIAGONALS[c] + if len(diagonals) < 4: + diagonal_faults += 1 + for d in diagonals: + if not board[d] in (color, EMPTY): + diagonal_faults += 1 + if diagonal_faults > 1: + return None + else: + return color + + +class Group(namedtuple('Group', ['id', 'stones', 'liberties', 'color'])): + """ + stones: a frozenset of Coordinates belonging to this group + liberties: a frozenset of Coordinates that are empty and adjacent to this group. + color: color of this group + """ + + def __eq__(self, other): + return self.stones == other.stones and self.liberties == other.liberties and self.color == other.color + + +class LibertyTracker(): + @staticmethod + def from_board(board): + board = np.copy(board) + curr_group_id = 0 + lib_tracker = LibertyTracker() + for color in (WHITE, BLACK): + while color in board: + curr_group_id += 1 + found_color = np.where(board == color) + coord = found_color[0][0], found_color[1][0] + chain, reached = find_reached(board, coord) + liberties = frozenset(r for r in reached if board[r] == EMPTY) + new_group = Group(curr_group_id, frozenset( + chain), liberties, color) + lib_tracker.groups[curr_group_id] = new_group + for s in chain: + lib_tracker.group_index[s] = curr_group_id + place_stones(board, FILL, chain) + + lib_tracker.max_group_id = curr_group_id + + liberty_counts = np.zeros([N, N], dtype=np.uint8) + for group in lib_tracker.groups.values(): + num_libs = len(group.liberties) + for s in group.stones: + liberty_counts[s] = num_libs + lib_tracker.liberty_cache = liberty_counts + + return lib_tracker + + def __init__(self, group_index=None, groups=None, liberty_cache=None, max_group_id=1): + # group_index: a NxN numpy array of group_ids. -1 means no group + # groups: a dict of group_id to groups + # liberty_cache: a NxN numpy array of liberty counts + self.group_index = group_index if group_index is not None else - \ + np.ones([N, N], dtype=np.int32) + self.groups = groups or {} + self.liberty_cache = liberty_cache if liberty_cache is not None else np.zeros([ + N, N], dtype=np.uint8) + self.max_group_id = max_group_id + + def __deepcopy__(self, memodict={}): + new_group_index = np.copy(self.group_index) + new_lib_cache = np.copy(self.liberty_cache) + # shallow copy + new_groups = copy.copy(self.groups) + return LibertyTracker(new_group_index, new_groups, liberty_cache=new_lib_cache, max_group_id=self.max_group_id) + + def add_stone(self, color, c): + assert self.group_index[c] == MISSING_GROUP_ID + captured_stones = set() + opponent_neighboring_group_ids = set() + friendly_neighboring_group_ids = set() + empty_neighbors = set() + + for n in NEIGHBORS[c]: + neighbor_group_id = self.group_index[n] + if neighbor_group_id != MISSING_GROUP_ID: + neighbor_group = self.groups[neighbor_group_id] + if neighbor_group.color == color: + friendly_neighboring_group_ids.add(neighbor_group_id) + else: + opponent_neighboring_group_ids.add(neighbor_group_id) + else: + empty_neighbors.add(n) + + new_group = self._merge_from_played( + color, c, empty_neighbors, friendly_neighboring_group_ids) + + # new_group becomes stale as _update_liberties and + # _handle_captures are called; must refetch with self.groups[new_group.id] + for group_id in opponent_neighboring_group_ids: + neighbor_group = self.groups[group_id] + if len(neighbor_group.liberties) == 1: + captured = self._capture_group(group_id) + captured_stones.update(captured) + else: + self._update_liberties(group_id, remove={c}) + + self._handle_captures(captured_stones) + + # suicide is illegal + if len(self.groups[new_group.id].liberties) == 0: + raise IllegalMove(f"Move at {c} would commit suicide!\n") + + return captured_stones + + def _merge_from_played(self, color, played, libs, other_group_ids): + stones = {played} + liberties = set(libs) + for group_id in other_group_ids: + other = self.groups.pop(group_id) + stones.update(other.stones) + liberties.update(other.liberties) + + if other_group_ids: + liberties.remove(played) + assert stones.isdisjoint(liberties) + self.max_group_id += 1 + result = Group( + self.max_group_id, + frozenset(stones), + frozenset(liberties), + color) + self.groups[result.id] = result + + for s in result.stones: + self.group_index[s] = result.id + self.liberty_cache[s] = len(result.liberties) + + return result + + def _capture_group(self, group_id): + dead_group = self.groups.pop(group_id) + for s in dead_group.stones: + self.group_index[s] = MISSING_GROUP_ID + self.liberty_cache[s] = 0 + return dead_group.stones + + def _update_liberties(self, group_id, add=set(), remove=set()): + group = self.groups[group_id] + new_libs = (group.liberties | add) - remove + self.groups[group_id] = Group( + group_id, group.stones, new_libs, group.color) + + new_lib_count = len(new_libs) + for s in self.groups[group_id].stones: + self.liberty_cache[s] = new_lib_count + + def _handle_captures(self, captured_stones): + for s in captured_stones: + for n in NEIGHBORS[s]: + group_id = self.group_index[n] + if group_id != MISSING_GROUP_ID: + self._update_liberties(group_id, add={s}) + + +class Position(): + def __init__(self, board=None, n=0, komi=7.5, caps=(0, 0), + lib_tracker=None, ko=None, recent=tuple(), + board_deltas=None, to_play=BLACK): + """ + board: a numpy array + n: an int representing moves played so far + komi: a float, representing points given to the second player. + caps: a (int, int) tuple of captures for B, W. + lib_tracker: a LibertyTracker object + ko: a Move + recent: a tuple of PlayerMoves, such that recent[-1] is the last move. + board_deltas: a np.array of shape (n, go.N, go.N) representing changes + made to the board at each move (played move and captures). + Should satisfy next_pos.board - next_pos.board_deltas[0] == pos.board + to_play: BLACK or WHITE + """ + assert type(recent) is tuple + self.board = board if board is not None else np.copy(EMPTY_BOARD) + # With a full history, self.n == len(self.recent) == num moves played + self.n = n + self.komi = komi + self.caps = caps + self.lib_tracker = lib_tracker or LibertyTracker.from_board(self.board) + self.ko = ko + self.recent = recent + self.board_deltas = board_deltas if board_deltas is not None else np.zeros([ + 0, N, N], dtype=np.int8) + self.to_play = to_play + + def __deepcopy__(self, memodict={}): + new_board = np.copy(self.board) + new_lib_tracker = copy.deepcopy(self.lib_tracker) + return Position(new_board, self.n, self.komi, self.caps, new_lib_tracker, self.ko, self.recent, self.board_deltas, self.to_play) + + def __str__(self, colors=True): + if colors: + pretty_print_map = { + WHITE: '\x1b[0;31;47mO', + EMPTY: '\x1b[0;31;43m.', + BLACK: '\x1b[0;31;40mX', + FILL: '#', + KO: '*', + } + else: + pretty_print_map = { + WHITE: 'O', + EMPTY: '.', + BLACK: 'X', + FILL: '#', + KO: '*', + } + board = np.copy(self.board) + captures = self.caps + if self.ko is not None: + place_stones(board, KO, [self.ko]) + raw_board_contents = [] + for i in range(N): + row = [' '] + for j in range(N): + appended = '<' if (self.recent and (i, j) + == self.recent[-1].move) else ' ' + row.append(pretty_print_map[board[i, j]] + appended) + if colors: + row.append('\x1b[0m') + + raw_board_contents.append(''.join(row)) + + row_labels = ['%2d' % i for i in range(N, 0, -1)] + annotated_board_contents = [''.join(r) for r in zip( + row_labels, raw_board_contents, row_labels)] + header_footer_rows = [ + ' ' + ' '.join('ABCDEFGHJKLMNOPQRST'[:N]) + ' '] + annotated_board = '\n'.join(itertools.chain( + header_footer_rows, annotated_board_contents, header_footer_rows)) + details = "\nMove: {}. Captures X: {} O: {}\n".format( + self.n, *captures) + return annotated_board + details + + def is_move_suicidal(self, move): + potential_libs = set() + for n in NEIGHBORS[move]: + neighbor_group_id = self.lib_tracker.group_index[n] + if neighbor_group_id == MISSING_GROUP_ID: + # at least one liberty after playing here, so not a suicide + return False + neighbor_group = self.lib_tracker.groups[neighbor_group_id] + if neighbor_group.color == self.to_play: + potential_libs |= neighbor_group.liberties + elif len(neighbor_group.liberties) == 1: + # would capture an opponent group if they only had one lib. + return False + # it's possible to suicide by connecting several friendly groups + # each of which had one liberty. + potential_libs -= {move} + return not potential_libs + + def is_move_legal(self, move): + 'Checks that a move is on an empty space, not on ko, and not suicide' + if move is None: + return True + if self.board[move] != EMPTY: + return False + if move == self.ko: + return False + if self.is_move_suicidal(move): + return False + + return True + + def all_legal_moves(self): + 'Returns a np.array of size go.N**2 + 1, with 1 = legal, 0 = illegal' + # by default, every move is legal + legal_moves = np.ones([N, N], dtype=np.int8) + # ...unless there is already a stone there + legal_moves[self.board != EMPTY] = 0 + # calculate which spots have 4 stones next to them + # padding is because the edge always counts as a lost liberty. + adjacent = np.ones([N + 2, N + 2], dtype=np.int8) + adjacent[1:-1, 1:-1] = np.abs(self.board) + num_adjacent_stones = (adjacent[:-2, 1:-1] + adjacent[1:-1, :-2] + + adjacent[2:, 1:-1] + adjacent[1:-1, 2:]) + # Surrounded spots are those that are empty and have 4 adjacent stones. + surrounded_spots = np.multiply( + (self.board == EMPTY), + (num_adjacent_stones == 4)) + # Such spots are possibly illegal, unless they are capturing something. + # Iterate over and manually check each spot. + for coord in np.transpose(np.nonzero(surrounded_spots)): + if self.is_move_suicidal(tuple(coord)): + legal_moves[tuple(coord)] = 0 + + # ...and retaking ko is always illegal + if self.ko is not None: + legal_moves[self.ko] = 0 + + # and pass is always legal + return np.concatenate([legal_moves.ravel(), [1]]) + + def pass_move(self, mutate=False): + pos = self if mutate else copy.deepcopy(self) + pos.n += 1 + pos.recent += (PlayerMove(pos.to_play, None),) + pos.board_deltas = np.concatenate(( + np.zeros([1, N, N], dtype=np.int8), + pos.board_deltas[:6])) + pos.to_play *= -1 + pos.ko = None + return pos + + def flip_playerturn(self, mutate=False): + pos = self if mutate else copy.deepcopy(self) + pos.ko = None + pos.to_play *= -1 + return pos + + def get_liberties(self): + return self.lib_tracker.liberty_cache + + def play_move(self, c, color=None, mutate=False): + # Obeys CGOS Rules of Play. In short: + # No suicides + # Chinese/area scoring + # Positional superko (this is very crudely approximate at the moment.) + if color is None: + color = self.to_play + + pos = self if mutate else copy.deepcopy(self) + + if c is None: + pos = pos.pass_move(mutate=mutate) + return pos + + if not self.is_move_legal(c): + raise IllegalMove("{} move at {} is illegal: \n{}".format( + "Black" if self.to_play == BLACK else "White", + coords.to_gtp(c), self)) + + potential_ko = is_koish(self.board, c) + + place_stones(pos.board, color, [c]) + captured_stones = pos.lib_tracker.add_stone(color, c) + place_stones(pos.board, EMPTY, captured_stones) + + opp_color = color * -1 + + new_board_delta = np.zeros([N, N], dtype=np.int8) + new_board_delta[c] = color + place_stones(new_board_delta, color, captured_stones) + + if len(captured_stones) == 1 and potential_ko == opp_color: + new_ko = list(captured_stones)[0] + else: + new_ko = None + + if pos.to_play == BLACK: + new_caps = (pos.caps[0] + len(captured_stones), pos.caps[1]) + else: + new_caps = (pos.caps[0], pos.caps[1] + len(captured_stones)) + + pos.n += 1 + pos.caps = new_caps + pos.ko = new_ko + pos.recent += (PlayerMove(color, c),) + + # keep a rolling history of last 7 deltas - that's all we'll need to + # extract the last 8 board states. + pos.board_deltas = np.concatenate(( + new_board_delta.reshape(1, N, N), + pos.board_deltas[:6])) + pos.to_play *= -1 + return pos + + def is_game_over(self): + return (len(self.recent) >= 2 + and self.recent[-1].move is None + and self.recent[-2].move is None) + + def score(self): + 'Return score from B perspective. If W is winning, score is negative.' + working_board = np.copy(self.board) + while EMPTY in working_board: + unassigned_spaces = np.where(working_board == EMPTY) + c = unassigned_spaces[0][0], unassigned_spaces[1][0] + territory, borders = find_reached(working_board, c) + border_colors = {working_board[b] for b in borders} + X_border = BLACK in border_colors + O_border = WHITE in border_colors + if X_border and not O_border: + territory_color = BLACK + elif O_border and not X_border: + territory_color = WHITE + else: + territory_color = UNKNOWN # dame, or seki + place_stones(working_board, territory_color, territory) + + return np.count_nonzero(working_board == BLACK) - np.count_nonzero(working_board == WHITE) - self.komi + + def result(self): + score = self.score() + if score > 0: + return 1 + elif score < 0: + return -1 + else: + return 0 + + def result_string(self): + score = self.score() + if score > 0: + return 'B+' + '%.1f' % score + elif score < 0: + return 'W+' + '%.1f' % abs(score) + else: + return 'DRAW' diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/go_env.py b/MLPY/Lib/site-packages/pettingzoo/classic/go/go_env.py new file mode 100644 index 0000000000000000000000000000000000000000..c45130dd04995c53d283a227e891545cb0084993 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/go/go_env.py @@ -0,0 +1,231 @@ +import os + +import numpy as np +import pygame +from gym import spaces + +from pettingzoo import AECEnv +from pettingzoo.utils import wrappers +from pettingzoo.utils.agent_selector import agent_selector + +from . import coords, go + + +def get_image(path): + from os import path as os_path + + import pygame + cwd = os_path.dirname(__file__) + image = pygame.image.load(cwd + '/' + path) + sfc = pygame.Surface(image.get_size(), flags=pygame.SRCALPHA) + sfc.blit(image, (0, 0)) + return sfc + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.TerminateIllegalWrapper(env, illegal_reward=-1) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +class raw_env(AECEnv): + + metadata = { + "render.modes": ["human", "rgb_array"], + "name": "go_v5", + "is_parallelizable": False, + "video.frames_per_second": 2, + } + + def __init__(self, board_size: int = 19, komi: float = 7.5): + # board_size: a int, representing the board size (board has a board_size x board_size shape) + # komi: a float, representing points given to the second player. + super().__init__() + + self._overwrite_go_global_variables(board_size=board_size) + self._komi = komi + + self.agents = ['black_0', 'white_0'] + self.possible_agents = self.agents[:] + self.has_reset = False + + self.screen = None + + self.observation_spaces = self._convert_to_dict( + [spaces.Dict({'observation': spaces.Box(low=0, high=1, shape=(self._N, self._N, 17), dtype=bool), + 'action_mask': spaces.Box(low=0, high=1, shape=((self._N * self._N) + 1,), dtype=np.int8)}) + for _ in range(self.num_agents)]) + + self.action_spaces = self._convert_to_dict([spaces.Discrete(self._N * self._N + 1) for _ in range(self.num_agents)]) + + self._agent_selector = agent_selector(self.agents) + + self.board_history = np.zeros((self._N, self._N, 16), dtype=bool) + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def _overwrite_go_global_variables(self, board_size: int): + self._N = board_size + go.N = self._N + go.ALL_COORDS = [(i, j) for i in range(self._N) for j in range(self._N)] + go.EMPTY_BOARD = np.zeros([self._N, self._N], dtype=np.int8) + go.NEIGHBORS = {(x, y): list(filter(self._check_bounds, [(x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)])) for x, y in go.ALL_COORDS} + go.DIAGONALS = {(x, y): list(filter(self._check_bounds, [(x + 1, y + 1), (x + 1, y - 1), (x - 1, y + 1), (x - 1, y - 1)])) for x, y in go.ALL_COORDS} + return + + def _check_bounds(self, c): + return 0 <= c[0] < self._N and 0 <= c[1] < self._N + + def _encode_player_plane(self, agent): + if agent == self.possible_agents[0]: + return np.zeros([self._N, self._N], dtype=bool) + else: + return np.ones([self._N, self._N], dtype=bool) + + def _encode_board_planes(self, agent): + agent_factor = go.BLACK if agent == self.possible_agents[0] else go.WHITE + current_agent_plane_idx = np.where(self._go.board == agent_factor) + opponent_agent_plane_idx = np.where(self._go.board == -agent_factor) + current_agent_plane = np.zeros([self._N, self._N], dtype=bool) + opponent_agent_plane = np.zeros([self._N, self._N], dtype=bool) + current_agent_plane[current_agent_plane_idx] = 1 + opponent_agent_plane[opponent_agent_plane_idx] = 1 + return current_agent_plane, opponent_agent_plane + + def _int_to_name(self, ind): + return self.possible_agents[ind] + + def _name_to_int(self, name): + return self.possible_agents.index(name) + + def _convert_to_dict(self, list_of_list): + return dict(zip(self.possible_agents, list_of_list)) + + def _encode_legal_actions(self, actions): + return np.where(actions == 1)[0] + + def _encode_rewards(self, result): + return [1, -1] if result == 1 else [-1, 1] + + def observe(self, agent): + current_agent_plane, opponent_agent_plane = self._encode_board_planes(agent) + player_plane = self._encode_player_plane(agent) + + observation = np.dstack((self.board_history, player_plane)) + + legal_moves = self.next_legal_moves if agent == self.agent_selection else [] + action_mask = np.zeros((self._N * self._N) + 1, 'int8') + for i in legal_moves: + action_mask[i] = 1 + + return {'observation': observation, 'action_mask': action_mask} + + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + self._go = self._go.play_move(coords.from_flat(action)) + self._last_obs = self.observe(self.agent_selection) + current_agent_plane, opponent_agent_plane = self._encode_board_planes(self.agent_selection) + self.board_history = np.dstack((current_agent_plane, opponent_agent_plane, self.board_history[:, :, :-2])) + next_player = self._agent_selector.next() + if self._go.is_game_over(): + self.dones = self._convert_to_dict([True for _ in range(self.num_agents)]) + self.rewards = self._convert_to_dict(self._encode_rewards(self._go.result())) + self.next_legal_moves = [self._N * self._N] + else: + self.next_legal_moves = self._encode_legal_actions(self._go.all_legal_moves()) + self.agent_selection = next_player if next_player else self._agent_selector.next() + self._accumulate_rewards() + + def reset(self): + self.has_reset = True + self._go = go.Position(board=None, komi=self._komi) + + self.agents = self.possible_agents[:] + self._agent_selector.reinit(self.agents) + self.agent_selection = self._agent_selector.reset() + self._cumulative_rewards = self._convert_to_dict(np.array([0.0, 0.0])) + self.rewards = self._convert_to_dict(np.array([0.0, 0.0])) + self.dones = self._convert_to_dict([False for _ in range(self.num_agents)]) + self.infos = self._convert_to_dict([{} for _ in range(self.num_agents)]) + self.next_legal_moves = self._encode_legal_actions(self._go.all_legal_moves()) + self._last_obs = self.observe(self.agents[0]) + self.board_history = np.zeros((self._N, self._N, 16), dtype=bool) + + def render(self, mode='human'): + screen_width = 1026 + screen_height = 1026 + + if self.screen is None: + if mode == "human": + pygame.init() + self.screen = pygame.display.set_mode((screen_width, screen_height)) + else: + self.screen = pygame.Surface((screen_width, screen_height)) + if mode == "human": + pygame.event.get() + + size = go.N + + # Load and scale all of the necessary images + tile_size = (screen_width) / size + + black_stone = get_image(os.path.join('img', 'GoBlackPiece.png')) + black_stone = pygame.transform.scale(black_stone, (int(tile_size * (5 / 6)), int(tile_size * (5 / 6)))) + + white_stone = get_image(os.path.join('img', 'GoWhitePiece.png')) + white_stone = pygame.transform.scale(white_stone, (int(tile_size * (5 / 6)), int(tile_size * (5 / 6)))) + + tile_img = get_image(os.path.join('img', 'GO_Tile0.png')) + tile_img = pygame.transform.scale(tile_img, ((int(tile_size * (7 / 6))), int(tile_size * (7 / 6)))) + + # blit board tiles + for i in range(1, size - 1): + for j in range(1, size - 1): + self.screen.blit(tile_img, ((i * (tile_size)), int(j) * (tile_size))) + + for i in range(1, 9): + tile_img = get_image(os.path.join('img', 'GO_Tile' + str(i) + '.png')) + tile_img = pygame.transform.scale(tile_img, ((int(tile_size * (7 / 6))), int(tile_size * (7 / 6)))) + for j in range(1, size - 1): + if i == 1: + self.screen.blit(tile_img, (0, int(j) * (tile_size))) + elif i == 2: + self.screen.blit(tile_img, ((int(j) * (tile_size)), 0)) + elif i == 3: + self.screen.blit(tile_img, ((size - 1) * (tile_size), int(j) * (tile_size))) + elif i == 4: + self.screen.blit(tile_img, ((int(j) * (tile_size)), (size - 1) * (tile_size))) + if i == 5: + self.screen.blit(tile_img, (0, 0)) + elif i == 6: + self.screen.blit(tile_img, ((size - 1) * (tile_size), 0)) + elif i == 7: + self.screen.blit(tile_img, ((size - 1) * (tile_size), (size - 1) * (tile_size))) + elif i == 8: + self.screen.blit(tile_img, (0, (size - 1) * (tile_size))) + + offset = tile_size * (1 / 6) + # Blit the necessary chips and their positions + for i in range(0, size): + for j in range(0, size): + if self._go.board[i][j] == go.BLACK: + self.screen.blit(black_stone, ((i * (tile_size) + offset), int(j) * (tile_size) + offset)) + elif self._go.board[i][j] == go.WHITE: + self.screen.blit(white_stone, ((i * (tile_size) + offset), int(j) * (tile_size) + offset)) + + if mode == "human": + pygame.display.update() + + observation = np.array(pygame.surfarray.pixels3d(self.screen)) + + return np.transpose(observation, axes=(1, 0, 2)) if mode == "rgb_array" else None + + def close(self): + pass diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile0.png b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile0.png new file mode 100644 index 0000000000000000000000000000000000000000..c7ebeeb49e43c3228e4fddd8a73720beabb85443 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile0.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile1.png b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile1.png new file mode 100644 index 0000000000000000000000000000000000000000..4b0e3e54e0a1df66c5498967ee48e982667591e5 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile1.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile2.png b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile2.png new file mode 100644 index 0000000000000000000000000000000000000000..921f890937bab091b28f02337bd9a9af326f7f1b Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile2.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile3.png b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile3.png new file mode 100644 index 0000000000000000000000000000000000000000..35001fcb368cee821d9f284e1cb3498756caa4cd Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile3.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile4.png b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile4.png new file mode 100644 index 0000000000000000000000000000000000000000..e6bab729926b738de41e791e181ae8e5f0c3d738 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile4.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile5.png b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile5.png new file mode 100644 index 0000000000000000000000000000000000000000..a2603c6cc6387c38047dab3fb4995e38f5d77182 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile5.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile6.png b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile6.png new file mode 100644 index 0000000000000000000000000000000000000000..176dd3067ceb5d463e07e867a43557d44b63178a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile6.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile7.png b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile7.png new file mode 100644 index 0000000000000000000000000000000000000000..d09fa7055c416c3ad95cb1c0092004a5d8621b24 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile7.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile8.png b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile8.png new file mode 100644 index 0000000000000000000000000000000000000000..b3ccab535411ddca73bf75822b42df634012b84a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GO_Tile8.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GoBlackPiece.png b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GoBlackPiece.png new file mode 100644 index 0000000000000000000000000000000000000000..7720a0376640ff6326e332d14d3d6337f3e29606 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GoBlackPiece.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GoBoard.png b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GoBoard.png new file mode 100644 index 0000000000000000000000000000000000000000..5d8605075a15eaab67a3ee612db6c37d5308cb2e Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GoBoard.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GoWhitePiece.png b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GoWhitePiece.png new file mode 100644 index 0000000000000000000000000000000000000000..d440f0a6e1266bdbd480c64d0ee2921f35bcdb3d Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/go/img/GoWhitePiece.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/go_v5.py b/MLPY/Lib/site-packages/pettingzoo/classic/go_v5.py new file mode 100644 index 0000000000000000000000000000000000000000..5808abd909a88f8cc88589110e740ca53a629173 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/go_v5.py @@ -0,0 +1 @@ +from .go.go_env import env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/hanabi/__init__.py b/MLPY/Lib/site-packages/pettingzoo/classic/hanabi/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/hanabi/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/hanabi/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..73828a97f42303d3bf70d8edcdf24ba28c76bed7 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/hanabi/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/hanabi/__pycache__/hanabi.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/hanabi/__pycache__/hanabi.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c3de91d61622fd69f17a82e19a57c43fc88ee258 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/hanabi/__pycache__/hanabi.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/hanabi/hanabi.py b/MLPY/Lib/site-packages/pettingzoo/classic/hanabi/hanabi.py new file mode 100644 index 0000000000000000000000000000000000000000..55f3bb266fa1635601f7ea3c1000b192cc70eb37 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/hanabi/hanabi.py @@ -0,0 +1,339 @@ +from typing import Dict, List, Optional, Union + +import numpy as np +from gym import spaces +from gym.utils import EzPickle, seeding + +from pettingzoo import AECEnv +from pettingzoo.utils import agent_selector, wrappers + +# importing Hanabi and throw error message if pypi package is not installed correctly. +try: + from hanabi_learning_environment.rl_env import HanabiEnv, make + +except ModuleNotFoundError: + raise ImportError( + ( + "Hanabi is not installed.\n", + "Run ´pip3 install hanabi_learning_environment´ from within your project environment.\n", + "Consult hanabi/README.md for detailed information." + ) + ) +""" +Wrapper class around Deepmind's Hanabi Learning Environment. +""" + + +class HanabiScorePenalty: + def __init__(self, env): + self.env = env + + def __float__(self): + return -float(self.env.hanabi_env.state.score()) + + +def env(**kwargs): + env = r_env = raw_env(**kwargs) + env = wrappers.CaptureStdoutWrapper(env) + env = wrappers.TerminateIllegalWrapper(env, illegal_reward=HanabiScorePenalty(r_env)) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +class raw_env(AECEnv, EzPickle): + """This class capsules endpoints provided within deepmind/hanabi-learning-environment/rl_env.py.""" + + metadata = { + "render.modes": ["human"], + "name": "hanabi_v4", + "is_parallelizable": False, + "video.frames_per_second": 2, + } + + # set of all required params + required_keys: set = { + 'colors', + 'ranks', + 'players', + 'hand_size', + 'max_information_tokens', + 'max_life_tokens', + 'observation_type', + 'random_start_player', + } + + def __init__(self, + colors: int = 5, + ranks: int = 5, + players: int = 2, + hand_size: int = 5, + max_information_tokens: int = 8, + max_life_tokens: int = 3, + observation_type: int = 1, + random_start_player: bool = False, + ): + + """ + Parameter descriptions : + - colors: int, Number of colors in [2,5]. + - ranks: int, Number of ranks in [2,5]. + - players: int, Number of players in [2,5]. + - hand_size: int, Hand size in [2,5]. + - max_information_tokens: int, Number of information tokens (>=0). + - max_life_tokens: int, Number of life tokens (>=1). + - observation_type: int. + 0: Minimal observation. + 1: First-order common knowledge observation. + - random_start_player: bool, Random start player. + + Common game configurations: + Hanabi-Full (default) : { + "colors": 5, + "ranks": 5, + "players": 2, + "max_information_tokens": 8, + "max_life_tokens": 3, + "hand_size": (4 if players >= 4 else 5) + "observation_type": 1, + "hand_size": 2 + } + + Hanabi-Small : { + "colors": 2, + "ranks": 5, + "players": 2, + "max_information_tokens": 3 + "hand_size": 2, + "max_life_tokens": 1 + "observation_type": 1} + + Hanabi-Very-Small : { + "colors": 1, + "ranks": 5, + "players": 2, + "max_information_tokens": 3 + "hand_size": 2, + "max_life_tokens": 1 + "observation_type": 1} + + """ + EzPickle.__init__( + self, + colors, + ranks, + players, + hand_size, + max_information_tokens, + max_life_tokens, + observation_type, + random_start_player, + ) + + # ToDo: Starts + # Check if all possible dictionary values are within a certain ranges. + self._raise_error_if_config_values_out_of_range(colors, + ranks, + players, + hand_size, + max_information_tokens, + max_life_tokens, + observation_type, + random_start_player) + + self._config = { + 'colors': colors, + 'ranks': ranks, + 'players': players, + 'hand_size': hand_size, + 'max_information_tokens': max_information_tokens, + 'max_life_tokens': max_life_tokens, + 'observation_type': observation_type, + 'random_start_player': random_start_player, + } + self.hanabi_env: HanabiEnv = HanabiEnv(config=self._config) + + # List of agent names + self.agents = [f"player_{i}" for i in range(self.hanabi_env.players)] + self.possible_agents = self.agents[:] + + self.agent_selection: str + + # Sets hanabi game to clean state and updates all internal dictionaries + self.reset() + + # Set action_spaces and observation_spaces based on params in hanabi_env + self.action_spaces = {name: spaces.Discrete(self.hanabi_env.num_moves()) for name in self.agents} + self.observation_spaces = {player_name: spaces.Dict({'observation': spaces.Box(low=0, + high=1, + shape=( + self.hanabi_env.vectorized_observation_shape()[ + 0],), + dtype=np.float32), + 'action_mask': spaces.Box(low=0, high=1, shape=( + self.hanabi_env.num_moves(),), dtype=np.int8)}) + for player_name in self.agents} + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def seed(self, seed=None): + config = dict(seed=seed, **self._config) + self.hanabi_env = HanabiEnv(config=config) + + @staticmethod + def _raise_error_if_config_values_out_of_range(colors, ranks, players, hand_size, max_information_tokens, + max_life_tokens, observation_type, random_start_player): + + if not (2 <= colors <= 5): + raise ValueError(f'Config parameter {colors} is out of bounds. See description in hanabi.py.') + + elif not (2 <= ranks <= 5): + raise ValueError(f'Config parameter {ranks} is out of bounds. See description in hanabi.py.') + + elif not (2 <= players <= 5): + raise ValueError(f'Config parameter {players} is out of bounds. See description in hanabi.py.') + + elif not (players <= colors): + raise ValueError(f'Config parameter colors: {colors} is smaller than players: {players}, which is not allowed. See description in hanabi.py.') + + elif not (2 <= hand_size <= 5): + raise ValueError(f'Config parameter {hand_size} is out of bounds. See description in hanabi.py.') + + elif not (0 <= max_information_tokens): + raise ValueError( + f'Config parameter {max_information_tokens} is out of bounds. See description in hanabi.py.') + + elif not (1 <= max_life_tokens): + raise ValueError(f'Config parameter {max_life_tokens} is out of bounds. See description in hanabi.py.') + + elif not (0 <= observation_type <= 1): + raise ValueError(f'Config parameter {observation_type} is out of bounds. See description in hanabi.py.') + + @property + def observation_vector_dim(self): + return self.hanabi_env.vectorized_observation_shape() + + @property + def legal_moves(self) -> List[int]: + return self.infos[self.agent_selection]['legal_moves'] + + @property + def all_moves(self) -> List[int]: + return list(range(0, self.hanabi_env.num_moves())) + + # ToDo: Fix Return value + def reset(self): + """ Resets the environment for a new game and returns observations of current player as List of ints + + Returns: + observation: Optional list of integers of length self.observation_vector_dim, describing observations of + current agent (agent_selection). + """ + + self.agents = self.possible_agents[:] + # Reset underlying hanabi reinforcement learning environment + obs = self.hanabi_env.reset() + + # Reset agent and agent_selection + self._reset_agents(player_number=obs['current_player']) + + self.rewards = {agent: 0 for agent in self.agents} + self._cumulative_rewards = {name: 0 for name in self.agents} + # Reset internal state + self._process_latest_observations(obs=obs) + + def _reset_agents(self, player_number: int): + """ Rearrange self.agents as pyhanabi starts a different player after each reset(). """ + + # Shifts self.agents list as long order starting player is not according to player_number + while not self.agents[0] == 'player_' + str(player_number): + self.agents = self.agents[1:] + [self.agents[0]] + + # Agent order list, on which the agent selector operates on. + self._agent_selector = agent_selector(self.agents) + + # Reset agent_selection + self.agent_selection = self._agent_selector.reset() + + def _step_agents(self): + self.agent_selection = self._agent_selector.next() + + def step(self, action: int, observe: bool = True, as_vector: bool = True) -> Optional[Union[np.ndarray, + List[List[dict]]]]: + """ Advances the environment by one step. Action must be within self.legal_moves, otherwise throws error. + + Returns: + observation: Optional List of new observations of agent at turn after the action step is performed. + By default a list of integers, describing the logic state of the game from the view of the agent. + Can be a returned as a descriptive dictionary, if as_vector=False. + """ + if self.dones[self.agent_selection]: + return self._was_done_step(action) + action = int(action) + + agent_on_turn = self.agent_selection + + if action not in self.legal_moves: + raise ValueError('Illegal action. Please choose between legal actions, as documented in dict self.infos') + + else: + # Iterate agent_selection + self._step_agents() + + # Apply action + all_observations, reward, done, _ = self.hanabi_env.step(action=action) + + # Update internal state + self._process_latest_observations(obs=all_observations, reward=reward, done=done) + + # sets current reward for 0 to initialize reward accumulation + self._cumulative_rewards[agent_on_turn] = 0 + self._accumulate_rewards() + + def observe(self, agent_name: str): + observation = np.array(self.infos[agent_name]['observations_vectorized'], + np.float32) if agent_name in self.infos else np.zeros_like( + self.observation_spaces[agent_name].low) + + legal_moves = self.infos[agent_name]['legal_moves'] + action_mask = np.zeros(self.hanabi_env.num_moves(), 'int8') + for i in legal_moves: + action_mask[i] = 1 + + return {'observation': observation, 'action_mask': action_mask} + + def _process_latest_observations(self, obs: Dict, reward: Optional[float] = 0, done: Optional[bool] = False): + """Updates internal state""" + + self.latest_observations = obs + self.rewards = {a: reward for a in self.agents} + self.dones = {player_name: done for player_name in self.agents} + + # Here we have to deal with the player index with offset = 1 + self.infos = {player_name: dict( + legal_moves=self.latest_observations['player_observations'][int(player_name[-1])]['legal_moves_as_int'], + # legal_moves_as_dict=self.latest_observations['player_observations'][int(player_name[-1])]['legal_moves'], + observations_vectorized=self.latest_observations['player_observations'][int(player_name[-1])]['vectorized'], + # observations=self.latest_observations['player_observations'][int(player_name[-1]) + ) + for player_name in self.agents} + + def render(self, mode='human'): + """ Supports console print only. Prints player's data. + + Example: + """ + player_data = self.latest_observations['player_observations'] + print("Active player:", self.possible_agents[player_data[0]['current_player_offset']]) + for i, d in enumerate(player_data): + print(self.possible_agents[i]) + print("========") + print(d['pyhanabi']) + print() + + def close(self): + pass diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/hanabi_v4.py b/MLPY/Lib/site-packages/pettingzoo/classic/hanabi_v4.py new file mode 100644 index 0000000000000000000000000000000000000000..3b1b0a1ab6681e48924d6a43047dee98ecc62e89 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/hanabi_v4.py @@ -0,0 +1 @@ +from .hanabi.hanabi import env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/leduc_holdem_v4.py b/MLPY/Lib/site-packages/pettingzoo/classic/leduc_holdem_v4.py new file mode 100644 index 0000000000000000000000000000000000000000..f925117945a776c3f4db573e7884c24d4ca8251e --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/leduc_holdem_v4.py @@ -0,0 +1 @@ +from .rlcard_envs.leduc_holdem import env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/mahjong_v4.py b/MLPY/Lib/site-packages/pettingzoo/classic/mahjong_v4.py new file mode 100644 index 0000000000000000000000000000000000000000..72ef376becb502d6c44fa1c765542dbb8a4bc052 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/mahjong_v4.py @@ -0,0 +1 @@ +from .rlcard_envs.mahjong import env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__init__.py b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bebc10845cf3dede6d8510ba40447540958f599a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/dou_dizhu.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/dou_dizhu.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7466ebda43b35c72a9a2205e8ea917454687f1ff Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/dou_dizhu.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/gin_rummy.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/gin_rummy.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e1f864179efd0c3dc79b67143fe2b4f7dd1ddabc Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/gin_rummy.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/leduc_holdem.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/leduc_holdem.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..becbaffa232b4fa23460e654fc75247f5762c41d Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/leduc_holdem.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/mahjong.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/mahjong.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7debab8ff823e9bfdbf2b5be6446538abc07f90d Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/mahjong.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/rlcard_base.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/rlcard_base.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a3488dc01867dfe36691ed850d51250d344bc5ae Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/rlcard_base.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/texas_holdem.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/texas_holdem.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6b22485d65be54214ce7a2ebd61836cc6d597966 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/texas_holdem.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/texas_holdem_no_limit.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/texas_holdem_no_limit.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c4c160e0a45145517e78708dd311694c19e3601f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/texas_holdem_no_limit.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/uno.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/uno.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..017e6d288079779d091c0cf720c3dbf4a7ce5d29 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/__pycache__/uno.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/dou_dizhu.py b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/dou_dizhu.py new file mode 100644 index 0000000000000000000000000000000000000000..bd4a13fa3e9e59262fd965f6c52ec35ac9401f18 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/dou_dizhu.py @@ -0,0 +1,69 @@ +import random + +import numpy as np +import rlcard +from gym import spaces + +from pettingzoo import AECEnv +from pettingzoo.utils import wrappers +from pettingzoo.utils.agent_selector import agent_selector + +from .rlcard_base import RLCardBase + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.CaptureStdoutWrapper(env) + env = wrappers.TerminateIllegalWrapper(env, illegal_reward=-1) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +class raw_env(RLCardBase): + + metadata = { + "render.modes": ["human"], + "name": "dou_dizhu_v4", + "is_parallelizable": False, + "video.frames_per_second": 1, + } + + def __init__(self, opponents_hand_visible=False): + self._opponents_hand_visible = opponents_hand_visible + self.agents = ['landlord_0', 'peasant_0', 'peasant_1'] + obs_dimension = 901 if self._opponents_hand_visible else 847 + super().__init__("doudizhu", 3, (obs_dimension, )) + self.observation_spaces = self._convert_to_dict([spaces.Dict( + {'observation': spaces.Box(low=0.0, high=1.0, shape=(obs_dimension - 111, ) + if agent == 'landlord_0' else (obs_dimension, ), dtype=self._dtype), + 'action_mask': spaces.Box(low=0, high=1, shape=(self.env.num_actions,), dtype=np.int8)}) + for agent in self.agents]) + + def _scale_rewards(self, reward): + # Maps 1 to 1 and 0 to -1 + return 2 * reward - 1 + + def observe(self, agent): + obs = self.env.get_state(self._name_to_int(agent)) + if self._opponents_hand_visible: + observation = obs['obs'].astype(self._dtype) + else: + observation = np.delete(obs['obs'], range(54, 108)).astype(self._dtype) + + legal_moves = self.next_legal_moves + action_mask = np.zeros(27472, 'int8') + for i in legal_moves: + action_mask[i] = 1 + + return {'observation': observation, 'action_mask': action_mask} + + def render(self, mode='human'): + for player in self.possible_agents: + state = self.env.game.get_state(self._name_to_int(player)) + print(f"\n===== {player}'s Hand =====") + print(state['current_hand']) + print('\n=========== Last 3 Actions ===========') + for action in state['trace'][:-4:-1]: + print(f'{self._int_to_name(action[0])}: {action[1]}') + print('\n') diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/font/Minecraft.ttf b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/font/Minecraft.ttf new file mode 100644 index 0000000000000000000000000000000000000000..85c14725a3bf6d67aaf0f03292f9b763c1654f07 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/font/Minecraft.ttf differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/gin_rummy.py b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/gin_rummy.py new file mode 100644 index 0000000000000000000000000000000000000000..c2e8cd09a789057ee45b8f93f887d0fb4228051c --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/gin_rummy.py @@ -0,0 +1,85 @@ +import random + +import numpy as np +import rlcard +from gym import spaces +from gym.utils import EzPickle +from rlcard.games.gin_rummy.player import GinRummyPlayer +from rlcard.games.gin_rummy.utils import melding as melding +from rlcard.games.gin_rummy.utils import utils +from rlcard.games.gin_rummy.utils.action_event import GinAction, KnockAction +from rlcard.utils.utils import print_card + +from pettingzoo import AECEnv +from pettingzoo.utils import wrappers +from pettingzoo.utils.agent_selector import agent_selector + +from .rlcard_base import RLCardBase + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.CaptureStdoutWrapper(env) + env = wrappers.TerminateIllegalWrapper(env, illegal_reward=-1) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +class raw_env(RLCardBase, EzPickle): + + metadata = { + "render.modes": ["human"], + "name": "gin_rummy_v4", + "is_parallelizable": False, + "video.frames_per_second": 1, + } + + def __init__(self, knock_reward: float = 0.5, gin_reward: float = 1.0, opponents_hand_visible=False): + EzPickle.__init__(self, knock_reward, gin_reward) + self._opponents_hand_visible = opponents_hand_visible + num_planes = 5 if self._opponents_hand_visible else 4 + RLCardBase.__init__(self, "gin-rummy", 2, (num_planes, 52)) + self._knock_reward = knock_reward + self._gin_reward = gin_reward + + self.env.game.judge.scorer.get_payoff = self._get_payoff + + def _get_payoff(self, player: GinRummyPlayer, game) -> float: + going_out_action = game.round.going_out_action + going_out_player_id = game.round.going_out_player_id + if going_out_player_id == player.player_id and type(going_out_action) is KnockAction: + payoff = self._knock_reward + elif going_out_player_id == player.player_id and type(going_out_action) is GinAction: + payoff = self._gin_reward + else: + hand = player.hand + best_meld_clusters = melding.get_best_meld_clusters(hand=hand) + best_meld_cluster = [] if not best_meld_clusters else best_meld_clusters[0] + deadwood_count = utils.get_deadwood_count(hand, best_meld_cluster) + payoff = -deadwood_count / 100 + return payoff + + def observe(self, agent): + obs = self.env.get_state(self._name_to_int(agent)) + if self._opponents_hand_visible: + observation = obs['obs'].astype(self._dtype) + else: + observation = obs['obs'][0:4, :].astype(self._dtype) + + legal_moves = self.next_legal_moves + action_mask = np.zeros(110, 'int8') + for i in legal_moves: + action_mask[i] = 1 + + return {'observation': observation, 'action_mask': action_mask} + + def render(self, mode='human'): + for player in self.possible_agents: + state = self.env.game.round.players[self._name_to_int(player)].hand + print(f"\n===== {player}'s Hand =====") + print_card([c.__str__()[::-1] for c in state]) + state = self.env.game.get_state(0) + print("\n==== Top Discarded Card ====") + print_card([c.__str__() for c in state['top_discard']] if state else None) + print('\n') diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C2.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C2.png new file mode 100644 index 0000000000000000000000000000000000000000..56c446d4a4b812a919468a3f68c0cbc5ab3c0285 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C2.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C3.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C3.png new file mode 100644 index 0000000000000000000000000000000000000000..f496d19c20d15ae9dc4acafd8febcb7b74a904c1 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C3.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C4.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C4.png new file mode 100644 index 0000000000000000000000000000000000000000..d8f10f216f27b2f7fc597b05d0c6a540e7b7227a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C4.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C5.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C5.png new file mode 100644 index 0000000000000000000000000000000000000000..3382e23027b456f5fdc12605399f89f58eea7565 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C5.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C6.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C6.png new file mode 100644 index 0000000000000000000000000000000000000000..ab19dc3f3c75bbb6fbf0b5af41e2f9240772fd01 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C6.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C7.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C7.png new file mode 100644 index 0000000000000000000000000000000000000000..19bf11c591f3f9cf54b08bf8d867818924068e80 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C7.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C8.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C8.png new file mode 100644 index 0000000000000000000000000000000000000000..1fd6f80dbfbd76294c12c3a0ac61904852f50b4f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C8.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C9.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C9.png new file mode 100644 index 0000000000000000000000000000000000000000..41a6431f96aaba6e390d07dce2012a373d3b081d Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/C9.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/CA.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/CA.png new file mode 100644 index 0000000000000000000000000000000000000000..006f9841e142156db8f5ccf7f71b91a984ad5708 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/CA.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/CJ.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/CJ.png new file mode 100644 index 0000000000000000000000000000000000000000..b948a0f07125ed2885c88d9c2e5943e723060eb2 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/CJ.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/CK.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/CK.png new file mode 100644 index 0000000000000000000000000000000000000000..9db8c772efed93bba85a267e43886c578470cecb Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/CK.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/CQ.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/CQ.png new file mode 100644 index 0000000000000000000000000000000000000000..fc23d8538499c950d70644682e29a50bb119a2fd Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/CQ.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/CT.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/CT.png new file mode 100644 index 0000000000000000000000000000000000000000..bde8478dc498e9f5d22fcdad3cdb227ddf08218f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/CT.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/Card.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/Card.png new file mode 100644 index 0000000000000000000000000000000000000000..04ec431da076dd52267d801ec8e471f0e02c4463 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/Card.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipBlack.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipBlack.png new file mode 100644 index 0000000000000000000000000000000000000000..338e093f614db06e106961696921d07a9346cdcd Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipBlack.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipBlue.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipBlue.png new file mode 100644 index 0000000000000000000000000000000000000000..53e99a2ae9c7708b887d3184ae9a81cb9e52a0be Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipBlue.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipGreen.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipGreen.png new file mode 100644 index 0000000000000000000000000000000000000000..cf0c1fa210ead2ff1719cc2528eeed9f62575d46 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipGreen.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipLightBlue.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipLightBlue.png new file mode 100644 index 0000000000000000000000000000000000000000..c1bb461ba0eb849d2a2313a0ac00b1b3e3e92134 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipLightBlue.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipOrange.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipOrange.png new file mode 100644 index 0000000000000000000000000000000000000000..b3b0c2474fca4a754995ea11da54ec74dd52c480 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipOrange.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipPink.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipPink.png new file mode 100644 index 0000000000000000000000000000000000000000..04d963934e0e257db2a216bb7bc9287928fba2f5 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipPink.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipRed.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipRed.png new file mode 100644 index 0000000000000000000000000000000000000000..d8f0428195e85e248bd486c5f2aca5446c250b23 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipRed.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipWhite.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipWhite.png new file mode 100644 index 0000000000000000000000000000000000000000..da6a0dfd80132cc757c2c7105951d0d42b116e85 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipWhite.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipYellow.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipYellow.png new file mode 100644 index 0000000000000000000000000000000000000000..79d490743548c34f3700dd33afddcae58acecc95 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ChipYellow.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D2.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D2.png new file mode 100644 index 0000000000000000000000000000000000000000..2b4babadfbd0c7b7c58442093d1b025c38a2c359 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D2.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D3.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D3.png new file mode 100644 index 0000000000000000000000000000000000000000..25e2e2e03ea493f6b29880a65b4b6bfbac6f0df2 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D3.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D4.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D4.png new file mode 100644 index 0000000000000000000000000000000000000000..f33e33dd6f67dc44cf030effa910066f22867ae4 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D4.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D5.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D5.png new file mode 100644 index 0000000000000000000000000000000000000000..dce581381a4f917521c28e87883398fae43493f7 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D5.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D6.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D6.png new file mode 100644 index 0000000000000000000000000000000000000000..59fa18072e651b17594df667597fd6b2817d80c6 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D6.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D7.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D7.png new file mode 100644 index 0000000000000000000000000000000000000000..35b733bce1e20f3f0f6b02d1c98c9769e2263710 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D7.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D8.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D8.png new file mode 100644 index 0000000000000000000000000000000000000000..ee777f97a564462a50bf71af030d3b09073ffc1f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D8.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D9.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D9.png new file mode 100644 index 0000000000000000000000000000000000000000..5b3936889b3075a827e8ef09af3a427a668e867d Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/D9.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/DA.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/DA.png new file mode 100644 index 0000000000000000000000000000000000000000..23b58115875883a8f1a234cee344ac1a4f9f8447 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/DA.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/DJ.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/DJ.png new file mode 100644 index 0000000000000000000000000000000000000000..6ca59058feae030a89f62d848872dcb20cb3a103 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/DJ.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/DK.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/DK.png new file mode 100644 index 0000000000000000000000000000000000000000..25d4acedcd9bb603a7cae392c15b301387433508 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/DK.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/DQ.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/DQ.png new file mode 100644 index 0000000000000000000000000000000000000000..a56f982e974582f5caf74409c82712b006782439 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/DQ.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/DT.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/DT.png new file mode 100644 index 0000000000000000000000000000000000000000..c94858b0216a80388e7d7e6cd77c421cac03255d Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/DT.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H2.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H2.png new file mode 100644 index 0000000000000000000000000000000000000000..466c0f3520a52b3d2535c9e322f4f3bdef072c05 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H2.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H3.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H3.png new file mode 100644 index 0000000000000000000000000000000000000000..66b71ccacedba8ae33cfa9f63bd5c721b35c0727 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H3.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H4.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H4.png new file mode 100644 index 0000000000000000000000000000000000000000..57ff831f7b6deb39f336a59d2208587b1691c3c2 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H4.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H5.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H5.png new file mode 100644 index 0000000000000000000000000000000000000000..64e37dceb091ccfacffa5663b58b6de50f8e8039 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H5.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H6.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H6.png new file mode 100644 index 0000000000000000000000000000000000000000..61a1ea4ff6bd8d06fe6592c214f897a0c744e58b Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H6.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H7.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H7.png new file mode 100644 index 0000000000000000000000000000000000000000..346d0a5721286fc30aeac63129fe8778b65b27b3 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H7.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H8.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H8.png new file mode 100644 index 0000000000000000000000000000000000000000..dc8dca2f68f7c09283d08c0cb2da5e4d8e1159af Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H8.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H9.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H9.png new file mode 100644 index 0000000000000000000000000000000000000000..3f92346e0861bdb7f845f4290e16beb9768dc991 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/H9.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/HA.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/HA.png new file mode 100644 index 0000000000000000000000000000000000000000..2f8854de2979123898549d172389af182028e843 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/HA.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/HJ.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/HJ.png new file mode 100644 index 0000000000000000000000000000000000000000..3930ff3f03dc84b25d425d7c45141bbb9b390894 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/HJ.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/HK.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/HK.png new file mode 100644 index 0000000000000000000000000000000000000000..ed169c8d25b33ebc2dcd523ec98c6daae3d45dd8 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/HK.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/HQ.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/HQ.png new file mode 100644 index 0000000000000000000000000000000000000000..06388f6fab80656acf69ab1b5b229e07169949eb Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/HQ.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/HT.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/HT.png new file mode 100644 index 0000000000000000000000000000000000000000..02724791e28428e415c4437005d0efe594b02a1e Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/HT.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S2.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S2.png new file mode 100644 index 0000000000000000000000000000000000000000..09eaccc3ee0e3ce4208fd787faae21bd7622ed15 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S2.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S3.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S3.png new file mode 100644 index 0000000000000000000000000000000000000000..ba094d54954408371109794fade303ee7a7dbba6 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S3.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S4.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S4.png new file mode 100644 index 0000000000000000000000000000000000000000..4090e11973a309e5ec21670a7bffa49a715026fd Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S4.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S5.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S5.png new file mode 100644 index 0000000000000000000000000000000000000000..230db548f94dbbf0f28a5b250eec9f39d326fd20 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S5.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S6.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S6.png new file mode 100644 index 0000000000000000000000000000000000000000..2189d5b9930ca9dd7eb10c7a71371363487c7a5e Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S6.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S7.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S7.png new file mode 100644 index 0000000000000000000000000000000000000000..5b859dfed4900fe200ddb14864a9537d3e0ab87e Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S7.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S8.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S8.png new file mode 100644 index 0000000000000000000000000000000000000000..26e6b7138160fdee98de23877b7959b43829b9f2 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S8.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S9.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S9.png new file mode 100644 index 0000000000000000000000000000000000000000..d69a6835077aa5c126ac03095b54b569b5312cca Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/S9.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/SA.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/SA.png new file mode 100644 index 0000000000000000000000000000000000000000..bd136074b0963e7723c87d6c0fbc751bd8178e1c Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/SA.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/SJ.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/SJ.png new file mode 100644 index 0000000000000000000000000000000000000000..3adc3a90bc546e01b2bd4ec1b23476c34605798e Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/SJ.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/SK.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/SK.png new file mode 100644 index 0000000000000000000000000000000000000000..5948ae521b4c1edccd0e45a0b86fce70ea34124b Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/SK.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/SQ.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/SQ.png new file mode 100644 index 0000000000000000000000000000000000000000..9fb5037700fef10dff68bbf694efd508b88c408f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/SQ.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ST.png b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ST.png new file mode 100644 index 0000000000000000000000000000000000000000..7e4a12a0fb0a762c5b86fccbaeb11082d646e98c Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/img/ST.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/leduc_holdem.py b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/leduc_holdem.py new file mode 100644 index 0000000000000000000000000000000000000000..6510a26ff9e7a9b72e168c339a045e13ae85c8a2 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/leduc_holdem.py @@ -0,0 +1,44 @@ +import random + +import numpy as np +import rlcard +from gym import spaces +from rlcard.utils.utils import print_card + +from pettingzoo import AECEnv +from pettingzoo.utils import wrappers +from pettingzoo.utils.agent_selector import agent_selector + +from .rlcard_base import RLCardBase + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.CaptureStdoutWrapper(env) + env = wrappers.TerminateIllegalWrapper(env, illegal_reward=-1) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +class raw_env(RLCardBase): + + metadata = { + "render.modes": ["human"], + "name": "leduc_holdem_v4", + "is_parallelizable": False, + "video.frames_per_second": 1, + } + + def __init__(self, num_players=2): + super().__init__("leduc-holdem", num_players, (36,)) + + def render(self, mode='human'): + for player in self.possible_agents: + state = self.env.game.get_state(self._name_to_int(player)) + print(f"\n=============== {player}'s Hand ===============") + print_card(state['hand']) + print("\n{}'s Chips: {}".format(player, state['my_chips'])) + print('\n================= Public Cards =================') + print_card(state['public_card']) if state['public_card'] is not None else print('No public cards.') + print('\n') diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/mahjong.py b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/mahjong.py new file mode 100644 index 0000000000000000000000000000000000000000..51325da538170ad4a1dcd3a110f7a3d164ad70b3 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/mahjong.py @@ -0,0 +1,42 @@ +import random + +import numpy as np +import rlcard +from gym import spaces + +from pettingzoo import AECEnv +from pettingzoo.utils import wrappers + +from .rlcard_base import RLCardBase + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.CaptureStdoutWrapper(env) + env = wrappers.TerminateIllegalWrapper(env, illegal_reward=-1) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +class raw_env(RLCardBase): + + metadata = { + "render.modes": ["human"], + "name": "mahjong_v4", + "is_parallelizable": False, + "video.frames_per_second": 1, + } + + def __init__(self): + super().__init__("mahjong", 4, (6, 34, 4)) + + def render(self, mode='human'): + for player in self.possible_agents: + state = self.env.game.get_state(self._name_to_int(player)) + print(f"\n======== {player}'s Hand ========") + print(', '.join([c.get_str() for c in state['current_hand']])) + print(f"\n{player}'s Piles: ", ', '.join([c.get_str() for pile in state['players_pile'][self._name_to_int(player)] for c in pile])) + print("\n======== Tiles on Table ========") + print(', '.join([c.get_str() for c in state['table']])) + print('\n') diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/rlcard_base.py b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/rlcard_base.py new file mode 100644 index 0000000000000000000000000000000000000000..b2c6fe1cddc1885ecd6fa1232221180542c9a767 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/rlcard_base.py @@ -0,0 +1,107 @@ +import random + +import numpy as np +import rlcard +from gym import spaces + +from pettingzoo import AECEnv +from pettingzoo.utils import wrappers + + +class RLCardBase(AECEnv): + def __init__(self, name, num_players, obs_shape): + super().__init__() + self.name = name + self.num_players = num_players + config = {'allow_step_back': False, + 'seed': None, + 'game_num_players': num_players} + + self.env = rlcard.make(name, config) + self.screen = None + if not hasattr(self, "agents"): + self.agents = [f'player_{i}' for i in range(num_players)] + self.possible_agents = self.agents[:] + + dtype = self.env.reset()[0]['obs'].dtype + if dtype == np.dtype(np.int64): + self._dtype = np.dtype(np.int8) + elif dtype == np.dtype(np.float64): + self._dtype = np.dtype(np.float32) + else: + self._dtype = dtype + + self.observation_spaces = self._convert_to_dict( + [spaces.Dict({'observation': spaces.Box(low=0.0, high=1.0, shape=obs_shape, dtype=self._dtype), + 'action_mask': spaces.Box(low=0, high=1, shape=(self.env.num_actions,), + dtype=np.int8)}) for _ in range(self.num_agents)]) + self.action_spaces = self._convert_to_dict([spaces.Discrete(self.env.num_actions) for _ in range(self.num_agents)]) + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def seed(self, seed=None): + config = {'allow_step_back': False, + 'seed': seed, + 'game_num_players': self.num_players} + self.env = rlcard.make(self.name, config) + + def _scale_rewards(self, reward): + return reward + + def _int_to_name(self, ind): + return self.possible_agents[ind] + + def _name_to_int(self, name): + return self.possible_agents.index(name) + + def _convert_to_dict(self, list_of_list): + return dict(zip(self.possible_agents, list_of_list)) + + def observe(self, agent): + obs = self.env.get_state(self._name_to_int(agent)) + observation = obs['obs'].astype(self._dtype) + + legal_moves = self.next_legal_moves + action_mask = np.zeros(self.env.num_actions, 'int8') + for i in legal_moves: + action_mask[i] = 1 + + return {'observation': observation, 'action_mask': action_mask} + + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + obs, next_player_id = self.env.step(action) + next_player = self._int_to_name(next_player_id) + self._last_obs = self.observe(self.agent_selection) + if self.env.is_over(): + self.rewards = self._convert_to_dict(self._scale_rewards(self.env.get_payoffs())) + self.next_legal_moves = [] + self.dones = self._convert_to_dict([True if self.env.is_over() else False for _ in range(self.num_agents)]) + else: + self.next_legal_moves = obs['legal_actions'] + self._cumulative_rewards[self.agent_selection] = 0 + self.agent_selection = next_player + self._accumulate_rewards() + self._dones_step_first() + + def reset(self): + obs, player_id = self.env.reset() + self.agents = self.possible_agents[:] + self.agent_selection = self._int_to_name(player_id) + self.rewards = self._convert_to_dict([0 for _ in range(self.num_agents)]) + self._cumulative_rewards = self._convert_to_dict([0 for _ in range(self.num_agents)]) + self.dones = self._convert_to_dict([False for _ in range(self.num_agents)]) + self.infos = self._convert_to_dict([{'legal_moves': []} for _ in range(self.num_agents)]) + self.next_legal_moves = list(sorted(obs['legal_actions'])) + self._last_obs = obs['obs'] + + def render(self, mode='human'): + raise NotImplementedError() + + def close(self): + pass diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/texas_holdem.py b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/texas_holdem.py new file mode 100644 index 0000000000000000000000000000000000000000..9a89e9b8e8194a02f49cf481f8b5c06c1ebd21f8 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/texas_holdem.py @@ -0,0 +1,165 @@ +import os +import random + +import numpy as np +import pygame +import rlcard +from gym import spaces +from numpy.lib.shape_base import tile +from rlcard.utils.utils import print_card + +from pettingzoo import AECEnv +from pettingzoo.utils import wrappers +from pettingzoo.utils.agent_selector import agent_selector + +from .rlcard_base import RLCardBase + + +def get_image(path): + from os import path as os_path + cwd = os_path.dirname(__file__) + image = pygame.image.load(cwd + '/' + path) + return image + + +def get_font(path, size): + from os import path as os_path + cwd = os_path.dirname(__file__) + font = pygame.font.Font((cwd + '/' + path), size) + return font + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.TerminateIllegalWrapper(env, illegal_reward=-1) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +class raw_env(RLCardBase): + + metadata = { + "render.modes": ["human", "rgb_array"], + "name": "texas_holdem_v4", + "is_parallelizable": False, + "video.frames_per_second": 1, + } + + def __init__(self, num_players=2): + super().__init__("limit-holdem", num_players, (72,)) + + def render(self, mode='human'): + + def calculate_width(self, screen_width, i): + return int((screen_width / (np.ceil(len(self.possible_agents) / 2) + 1) * np.ceil((i + 1) / 2)) + (tile_size * 31 / 616)) + + def calculate_offset(hand, j, tile_size): + return int((len(hand) * (tile_size * 23 / 56)) - ((j) * (tile_size * 23 / 28))) + + def calculate_height(screen_height, divisor, multiplier, tile_size, offset): + return int(multiplier * screen_height / divisor + tile_size * offset) + + screen_height = 1000 + screen_width = int(screen_height * (1 / 20) + np.ceil(len(self.possible_agents) / 2) * (screen_height * 1 / 2)) + + if self.screen is None: + if mode == "human": + pygame.init() + self.screen = pygame.display.set_mode((screen_width, screen_height)) + else: + pygame.font.init() + self.screen = pygame.Surface((screen_width, screen_height)) + if mode == "human": + pygame.event.get() + + # Setup dimensions for card size and setup for colors + tile_size = screen_height * 2 / 10 + + bg_color = (7, 99, 36) + white = (255, 255, 255) + self.screen.fill(bg_color) + + chips = {0: {'value': 10000, 'img': 'ChipOrange.png', 'number': 0}, + 1: {'value': 5000, 'img': 'ChipPink.png', 'number': 0}, + 2: {'value': 1000, 'img': 'ChipYellow.png', 'number': 0}, + 3: {'value': 100, 'img': 'ChipBlack.png', 'number': 0}, + 4: {'value': 50, 'img': 'ChipBlue.png', 'number': 0}, + 5: {'value': 25, 'img': 'ChipGreen.png', 'number': 0}, + 6: {'value': 10, 'img': 'ChipLightBlue.png', 'number': 0}, + 7: {'value': 5, 'img': 'ChipRed.png', 'number': 0}, + 8: {'value': 1, 'img': 'ChipWhite.png', 'number': 0}} + + # Load and blit all images for each card in each player's hand + for i, player in enumerate(self.possible_agents): + state = self.env.game.get_state(self._name_to_int(player)) + for j, card in enumerate(state['hand']): + # Load specified card + card_img = get_image(os.path.join('img', card + '.png')) + card_img = pygame.transform.scale(card_img, (int(tile_size * (142 / 197)), int(tile_size))) + # Players with even id go above public cards + if i % 2 == 0: + self.screen.blit(card_img, ((calculate_width(self, screen_width, i) - calculate_offset(state['hand'], j, tile_size)), calculate_height(screen_height, 4, 1, tile_size, -1))) + # Players with odd id go below public cards + else: + self.screen.blit(card_img, ((calculate_width(self, screen_width, i) - calculate_offset(state['hand'], j, tile_size)), calculate_height(screen_height, 4, 3, tile_size, 0))) + + # Load and blit text for player name + font = get_font(os.path.join('font', 'Minecraft.ttf'), 36) + text = font.render("Player " + str(i + 1), True, white) + textRect = text.get_rect() + if i % 2 == 0: + textRect.center = ((screen_width / (np.ceil(len(self.possible_agents) / 2) + 1) * np.ceil((i + 1) / 2)), calculate_height(screen_height, 4, 1, tile_size, -(22 / 20))) + else: + textRect.center = ((screen_width / (np.ceil(len(self.possible_agents) / 2) + 1) * np.ceil((i + 1) / 2)), calculate_height(screen_height, 4, 3, tile_size, (23 / 20))) + self.screen.blit(text, textRect) + + # Load and blit number of poker chips for each player + font = get_font(os.path.join('font', 'Minecraft.ttf'), 24) + text = font.render(str(state['my_chips']), True, white) + textRect = text.get_rect() + + # Calculate number of each chip + total = (state['my_chips']) + height = 0 + for key in chips: + num = total / chips[key]['value'] + chips[key]['number'] = int(num) + total %= chips[key]['value'] + + chip_img = get_image(os.path.join('img', chips[key]['img'])) + chip_img = pygame.transform.scale(chip_img, (int(tile_size / 2), int(tile_size * 16 / 45))) + + # Blit poker chip img + for j in range(0, int(chips[key]['number'])): + if i % 2 == 0: + self.screen.blit(chip_img, ((calculate_width(self, screen_width, i) + tile_size * (8 / 10)), calculate_height(screen_height, 4, 1, tile_size, -1 / 2) - ((j + height) * tile_size / 15))) + else: + self.screen.blit(chip_img, ((calculate_width(self, screen_width, i) + tile_size * (8 / 10)), calculate_height(screen_height, 4, 3, tile_size, 1 / 2) - ((j + height) * tile_size / 15))) + height += chips[key]['number'] + + # Blit text number + if i % 2 == 0: + textRect.center = ((calculate_width(self, screen_width, i) + tile_size * (21 / 20)), calculate_height(screen_height, 4, 1, tile_size, -1 / 2) - ((height + 1) * tile_size / 15)) + else: + textRect.center = ((calculate_width(self, screen_width, i) + tile_size * (21 / 20)), calculate_height(screen_height, 4, 3, tile_size, 1 / 2) - ((height + 1) * tile_size / 15)) + self.screen.blit(text, textRect) + + # Load and blit public cards + for i, card in enumerate(state['public_cards']): + card_img = get_image(os.path.join('img', card + '.png')) + card_img = pygame.transform.scale(card_img, (int(tile_size * (142 / 197)), int(tile_size))) + if len(state['public_cards']) <= 3: + self.screen.blit(card_img, (((((screen_width / 2) + (tile_size * 31 / 616)) - calculate_offset(state['public_cards'], i, tile_size)), calculate_height(screen_height, 2, 1, tile_size, -(1 / 2))))) + else: + if i <= 2: + self.screen.blit(card_img, (((((screen_width / 2) + (tile_size * 31 / 616)) - calculate_offset(state['public_cards'][:3], i, tile_size)), calculate_height(screen_height, 2, 1, tile_size, -21 / 20)))) + else: + self.screen.blit(card_img, (((((screen_width / 2) + (tile_size * 31 / 616)) - calculate_offset(state['public_cards'][3:], i - 3, tile_size)), calculate_height(screen_height, 2, 1, tile_size, 1 / 20)))) + + if mode == "human": + pygame.display.update() + + observation = np.array(pygame.surfarray.pixels3d(self.screen)) + + return np.transpose(observation, axes=(1, 0, 2)) if mode == "rgb_array" else None diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/texas_holdem_no_limit.py b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/texas_holdem_no_limit.py new file mode 100644 index 0000000000000000000000000000000000000000..6d3793a636cabad79fc80608ed9c0202a8b890dd --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/texas_holdem_no_limit.py @@ -0,0 +1,167 @@ +import os +import random + +import numpy as np +import pygame +import rlcard +from gym import spaces +from rlcard.utils.utils import print_card + +from pettingzoo import AECEnv +from pettingzoo.utils import wrappers +from pettingzoo.utils.agent_selector import agent_selector + +from .rlcard_base import RLCardBase + + +def get_image(path): + from os import path as os_path + cwd = os_path.dirname(__file__) + image = pygame.image.load(cwd + '/' + path) + return image + + +def get_font(path, size): + from os import path as os_path + cwd = os_path.dirname(__file__) + font = pygame.font.Font((cwd + '/' + path), size) + return font + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.TerminateIllegalWrapper(env, illegal_reward=-1) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +class raw_env(RLCardBase): + + metadata = { + "render.modes": ["human", "rgb_array"], + "name": "texas_holdem_no_limit_v6", + "is_parallelizable": False, + "video.frames_per_second": 1, + } + + def __init__(self, num_players=2): + super().__init__("no-limit-holdem", num_players, (54,)) + self.observation_spaces = self._convert_to_dict([spaces.Dict( + {'observation': spaces.Box(low=np.zeros(54, ), high=np.append(np.ones(52, ), [100, 100]), dtype=np.float64), + 'action_mask': spaces.Box(low=0, high=1, shape=(self.env.num_actions,), dtype=np.int8)}) for _ in range(self.num_agents)]) + + def render(self, mode='human'): + + def calculate_width(self, screen_width, i): + return int((screen_width / (np.ceil(len(self.possible_agents) / 2) + 1) * np.ceil((i + 1) / 2)) + (tile_size * 31 / 616)) + + def calculate_offset(hand, j, tile_size): + return int((len(hand) * (tile_size * 23 / 56)) - ((j) * (tile_size * 23 / 28))) + + def calculate_height(screen_height, divisor, multiplier, tile_size, offset): + return int(multiplier * screen_height / divisor + tile_size * offset) + + screen_height = 1000 + screen_width = int(screen_height * (1 / 20) + np.ceil(len(self.possible_agents) / 2) * (screen_height * 1 / 2)) + + if self.screen is None: + if mode == "human": + pygame.init() + self.screen = pygame.display.set_mode((screen_width, screen_height)) + else: + pygame.font.init() + self.screen = pygame.Surface((screen_width, screen_height)) + if mode == "human": + pygame.event.get() + + # Setup dimensions for card size and setup for colors + tile_size = screen_height * 2 / 10 + + bg_color = (7, 99, 36) + white = (255, 255, 255) + self.screen.fill(bg_color) + + chips = {0: {'value': 10000, 'img': 'ChipOrange.png', 'number': 0}, + 1: {'value': 5000, 'img': 'ChipPink.png', 'number': 0}, + 2: {'value': 1000, 'img': 'ChipYellow.png', 'number': 0}, + 3: {'value': 100, 'img': 'ChipBlack.png', 'number': 0}, + 4: {'value': 50, 'img': 'ChipBlue.png', 'number': 0}, + 5: {'value': 25, 'img': 'ChipGreen.png', 'number': 0}, + 6: {'value': 10, 'img': 'ChipLightBlue.png', 'number': 0}, + 7: {'value': 5, 'img': 'ChipRed.png', 'number': 0}, + 8: {'value': 1, 'img': 'ChipWhite.png', 'number': 0}} + + # Load and blit all images for each card in each player's hand + for i, player in enumerate(self.possible_agents): + state = self.env.game.get_state(self._name_to_int(player)) + for j, card in enumerate(state['hand']): + # Load specified card + card_img = get_image(os.path.join('img', card + '.png')) + card_img = pygame.transform.scale(card_img, (int(tile_size * (142 / 197)), int(tile_size))) + # Players with even id go above public cards + if i % 2 == 0: + self.screen.blit(card_img, ((calculate_width(self, screen_width, i) - calculate_offset(state['hand'], j, tile_size)), calculate_height(screen_height, 4, 1, tile_size, -1))) + # Players with odd id go below public cards + else: + self.screen.blit(card_img, ((calculate_width(self, screen_width, i) - calculate_offset(state['hand'], j, tile_size)), calculate_height(screen_height, 4, 3, tile_size, 0))) + + # Load and blit text for player name + font = get_font(os.path.join('font', 'Minecraft.ttf'), 36) + text = font.render("Player " + str(i + 1), True, white) + textRect = text.get_rect() + if i % 2 == 0: + textRect.center = ((screen_width / (np.ceil(len(self.possible_agents) / 2) + 1) * np.ceil((i + 1) / 2)), calculate_height(screen_height, 4, 1, tile_size, -(22 / 20))) + else: + textRect.center = ((screen_width / (np.ceil(len(self.possible_agents) / 2) + 1) * np.ceil((i + 1) / 2)), calculate_height(screen_height, 4, 3, tile_size, (23 / 20))) + self.screen.blit(text, textRect) + + # Load and blit number of poker chips for each player + font = get_font(os.path.join('font', 'Minecraft.ttf'), 24) + text = font.render(str(state['my_chips']), True, white) + textRect = text.get_rect() + + # Calculate number of each chip + total = (state['my_chips']) + height = 0 + for key in chips: + num = total / chips[key]['value'] + chips[key]['number'] = int(num) + total %= chips[key]['value'] + + chip_img = get_image(os.path.join('img', chips[key]['img'])) + chip_img = pygame.transform.scale(chip_img, (int(tile_size / 2), int(tile_size * 16 / 45))) + + # Blit poker chip img + for j in range(0, int(chips[key]['number'])): + if i % 2 == 0: + self.screen.blit(chip_img, ((calculate_width(self, screen_width, i) + tile_size * (8 / 10)), calculate_height(screen_height, 4, 1, tile_size, -1 / 2) - ((j + height) * tile_size / 15))) + else: + self.screen.blit(chip_img, ((calculate_width(self, screen_width, i) + tile_size * (8 / 10)), calculate_height(screen_height, 4, 3, tile_size, 1 / 2) - ((j + height) * tile_size / 15))) + height += chips[key]['number'] + + # Blit text number + if i % 2 == 0: + textRect.center = ((calculate_width(self, screen_width, i) + tile_size * (21 / 20)), calculate_height(screen_height, 4, 1, tile_size, -1 / 2) - ((height + 1) * tile_size / 15)) + else: + textRect.center = ((calculate_width(self, screen_width, i) + tile_size * (21 / 20)), calculate_height(screen_height, 4, 3, tile_size, 1 / 2) - ((height + 1) * tile_size / 15)) + self.screen.blit(text, textRect) + + # Load and blit public cards + for i, card in enumerate(state['public_cards']): + card_img = get_image(os.path.join('img', card + '.png')) + card_img = pygame.transform.scale(card_img, (int(tile_size * (142 / 197)), int(tile_size))) + if len(state['public_cards']) <= 3: + self.screen.blit(card_img, (((((screen_width / 2) + (tile_size * 31 / 616)) - calculate_offset(state['public_cards'], i, tile_size)), calculate_height(screen_height, 2, 1, tile_size, -(1 / 2))))) + else: + if i <= 2: + self.screen.blit(card_img, (((((screen_width / 2) + (tile_size * 31 / 616)) - calculate_offset(state['public_cards'][:3], i, tile_size)), calculate_height(screen_height, 2, 1, tile_size, -21 / 20)))) + else: + self.screen.blit(card_img, (((((screen_width / 2) + (tile_size * 31 / 616)) - calculate_offset(state['public_cards'][3:], i - 3, tile_size)), calculate_height(screen_height, 2, 1, tile_size, 1 / 20)))) + + if mode == "human": + pygame.display.update() + + observation = np.array(pygame.surfarray.pixels3d(self.screen)) + + return np.transpose(observation, axes=(1, 0, 2)) if mode == "rgb_array" else None diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/uno.py b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/uno.py new file mode 100644 index 0000000000000000000000000000000000000000..2064ef1264776d153e6f0af2ec177f4af5f9f3da --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/rlcard_envs/uno.py @@ -0,0 +1,62 @@ +import random + +import numpy as np +import rlcard +from gym import spaces +from rlcard.games.uno.card import UnoCard + +from pettingzoo import AECEnv +from pettingzoo.utils import wrappers +from pettingzoo.utils.agent_selector import agent_selector + +from .rlcard_base import RLCardBase + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.CaptureStdoutWrapper(env) + env = wrappers.TerminateIllegalWrapper(env, illegal_reward=-1) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +class raw_env(RLCardBase): + + metadata = { + "render.modes": ["human"], + "name": "uno_v4", + "is_parallelizable": False, + "video.frames_per_second": 2, + } + + def __init__(self, opponents_hand_visible=False): + self._opponents_hand_visible = opponents_hand_visible + num_planes = 7 if self._opponents_hand_visible else 4 + super().__init__("uno", 2, (num_planes, 4, 15)) + + def observe(self, agent): + obs = self.env.get_state(self._name_to_int(agent)) + if self._opponents_hand_visible: + observation = obs['obs'].astype(self._dtype) + else: + observation = obs['obs'][0:4, :, :].astype(self._dtype) + + legal_moves = self.next_legal_moves + action_mask = np.zeros(61, 'int8') + for i in legal_moves: + action_mask[i] = 1 + + return {'observation': observation, 'action_mask': action_mask} + + def render(self, mode='human'): + for player in self.possible_agents: + state = self.env.game.get_state(self._name_to_int(player)) + print(f"\n\n=============== {player}'s Hand ===============") + UnoCard.print_cards(state['hand']) + print('\n\n================= Target Card =================') + UnoCard.print_cards(state['target'], wild_color=True) + print('\n') + + def close(self): + pass diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rps/__init__.py b/MLPY/Lib/site-packages/pettingzoo/classic/rps/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rps/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/rps/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4ca62a84984e9463229cdce679ae624a91e614b3 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rps/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rps/__pycache__/rps.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/rps/__pycache__/rps.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c6f9fcbb4e9201dffd5f3e813f821b1f465714c5 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rps/__pycache__/rps.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rps/font/Minecraft.ttf b/MLPY/Lib/site-packages/pettingzoo/classic/rps/font/Minecraft.ttf new file mode 100644 index 0000000000000000000000000000000000000000..85c14725a3bf6d67aaf0f03292f9b763c1654f07 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rps/font/Minecraft.ttf differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rps/img/Lizard.png b/MLPY/Lib/site-packages/pettingzoo/classic/rps/img/Lizard.png new file mode 100644 index 0000000000000000000000000000000000000000..3f26e4905966e8cbaf0d75d24451d8ad2db35256 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rps/img/Lizard.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rps/img/Paper.png b/MLPY/Lib/site-packages/pettingzoo/classic/rps/img/Paper.png new file mode 100644 index 0000000000000000000000000000000000000000..73241568e1c7dff1cc7f19fe91e2e941494269ef Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rps/img/Paper.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rps/img/Rock.png b/MLPY/Lib/site-packages/pettingzoo/classic/rps/img/Rock.png new file mode 100644 index 0000000000000000000000000000000000000000..509fc5e2901073814c2f1e09628347ba5df739e9 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rps/img/Rock.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rps/img/Scissors.png b/MLPY/Lib/site-packages/pettingzoo/classic/rps/img/Scissors.png new file mode 100644 index 0000000000000000000000000000000000000000..9911c3a3f167066cce195b5d4f26d269b1f6c257 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rps/img/Scissors.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rps/img/Spock.png b/MLPY/Lib/site-packages/pettingzoo/classic/rps/img/Spock.png new file mode 100644 index 0000000000000000000000000000000000000000..20034330bb57bf7c3bfb0d4b80eb82737208aa20 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/rps/img/Spock.png differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rps/rps.py b/MLPY/Lib/site-packages/pettingzoo/classic/rps/rps.py new file mode 100644 index 0000000000000000000000000000000000000000..04bc0d797a36d5fa0cda1aa3118484c91362789f --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/rps/rps.py @@ -0,0 +1,242 @@ +import os + +import numpy as np +import pygame +from gym.spaces import Discrete + +from pettingzoo import AECEnv +from pettingzoo.utils import agent_selector, wrappers +from pettingzoo.utils.conversions import parallel_wrapper_fn + + +def get_image(path): + from os import path as os_path + + import pygame + cwd = os_path.dirname(__file__) + image = pygame.image.load(cwd + '/' + path) + sfc = pygame.Surface(image.get_size(), flags=pygame.SRCALPHA) + sfc.blit(image, (0, 0)) + return sfc + + +def get_font(path, size): + from os import path as os_path + cwd = os_path.dirname(__file__) + font = pygame.font.Font((cwd + '/' + path), size) + return font + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +parallel_env = parallel_wrapper_fn(env) + + +class raw_env(AECEnv): + """Two-player environment for rock paper scissors. + Expandable environment to rock paper scissors lizard spock action_6 action_7 ... + The observation is simply the last opponent action.""" + + metadata = { + "render.modes": ["human", "rgb_array"], + "name": "rps_v2", + "is_parallelizable": True, + "video.frames_per_second": 2, + } + + def __init__(self, num_actions=3, max_cycles=15): + self.max_cycles = max_cycles + + # number of actions must be odd and greater than 3 + assert num_actions > 2, "The number of actions must be equal or greater than 3." + assert num_actions % 2 != 0, "The number of actions must be an odd number." + self._moves = ["ROCK", "PAPER", "SCISSORS"] + if num_actions > 3: + # expand to lizard, spock for first extra action pair + self._moves.extend(("SPOCK", "LIZARD")) + for action in range(num_actions - 5): + self._moves.append("ACTION_"f'{action + 6}') + # none is last possible action, to satisfy discrete action space + self._moves.append("None") + self._none = num_actions + + self.agents = ["player_" + str(r) for r in range(2)] + self.possible_agents = self.agents[:] + self.agent_name_mapping = dict(zip(self.agents, list(range(self.num_agents)))) + self.action_spaces = {agent: Discrete(num_actions) for agent in self.agents} + self.observation_spaces = {agent: Discrete(1 + num_actions) for agent in self.agents} + + self.screen = None + self.history = [0] * (2 * 5) + + self.reinit() + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def reinit(self): + self.agents = self.possible_agents[:] + self._agent_selector = agent_selector(self.agents) + self.agent_selection = self._agent_selector.next() + self.rewards = {agent: 0 for agent in self.agents} + self._cumulative_rewards = {agent: 0 for agent in self.agents} + self.dones = {agent: False for agent in self.agents} + self.infos = {agent: {} for agent in self.agents} + + self.state = {agent: self._none for agent in self.agents} + self.observations = {agent: self._none for agent in self.agents} + + self.num_moves = 0 + + def render(self, mode="human"): + def offset(i, size, offset=0): + if i == 0: + return -(size) - offset + else: + return offset + + screen_height = 350 + screen_width = int(screen_height * 5 / 14) + + if self.screen is None: + if mode == "human": + pygame.init() + self.screen = pygame.display.set_mode((screen_width, screen_height)) + else: + pygame.font.init() + self.screen = pygame.Surface((screen_width, screen_height)) + if mode == "human": + pygame.event.get() + + # Load and all of the necessary images + paper = get_image(os.path.join('img', 'Paper.png')) + rock = get_image(os.path.join('img', 'Rock.png')) + scissors = get_image(os.path.join('img', 'Scissors.png')) + spock = get_image(os.path.join('img', 'Spock.png')) + lizard = get_image(os.path.join('img', 'Lizard.png')) + + # Scale images in history + paper = pygame.transform.scale(paper, (int(screen_height / 9), int(screen_height / 9 * (14 / 12)))) + rock = pygame.transform.scale(rock, (int(screen_height / 9), int(screen_height / 9 * (10 / 13)))) + scissors = pygame.transform.scale(scissors, (int(screen_height / 9), int(screen_height / 9 * (14 / 13)))) + spock = pygame.transform.scale(spock, (int(screen_height / 9), int(screen_height / 9))) + lizard = pygame.transform.scale(lizard, (int(screen_height / 9 * (9 / 18)), int(screen_height / 9))) + + # Set background color + bg = (255, 255, 255) + self.screen.fill(bg) + + # Set font properties + black = (0, 0, 0) + font = get_font((os.path.join('font', 'Minecraft.ttf')), int(screen_height / 25)) + + for i, move in enumerate(self.history[0:10]): + # Blit move history + if move == 'ROCK': + self.screen.blit(rock, ((screen_width / 2) + offset((i + 1) % 2, screen_height / 9, screen_height * 7 / 126), (screen_height * 7 / 24) + ((screen_height / 7) * np.floor(i / 2)))) + elif move == 'PAPER': + self.screen.blit(paper, ((screen_width / 2) + offset((i + 1) % 2, screen_height / 9, screen_height * 7 / 126), (screen_height * 7 / 24) + ((screen_height / 7) * np.floor(i / 2)))) + elif move == 'SCISSORS': + self.screen.blit(scissors, ((screen_width / 2) + offset((i + 1) % 2, screen_height / 9, screen_height * 7 / 126), (screen_height * 7 / 24) + ((screen_height / 7) * np.floor(i / 2)))) + elif move == 'SPOCK': + self.screen.blit(spock, ((screen_width / 2) + offset((i + 1) % 2, screen_height / 9, screen_height * 7 / 126), (screen_height * 7 / 24) + ((screen_height / 7) * np.floor(i / 2)))) + elif move == 'LIZARD': + self.screen.blit(lizard, ((screen_width / 2) + offset((i + 1) % 2, screen_height / 9, screen_height * 7 / 126), (screen_height * 7 / 24) + ((screen_height / 7) * np.floor(i / 2)))) + + # Scale images in current game + paper = pygame.transform.scale(paper, (int(screen_height / 7), int(screen_height / 7 * (14 / 12)))) + rock = pygame.transform.scale(rock, (int(screen_height / 7), int(screen_height / 7 * (10 / 13)))) + scissors = pygame.transform.scale(scissors, (int(screen_height / 7), int(screen_height / 7 * (14 / 13)))) + spock = pygame.transform.scale(spock, (int(screen_height / 7), int(screen_height / 7))) + lizard = pygame.transform.scale(lizard, (int(screen_height / 7 * (9 / 18)), int(screen_height / 7))) + + if len(self.agents) > 1: + for i in range(0, 2): + # Text for each agent + text = font.render('Agent ' + str(i + 1), True, black) + textRect = text.get_rect() + textRect.center = ((screen_width / 2) + offset(i, 0, screen_width * 11 / 40), screen_height / 40) + self.screen.blit(text, textRect) + + # Blit agent action + if self._moves[self.state[self.agents[i]]] == 'ROCK': + self.screen.blit(rock, ((screen_width / 2) + offset(i, screen_height / 7, screen_height / 42), screen_height / 12)) + elif self._moves[self.state[self.agents[i]]] == 'PAPER': + self.screen.blit(paper, ((screen_width / 2) + offset(i, screen_height / 7, screen_height / 42), screen_height / 12)) + elif self._moves[self.state[self.agents[i]]] == 'SCISSORS': + self.screen.blit(scissors, ((screen_width / 2) + offset(i, screen_height / 7, screen_height / 42), screen_height / 12)) + elif self._moves[self.state[self.agents[i]]] == 'SPOCK': + self.screen.blit(spock, ((screen_width / 2) + offset(i, screen_height / 7, screen_height / 42), screen_height / 12)) + elif self._moves[self.state[self.agents[i]]] == 'LIZARD': + self.screen.blit(lizard, ((screen_width / 2) + offset(i, screen_height / 7, screen_height / 42), screen_height / 12)) + if self._moves[self.state[self.agents[1]]] != 'None': + self.history = [self._moves[self.state[self.agents[i]]]] + self.history[:-1] + + if mode == "human": + pygame.display.update() + + observation = np.array(pygame.surfarray.pixels3d(self.screen)) + + return np.transpose(observation, axes=(1, 0, 2)) if mode == "rgb_array" else None + + def observe(self, agent): + # observation of one agent is the previous state of the other + return np.array(self.observations[agent]) + + def close(self): + pass + + def reset(self): + self.reinit() + + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + agent = self.agent_selection + + self.state[self.agent_selection] = action + + # collect reward if it is the last agent to act + if self._agent_selector.is_last(): + + # same action => 0 reward each agent + if self.state[self.agents[0]] == self.state[self.agents[1]]: + rewards = (0, 0) + else: + # same action parity => lower action number wins + if (self.state[self.agents[0]] + self.state[self.agents[1]]) % 2 == 0: + if self.state[self.agents[0]] > self.state[self.agents[1]]: + rewards = (-1, 1) + else: + rewards = (1, -1) + # different action parity => higher action number wins + else: + if self.state[self.agents[0]] > self.state[self.agents[1]]: + rewards = (1, -1) + else: + rewards = (-1, 1) + self.rewards[self.agents[0]], self.rewards[self.agents[1]] = rewards + + self.num_moves += 1 + + self.dones = {agent: self.num_moves >= self.max_cycles for agent in self.agents} + + # observe the current state + for i in self.agents: + self.observations[i] = self.state[self.agents[1 - self.agent_name_mapping[i]]] + else: + self.state[self.agents[1 - self.agent_name_mapping[agent]]] = self._none + + self._clear_rewards() + + self._cumulative_rewards[self.agent_selection] = 0 + self.agent_selection = self._agent_selector.next() + self._accumulate_rewards() diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/rps_v2.py b/MLPY/Lib/site-packages/pettingzoo/classic/rps_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..5406f02b3d2a8f30c0592f1295dd11b8db403977 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/rps_v2.py @@ -0,0 +1 @@ +from .rps.rps import env, parallel_env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/texas_holdem_no_limit_v6.py b/MLPY/Lib/site-packages/pettingzoo/classic/texas_holdem_no_limit_v6.py new file mode 100644 index 0000000000000000000000000000000000000000..27c784742dd6f63d4c2ff1db43cffd9d873046ed --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/texas_holdem_no_limit_v6.py @@ -0,0 +1 @@ +from .rlcard_envs.texas_holdem_no_limit import env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/texas_holdem_v4.py b/MLPY/Lib/site-packages/pettingzoo/classic/texas_holdem_v4.py new file mode 100644 index 0000000000000000000000000000000000000000..6049cd4c52134a332eb2a35dd91720475bd650f5 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/texas_holdem_v4.py @@ -0,0 +1 @@ +from .rlcard_envs.texas_holdem import env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/__init__.py b/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ba99a6ace025cdd996ccf2b47c074f6a8251f1c1 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/__pycache__/board.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/__pycache__/board.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..85f3d256c49ca8e42985e4dd5d1679c20db36c54 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/__pycache__/board.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/__pycache__/tictactoe.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/__pycache__/tictactoe.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b08918ed7b85a48e3d1295ff4c867cdddc479109 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/__pycache__/tictactoe.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/board.py b/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/board.py new file mode 100644 index 0000000000000000000000000000000000000000..6d4cffbabd9e3390b45299bce65bc95d52f54dad --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/board.py @@ -0,0 +1,75 @@ +class Board: + def __init__(self): + # internally self.board.squares holds a flat representation of tic tac toe board + # where an empty board is [0, 0, 0, 0, 0, 0, 0, 0, 0] + # where indexes are column wise order + # 0 3 6 + # 1 4 7 + # 2 5 8 + + # empty -- 0 + # player 0 -- 1 + # player 1 -- 2 + self.squares = [0] * 9 + + # precommute possible winning combinations + self.calculate_winners() + + def setup(self): + self.calculate_winners() + + def play_turn(self, agent, pos): + # if spot is empty + if self.squares[pos] != 0: + return + if agent == 0: + self.squares[pos] = 1 + elif agent == 1: + self.squares[pos] = 2 + return + + def calculate_winners(self): + winning_combinations = [] + indices = [x for x in range(0, 9)] + + # Vertical combinations + winning_combinations += ([tuple(indices[i:(i + 3)]) for i in range(0, len(indices), 3)]) + + # Horizontal combinations + winning_combinations += [tuple(indices[x] for x in range(y, len(indices), 3)) for y in range(0, 3)] + + # Diagonal combinations + winning_combinations.append(tuple(x for x in range(0, len(indices), 4))) + winning_combinations.append(tuple(x for x in range(2, len(indices) - 1, 2))) + + self.winning_combinations = winning_combinations + + # returns: + # -1 for no winner + # 0 -- agent 0 wins + # 1 -- agent 1 wins + def check_for_winner(self): + winner = -1 + for combination in self.winning_combinations: + states = [] + for index in combination: + states.append(self.squares[index]) + if all(x == 1 for x in states): + winner = 1 + if all(x == 2 for x in states): + winner = 2 + return winner + + def check_game_over(self): + winner = self.check_for_winner() + + if winner == -1 and all(square in [1, 2] for square in self.squares): + # tie + return True + elif winner in [1, 2]: + return True + else: + return False + + def __str__(self): + return str(self.squares) diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/tictactoe.py b/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/tictactoe.py new file mode 100644 index 0000000000000000000000000000000000000000..e98158a6162ae7d47ff51e64e2412aeb17b5f3cb --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe/tictactoe.py @@ -0,0 +1,161 @@ +import warnings + +import numpy as np +from gym import spaces + +from pettingzoo import AECEnv +from pettingzoo.utils import agent_selector, wrappers + +from .board import Board + + +def env(): + env = raw_env() + env = wrappers.CaptureStdoutWrapper(env) + env = wrappers.TerminateIllegalWrapper(env, illegal_reward=-1) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +class raw_env(AECEnv): + metadata = { + "render.modes": ["human"], + "name": "tictactoe_v3", + "is_parallelizable": False, + "video.frames_per_second": 1, + } + + def __init__(self): + super().__init__() + self.board = Board() + + self.agents = ["player_1", "player_2"] + self.possible_agents = self.agents[:] + + self.action_spaces = {i: spaces.Discrete(9) for i in self.agents} + self.observation_spaces = {i: spaces.Dict({ + 'observation': spaces.Box(low=0, high=1, shape=(3, 3, 2), dtype=np.int8), + 'action_mask': spaces.Box(low=0, high=1, shape=(9,), dtype=np.int8) + }) for i in self.agents} + + self.rewards = {i: 0 for i in self.agents} + self.dones = {i: False for i in self.agents} + self.infos = {i: {'legal_moves': list(range(0, 9))} for i in self.agents} + + self._agent_selector = agent_selector(self.agents) + self.agent_selection = self._agent_selector.reset() + + # Key + # ---- + # blank space = 0 + # agent 0 = 1 + # agent 1 = 2 + # An observation is list of lists, where each list represents a row + # + # [[0,0,2] + # [1,2,1] + # [2,1,0]] + def observe(self, agent): + board_vals = np.array(self.board.squares).reshape(3, 3) + cur_player = self.possible_agents.index(agent) + opp_player = (cur_player + 1) % 2 + + cur_p_board = np.equal(board_vals, cur_player + 1) + opp_p_board = np.equal(board_vals, opp_player + 1) + + observation = np.stack([cur_p_board, opp_p_board], axis=2).astype(np.int8) + legal_moves = self._legal_moves() if agent == self.agent_selection else [] + + action_mask = np.zeros(9, 'int8') + for i in legal_moves: + action_mask[i] = 1 + + return {'observation': observation, 'action_mask': action_mask} + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def _legal_moves(self): + return [i for i in range(len(self.board.squares)) if self.board.squares[i] == 0] + + # action in this case is a value from 0 to 8 indicating position to move on tictactoe board + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + # check if input action is a valid move (0 == empty spot) + assert (self.board.squares[action] == 0), "played illegal move" + # play turn + self.board.play_turn(self.agents.index(self.agent_selection), action) + + # update infos + # list of valid actions (indexes in board) + # next_agent = self.agents[(self.agents.index(self.agent_selection) + 1) % len(self.agents)] + next_agent = self._agent_selector.next() + + if self.board.check_game_over(): + winner = self.board.check_for_winner() + + if winner == -1: + # tie + pass + elif winner == 1: + # agent 0 won + self.rewards[self.agents[0]] += 1 + self.rewards[self.agents[1]] -= 1 + else: + # agent 1 won + self.rewards[self.agents[1]] += 1 + self.rewards[self.agents[0]] -= 1 + + # once either play wins or there is a draw, game over, both players are done + self.dones = {i: True for i in self.agents} + + # Switch selection to next agents + self._cumulative_rewards[self.agent_selection] = 0 + self.agent_selection = next_agent + + self._accumulate_rewards() + + def reset(self): + # reset environment + self.board = Board() + + self.agents = self.possible_agents[:] + self.rewards = {i: 0 for i in self.agents} + self._cumulative_rewards = {i: 0 for i in self.agents} + self.dones = {i: False for i in self.agents} + self.infos = {i: {} for i in self.agents} + # selects the first agent + self._agent_selector.reinit(self.agents) + self._agent_selector.reset() + self.agent_selection = self._agent_selector.reset() + + def render(self, mode='human'): + def getSymbol(input): + if input == 0: + return '-' + elif input == 1: + return 'X' + else: + return 'O' + + board = list(map(getSymbol, self.board.squares)) + + print(" " * 5 + "|" + " " * 5 + "|" + " " * 5) + print(f" {board[0]} " + "|" + f" {board[3]} " + "|" + f" {board[6]} ") + print("_" * 5 + "|" + "_" * 5 + "|" + "_" * 5) + + print(" " * 5 + "|" + " " * 5 + "|" + " " * 5) + print(f" {board[1]} " + "|" + f" {board[4]} " + "|" + f" {board[7]} ") + print("_" * 5 + "|" + "_" * 5 + "|" + "_" * 5) + + print(" " * 5 + "|" + " " * 5 + "|" + " " * 5) + print(f" {board[2]} " + "|" + f" {board[5]} " + "|" + f" {board[8]} ") + print(" " * 5 + "|" + " " * 5 + "|" + " " * 5) + + def close(self): + pass diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe_v3.py b/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..b8b236b4696752c4c1362f84238ef47d66fa5324 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/tictactoe_v3.py @@ -0,0 +1 @@ +from .tictactoe.tictactoe import env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/classic/uno_v4.py b/MLPY/Lib/site-packages/pettingzoo/classic/uno_v4.py new file mode 100644 index 0000000000000000000000000000000000000000..6b6fd43ac590fee3757570b3538b7734a0c80096 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/classic/uno_v4.py @@ -0,0 +1 @@ +from .rlcard_envs.uno import env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/__init__.py b/MLPY/Lib/site-packages/pettingzoo/magent/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ded2c453b63215d5c2e1c2eb55d820238962a0fb --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/magent/__init__.py @@ -0,0 +1,5 @@ +from pettingzoo.utils.deprecated_module import depricated_handler + + +def __getattr__(env_name): + return depricated_handler(env_name, __path__, __name__) diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..573f6c9cfee827c5af518afa81911f1a09baf4a6 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/adversarial_pursuit_v3.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/adversarial_pursuit_v3.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d67b0a27431d3554df27f1d04222d5e4c4545837 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/adversarial_pursuit_v3.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/battle_v3.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/battle_v3.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aec46a4a2aef77e2132f07d930d898cbc485321f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/battle_v3.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/battlefield_v3.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/battlefield_v3.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9c2618c529f75a8ec1b54ca0053c658c309af24a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/battlefield_v3.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/combined_arms_v5.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/combined_arms_v5.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6020b32d937e030fa7085cade46a1389e7e1a272 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/combined_arms_v5.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/gather_v4.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/gather_v4.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..379e5460092e3f6b89da66fbd271ad4133f76cc7 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/gather_v4.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/magent_env.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/magent_env.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..995dca97d0deecd4939aeb363c305f644a8fb3de Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/magent_env.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/render.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/render.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7b19b27567292d4ad9e5029979e45b0d657564d8 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/render.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/tiger_deer_v3.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/tiger_deer_v3.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..290ffcb0ee068a199009691781a6d81bf195ab6b Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/magent/__pycache__/tiger_deer_v3.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/adversarial_pursuit_v3.py b/MLPY/Lib/site-packages/pettingzoo/magent/adversarial_pursuit_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..13e2ff09649820f7bc1d46362b8ae403f6ce29e2 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/magent/adversarial_pursuit_v3.py @@ -0,0 +1,97 @@ +import math +import warnings + +import magent +import numpy as np +from gym.spaces import Box, Discrete +from gym.utils import EzPickle + +from pettingzoo import AECEnv +from pettingzoo.magent.render import Renderer +from pettingzoo.utils import agent_selector +from pettingzoo.utils.conversions import parallel_to_aec_wrapper, parallel_wrapper_fn + +from .magent_env import magent_parallel_env, make_env + +default_map_size = 45 +max_cycles_default = 500 +minimap_mode_default = False +default_reward_args = dict(tag_penalty=-0.2) + + +def parallel_env(map_size=default_map_size, max_cycles=max_cycles_default, minimap_mode=minimap_mode_default, extra_features=False, **reward_args): + env_reward_args = dict(**default_reward_args) + env_reward_args.update(reward_args) + return _parallel_env(map_size, minimap_mode, env_reward_args, max_cycles, extra_features) + + +def raw_env(map_size=default_map_size, max_cycles=max_cycles_default, minimap_mode=minimap_mode_default, extra_features=False, **reward_args): + return parallel_to_aec_wrapper(parallel_env(map_size, max_cycles, minimap_mode, extra_features, **reward_args)) + + +env = make_env(raw_env) + + +def get_config(map_size, minimap_mode, tag_penalty): + gw = magent.gridworld + cfg = gw.Config() + + cfg.set({"map_width": map_size, "map_height": map_size}) + cfg.set({"minimap_mode": minimap_mode}) + cfg.set({"embedding_size": 10}) + + options = { + 'width': 2, 'length': 2, 'hp': 1, 'speed': 1, + 'view_range': gw.CircleRange(5), 'attack_range': gw.CircleRange(2), + 'attack_penalty': tag_penalty + } + predator = cfg.register_agent_type( + "predator", + options + ) + + options = { + 'width': 1, 'length': 1, 'hp': 1, 'speed': 1.5, + 'view_range': gw.CircleRange(4), 'attack_range': gw.CircleRange(0) + } + prey = cfg.register_agent_type( + "prey", + options + ) + + predator_group = cfg.add_group(predator) + prey_group = cfg.add_group(prey) + + a = gw.AgentSymbol(predator_group, index='any') + b = gw.AgentSymbol(prey_group, index='any') + + cfg.add_reward_rule(gw.Event(a, 'attack', b), receiver=[a, b], value=[1, -1]) + + return cfg + + +class _parallel_env(magent_parallel_env, EzPickle): + metadata = { + "render.modes": ["human", "rgb_array"], + 'name': "adversarial_pursuit_v3", + "video.frames_per_second": 5, + } + + def __init__(self, map_size, minimap_mode, reward_args, max_cycles, extra_features): + EzPickle.__init__(self, map_size, minimap_mode, reward_args, max_cycles, extra_features) + assert map_size >= 7, "size of map must be at least 7" + env = magent.GridWorld(get_config(map_size, minimap_mode, **reward_args), map_size=map_size) + + handles = env.get_handles() + reward_vals = np.array([1, -1, -1, -1, -1] + list(reward_args.values())) + reward_range = [np.minimum(reward_vals, 0).sum(), np.maximum(reward_vals, 0).sum()] + names = ["predator", "prey"] + super().__init__(env, handles, names, map_size, max_cycles, reward_range, minimap_mode, extra_features) + + def generate_map(self): + env, map_size = self.env, self.map_size + handles = env.get_handles() + + env.add_walls(method="random", n=map_size * map_size * 0.03) + env.add_agents(handles[0], method="random", n=map_size * map_size * 0.0125) + env.add_agents(handles[1], method="random", n=map_size * map_size * 0.025) diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/battle_v3.py b/MLPY/Lib/site-packages/pettingzoo/magent/battle_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..4590ee5aed3bfc17e1030189813bb5ddf9e05165 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/magent/battle_v3.py @@ -0,0 +1,116 @@ +import math +import warnings + +import magent +import numpy as np +from gym.spaces import Box, Discrete +from gym.utils import EzPickle + +from pettingzoo import AECEnv +from pettingzoo.magent.render import Renderer +from pettingzoo.utils import agent_selector +from pettingzoo.utils.conversions import parallel_to_aec_wrapper, parallel_wrapper_fn + +from .magent_env import magent_parallel_env, make_env + +default_map_size = 45 +max_cycles_default = 1000 +KILL_REWARD = 5 +minimap_mode_default = False +default_reward_args = dict(step_reward=-0.005, dead_penalty=-0.1, attack_penalty=-0.1, attack_opponent_reward=0.2) + + +def parallel_env(map_size=default_map_size, max_cycles=max_cycles_default, minimap_mode=minimap_mode_default, extra_features=False, **reward_args): + env_reward_args = dict(**default_reward_args) + env_reward_args.update(reward_args) + return _parallel_env(map_size, minimap_mode, env_reward_args, max_cycles, extra_features) + + +def raw_env(map_size=default_map_size, max_cycles=max_cycles_default, minimap_mode=minimap_mode_default, extra_features=False, **reward_args): + return parallel_to_aec_wrapper(parallel_env(map_size, max_cycles, minimap_mode, extra_features, **reward_args)) + + +env = make_env(raw_env) + + +def get_config(map_size, minimap_mode, step_reward, dead_penalty, attack_penalty, attack_opponent_reward): + gw = magent.gridworld + cfg = gw.Config() + + cfg.set({"map_width": map_size, "map_height": map_size}) + cfg.set({"minimap_mode": minimap_mode}) + cfg.set({"embedding_size": 10}) + + options = { + 'width': 1, 'length': 1, 'hp': 10, 'speed': 2, + 'view_range': gw.CircleRange(6), 'attack_range': gw.CircleRange(1.5), + 'damage': 2, 'kill_reward': KILL_REWARD, 'step_recover': 0.1, + 'step_reward': step_reward, 'dead_penalty': dead_penalty, 'attack_penalty': attack_penalty + } + small = cfg.register_agent_type( + "small", + options + ) + + g0 = cfg.add_group(small) + g1 = cfg.add_group(small) + + a = gw.AgentSymbol(g0, index='any') + b = gw.AgentSymbol(g1, index='any') + + # reward shaping to encourage attack + cfg.add_reward_rule(gw.Event(a, 'attack', b), receiver=a, value=attack_opponent_reward) + cfg.add_reward_rule(gw.Event(b, 'attack', a), receiver=b, value=attack_opponent_reward) + + return cfg + + +class _parallel_env(magent_parallel_env, EzPickle): + metadata = { + "render.modes": ["human", "rgb_array"], + 'name': "battle_v3", + "video.frames_per_second": 5, + } + + def __init__(self, map_size, minimap_mode, reward_args, max_cycles, extra_features): + EzPickle.__init__(self, map_size, minimap_mode, reward_args, max_cycles, extra_features) + assert map_size >= 12, "size of map must be at least 12" + env = magent.GridWorld(get_config(map_size, minimap_mode, **reward_args), map_size=map_size) + self.leftID = 0 + self.rightID = 1 + reward_vals = np.array([KILL_REWARD] + list(reward_args.values())) + reward_range = [np.minimum(reward_vals, 0).sum(), np.maximum(reward_vals, 0).sum()] + names = ["red", "blue"] + super().__init__(env, env.get_handles(), names, map_size, max_cycles, reward_range, minimap_mode, extra_features) + + def generate_map(self): + env, map_size, handles = self.env, self.map_size, self.handles + """ generate a map, which consists of two squares of agents""" + width = height = map_size + init_num = map_size * map_size * 0.04 + gap = 3 + + self.leftID, self.rightID = self.rightID, self.leftID + + # left + n = init_num + side = int(math.sqrt(n)) * 2 + pos = [] + for x in range(width // 2 - gap - side, width // 2 - gap - side + side, 2): + for y in range((height - side) // 2, (height - side) // 2 + side, 2): + if 0 < x < width - 1 and 0 < y < height - 1: + pos.append([x, y, 0]) + team1_size = len(pos) + env.add_agents(handles[self.leftID], method="custom", pos=pos) + + # right + n = init_num + side = int(math.sqrt(n)) * 2 + pos = [] + for x in range(width // 2 + gap, width // 2 + gap + side, 2): + for y in range((height - side) // 2, (height - side) // 2 + side, 2): + if 0 < x < width - 1 and 0 < y < height - 1: + pos.append([x, y, 0]) + + pos = pos[:team1_size] + env.add_agents(handles[self.rightID], method="custom", pos=pos) diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/battlefield_v3.py b/MLPY/Lib/site-packages/pettingzoo/magent/battlefield_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..f4eca0c08ee5b3b5abf78b676a239e2be45d74d0 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/magent/battlefield_v3.py @@ -0,0 +1,113 @@ +import math +import warnings + +import magent +import numpy as np +from gym.spaces import Box, Discrete +from gym.utils import EzPickle + +from pettingzoo import AECEnv +from pettingzoo.magent.render import Renderer +from pettingzoo.utils import agent_selector, wrappers +from pettingzoo.utils.conversions import parallel_to_aec_wrapper, parallel_wrapper_fn + +from .battle_v3 import KILL_REWARD, get_config +from .magent_env import magent_parallel_env, make_env + +default_map_size = 80 +max_cycles_default = 1000 +minimap_mode_default = False +default_reward_args = dict(step_reward=-0.005, dead_penalty=-0.1, attack_penalty=-0.1, attack_opponent_reward=0.2) + + +def parallel_env(map_size=default_map_size, max_cycles=max_cycles_default, minimap_mode=minimap_mode_default, extra_features=False, **reward_args): + env_reward_args = dict(**default_reward_args) + env_reward_args.update(reward_args) + return _parallel_env(map_size, minimap_mode, env_reward_args, max_cycles, extra_features) + + +def raw_env(map_size=default_map_size, max_cycles=max_cycles_default, minimap_mode=minimap_mode_default, extra_features=False, **reward_args): + return parallel_to_aec_wrapper(parallel_env(map_size, max_cycles, minimap_mode, extra_features, **reward_args)) + + +env = make_env(raw_env) + + +class _parallel_env(magent_parallel_env, EzPickle): + metadata = { + "render.modes": ["human", "rgb_array"], + 'name': "battlefield_v3", + "video.frames_per_second": 5, + } + + def __init__(self, map_size, minimap_mode, reward_args, max_cycles, extra_features): + EzPickle.__init__(self, map_size, minimap_mode, reward_args, max_cycles, extra_features) + assert map_size >= 46, "size of map must be at least 46" + env = magent.GridWorld(get_config(map_size, minimap_mode, **reward_args), map_size=map_size) + self.leftID = 0 + self.rightID = 1 + reward_vals = np.array([KILL_REWARD] + list(reward_args.values())) + reward_range = [np.minimum(reward_vals, 0).sum(), np.maximum(reward_vals, 0).sum()] + names = ["red", "blue"] + super().__init__(env, env.get_handles(), names, map_size, max_cycles, reward_range, minimap_mode, extra_features) + + def generate_map(self): + env, map_size, handles = self.env, self.map_size, self.handles + """ generate a map, which consists of two squares of agents""" + width = height = map_size + init_num = map_size * map_size * 0.04 + gap = 3 + + width = map_size + height = map_size + + init_num = 20 + + gap = 3 + leftID, rightID = 0, 1 + + # left + pos = [] + for y in range(10, 45): + pos.append((width / 2 - 5, y)) + pos.append((width / 2 - 4, y)) + for y in range(50, height // 2 + 25): + pos.append((width / 2 - 5, y)) + pos.append((width / 2 - 4, y)) + + for y in range(height // 2 - 25, height - 50): + pos.append((width / 2 + 5, y)) + pos.append((width / 2 + 4, y)) + for y in range(height - 45, height - 10): + pos.append((width / 2 + 5, y)) + pos.append((width / 2 + 4, y)) + + for x, y in pos: + if not (0 < x < width - 1 and 0 < y < height - 1): + assert False + env.add_walls(pos=pos, method="custom") + + n = init_num + side = int(math.sqrt(n)) * 2 + pos = [] + for x in range(width // 2 - gap - side, width // 2 - gap - side + side, 2): + for y in range((height - side) // 2, (height - side) // 2 + side, 2): + pos.append([x, y, 0]) + + for x, y, _ in pos: + if not (0 < x < width - 1 and 0 < y < height - 1): + assert False + env.add_agents(handles[leftID], method="custom", pos=pos) + + # right + n = init_num + side = int(math.sqrt(n)) * 2 + pos = [] + for x in range(width // 2 + gap, width // 2 + gap + side, 2): + for y in range((height - side) // 2, (height - side) // 2 + side, 2): + pos.append([x, y, 0]) + + for x, y, _ in pos: + if not (0 < x < width - 1 and 0 < y < height - 1): + assert False + env.add_agents(handles[rightID], method="custom", pos=pos) diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/combined_arms_v5.py b/MLPY/Lib/site-packages/pettingzoo/magent/combined_arms_v5.py new file mode 100644 index 0000000000000000000000000000000000000000..b7b4ba27ad736fd446bb1a705fa587ce25e2513b --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/magent/combined_arms_v5.py @@ -0,0 +1,164 @@ +import math +import warnings + +import magent +import numpy as np +from gym.spaces import Box, Discrete +from gym.utils import EzPickle + +from pettingzoo import AECEnv +from pettingzoo.magent.render import Renderer +from pettingzoo.utils import agent_selector +from pettingzoo.utils.conversions import parallel_to_aec_wrapper, parallel_wrapper_fn + +from .magent_env import magent_parallel_env, make_env + +default_map_size = 45 +max_cycles_default = 1000 +KILL_REWARD = 5 +minimap_mode_default = False +default_reward_args = dict(step_reward=-0.005, dead_penalty=-0.1, attack_penalty=-0.1, attack_opponent_reward=0.2) + + +def parallel_env(map_size=default_map_size, max_cycles=max_cycles_default, minimap_mode=minimap_mode_default, extra_features=False, **reward_args): + env_reward_args = dict(**default_reward_args) + env_reward_args.update(reward_args) + return _parallel_env(map_size, minimap_mode, env_reward_args, max_cycles, extra_features) + + +def raw_env(map_size=default_map_size, max_cycles=max_cycles_default, minimap_mode=minimap_mode_default, extra_features=False, **reward_args): + return parallel_to_aec_wrapper(parallel_env(map_size, max_cycles, minimap_mode, extra_features, **reward_args)) + + +env = make_env(raw_env) + + +def load_config(map_size, minimap_mode, step_reward, dead_penalty, attack_penalty, attack_opponent_reward): + gw = magent.gridworld + cfg = gw.Config() + + cfg.set({"map_width": map_size, "map_height": map_size}) + cfg.set({"minimap_mode": minimap_mode}) + + cfg.set({"embedding_size": 10}) + + options = { + 'width': 1, 'length': 1, 'hp': 10, 'speed': 1, + 'view_range': gw.CircleRange(6), 'attack_range': gw.CircleRange(1), + 'damage': 2, 'step_recover': 0.1, 'attack_in_group': True, + 'step_reward': step_reward, 'dead_penalty': dead_penalty, 'attack_penalty': attack_penalty, + } + + melee = cfg.register_agent_type( + "melee", + options + ) + + options = { + 'width': 1, 'length': 1, 'hp': 3, 'speed': 2, + 'view_range': gw.CircleRange(6), 'attack_range': gw.CircleRange(2), + 'damage': 2, 'step_recover': 0.1, 'attack_in_group': True, + 'step_reward': step_reward, 'dead_penalty': dead_penalty, 'attack_penalty': attack_penalty, + } + + ranged = cfg.register_agent_type( + "ranged", + options + ) + + g0 = cfg.add_group(melee) + g1 = cfg.add_group(ranged) + g2 = cfg.add_group(melee) + g3 = cfg.add_group(ranged) + + arm0_0 = gw.AgentSymbol(g0, index='any') + arm0_1 = gw.AgentSymbol(g1, index='any') + arm1_0 = gw.AgentSymbol(g2, index='any') + arm1_1 = gw.AgentSymbol(g3, index='any') + + # reward shaping + cfg.add_reward_rule(gw.Event(arm0_0, 'attack', arm1_0), receiver=arm0_0, value=attack_opponent_reward) + cfg.add_reward_rule(gw.Event(arm0_0, 'attack', arm1_1), receiver=arm0_0, value=attack_opponent_reward) + cfg.add_reward_rule(gw.Event(arm0_1, 'attack', arm1_0), receiver=arm0_1, value=attack_opponent_reward) + cfg.add_reward_rule(gw.Event(arm0_1, 'attack', arm1_1), receiver=arm0_1, value=attack_opponent_reward) + + cfg.add_reward_rule(gw.Event(arm1_0, 'attack', arm0_0), receiver=arm1_0, value=attack_opponent_reward) + cfg.add_reward_rule(gw.Event(arm1_0, 'attack', arm0_1), receiver=arm1_0, value=attack_opponent_reward) + cfg.add_reward_rule(gw.Event(arm1_1, 'attack', arm0_0), receiver=arm1_1, value=attack_opponent_reward) + cfg.add_reward_rule(gw.Event(arm1_1, 'attack', arm0_1), receiver=arm1_1, value=attack_opponent_reward) + + # kill reward + cfg.add_reward_rule(gw.Event(arm0_0, 'kill', arm1_0), receiver=arm0_0, value=KILL_REWARD) + cfg.add_reward_rule(gw.Event(arm0_0, 'kill', arm1_1), receiver=arm0_0, value=KILL_REWARD) + cfg.add_reward_rule(gw.Event(arm0_1, 'kill', arm1_0), receiver=arm0_1, value=KILL_REWARD) + cfg.add_reward_rule(gw.Event(arm0_1, 'kill', arm1_1), receiver=arm0_1, value=KILL_REWARD) + + cfg.add_reward_rule(gw.Event(arm1_0, 'kill', arm0_0), receiver=arm1_0, value=KILL_REWARD) + cfg.add_reward_rule(gw.Event(arm1_0, 'kill', arm0_1), receiver=arm1_0, value=KILL_REWARD) + cfg.add_reward_rule(gw.Event(arm1_1, 'kill', arm0_0), receiver=arm1_1, value=KILL_REWARD) + cfg.add_reward_rule(gw.Event(arm1_1, 'kill', arm0_1), receiver=arm1_1, value=KILL_REWARD) + + return cfg + + +def generate_map(env, map_size, handles): + width = map_size + height = map_size + + init_num = map_size * map_size * 0.04 + + gap = 3 + # left + n = init_num + side = int(math.sqrt(n)) * 2 + pos = [[], []] + ct = 0 + for x in range(max(width // 2 - gap - side, 1), width // 2 - gap - side + side, 2): + for y in range((height - side) // 2, (height - side) // 2 + side, 2): + pos[ct % 2].append([x, y]) + ct += 1 + + xct1 = ct + for x, y in pos[0] + pos[1]: + if not (0 < x < width - 1 and 0 < y < height - 1): + assert False + env.add_agents(handles[0], method="custom", pos=pos[0]) + env.add_agents(handles[1], method="custom", pos=pos[1]) + + # right + n = init_num + side = int(math.sqrt(n)) * 2 + pos = [[], []] + ct = 0 + for x in range(width // 2 + gap, min(width // 2 + gap + side, height - 1), 2): + for y in range((height - side) // 2, min((height - side) // 2 + side, height - 1), 2): + pos[ct % 2].append([x, y]) + ct += 1 + if xct1 <= ct: + break + + for x, y in pos[0] + pos[1]: + if not (0 < x < width - 1 and 0 < y < height - 1): + assert False + env.add_agents(handles[2], method="custom", pos=pos[0]) + env.add_agents(handles[3], method="custom", pos=pos[1]) + + +class _parallel_env(magent_parallel_env, EzPickle): + metadata = { + "render.modes": ["human", "rgb_array"], + 'name': "combined_arms_v5", + "video.frames_per_second": 5, + } + + def __init__(self, map_size, minimap_mode, reward_args, max_cycles, extra_features): + EzPickle.__init__(self, map_size, minimap_mode, reward_args, max_cycles, extra_features) + assert map_size >= 16, "size of map must be at least 16" + env = magent.GridWorld(load_config(map_size, minimap_mode, **reward_args)) + reward_vals = np.array([KILL_REWARD] + list(reward_args.values())) + reward_range = [np.minimum(reward_vals, 0).sum(), np.maximum(reward_vals, 0).sum()] + names = ["redmelee", "redranged", "bluemele", "blueranged"] + super().__init__(env, env.get_handles(), names, map_size, max_cycles, reward_range, minimap_mode, extra_features) + + def generate_map(self): + generate_map(self.env, self.map_size, self.handles) diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/gather_v4.py b/MLPY/Lib/site-packages/pettingzoo/magent/gather_v4.py new file mode 100644 index 0000000000000000000000000000000000000000..6e8e78afebc165aa72d1840dd058f3ae5a62216c --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/magent/gather_v4.py @@ -0,0 +1,144 @@ +import math +import warnings + +import magent +import numpy as np +from gym.spaces import Box, Discrete +from gym.utils import EzPickle + +from pettingzoo import AECEnv +from pettingzoo.magent.render import Renderer +from pettingzoo.utils import agent_selector +from pettingzoo.utils.conversions import parallel_to_aec_wrapper, parallel_wrapper_fn + +from .magent_env import magent_parallel_env, make_env + +map_size = 200 +max_cycles_default = 500 +KILL_REWARD = 5 +minimap_mode_default = False +default_reward_args = dict(step_reward=-0.01, attack_penalty=-0.1, dead_penalty=-1, attack_food_reward=0.5) + + +def parallel_env(max_cycles=max_cycles_default, minimap_mode=minimap_mode_default, extra_features=False, **reward_args): + env_reward_args = dict(**default_reward_args) + env_reward_args.update(reward_args) + return _parallel_env(map_size, minimap_mode, env_reward_args, max_cycles, extra_features) + + +def raw_env(max_cycles=max_cycles_default, minimap_mode=minimap_mode_default, extra_features=False, **reward_args): + return parallel_to_aec_wrapper(parallel_env(max_cycles, minimap_mode, extra_features, **reward_args)) + + +env = make_env(raw_env) + + +def load_config(size, minimap_mode, step_reward, attack_penalty, dead_penalty, attack_food_reward): + gw = magent.gridworld + cfg = gw.Config() + + cfg.set({"map_width": size, "map_height": size}) + cfg.set({"minimap_mode": minimap_mode}) + + options = { + 'width': 1, 'length': 1, 'hp': 3, 'speed': 3, + 'view_range': gw.CircleRange(7), 'attack_range': gw.CircleRange(1), + 'damage': 6, 'step_recover': 0, 'attack_in_group': 1, + 'step_reward': step_reward, 'attack_penalty': attack_penalty, 'dead_penalty': dead_penalty + } + + agent = cfg.register_agent_type( + name="agent", + attr=options) + + options = { + 'width': 1, 'length': 1, 'hp': 25, 'speed': 0, + 'view_range': gw.CircleRange(1), 'attack_range': gw.CircleRange(0), + 'kill_reward': KILL_REWARD} + food = cfg.register_agent_type( + name='food', + attr=options) + + g_f = cfg.add_group(food) + g_s = cfg.add_group(agent) + + a = gw.AgentSymbol(g_s, index='any') + b = gw.AgentSymbol(g_f, index='any') + + cfg.add_reward_rule(gw.Event(a, 'attack', b), receiver=a, value=attack_food_reward) + + return cfg + + +class _parallel_env(magent_parallel_env, EzPickle): + metadata = { + 'render.modes': ['human','rgb_array'], + 'name': "gather_v4", + 'video.frames_per_second': 5, + } + + def __init__(self, map_size, minimap_mode, reward_args, max_cycles, extra_features): + EzPickle.__init__(self, map_size, minimap_mode, reward_args, max_cycles, extra_features) + env = magent.GridWorld(load_config(map_size, minimap_mode, **reward_args)) + handles = env.get_handles() + reward_vals = np.array([5] + list(reward_args.values())) + reward_range = [np.minimum(reward_vals, 0).sum(), np.maximum(reward_vals, 0).sum()] + names = ["omnivore"] + super().__init__(env, handles[1:], names, map_size, max_cycles, reward_range, minimap_mode, extra_features) + + def generate_map(self): + env, map_size = self.env, self.map_size + handles = env.get_handles()[1:] + food_handle = env.get_handles()[0] + center_x, center_y = map_size // 2, map_size // 2 + + def add_square(pos, side, gap): + side = int(side) + for x in range(center_x - side // 2, center_x + side // 2 + 1, gap): + pos.append([x, center_y - side // 2]) + pos.append([x, center_y + side // 2]) + for y in range(center_y - side // 2, center_y + side // 2 + 1, gap): + pos.append([center_x - side // 2, y]) + pos.append([center_x + side // 2, y]) + + # agent + pos = [] + add_square(pos, map_size * 0.9, 3) + add_square(pos, map_size * 0.8, 4) + add_square(pos, map_size * 0.7, 6) + env.add_agents(handles[0], method="custom", pos=pos) + + # food + pos = [] + add_square(pos, map_size * 0.65, 10) + add_square(pos, map_size * 0.6, 10) + add_square(pos, map_size * 0.55, 10) + add_square(pos, map_size * 0.5, 4) + add_square(pos, map_size * 0.45, 3) + add_square(pos, map_size * 0.4, 1) + add_square(pos, map_size * 0.3, 1) + add_square(pos, map_size * 0.3 - 2, 1) + add_square(pos, map_size * 0.3 - 4, 1) + add_square(pos, map_size * 0.3 - 6, 1) + env.add_agents(food_handle, method="custom", pos=pos) + + # pattern + pattern = ( [[int(not((i % 4 == 0 or i % 4 == 1) or (j % 4 == 0 or j % 4 == 1)) ) for j in range(53)] for i in range(53)]) + + def draw(base_x, base_y, data): + w, h = len(data), len(data[0]) + pos = [] + for i in range(w): + for j in range(h): + if data[i][j] == 1: + start_x = i + base_x + start_y = j + base_y + for x in range(start_x, start_x + 1): + for y in range(start_y, start_y + 1): + pos.append([y, x]) + + env.add_agents(food_handle, method="custom", pos=pos) + + w, h = len(pattern), len(pattern[0]) + + draw(map_size // 2 - w // 2, map_size // 2 - h // 2, pattern) diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/magent_env.py b/MLPY/Lib/site-packages/pettingzoo/magent/magent_env.py new file mode 100644 index 0000000000000000000000000000000000000000..040ab7d71bfc1889652843fcc9406cbd1a5c89b2 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/magent/magent_env.py @@ -0,0 +1,216 @@ +import math +import warnings + +import magent +import numpy as np +from gym.spaces import Box, Discrete +from gym.utils import seeding + +from pettingzoo import AECEnv +from pettingzoo.magent.render import Renderer +from pettingzoo.utils import agent_selector, wrappers +from pettingzoo.utils.env import ParallelEnv + + +def make_env(raw_env): + def env_fn(**kwargs): + env = raw_env(**kwargs) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + return env_fn + + +class magent_parallel_env(ParallelEnv): + def __init__(self, env, active_handles, names, map_size, max_cycles, reward_range, minimap_mode, extra_features): + self.map_size = map_size + self.max_cycles = max_cycles + self.minimap_mode = minimap_mode + self.extra_features = extra_features + self.env = env + self.handles = active_handles + self._all_handles = self.env.get_handles() + env.reset() + self.generate_map() + self.team_sizes = team_sizes = [env.get_num(handle) for handle in self.handles] + self.agents = [f"{names[j]}_{i}" for j in range(len(team_sizes)) for i in range(team_sizes[j])] + self.possible_agents = self.agents[:] + + num_actions = [env.get_action_space(handle)[0] for handle in self.handles] + action_spaces_list = [Discrete(num_actions[j]) for j in range(len(team_sizes)) for i in range(team_sizes[j])] + # may change depending on environment config? Not sure. + team_obs_shapes = self._calc_obs_shapes() + state_shape = self._calc_state_shape() + observation_space_list = [Box(low=0., high=2., shape=team_obs_shapes[j], dtype=np.float32) for j in range(len(team_sizes)) for i in range(team_sizes[j])] + + self.state_space = Box(low=0., high=2., shape=state_shape, dtype=np.float32) + reward_low, reward_high = reward_range + + if extra_features: + for space in observation_space_list: + idx = space.shape[2] - 3 if minimap_mode else space.shape[2] - 1 + space.low[:, :, idx] = reward_low + space.high[:, :, idx] = reward_high + idx_state = self.state_space.shape[2] - 3 if minimap_mode else self.state_space.shape[2] - 1 + self.state_space.low[:, :, idx_state] = reward_low + self.state_space.high[:, :, idx_state] = reward_high + + self.action_spaces = {agent: space for agent, space in zip(self.agents, action_spaces_list)} + self.observation_spaces = {agent: space for agent, space in zip(self.agents, observation_space_list)} + + self._zero_obs = {agent: np.zeros_like(space.low) for agent, space in self.observation_spaces.items()} + self.base_state = np.zeros(self.state_space.shape, dtype='float32') + walls = self.env._get_walls_info() + wall_x, wall_y = zip(*walls) + self.base_state[wall_x, wall_y, 0] = 1 + self._renderer = None + self.frames = 0 + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def seed(self, seed=None): + if seed is None: + seed = seeding.create_seed(seed, max_bytes=4) + self.env.set_seed(seed) + + def _calc_obs_shapes(self): + view_spaces = [self.env.get_view_space(handle) for handle in self.handles] + feature_spaces = [self.env.get_feature_space(handle) for handle in self.handles] + assert all(len(tup) == 3 for tup in view_spaces) + assert all(len(tup) == 1 for tup in feature_spaces) + feat_size = [[fs[0]] for fs in feature_spaces] + for feature_space in feat_size: + if not self.extra_features: + feature_space[0] = 2 if self.minimap_mode else 0 + obs_spaces = [(view_space[:2] + (view_space[2] + feature_space[0],)) for view_space, feature_space in zip(view_spaces, feat_size)] + return obs_spaces + + def _calc_state_shape(self): + feature_spaces = [ + self.env.get_feature_space(handle) for handle in self._all_handles + ] + self._minimap_features = 2 if self.minimap_mode else 0 + # map channel and agent pair channel. Remove global agent position when minimap mode and extra features + state_depth = ( + (max(feature_spaces)[0] - self._minimap_features) * self.extra_features + + 1 + + len(self._all_handles) * 2 + ) + + return (self.map_size, self.map_size, state_depth) + + def render(self, mode="human"): + if self._renderer is None: + self._renderer = Renderer(self.env, self.map_size, mode) + assert mode == self._renderer.mode, "mode must be consistent across render calls" + return self._renderer.render(mode) + + def close(self): + if self._renderer is not None: + self._renderer.close() + self._renderer = None + + def reset(self): + self.agents = self.possible_agents[:] + self.env.reset() + self.frames = 0 + self.all_dones = {agent: False for agent in self.possible_agents} + self.generate_map() + return self._observe_all() + + def _observe_all(self): + observes = [None] * self.max_num_agents + for handle in self.handles: + ids = self.env.get_agent_id(handle) + view, features = self.env.get_observation(handle) + + if self.minimap_mode and not self.extra_features: + features = features[:, -2:] + if self.minimap_mode or self.extra_features: + feat_reshape = np.expand_dims(np.expand_dims(features, 1), 1) + feat_img = np.tile(feat_reshape, (1, view.shape[1], view.shape[2], 1)) + fin_obs = np.concatenate([view, feat_img], axis=-1) + else: + fin_obs = np.copy(view) + for id, obs in zip(ids, fin_obs): + observes[id] = obs + + ret_agents = set(self.agents) + return {agent: obs if obs is not None else self._zero_obs[agent] for agent, obs in zip(self.possible_agents, observes) if agent in ret_agents} + + def _all_rewards(self): + rewards = np.zeros(self.max_num_agents) + for handle in self.handles: + ids = self.env.get_agent_id(handle) + rewards[ids] = self.env.get_reward(handle) + ret_agents = set(self.agents) + return {agent: float(rew) for agent, rew in zip(self.possible_agents, rewards) if agent in ret_agents} + + def _all_dones(self, step_done=False): + dones = np.ones(self.max_num_agents, dtype=bool) + if not step_done: + for handle in self.handles: + ids = self.env.get_agent_id(handle) + dones[ids] = ~self.env.get_alive(handle) + ret_agents = set(self.agents) + return {agent: bool(done) for agent, done in zip(self.possible_agents, dones) if agent in ret_agents} + + def state(self): + ''' + Returns an observation of the global environment + ''' + state = np.copy(self.base_state) + + for handle in self._all_handles: + view, features = self.env.get_observation(handle) + + pos = self.env.get_pos(handle) + pos_x, pos_y = zip(*pos) + state[pos_x, pos_y, 1 + handle.value * 2] = 1 + state[pos_x, pos_y, 2 + handle.value * 2] = view[:, view.shape[1] // 2, view.shape[2] // 2, 2] + + if self.extra_features: + add_zeros = np.zeros( + ( + features.shape[0], + state.shape[2] + - (1 + len(self.team_sizes) * 2 + features.shape[1] - self._minimap_features), + ) + ) + + rewards = features[:, -1 - self._minimap_features] + actions = features[:, :-1 - self._minimap_features] + actions = np.concatenate((actions, add_zeros), axis=1) + rewards = rewards.reshape(len(rewards), 1) + state_features = np.hstack((actions, rewards)) + + state[pos_x, pos_y, 1 + len(self.team_sizes) * 2:] = state_features + return state + + def step(self, all_actions): + action_list = [0] * self.max_num_agents + for i, agent in enumerate(self.possible_agents): + if agent in all_actions: + action_list[i] = all_actions[agent] + all_actions = np.asarray(action_list, dtype=np.int32) + start_point = 0 + for i in range(len(self.handles)): + size = self.team_sizes[i] + self.env.set_action(self.handles[i], all_actions[start_point:(start_point + size)]) + start_point += size + + self.frames += 1 + done = self.env.step() or self.frames >= self.max_cycles + + all_infos = {agent: {} for agent in self.agents} + all_dones = self._all_dones(done) + all_rewards = self._all_rewards() + all_observes = self._observe_all() + self.all_dones = all_dones + self.env.clear_dead() + self.agents = [agent for agent in self.agents if not self.all_dones[agent]] + return all_observes, all_rewards, all_dones, all_infos diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/render.py b/MLPY/Lib/site-packages/pettingzoo/magent/render.py new file mode 100644 index 0000000000000000000000000000000000000000..b4632134bad4944a425a194617e351993c2b2cd5 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/magent/render.py @@ -0,0 +1,258 @@ +import math + +import magent +import numpy as np + +animation_total = 2 +animation_stop = 0 +background_rgb = (255, 255, 255) +attack_line_rgb = (0, 0, 0) +attack_dot_rgb = (0, 0, 0) +attack_dot_size = 0.3 +text_rgb = (0, 0, 0) +text_size = 16 +text_spacing = 3 +banner_size = 20 +banner_spacing = 3 +bigscreen_size = 72 +bigscreen_spacing = 0 +grid_rgba = ((0, 0, 0), 30) +grid_size = 8 + + +def draw_line(surface, color, a, b): + import pygame + pygame.draw.line( + surface, color, + (int(round(a[0])), int(round(a[1]))), + (int(round(b[0])), int(round(b[1]))) + ) + + +def draw_rect(surface, color, a, w, h): + import pygame + pygame.draw.rect(surface, color, pygame.Rect(*map(int, ( + round(a[0]), round(a[1]), + round(w + a[0] - round(a[0])), + round(h + a[1] - round(a[1])))))) + + +def draw_rect_matrix(matrix, color, a, w, h, resolution): + x, y, w, h = map(int, (round(a[0]), round(a[1]), round(w + a[0] - round(a[0])), round(h + a[1] - round(a[1])))) + matrix[max(x, 0):min(x + w, resolution[0]), max(y, 0):min(h + y, resolution[1]), :] = color + + +def draw_line_matrix(matrix, color, a, b, resolution): + a = (min(max(0, a[0]), resolution[0] - 1), min(max(0, a[1]), resolution[1] - 1)) + b = (min(max(0, b[0]), resolution[0] - 1), min(max(0, b[1]), resolution[1] - 1)) + a = map(int, (round(a[0]), round(a[1]))) + b = map(int, (round(b[0]), round(b[1]))) + if a[0] == b[0]: + if a[1] > b[1]: + matrix[a[0], b[1]:a[1] + 1] = color + else: + matrix[a[0], a[1]:b[1] + 1] = color + elif a[1] == b[1]: + if a[0] > b[0]: + matrix[b[0]:a[0] + 1, a[1]] = color + else: + matrix[a[0]:b[0] + 1, a[1]] = color + else: + raise NotImplementedError + + +class Renderer: + def __init__(self, env, map_size, mode): + import pygame + self.env = env + self.mode = mode + self.handles = self.env.get_handles() + base_resolution = (map_size * 8, map_size * 8 + 15) + if mode == "human": + pygame.init() + pygame.display.init() + infoObject = pygame.display.Info() + screen_size = (infoObject.current_w - 50, infoObject.current_h - 50) + self.resolution = resolution = np.min([screen_size, base_resolution], axis=0) + self.display = pygame.display.set_mode(resolution, pygame.DOUBLEBUF, 0) + canvas_resolution = (resolution[0], resolution[1]) + self.canvas = pygame.Surface(canvas_resolution) + pygame.display.set_caption('MAgent Renderer Window') + elif mode == "rgb_array": + pygame.font.init() + self.resolution = base_resolution + self.display = pygame.Surface(base_resolution) + canvas_resolution = (base_resolution[0], base_resolution[1]) + self.canvas = pygame.Surface(canvas_resolution) + + self.text_formatter = pygame.font.SysFont(None, text_size, True) + self.banner_formatter = pygame.font.SysFont(None, banner_size, True) + self.bigscreen_formatter = pygame.font.SysFont(None, bigscreen_size, True) + + self.map_size = (map_size, map_size) + + self.frame_id = 0 + + self.old_data = None + self.new_data = None + + self.need_static_update = True + self.animation_progress = 0 + + def get_banners(self, frame_id, resolution): + groups = self.env._get_groups_info() + + def form_txt(index): + handle = self.handles[index] + color = tuple(int(a) for a in groups[index][2:]) + return f'{np.sum(self.env.get_alive(handle).astype(np.int32))}', color + if len(self.handles) == 1: + result = [(form_txt(0), )] + if len(self.handles) == 2: + vs = ' vs ', (0, 0, 0) + result = [(form_txt(0), vs, form_txt(1))] + elif len(self.handles) == 4: + vs = ' vs ', (0, 0, 0) + comma = ', ', (0, 0, 0) + result = [(form_txt(0), comma, form_txt(1), vs, form_txt(2), comma, form_txt(3))] + else: + raise RuntimeError("bad number of handles") + + return result + + def close(self): + import pygame + pygame.display.quit() + pygame.quit() + + def render(self, mode): + import os + + import pygame + + env = self.env + self.groups = env._get_groups_info() + resolution = self.resolution + + grid_map = np.zeros((resolution[0], resolution[1], 3), dtype=np.int16) + view_position = [self.map_size[0] / 2 * grid_size - resolution[0] / 2, + self.map_size[1] / 2 * grid_size - resolution[1] / 2] + + groups = self.groups + banner_formatter = self.banner_formatter + status = True + triggered = False + # x_range: which vertical gridlines should be shown on the display + # y_range: which horizontal gridlines should be shown on the display + x_range = ( + max(0, int(math.floor(max(0, view_position[0]) / grid_size))), + min(self.map_size[0], int(math.ceil(max(0, view_position[0] + resolution[0]) / grid_size))) + ) + + y_range = ( + max(0, int(math.floor(max(0, view_position[1]) / grid_size))), + min(self.map_size[1], int(math.ceil(max(0, view_position[1] + resolution[1]) / grid_size))) + ) + + self.canvas.fill(background_rgb) + self.display.fill(background_rgb) + + if self.need_static_update or True: + grids = pygame.Surface(resolution) + grids.fill(background_rgb) + + if self.new_data is None or self.animation_progress > animation_total + animation_stop: + pos, event = env._get_render_info(x_range, y_range) + buffered_new_data = pos, event + + if buffered_new_data is None: + buffered_new_data = self.new_data + self.old_data = self.new_data + self.new_data = buffered_new_data + self.animation_progress = 0 + + if self.new_data is not None: + if self.old_data is None and self.animation_progress == 0: + self.animation_progress = animation_total + + if self.need_static_update or True: + pygame.pixelcopy.surface_to_array(grid_map, self.canvas) + for wall in env._get_walls_info(): + x, y = wall[0], wall[1] + if x >= x_range[0] and x <= x_range[1] and y >= y_range[0] and y <= y_range[1]: + draw_rect_matrix(grid_map, (127, 127, 127), + (x * grid_size - view_position[0], y * grid_size - view_position[1]), + grid_size, grid_size, resolution) + pygame.pixelcopy.array_to_surface(self.canvas, grid_map) + + for key in self.new_data[0]: + new_prop = self.new_data[0][key] + new_group = groups[new_prop[2]] + now_prop = new_prop + now_group = new_group + draw_rect( + self.canvas, (int(now_group[2]), int(now_group[3]), int(now_group[4])), + ( + now_prop[0] * grid_size - view_position[0], + now_prop[1] * grid_size - view_position[1] + ), + now_group[0] * grid_size, + now_group[1] * grid_size + ) + + for key, event_x, event_y in self.new_data[1]: + if key not in self.new_data[0]: + continue + new_prop = self.new_data[0][key] + new_group = groups[new_prop[2]] + now_prop = new_prop + now_group = new_group + draw_line( + self.canvas, attack_line_rgb, + ( + now_prop[0] * grid_size - view_position[0] + now_group[0] / 2 * grid_size, + now_prop[1] * grid_size - view_position[1] + now_group[1] / 2 * grid_size + ), + ( + event_x * grid_size - view_position[0] + grid_size / 2, + event_y * grid_size - view_position[1] + grid_size / 2 + ) + ) + draw_rect( + self.canvas, attack_dot_rgb, + ( + event_x * grid_size - view_position[0] + grid_size / 2 - attack_dot_size * grid_size / 2, + event_y * grid_size - view_position[1] + grid_size / 2 - attack_dot_size * grid_size / 2, + ), + attack_dot_size * grid_size, + attack_dot_size * grid_size + ) + + if status or triggered or self.animation_progress < animation_total + animation_stop: + self.animation_progress += 1 + + self.display.blit(self.canvas, (0, 7)) + + height_now = 0 + for texts in self.get_banners(self.frame_id, resolution): + content = [] + width, height = 0, 0 + for text in texts: + text = banner_formatter.render(text[0], True, pygame.Color(*text[1])) + content.append((text, width)) + width += text.get_width() + height = max(height, text.get_height()) + start = (resolution[0] - width) / 2.0 + for b in content: + self.display.blit(b[0], (start + b[1], height_now)) + height_now += height + banner_spacing + + if self.need_static_update: + self.need_static_update = False + + observation = pygame.surfarray.pixels3d(self.display) + new_observation = np.copy(observation) + del observation + if self.mode == 'human': + pygame.display.flip() + return np.transpose(new_observation, axes=(1, 0, 2)) if mode == "rgb_array" else None diff --git a/MLPY/Lib/site-packages/pettingzoo/magent/tiger_deer_v3.py b/MLPY/Lib/site-packages/pettingzoo/magent/tiger_deer_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..237d2f762bddbece60cee7dc4b7b96bdd8b5b635 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/magent/tiger_deer_v3.py @@ -0,0 +1,105 @@ +import math +import warnings + +import magent +import numpy as np +from gym.spaces import Box, Discrete +from gym.utils import EzPickle + +from pettingzoo import AECEnv +from pettingzoo.magent.render import Renderer +from pettingzoo.utils import agent_selector +from pettingzoo.utils.conversions import parallel_to_aec_wrapper, parallel_wrapper_fn + +from .magent_env import magent_parallel_env, make_env + +default_map_size = 45 +max_cycles_default = 300 +minimap_mode_default = False +default_env_args = dict(tiger_step_recover=-0.1, deer_attacked=-0.1) + + +def parallel_env(map_size=default_map_size, max_cycles=max_cycles_default, minimap_mode=minimap_mode_default, extra_features=False, **env_args): + env_env_args = dict(**default_env_args) + env_env_args.update(env_args) + return _parallel_env(map_size, minimap_mode, env_env_args, max_cycles, extra_features) + + +def raw_env(map_size=default_map_size, max_cycles=max_cycles_default, minimap_mode=minimap_mode_default, extra_features=False, **env_args): + return parallel_to_aec_wrapper(parallel_env(map_size, max_cycles, minimap_mode, extra_features, **env_args)) + + +env = make_env(raw_env) + + +def get_config(map_size, minimap_mode, tiger_step_recover, deer_attacked): + gw = magent.gridworld + cfg = gw.Config() + + cfg.set({"map_width": map_size, "map_height": map_size}) + cfg.set({"embedding_size": 10}) + cfg.set({"minimap_mode": minimap_mode}) + + options = { + 'width': 1, 'length': 1, 'hp': 5, 'speed': 1, + 'view_range': gw.CircleRange(1), 'attack_range': gw.CircleRange(0), + 'step_recover': 0.2, + 'kill_supply': 8, 'dead_penalty': -1., + } + + deer = cfg.register_agent_type( + "deer", + options) + + options = { + 'width': 1, 'length': 1, 'hp': 10, 'speed': 1, + 'view_range': gw.CircleRange(4), 'attack_range': gw.CircleRange(1), + 'damage': 1, 'step_recover': tiger_step_recover + } + tiger = cfg.register_agent_type( + "tiger", + options) + + deer_group = cfg.add_group(deer) + tiger_group = cfg.add_group(tiger) + + a = gw.AgentSymbol(tiger_group, index='any') + b = gw.AgentSymbol(tiger_group, index='any') + c = gw.AgentSymbol(deer_group, index='any') + + # tigers get reward when they attack a deer simultaneously + e1 = gw.Event(a, 'attack', c) + e2 = gw.Event(b, 'attack', c) + tiger_attack_rew = 1 + # reward is halved because the reward is double counted + cfg.add_reward_rule(e1 & e2, receiver=[a, b], value=[tiger_attack_rew / 2, tiger_attack_rew / 2]) + cfg.add_reward_rule(e1, receiver=[c], value=[deer_attacked]) + + return cfg + + +class _parallel_env(magent_parallel_env, EzPickle): + metadata = { + "render.modes": ["human", "rgb_array"], + 'name': "tiger_deer_v3", + "video.frames_per_second": 5, + } + def __init__(self, map_size, minimap_mode, reward_args, max_cycles, extra_features): + EzPickle.__init__(self, map_size, minimap_mode, reward_args, max_cycles, extra_features) + assert map_size >= 10, "size of map must be at least 10" + env = magent.GridWorld(get_config(map_size, minimap_mode, **reward_args), map_size=map_size) + + handles = env.get_handles() + reward_vals = np.array([1, -1] + list(reward_args.values())) + reward_range = [np.minimum(reward_vals, 0).sum(), np.maximum(reward_vals, 0).sum()] + + names = ["deer", "tiger"] + super().__init__(env, handles, names, map_size, max_cycles, reward_range, minimap_mode, extra_features) + + def generate_map(self): + env, map_size = self.env, self.map_size + handles = env.get_handles() + + env.add_walls(method="random", n=map_size * map_size * 0.04) + env.add_agents(handles[0], method="random", n=map_size * map_size * 0.05) + env.add_agents(handles[1], method="random", n=map_size * map_size * 0.01) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/__init__.py b/MLPY/Lib/site-packages/pettingzoo/mpe/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ded2c453b63215d5c2e1c2eb55d820238962a0fb --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/__init__.py @@ -0,0 +1,5 @@ +from pettingzoo.utils.deprecated_module import depricated_handler + + +def __getattr__(env_name): + return depricated_handler(env_name, __path__, __name__) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6d7aba078235cc99f3507dcf9233fa014c26f120 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_adversary_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_adversary_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d45536b2a4078ffd2388aa66754a764262d9b6d6 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_adversary_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_crypto_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_crypto_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d8f4600edfa9fd2dd6ca974ac5cc64916616f3af Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_crypto_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_push_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_push_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..78ad7691053f90dca8ed31000c8a760a847e0ca7 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_push_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_reference_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_reference_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4a3bf98d383f47e97443160b0db61630e0efb07f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_reference_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_speaker_listener_v3.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_speaker_listener_v3.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..71390742cbf698838d0825b87fd4a35330442992 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_speaker_listener_v3.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_spread_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_spread_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a8d12aaca4bae96e38615fcd89bd0f51161cd96e Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_spread_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_tag_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_tag_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..298896d9b5afce661f6686df1e10cb67b792b36a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_tag_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6e5d3be6e4f317dc97a3a307df75635209c6d23a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_world_comm_v2.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_world_comm_v2.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ec8dbd45c768f62bcd8e2d01181d73e990ff5f80 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/__pycache__/simple_world_comm_v2.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__init__.py b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..75e130a5adfb16aaeab2bd15588c738fea445966 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__pycache__/core.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__pycache__/core.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fcf37992d36c028053bae232e2e1b2b68bcda99c Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__pycache__/core.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__pycache__/rendering.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__pycache__/rendering.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f6dba8e8dee9b0b4207291266cebb74f3f55606c Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__pycache__/rendering.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__pycache__/scenario.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__pycache__/scenario.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..73a63aff0eb0dde11eaf6785c8c3a9bf2fab695b Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__pycache__/scenario.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__pycache__/simple_env.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__pycache__/simple_env.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cdba95ff8742fc90cebf348466b976f034f02e9f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/__pycache__/simple_env.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/core.py b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/core.py new file mode 100644 index 0000000000000000000000000000000000000000..cf7d821ba6876716e39d4d8934aa5c9b81b2e041 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/core.py @@ -0,0 +1,199 @@ +import numpy as np + + +class EntityState: # physical/external base state of all entities + def __init__(self): + # physical position + self.p_pos = None + # physical velocity + self.p_vel = None + + +class AgentState(EntityState): # state of agents (including communication and internal/mental state) + def __init__(self): + super().__init__() + # communication utterance + self.c = None + + +class Action: # action of the agent + def __init__(self): + # physical action + self.u = None + # communication action + self.c = None + + +class Entity: # properties and state of physical world entity + def __init__(self): + # name + self.name = '' + # properties: + self.size = 0.050 + # entity can move / be pushed + self.movable = False + # entity collides with others + self.collide = True + # material density (affects mass) + self.density = 25.0 + # color + self.color = None + # max speed and accel + self.max_speed = None + self.accel = None + # state + self.state = EntityState() + # mass + self.initial_mass = 1.0 + + @property + def mass(self): + return self.initial_mass + + +class Landmark(Entity): # properties of landmark entities + def __init__(self): + super().__init__() + + +class Agent(Entity): # properties of agent entities + def __init__(self): + super().__init__() + # agents are movable by default + self.movable = True + # cannot send communication signals + self.silent = False + # cannot observe the world + self.blind = False + # physical motor noise amount + self.u_noise = None + # communication noise amount + self.c_noise = None + # control range + self.u_range = 1.0 + # state + self.state = AgentState() + # action + self.action = Action() + # script behavior to execute + self.action_callback = None + + +class World: # multi-agent world + def __init__(self): + # list of agents and entities (can change at execution-time!) + self.agents = [] + self.landmarks = [] + # communication channel dimensionality + self.dim_c = 0 + # position dimensionality + self.dim_p = 2 + # color dimensionality + self.dim_color = 3 + # simulation timestep + self.dt = 0.1 + # physical damping + self.damping = 0.25 + # contact response parameters + self.contact_force = 1e+2 + self.contact_margin = 1e-3 + + # return all entities in the world + @property + def entities(self): + return self.agents + self.landmarks + + # return all agents controllable by external policies + @property + def policy_agents(self): + return [agent for agent in self.agents if agent.action_callback is None] + + # return all agents controlled by world scripts + @property + def scripted_agents(self): + return [agent for agent in self.agents if agent.action_callback is not None] + + # update state of the world + def step(self): + # set actions for scripted agents + for agent in self.scripted_agents: + agent.action = agent.action_callback(agent, self) + # gather forces applied to entities + p_force = [None] * len(self.entities) + # apply agent physical controls + p_force = self.apply_action_force(p_force) + # apply environment forces + p_force = self.apply_environment_force(p_force) + # integrate physical state + self.integrate_state(p_force) + # update agent state + for agent in self.agents: + self.update_agent_state(agent) + + # gather agent action forces + def apply_action_force(self, p_force): + # set applied forces + for i, agent in enumerate(self.agents): + if agent.movable: + noise = np.random.randn(*agent.action.u.shape) * agent.u_noise if agent.u_noise else 0.0 + p_force[i] = agent.action.u + noise + return p_force + + # gather physical forces acting on entities + def apply_environment_force(self, p_force): + # simple (but inefficient) collision response + for a, entity_a in enumerate(self.entities): + for b, entity_b in enumerate(self.entities): + if(b <= a): + continue + [f_a, f_b] = self.get_collision_force(entity_a, entity_b) + if(f_a is not None): + if(p_force[a] is None): + p_force[a] = 0.0 + p_force[a] = f_a + p_force[a] + if(f_b is not None): + if(p_force[b] is None): + p_force[b] = 0.0 + p_force[b] = f_b + p_force[b] + return p_force + + # integrate physical state + def integrate_state(self, p_force): + for i, entity in enumerate(self.entities): + if not entity.movable: + continue + entity.state.p_vel = entity.state.p_vel * (1 - self.damping) + if (p_force[i] is not None): + entity.state.p_vel += (p_force[i] / entity.mass) * self.dt + if entity.max_speed is not None: + speed = np.sqrt(np.square(entity.state.p_vel[0]) + np.square(entity.state.p_vel[1])) + if speed > entity.max_speed: + entity.state.p_vel = entity.state.p_vel / np.sqrt(np.square(entity.state.p_vel[0]) + np.square(entity.state.p_vel[1])) * entity.max_speed + entity.state.p_pos += entity.state.p_vel * self.dt + + def update_agent_state(self, agent): + # set communication state (directly for now) + if agent.silent: + agent.state.c = np.zeros(self.dim_c) + else: + noise = np.random.randn(*agent.action.c.shape) * agent.c_noise if agent.c_noise else 0.0 + agent.state.c = agent.action.c + noise + + # get collision forces for any contact between two entities + def get_collision_force(self, entity_a, entity_b): + if (not entity_a.collide) or (not entity_b.collide): + return [None, None] # not a collider + if (entity_a is entity_b): + return [None, None] # don't collide against itself + # compute actual distance between entities + delta_pos = entity_a.state.p_pos - entity_b.state.p_pos + dist = np.sqrt(np.sum(np.square(delta_pos))) + # minimum allowable distance + dist_min = entity_a.size + entity_b.size + # softmax penetration + k = self.contact_margin + penetration = np.logaddexp(0, -(dist - dist_min) / k) * k + force = self.contact_force * delta_pos / dist * penetration + force_a = +force if entity_a.movable else None + force_b = -force if entity_b.movable else None + return [force_a, force_b] diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/rendering.py b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/rendering.py new file mode 100644 index 0000000000000000000000000000000000000000..61d37117958daab725942343f0dd922de3c0f4c3 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/rendering.py @@ -0,0 +1,438 @@ +""" +2D rendering framework +""" +import math +import os +import sys + +import numpy as np +from gym import error +from pyglet.gl import (GL_BLEND, GL_LINE_LOOP, GL_LINE_SMOOTH, GL_LINE_SMOOTH_HINT, GL_LINE_STIPPLE, + GL_LINE_STRIP, GL_LINES, GL_NICEST, GL_ONE_MINUS_SRC_ALPHA, GL_POINTS, + GL_POLYGON, GL_QUADS, GL_SRC_ALPHA, GL_TRIANGLES) + +try: + import pyglet +except ImportError: + raise ImportError("HINT: you can install pyglet directly via 'pip install pyglet'. But if you really just want to install all Gym dependencies and not have to think about it, 'pip install -e .[all]' or 'pip install gym[all]' will do it.") + +try: + from pyglet.gl import (glBegin, glBlendFunc, glClearColor, glColor4f, glDisable, glEnable, + glEnd, glHint, glLineStipple, glLineWidth, glPopMatrix, glPushMatrix, + glRotatef, glScalef, glTranslatef, gluOrtho2D, glVertex2f, glVertex3f) +except ImportError: + raise ImportError("""Error occurred while running `from pyglet.gl import ...` + HINT: make sure you have OpenGL install. On Ubuntu, you can run 'apt-get install python-opengl'. If you're running on a server, you may need a virtual frame buffer; something like this should work: 'xvfb-run -s \"-screen 0 1400x900x24\" python '""") + + +if "Apple" in sys.version: + if 'DYLD_FALLBACK_LIBRARY_PATH' in os.environ: + os.environ['DYLD_FALLBACK_LIBRARY_PATH'] += ':/usr/lib' + + +RAD2DEG = 57.29577951308232 + + +def get_display(spec): + """Convert a display specification (such as :0) into an actual Display + object. + + Pyglet only supports multiple Displays on Linux. + """ + if spec is None: + return None + elif isinstance(spec, str): + return pyglet.canvas.Display(spec) + else: + raise error.Error(f'Invalid display specification: {spec}. (Must be a string like :0 or None.)') + + +class Viewer: + def __init__(self, width, height, display=None): + display = get_display(display) + + self.width = width + self.height = height + + self.window = pyglet.window.Window(width=width, height=height, display=display) + self.window.on_close = self.window_closed_by_user + self.geoms = [] + self.onetime_geoms = [] + self.text_lines = [] + self.transform = Transform() + self.max_size = 1 + + glEnable(GL_BLEND) + # glEnable(GL_MULTISAMPLE) + glEnable(GL_LINE_SMOOTH) + # glHint(GL_LINE_SMOOTH_HINT, GL_DONT_CARE) + glHint(GL_LINE_SMOOTH_HINT, GL_NICEST) + glLineWidth(2.0) + glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA) + + def close(self): + self.window.close() + + def window_closed_by_user(self): + self.close() + + def set_max_size(self, current_size): + max_size = self.max_size = max(current_size, self.max_size) + left = -max_size + right = max_size + bottom = -max_size + top = max_size + assert right > left and top > bottom + scalex = self.width / (right - left) + scaley = self.height / (top - bottom) + self.transform = Transform( + translation=(-left * scalex, -bottom * scaley), + scale=(scalex, scaley)) + + def add_geom(self, geom): + self.geoms.append(geom) + + def add_onetime(self, geom): + self.onetime_geoms.append(geom) + + def render(self, return_rgb_array=False): + glClearColor(1, 1, 1, 1) + self.window.clear() + self.window.switch_to() + self.window.dispatch_events() + self.transform.enable() + for geom in self.geoms: + geom.render() + for geom in self.onetime_geoms: + geom.render() + self.transform.disable() + + pyglet.gl.glMatrixMode(pyglet.gl.GL_PROJECTION) + pyglet.gl.glLoadIdentity() + gluOrtho2D(0, self.window.width, 0, self.window.height) + for geom in self.text_lines: + geom.render() + + arr = None + if return_rgb_array: + buffer = pyglet.image.get_buffer_manager().get_color_buffer() + image_data = buffer.get_image_data() + arr = np.fromstring(image_data.get_data(), dtype=np.uint8, sep='') + # In https://github.com/openai/gym-http-api/issues/2, we + # discovered that someone using Xmonad on Arch was having + # a window of size 598 x 398, though a 600 x 400 window + # was requested. (Guess Xmonad was preserving a pixel for + # the boundary.) So we use the buffer height/width rather + # than the requested one. + arr = arr.reshape(buffer.height, buffer.width, 4) + arr = arr[::-1, :, 0:3] + self.window.flip() + self.onetime_geoms = [] + return arr + + # Convenience + def draw_circle(self, radius=10, res=30, filled=True, **attrs): + geom = make_circle(radius=radius, res=res, filled=filled) + _add_attrs(geom, attrs) + self.add_onetime(geom) + return geom + + def draw_polygon(self, v, filled=True, **attrs): + geom = make_polygon(v=v, filled=filled) + _add_attrs(geom, attrs) + self.add_onetime(geom) + return geom + + def draw_polyline(self, v, **attrs): + geom = make_polyline(v=v) + _add_attrs(geom, attrs) + self.add_onetime(geom) + return geom + + def draw_line(self, start, end, **attrs): + geom = Line(start, end) + _add_attrs(geom, attrs) + self.add_onetime(geom) + return geom + + def get_array(self): + self.window.flip() + image_data = pyglet.image.get_buffer_manager().get_color_buffer().get_image_data() + self.window.flip() + arr = np.fromstring(image_data.get_data(), dtype=np.uint8, sep='') + arr = arr.reshape(self.height, self.width, 4) + return arr[::-1, :, 0:3] + + +def _add_attrs(geom, attrs): + if "color" in attrs: + geom.set_color(*attrs["color"]) + if "linewidth" in attrs: + geom.set_linewidth(attrs["linewidth"]) + + +class Geom: + def __init__(self): + self._color = Color((0, 0, 0, 1.0)) + self.attrs = [self._color] + + def render(self): + for attr in reversed(self.attrs): + attr.enable() + self.render1() + for attr in self.attrs: + attr.disable() + + def render1(self): + raise NotImplementedError + + def add_attr(self, attr): + self.attrs.append(attr) + + def set_color(self, r, g, b, alpha=1): + self._color.vec4 = (r, g, b, alpha) + + +class Attr: + def enable(self): + raise NotImplementedError + + def disable(self): + pass + + +class Transform(Attr): + def __init__(self, translation=(0.0, 0.0), rotation=0.0, scale=(1, 1)): + self.set_translation(*translation) + self.set_rotation(rotation) + self.set_scale(*scale) + + def enable(self): + glPushMatrix() + glTranslatef(self.translation[0], self.translation[1], 0) # translate to GL loc ppint + glRotatef(RAD2DEG * self.rotation, 0, 0, 1.0) + glScalef(self.scale[0], self.scale[1], 1) + + def disable(self): + glPopMatrix() + + def set_translation(self, newx, newy): + self.translation = (float(newx), float(newy)) + + def set_rotation(self, new): + self.rotation = float(new) + + def set_scale(self, newx, newy): + self.scale = (float(newx), float(newy)) + + +class Color(Attr): + def __init__(self, vec4): + self.vec4 = vec4 + + def enable(self): + glColor4f(*self.vec4) + + +class LineStyle(Attr): + def __init__(self, style): + self.style = style + + def enable(self): + glEnable(GL_LINE_STIPPLE) + glLineStipple(1, self.style) + + def disable(self): + glDisable(GL_LINE_STIPPLE) + + +class LineWidth(Attr): + def __init__(self, stroke): + self.stroke = stroke + + def enable(self): + glLineWidth(self.stroke) + + +class Point(Geom): + def __init__(self): + Geom.__init__(self) + + def render1(self): + (GL_POINTS) # draw point + glVertex3f(0.0, 0.0, 0.0) + glEnd() + + +class TextLine: + def __init__(self, window, idx): + self.idx = idx + self.window = window + pyglet.font.add_file(os.path.join(os.path.dirname(__file__), "secrcode.ttf")) + self.label = None + self.set_text('') + + def render(self): + if self.label is not None: + self.label.draw() + + def set_text(self, text): + if pyglet.font.have_font('Courier'): + font = "Courier" + elif pyglet.font.have_font('Secret Code'): + font = "Secret Code" + else: + return + + self.label = pyglet.text.Label(text, + font_name=font, + color=(0, 0, 0, 255), + font_size=20, + x=0, y=self.idx * 40 + 20, + anchor_x="left", anchor_y="bottom") + + self.label.draw() + + +class FilledPolygon(Geom): + def __init__(self, v): + Geom.__init__(self) + self.v = v + + def render1(self): + if len(self.v) == 4: + glBegin(GL_QUADS) + elif len(self.v) > 4: + glBegin(GL_POLYGON) + else: + glBegin(GL_TRIANGLES) + for p in self.v: + glVertex3f(p[0], p[1], 0) # draw each vertex + glEnd() + + color = (self._color.vec4[0] * 0.5, self._color.vec4[1] * 0.5, self._color.vec4[2] * 0.5, self._color.vec4[3] * 0.5) + glColor4f(*color) + glBegin(GL_LINE_LOOP) + for p in self.v: + glVertex3f(p[0], p[1], 0) # draw each vertex + glEnd() + + +def make_circle(radius=10, res=30, filled=True): + points = [] + for i in range(res): + ang = 2 * math.pi * i / res + points.append((math.cos(ang) * radius, math.sin(ang) * radius)) + if filled: + return FilledPolygon(points) + else: + return PolyLine(points, True) + + +def make_polygon(v, filled=True): + if filled: + return FilledPolygon(v) + else: + return PolyLine(v, True) + + +def make_polyline(v): + return PolyLine(v, False) + + +def make_capsule(length, width): + l, r, t, b = 0, length, width / 2, -width / 2 + box = make_polygon([(l, b), (l, t), (r, t), (r, b)]) + circ0 = make_circle(width / 2) + circ1 = make_circle(width / 2) + circ1.add_attr(Transform(translation=(length, 0))) + geom = Compound([box, circ0, circ1]) + return geom + + +class Compound(Geom): + def __init__(self, gs): + Geom.__init__(self) + self.gs = gs + for g in self.gs: + g.attrs = [a for a in g.attrs if not isinstance(a, Color)] + + def render1(self): + for g in self.gs: + g.render() + + +class PolyLine(Geom): + def __init__(self, v, close): + Geom.__init__(self) + self.v = v + self.close = close + self.linewidth = LineWidth(1) + self.add_attr(self.linewidth) + + def render1(self): + glBegin(GL_LINE_LOOP if self.close else GL_LINE_STRIP) + for p in self.v: + glVertex3f(p[0], p[1], 0) # draw each vertex + glEnd() + + def set_linewidth(self, x): + self.linewidth.stroke = x + + +class Line(Geom): + def __init__(self, start=(0.0, 0.0), end=(0.0, 0.0)): + Geom.__init__(self) + self.start = start + self.end = end + self.linewidth = LineWidth(1) + self.add_attr(self.linewidth) + + def render1(self): + glBegin(GL_LINES) + glVertex2f(*self.start) + glVertex2f(*self.end) + glEnd() + + +class Image(Geom): + def __init__(self, fname, width, height): + Geom.__init__(self) + self.width = width + self.height = height + img = pyglet.image.load(fname) + self.img = img + self.flip = False + + def render1(self): + self.img.blit(-self.width / 2, -self.height / 2, width=self.width, height=self.height) + + +class SimpleImageViewer: + def __init__(self, display=None): + self.window = None + self.isopen = False + self.display = display + + def imshow(self, arr): + if self.window is None: + height, width, channels = arr.shape + self.window = pyglet.window.Window(width=width, height=height, display=self.display) + self.width = width + self.height = height + self.isopen = True + assert arr.shape == (self.height, self.width, 3), "You passed in an image with the wrong number shape" + image = pyglet.image.ImageData(self.width, self.height, 'RGB', arr.tobytes(), pitch=self.width * -3) + self.window.clear() + self.window.switch_to() + self.window.dispatch_events() + image.blit(0, 0) + self.window.flip() + + def close(self): + if self.isopen: + self.window.close() + self.isopen = False + + def __del__(self): + self.close() diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/scenario.py b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/scenario.py new file mode 100644 index 0000000000000000000000000000000000000000..d468667a31efc988e2b74b2804770a2c8a6c8b14 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/scenario.py @@ -0,0 +1,6 @@ +class BaseScenario: # defines scenario upon which the world is built + def make_world(self): # create elements of the world + raise NotImplementedError() + + def reset_world(self, world, np_random): # create initial conditions of the world + raise NotImplementedError() diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/secrcode.ttf b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/secrcode.ttf new file mode 100644 index 0000000000000000000000000000000000000000..2e077c4347af09d269791dd0931317b973d3b588 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/secrcode.ttf differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/simple_env.py b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/simple_env.py new file mode 100644 index 0000000000000000000000000000000000000000..7c50a75c469e11d636aaa7b5081653d0d0326002 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/_mpe_utils/simple_env.py @@ -0,0 +1,276 @@ +import numpy as np +from gym import spaces +from gym.utils import seeding + +from pettingzoo import AECEnv +from pettingzoo.utils import wrappers +from pettingzoo.utils.agent_selector import agent_selector + + +def make_env(raw_env): + def env(**kwargs): + env = raw_env(**kwargs) + if env.continuous_actions: + env = wrappers.ClipOutOfBoundsWrapper(env) + else: + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + return env + + +class SimpleEnv(AECEnv): + def __init__(self, scenario, world, max_cycles, continuous_actions=False, local_ratio=None): + super().__init__() + + self.seed() + + self.metadata = { + 'render.modes': ['human', 'rgb_array'], + 'is_parallelizable': True, + 'video.frames_per_second': 10 + } + + self.max_cycles = max_cycles + self.scenario = scenario + self.world = world + self.continuous_actions = continuous_actions + self.local_ratio = local_ratio + + self.scenario.reset_world(self.world, self.np_random) + + self.agents = [agent.name for agent in self.world.agents] + self.possible_agents = self.agents[:] + self._index_map = {agent.name: idx for idx, agent in enumerate(self.world.agents)} + + self._agent_selector = agent_selector(self.agents) + + # set spaces + self.action_spaces = dict() + self.observation_spaces = dict() + state_dim = 0 + for agent in self.world.agents: + if agent.movable: + space_dim = self.world.dim_p * 2 + 1 + elif self.continuous_actions: + space_dim = 0 + else: + space_dim = 1 + if not agent.silent: + if self.continuous_actions: + space_dim += self.world.dim_c + else: + space_dim *= self.world.dim_c + + obs_dim = len(self.scenario.observation(agent, self.world)) + state_dim += obs_dim + if self.continuous_actions: + self.action_spaces[agent.name] = spaces.Box(low=0, high=1, shape=(space_dim,)) + else: + self.action_spaces[agent.name] = spaces.Discrete(space_dim) + self.observation_spaces[agent.name] = spaces.Box(low=-np.float32(np.inf), high=+np.float32(np.inf), shape=(obs_dim,), dtype=np.float32) + + self.state_space = spaces.Box(low=-np.float32(np.inf), high=+np.float32(np.inf), shape=(state_dim,), dtype=np.float32) + + self.steps = 0 + + self.current_actions = [None] * self.num_agents + + self.viewer = None + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def seed(self, seed=None): + self.np_random, seed = seeding.np_random(seed) + + def observe(self, agent): + return self.scenario.observation(self.world.agents[self._index_map[agent]], self.world).astype(np.float32) + + def state(self): + states = tuple(self.scenario.observation(self.world.agents[self._index_map[agent]], self.world).astype(np.float32) for agent in self.possible_agents) + return np.concatenate(states, axis=None) + + def reset(self): + self.scenario.reset_world(self.world, self.np_random) + + self.agents = self.possible_agents[:] + self.rewards = {name: 0. for name in self.agents} + self._cumulative_rewards = {name: 0. for name in self.agents} + self.dones = {name: False for name in self.agents} + self.infos = {name: {} for name in self.agents} + + self._reset_render() + + self.agent_selection = self._agent_selector.reset() + self.steps = 0 + + self.current_actions = [None] * self.num_agents + + def _execute_world_step(self): + # set action for each agent + for i, agent in enumerate(self.world.agents): + action = self.current_actions[i] + scenario_action = [] + if agent.movable: + mdim = self.world.dim_p * 2 + 1 + if self.continuous_actions: + scenario_action.append(action[0:mdim]) + action = action[mdim:] + else: + scenario_action.append(action % mdim) + action //= mdim + if not agent.silent: + scenario_action.append(action) + self._set_action(scenario_action, agent, self.action_spaces[agent.name]) + + self.world.step() + + global_reward = 0. + if self.local_ratio is not None: + global_reward = float(self.scenario.global_reward(self.world)) + + for agent in self.world.agents: + agent_reward = float(self.scenario.reward(agent, self.world)) + if self.local_ratio is not None: + reward = global_reward * (1 - self.local_ratio) + agent_reward * self.local_ratio + else: + reward = agent_reward + + self.rewards[agent.name] = reward + + # set env action for a particular agent + def _set_action(self, action, agent, action_space, time=None): + agent.action.u = np.zeros(self.world.dim_p) + agent.action.c = np.zeros(self.world.dim_c) + + if agent.movable: + # physical action + agent.action.u = np.zeros(self.world.dim_p) + if self.continuous_actions: + # Process continuous action as in OpenAI MPE + agent.action.u[0] += action[0][1] - action[0][2] + agent.action.u[1] += action[0][3] - action[0][4] + else: + # process discrete action + if action[0] == 1: + agent.action.u[0] = -1.0 + if action[0] == 2: + agent.action.u[0] = +1.0 + if action[0] == 3: + agent.action.u[1] = -1.0 + if action[0] == 4: + agent.action.u[1] = +1.0 + sensitivity = 5.0 + if agent.accel is not None: + sensitivity = agent.accel + agent.action.u *= sensitivity + action = action[1:] + if not agent.silent: + # communication action + if self.continuous_actions: + agent.action.c = action[0] + else: + agent.action.c = np.zeros(self.world.dim_c) + agent.action.c[action[0]] = 1.0 + action = action[1:] + # make sure we used all elements of action + assert len(action) == 0 + + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + cur_agent = self.agent_selection + current_idx = self._index_map[self.agent_selection] + next_idx = (current_idx + 1) % self.num_agents + self.agent_selection = self._agent_selector.next() + + self.current_actions[current_idx] = action + + if next_idx == 0: + self._execute_world_step() + self.steps += 1 + if self.steps >= self.max_cycles: + for a in self.agents: + self.dones[a] = True + else: + self._clear_rewards() + + self._cumulative_rewards[cur_agent] = 0 + self._accumulate_rewards() + + def render(self, mode='human'): + from . import rendering + + if self.viewer is None: + self.viewer = rendering.Viewer(700, 700) + + # create rendering geometry + if self.render_geoms is None: + # import rendering only if we need it (and don't import for headless machines) + # from gym.envs.classic_control import rendering + # from multiagent._mpe_utils import rendering + self.render_geoms = [] + self.render_geoms_xform = [] + for entity in self.world.entities: + geom = rendering.make_circle(entity.size) + xform = rendering.Transform() + if 'agent' in entity.name: + geom.set_color(*entity.color[:3], alpha=0.5) + else: + geom.set_color(*entity.color[:3]) + geom.add_attr(xform) + self.render_geoms.append(geom) + self.render_geoms_xform.append(xform) + + # add geoms to viewer + self.viewer.geoms = [] + for geom in self.render_geoms: + self.viewer.add_geom(geom) + + self.viewer.text_lines = [] + idx = 0 + for agent in self.world.agents: + if not agent.silent: + tline = rendering.TextLine(self.viewer.window, idx) + self.viewer.text_lines.append(tline) + idx += 1 + + alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + for idx, other in enumerate(self.world.agents): + if other.silent: + continue + if np.all(other.state.c == 0): + word = '_' + elif self.continuous_actions: + word = '[' + ",".join([f"{comm:.2f}" for comm in other.state.c]) + "]" + else: + word = alphabet[np.argmax(other.state.c)] + + message = (other.name + ' sends ' + word + ' ') + + self.viewer.text_lines[idx].set_text(message) + + # update bounds to center around agent + all_poses = [entity.state.p_pos for entity in self.world.entities] + cam_range = np.max(np.abs(np.array(all_poses))) + 1 + self.viewer.set_max_size(cam_range) + # update geometry positions + for e, entity in enumerate(self.world.entities): + self.render_geoms_xform[e].set_translation(*entity.state.p_pos) + # render to display or array + return self.viewer.render(return_rgb_array=mode == 'rgb_array') + + # reset rendering assets + def _reset_render(self): + self.render_geoms = None + self.render_geoms_xform = None + + def close(self): + if self.viewer is not None: + self.viewer.close() + self.viewer = None + self._reset_render() diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__init__.py b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7ca91c2ae731d52b0dd02121cee8d233604729ec Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2a0cda08cc7cb061edf514d5fc5c32e2639fcce3 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_adversary.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_adversary.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b781f68e54cc54523e96216907d06b03ced685da Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_adversary.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_crypto.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_crypto.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..828e8701ac9a10c5492d2886f4b56850d0545315 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_crypto.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_push.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_push.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ffd4e678108bf8e1f59c6404f4fc276ef748edeb Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_push.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_reference.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_reference.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5d8f1accfd8e4a5360e4a8b504bbbbb8fb47c60f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_reference.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_speaker_listener.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_speaker_listener.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7f97a63954629d84310e84489088b3ba73947b55 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_speaker_listener.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_spread.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_spread.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aea9d446795cd37c466a6521583a856e2e150a04 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_spread.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_tag.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_tag.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1704ab697f88adab4a24abf4557da5f026fd434d Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_tag.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_world_comm.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_world_comm.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4406904d81a729c177a81c4ad827bd68e085ed44 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/__pycache__/simple_world_comm.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple.py b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple.py new file mode 100644 index 0000000000000000000000000000000000000000..5c8697acdb3127a4645d3675030467d34ef533ef --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple.py @@ -0,0 +1,50 @@ +import numpy as np + +from .._mpe_utils.core import Agent, Landmark, World +from .._mpe_utils.scenario import BaseScenario + + +class Scenario(BaseScenario): + def make_world(self): + world = World() + # add agents + world.agents = [Agent() for i in range(1)] + for i, agent in enumerate(world.agents): + agent.name = f'agent_{i}' + agent.collide = False + agent.silent = True + # add landmarks + world.landmarks = [Landmark() for i in range(1)] + for i, landmark in enumerate(world.landmarks): + landmark.name = 'landmark %d' % i + landmark.collide = False + landmark.movable = False + return world + + def reset_world(self, world, np_random): + # random properties for agents + for i, agent in enumerate(world.agents): + agent.color = np.array([0.25, 0.25, 0.25]) + # random properties for landmarks + for i, landmark in enumerate(world.landmarks): + landmark.color = np.array([0.75, 0.75, 0.75]) + world.landmarks[0].color = np.array([0.75, 0.25, 0.25]) + # set random initial states + for agent in world.agents: + agent.state.p_pos = np_random.uniform(-1, +1, world.dim_p) + agent.state.p_vel = np.zeros(world.dim_p) + agent.state.c = np.zeros(world.dim_c) + for i, landmark in enumerate(world.landmarks): + landmark.state.p_pos = np_random.uniform(-1, +1, world.dim_p) + landmark.state.p_vel = np.zeros(world.dim_p) + + def reward(self, agent, world): + dist2 = np.sum(np.square(agent.state.p_pos - world.landmarks[0].state.p_pos)) + return -dist2 + + def observation(self, agent, world): + # get positions of all entities in this agent's reference frame + entity_pos = [] + for entity in world.landmarks: + entity_pos.append(entity.state.p_pos - agent.state.p_pos) + return np.concatenate([agent.state.p_vel] + entity_pos) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_adversary.py b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_adversary.py new file mode 100644 index 0000000000000000000000000000000000000000..81a20193e1fdbc582d63192dc99a7a18ff612542 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_adversary.py @@ -0,0 +1,140 @@ +import numpy as np + +from .._mpe_utils.core import Agent, Landmark, World +from .._mpe_utils.scenario import BaseScenario + + +class Scenario(BaseScenario): + + def make_world(self, N=2): + world = World() + # set any world properties first + world.dim_c = 2 + num_agents = N + 1 + world.num_agents = num_agents + num_adversaries = 1 + num_landmarks = num_agents - 1 + # add agents + world.agents = [Agent() for i in range(num_agents)] + for i, agent in enumerate(world.agents): + agent.adversary = True if i < num_adversaries else False + base_name = "adversary" if agent.adversary else "agent" + base_index = i if i < num_adversaries else i - num_adversaries + agent.name = f'{base_name}_{base_index}' + agent.collide = False + agent.silent = True + agent.size = 0.15 + # add landmarks + world.landmarks = [Landmark() for i in range(num_landmarks)] + for i, landmark in enumerate(world.landmarks): + landmark.name = 'landmark %d' % i + landmark.collide = False + landmark.movable = False + landmark.size = 0.08 + return world + + def reset_world(self, world, np_random): + # random properties for agents + world.agents[0].color = np.array([0.85, 0.35, 0.35]) + for i in range(1, world.num_agents): + world.agents[i].color = np.array([0.35, 0.35, 0.85]) + # random properties for landmarks + for i, landmark in enumerate(world.landmarks): + landmark.color = np.array([0.15, 0.15, 0.15]) + # set goal landmark + goal = np_random.choice(world.landmarks) + goal.color = np.array([0.15, 0.65, 0.15]) + for agent in world.agents: + agent.goal_a = goal + # set random initial states + for agent in world.agents: + agent.state.p_pos = np_random.uniform(-1, +1, world.dim_p) + agent.state.p_vel = np.zeros(world.dim_p) + agent.state.c = np.zeros(world.dim_c) + for i, landmark in enumerate(world.landmarks): + landmark.state.p_pos = np_random.uniform(-1, +1, world.dim_p) + landmark.state.p_vel = np.zeros(world.dim_p) + + def benchmark_data(self, agent, world): + # returns data for benchmarking purposes + if agent.adversary: + return np.sum(np.square(agent.state.p_pos - agent.goal_a.state.p_pos)) + else: + dists = [] + for l in world.landmarks: + dists.append(np.sum(np.square(agent.state.p_pos - l.state.p_pos))) + dists.append(np.sum(np.square(agent.state.p_pos - agent.goal_a.state.p_pos))) + return tuple(dists) + + # return all agents that are not adversaries + def good_agents(self, world): + return [agent for agent in world.agents if not agent.adversary] + + # return all adversarial agents + def adversaries(self, world): + return [agent for agent in world.agents if agent.adversary] + + def reward(self, agent, world): + # Agents are rewarded based on minimum agent distance to each landmark + return self.adversary_reward(agent, world) if agent.adversary else self.agent_reward(agent, world) + + def agent_reward(self, agent, world): + # Rewarded based on how close any good agent is to the goal landmark, and how far the adversary is from it + shaped_reward = True + shaped_adv_reward = True + + # Calculate negative reward for adversary + adversary_agents = self.adversaries(world) + if shaped_adv_reward: # distance-based adversary reward + adv_rew = sum(np.sqrt(np.sum(np.square(a.state.p_pos - a.goal_a.state.p_pos))) for a in adversary_agents) + else: # proximity-based adversary reward (binary) + adv_rew = 0 + for a in adversary_agents: + if np.sqrt(np.sum(np.square(a.state.p_pos - a.goal_a.state.p_pos))) < 2 * a.goal_a.size: + adv_rew -= 5 + + # Calculate positive reward for agents + good_agents = self.good_agents(world) + if shaped_reward: # distance-based agent reward + pos_rew = -min( + np.sqrt(np.sum(np.square(a.state.p_pos - a.goal_a.state.p_pos))) for a in good_agents) + else: # proximity-based agent reward (binary) + pos_rew = 0 + if min(np.sqrt(np.sum(np.square(a.state.p_pos - a.goal_a.state.p_pos))) for a in good_agents) \ + < 2 * agent.goal_a.size: + pos_rew += 5 + pos_rew -= min( + np.sqrt(np.sum(np.square(a.state.p_pos - a.goal_a.state.p_pos))) for a in good_agents) + return pos_rew + adv_rew + + def adversary_reward(self, agent, world): + # Rewarded based on proximity to the goal landmark + shaped_reward = True + if shaped_reward: # distance-based reward + return -np.sqrt(np.sum(np.square(agent.state.p_pos - agent.goal_a.state.p_pos))) + else: # proximity-based reward (binary) + adv_rew = 0 + if np.sqrt(np.sum(np.square(agent.state.p_pos - agent.goal_a.state.p_pos))) < 2 * agent.goal_a.size: + adv_rew += 5 + return adv_rew + + def observation(self, agent, world): + # get positions of all entities in this agent's reference frame + entity_pos = [] + for entity in world.landmarks: + entity_pos.append(entity.state.p_pos - agent.state.p_pos) + # entity colors + entity_color = [] + for entity in world.landmarks: + entity_color.append(entity.color) + # communication of all other agents + other_pos = [] + for other in world.agents: + if other is agent: + continue + other_pos.append(other.state.p_pos - agent.state.p_pos) + + if not agent.adversary: + return np.concatenate([agent.goal_a.state.p_pos - agent.state.p_pos] + entity_pos + other_pos) + else: + return np.concatenate(entity_pos + other_pos) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_crypto.py b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_crypto.py new file mode 100644 index 0000000000000000000000000000000000000000..4aa959c6d88801fa54b9a0a7936270fba84c9cf9 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_crypto.py @@ -0,0 +1,161 @@ +""" +Scenario: +1 speaker, 2 listeners (one of which is an adversary). Good agents rewarded for proximity to goal, and distance from +adversary to goal. Adversary is rewarded for its distance to the goal. +""" + + +import numpy as np + +from .._mpe_utils.core import Agent, Landmark, World +from .._mpe_utils.scenario import BaseScenario + + +class CryptoAgent(Agent): + def __init__(self): + super().__init__() + self.key = None + + +class Scenario(BaseScenario): + + def make_world(self): + world = World() + # set any world properties first + num_agents = 3 + num_adversaries = 1 + num_landmarks = 2 + world.dim_c = 4 + # add agents + world.agents = [CryptoAgent() for i in range(num_agents)] + for i, agent in enumerate(world.agents): + agent.adversary = True if i < num_adversaries else False + agent.collide = False + agent.speaker = True if i == 2 else False + agent.movable = False + base_name = "eve" if agent.adversary else ("alice" if agent.speaker else "bob") + agent.name = f'{base_name}_0' + # add landmarks + world.landmarks = [Landmark() for i in range(num_landmarks)] + for i, landmark in enumerate(world.landmarks): + landmark.name = 'landmark %d' % i + landmark.collide = False + landmark.movable = False + return world + + def reset_world(self, world, np_random): + # random properties for agents + for i, agent in enumerate(world.agents): + agent.color = np.array([0.25, 0.25, 0.25]) + if agent.adversary: + agent.color = np.array([0.75, 0.25, 0.25]) + agent.key = None + # random properties for landmarks + color_list = [np.zeros(world.dim_c) for i in world.landmarks] + for i, color in enumerate(color_list): + color[i] += 1 + for color, landmark in zip(color_list, world.landmarks): + landmark.color = color + # set goal landmark + goal = np_random.choice(world.landmarks) + + world.agents[1].color = goal.color + world.agents[2].key = np_random.choice(world.landmarks).color + + for agent in world.agents: + agent.goal_a = goal + + # set random initial states + for agent in world.agents: + agent.state.p_pos = np_random.uniform(-1, +1, world.dim_p) + agent.state.p_vel = np.zeros(world.dim_p) + agent.state.c = np.zeros(world.dim_c) + for i, landmark in enumerate(world.landmarks): + landmark.state.p_pos = np_random.uniform(-1, +1, world.dim_p) + landmark.state.p_vel = np.zeros(world.dim_p) + + def benchmark_data(self, agent, world): + # returns data for benchmarking purposes + return (agent.state.c, agent.goal_a.color) + + # return all agents that are not adversaries + def good_listeners(self, world): + return [agent for agent in world.agents if not agent.adversary and not agent.speaker] + + # return all agents that are not adversaries + def good_agents(self, world): + return [agent for agent in world.agents if not agent.adversary] + + # return all adversarial agents + def adversaries(self, world): + return [agent for agent in world.agents if agent.adversary] + + def reward(self, agent, world): + return self.adversary_reward(agent, world) if agent.adversary else self.agent_reward(agent, world) + + def agent_reward(self, agent, world): + # Agents rewarded if Bob can reconstruct message, but adversary (Eve) cannot + good_listeners = self.good_listeners(world) + adversaries = self.adversaries(world) + good_rew = 0 + adv_rew = 0 + for a in good_listeners: + if (a.state.c == np.zeros(world.dim_c)).all(): + continue + else: + good_rew -= np.sum(np.square(a.state.c - agent.goal_a.color)) + for a in adversaries: + if (a.state.c == np.zeros(world.dim_c)).all(): + continue + else: + adv_l1 = np.sum(np.square(a.state.c - agent.goal_a.color)) + adv_rew += adv_l1 + return adv_rew + good_rew + + def adversary_reward(self, agent, world): + # Adversary (Eve) is rewarded if it can reconstruct original goal + rew = 0 + if not (agent.state.c == np.zeros(world.dim_c)).all(): + rew -= np.sum(np.square(agent.state.c - agent.goal_a.color)) + return rew + + def observation(self, agent, world): + # goal color + goal_color = np.zeros(world.dim_color) + if agent.goal_a is not None: + goal_color = agent.goal_a.color + + # get positions of all entities in this agent's reference frame + entity_pos = [] + for entity in world.landmarks: + entity_pos.append(entity.state.p_pos - agent.state.p_pos) + # communication of all other agents + comm = [] + for other in world.agents: + if other is agent or (other.state.c is None) or not other.speaker: + continue + comm.append(other.state.c) + + key = world.agents[2].key + + # prnt = False + # speaker + if agent.speaker: + # if prnt: + # print('speaker') + # print(agent.state.c) + # print(np.concatenate([goal_color] + [key])) + return np.concatenate([goal_color] + [key]) + # listener + if not agent.speaker and not agent.adversary: + # if prnt: + # print('listener') + # print(agent.state.c) + # print(np.concatenate([key] + comm)) + return np.concatenate([key] + comm) + if not agent.speaker and agent.adversary: + # if prnt: + # print('adversary') + # print(agent.state.c) + # print(np.concatenate(comm)) + return np.concatenate(comm) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_push.py b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_push.py new file mode 100644 index 0000000000000000000000000000000000000000..d21d1826a7de629844e8ad9716a5212059037567 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_push.py @@ -0,0 +1,95 @@ +import numpy as np + +from .._mpe_utils.core import Agent, Landmark, World +from .._mpe_utils.scenario import BaseScenario + + +class Scenario(BaseScenario): + def make_world(self): + world = World() + # set any world properties first + world.dim_c = 2 + num_agents = 2 + num_adversaries = 1 + num_landmarks = 2 + # add agents + world.agents = [Agent() for i in range(num_agents)] + for i, agent in enumerate(world.agents): + agent.adversary = True if i < num_adversaries else False + base_name = "adversary" if agent.adversary else "agent" + base_index = i if i < num_adversaries else i - num_adversaries + agent.name = f'{base_name}_{base_index}' + agent.collide = True + agent.silent = True + # add landmarks + world.landmarks = [Landmark() for i in range(num_landmarks)] + for i, landmark in enumerate(world.landmarks): + landmark.name = 'landmark %d' % i + landmark.collide = False + landmark.movable = False + return world + + def reset_world(self, world, np_random): + # random properties for landmarks + for i, landmark in enumerate(world.landmarks): + landmark.color = np.array([0.1, 0.1, 0.1]) + landmark.color[i + 1] += 0.8 + landmark.index = i + # set goal landmark + goal = np_random.choice(world.landmarks) + for i, agent in enumerate(world.agents): + agent.goal_a = goal + agent.color = np.array([0.25, 0.25, 0.25]) + if agent.adversary: + agent.color = np.array([0.75, 0.25, 0.25]) + else: + j = goal.index + agent.color[j + 1] += 0.5 + # set random initial states + for agent in world.agents: + agent.state.p_pos = np_random.uniform(-1, +1, world.dim_p) + agent.state.p_vel = np.zeros(world.dim_p) + agent.state.c = np.zeros(world.dim_c) + for i, landmark in enumerate(world.landmarks): + landmark.state.p_pos = np_random.uniform(-1, +1, world.dim_p) + landmark.state.p_vel = np.zeros(world.dim_p) + + def reward(self, agent, world): + # Agents are rewarded based on minimum agent distance to each landmark + return self.adversary_reward(agent, world) if agent.adversary else self.agent_reward(agent, world) + + def agent_reward(self, agent, world): + # the distance to the goal + return -np.sqrt(np.sum(np.square(agent.state.p_pos - agent.goal_a.state.p_pos))) + + def adversary_reward(self, agent, world): + # keep the nearest good agents away from the goal + agent_dist = [np.sqrt(np.sum(np.square(a.state.p_pos - a.goal_a.state.p_pos))) for a in world.agents if not a.adversary] + pos_rew = min(agent_dist) + # nearest_agent = world.good_agents[np.argmin(agent_dist)] + # neg_rew = np.sqrt(np.sum(np.square(nearest_agent.state.p_pos - agent.state.p_pos))) + neg_rew = np.sqrt(np.sum(np.square(agent.goal_a.state.p_pos - agent.state.p_pos))) + # neg_rew = sum([np.sqrt(np.sum(np.square(a.state.p_pos - agent.state.p_pos))) for a in world.good_agents]) + return pos_rew - neg_rew + + def observation(self, agent, world): + # get positions of all entities in this agent's reference frame + entity_pos = [] + for entity in world.landmarks: # world.entities: + entity_pos.append(entity.state.p_pos - agent.state.p_pos) + # entity colors + entity_color = [] + for entity in world.landmarks: # world.entities: + entity_color.append(entity.color) + # communication of all other agents + comm = [] + other_pos = [] + for other in world.agents: + if other is agent: + continue + comm.append(other.state.c) + other_pos.append(other.state.p_pos - agent.state.p_pos) + if not agent.adversary: + return np.concatenate([agent.state.p_vel] + [agent.goal_a.state.p_pos - agent.state.p_pos] + [agent.color] + entity_pos + entity_color + other_pos) + else: + return np.concatenate([agent.state.p_vel] + entity_pos + other_pos) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_reference.py b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_reference.py new file mode 100644 index 0000000000000000000000000000000000000000..67fdaeb9abffd9c86c6524fe454d6ede84513217 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_reference.py @@ -0,0 +1,86 @@ +import numpy as np + +from .._mpe_utils.core import Agent, Landmark, World +from .._mpe_utils.scenario import BaseScenario + + +class Scenario(BaseScenario): + def make_world(self): + world = World() + # set any world properties first + world.dim_c = 10 + world.collaborative = True # whether agents share rewards + # add agents + world.agents = [Agent() for i in range(2)] + for i, agent in enumerate(world.agents): + agent.name = f'agent_{i}' + agent.collide = False + # add landmarks + world.landmarks = [Landmark() for i in range(3)] + for i, landmark in enumerate(world.landmarks): + landmark.name = 'landmark %d' % i + landmark.collide = False + landmark.movable = False + return world + + def reset_world(self, world, np_random): + # assign goals to agents + for agent in world.agents: + agent.goal_a = None + agent.goal_b = None + # want other agent to go to the goal landmark + world.agents[0].goal_a = world.agents[1] + world.agents[0].goal_b = np_random.choice(world.landmarks) + world.agents[1].goal_a = world.agents[0] + world.agents[1].goal_b = np_random.choice(world.landmarks) + # random properties for agents + for i, agent in enumerate(world.agents): + agent.color = np.array([0.25, 0.25, 0.25]) + # random properties for landmarks + world.landmarks[0].color = np.array([0.75, 0.25, 0.25]) + world.landmarks[1].color = np.array([0.25, 0.75, 0.25]) + world.landmarks[2].color = np.array([0.25, 0.25, 0.75]) + # special colors for goals + world.agents[0].goal_a.color = world.agents[0].goal_b.color + world.agents[1].goal_a.color = world.agents[1].goal_b.color + # set random initial states + for agent in world.agents: + agent.state.p_pos = np_random.uniform(-1, +1, world.dim_p) + agent.state.p_vel = np.zeros(world.dim_p) + agent.state.c = np.zeros(world.dim_c) + for i, landmark in enumerate(world.landmarks): + landmark.state.p_pos = np_random.uniform(-1, +1, world.dim_p) + landmark.state.p_vel = np.zeros(world.dim_p) + + def reward(self, agent, world): + if agent.goal_a is None or agent.goal_b is None: + agent_reward = 0.0 + else: + agent_reward = np.sqrt(np.sum(np.square(agent.goal_a.state.p_pos - agent.goal_b.state.p_pos))) + return -agent_reward + + def global_reward(self, world): + all_rewards = sum(self.reward(agent, world) for agent in world.agents) + return all_rewards / len(world.agents) + + def observation(self, agent, world): + # goal color + goal_color = [np.zeros(world.dim_color), np.zeros(world.dim_color)] + if agent.goal_b is not None: + goal_color[1] = agent.goal_b.color + + # get positions of all entities in this agent's reference frame + entity_pos = [] + for entity in world.landmarks: + entity_pos.append(entity.state.p_pos - agent.state.p_pos) + # entity colors + entity_color = [] + for entity in world.landmarks: + entity_color.append(entity.color) + # communication of all other agents + comm = [] + for other in world.agents: + if other is agent: + continue + comm.append(other.state.c) + return np.concatenate([agent.state.p_vel] + entity_pos + [goal_color[1]] + comm) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_speaker_listener.py b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_speaker_listener.py new file mode 100644 index 0000000000000000000000000000000000000000..7987dc7cbc046daac71e9c35dbb8efade4ca5fd1 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_speaker_listener.py @@ -0,0 +1,92 @@ +import numpy as np + +from .._mpe_utils.core import Agent, Landmark, World +from .._mpe_utils.scenario import BaseScenario + + +class Scenario(BaseScenario): + def make_world(self): + world = World() + # set any world properties first + world.dim_c = 3 + num_landmarks = 3 + world.collaborative = True + # add agents + world.agents = [Agent() for i in range(2)] + for i, agent in enumerate(world.agents): + agent.name = "speaker_0" if i == 0 else "listener_0" + agent.collide = False + agent.size = 0.075 + # speaker + world.agents[0].movable = False + # listener + world.agents[1].silent = True + # add landmarks + world.landmarks = [Landmark() for i in range(num_landmarks)] + for i, landmark in enumerate(world.landmarks): + landmark.name = 'landmark %d' % i + landmark.collide = False + landmark.movable = False + landmark.size = 0.04 + return world + + def reset_world(self, world, np_random): + # assign goals to agents + for agent in world.agents: + agent.goal_a = None + agent.goal_b = None + # want listener to go to the goal landmark + world.agents[0].goal_a = world.agents[1] + world.agents[0].goal_b = np_random.choice(world.landmarks) + # random properties for agents + for i, agent in enumerate(world.agents): + agent.color = np.array([0.25, 0.25, 0.25]) + # random properties for landmarks + world.landmarks[0].color = np.array([0.65, 0.15, 0.15]) + world.landmarks[1].color = np.array([0.15, 0.65, 0.15]) + world.landmarks[2].color = np.array([0.15, 0.15, 0.65]) + # special colors for goals + world.agents[0].goal_a.color = world.agents[0].goal_b.color + np.array([0.45, 0.45, 0.45]) + # set random initial states + for agent in world.agents: + agent.state.p_pos = np_random.uniform(-1, +1, world.dim_p) + agent.state.p_vel = np.zeros(world.dim_p) + agent.state.c = np.zeros(world.dim_c) + for i, landmark in enumerate(world.landmarks): + landmark.state.p_pos = np_random.uniform(-1, +1, world.dim_p) + landmark.state.p_vel = np.zeros(world.dim_p) + + def benchmark_data(self, agent, world): + # returns data for benchmarking purposes + return self.reward(agent, world) + + def reward(self, agent, world): + # squared distance from listener to landmark + a = world.agents[0] + dist2 = np.sum(np.square(a.goal_a.state.p_pos - a.goal_b.state.p_pos)) + return -dist2 + + def observation(self, agent, world): + # goal color + goal_color = np.zeros(world.dim_color) + if agent.goal_b is not None: + goal_color = agent.goal_b.color + + # get positions of all entities in this agent's reference frame + entity_pos = [] + for entity in world.landmarks: + entity_pos.append(entity.state.p_pos - agent.state.p_pos) + + # communication of all other agents + comm = [] + for other in world.agents: + if other is agent or (other.state.c is None): + continue + comm.append(other.state.c) + + # speaker + if not agent.movable: + return np.concatenate([goal_color]) + # listener + if agent.silent: + return np.concatenate([agent.state.p_vel] + entity_pos + comm) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_spread.py b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_spread.py new file mode 100644 index 0000000000000000000000000000000000000000..b298d1d0e8ced47a0d014fc8ff8e9029be5bc6b1 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_spread.py @@ -0,0 +1,103 @@ +import numpy as np + +from .._mpe_utils.core import Agent, Landmark, World +from .._mpe_utils.scenario import BaseScenario + + +class Scenario(BaseScenario): + def make_world(self, N=3): + world = World() + # set any world properties first + world.dim_c = 2 + num_agents = N + num_landmarks = N + world.collaborative = True + # add agents + world.agents = [Agent() for i in range(num_agents)] + for i, agent in enumerate(world.agents): + agent.name = f'agent_{i}' + agent.collide = True + agent.silent = True + agent.size = 0.15 + # add landmarks + world.landmarks = [Landmark() for i in range(num_landmarks)] + for i, landmark in enumerate(world.landmarks): + landmark.name = 'landmark %d' % i + landmark.collide = False + landmark.movable = False + return world + + def reset_world(self, world, np_random): + # random properties for agents + for i, agent in enumerate(world.agents): + agent.color = np.array([0.35, 0.35, 0.85]) + # random properties for landmarks + for i, landmark in enumerate(world.landmarks): + landmark.color = np.array([0.25, 0.25, 0.25]) + # set random initial states + for agent in world.agents: + agent.state.p_pos = np_random.uniform(-1, +1, world.dim_p) + agent.state.p_vel = np.zeros(world.dim_p) + agent.state.c = np.zeros(world.dim_c) + for i, landmark in enumerate(world.landmarks): + landmark.state.p_pos = np_random.uniform(-1, +1, world.dim_p) + landmark.state.p_vel = np.zeros(world.dim_p) + + def benchmark_data(self, agent, world): + rew = 0 + collisions = 0 + occupied_landmarks = 0 + min_dists = 0 + for l in world.landmarks: + dists = [np.sqrt(np.sum(np.square(a.state.p_pos - l.state.p_pos))) for a in world.agents] + min_dists += min(dists) + rew -= min(dists) + if min(dists) < 0.1: + occupied_landmarks += 1 + if agent.collide: + for a in world.agents: + if self.is_collision(a, agent): + rew -= 1 + collisions += 1 + return (rew, collisions, min_dists, occupied_landmarks) + + def is_collision(self, agent1, agent2): + delta_pos = agent1.state.p_pos - agent2.state.p_pos + dist = np.sqrt(np.sum(np.square(delta_pos))) + dist_min = agent1.size + agent2.size + return True if dist < dist_min else False + + def reward(self, agent, world): + # Agents are rewarded based on minimum agent distance to each landmark, penalized for collisions + rew = 0 + if agent.collide: + for a in world.agents: + if self.is_collision(a, agent): + rew -= 1 + return rew + + def global_reward(self, world): + rew = 0 + for l in world.landmarks: + dists = [np.sqrt(np.sum(np.square(a.state.p_pos - l.state.p_pos))) for a in world.agents] + rew -= min(dists) + return rew + + def observation(self, agent, world): + # get positions of all entities in this agent's reference frame + entity_pos = [] + for entity in world.landmarks: # world.entities: + entity_pos.append(entity.state.p_pos - agent.state.p_pos) + # entity colors + entity_color = [] + for entity in world.landmarks: # world.entities: + entity_color.append(entity.color) + # communication of all other agents + comm = [] + other_pos = [] + for other in world.agents: + if other is agent: + continue + comm.append(other.state.c) + other_pos.append(other.state.p_pos - agent.state.p_pos) + return np.concatenate([agent.state.p_vel] + [agent.state.p_pos] + entity_pos + other_pos + comm) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_tag.py b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_tag.py new file mode 100644 index 0000000000000000000000000000000000000000..6e26b52b97d9c5c2b7a769258852bc8f51a3e5eb --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_tag.py @@ -0,0 +1,144 @@ +import numpy as np + +from .._mpe_utils.core import Agent, Landmark, World +from .._mpe_utils.scenario import BaseScenario + + +class Scenario(BaseScenario): + def make_world(self, num_good=1, num_adversaries=3, num_obstacles=2): + world = World() + # set any world properties first + world.dim_c = 2 + num_good_agents = num_good + num_adversaries = num_adversaries + num_agents = num_adversaries + num_good_agents + num_landmarks = num_obstacles + # add agents + world.agents = [Agent() for i in range(num_agents)] + for i, agent in enumerate(world.agents): + agent.adversary = True if i < num_adversaries else False + base_name = "adversary" if agent.adversary else "agent" + base_index = i if i < num_adversaries else i - num_adversaries + agent.name = f'{base_name}_{base_index}' + agent.collide = True + agent.silent = True + agent.size = 0.075 if agent.adversary else 0.05 + agent.accel = 3.0 if agent.adversary else 4.0 + agent.max_speed = 1.0 if agent.adversary else 1.3 + # add landmarks + world.landmarks = [Landmark() for i in range(num_landmarks)] + for i, landmark in enumerate(world.landmarks): + landmark.name = 'landmark %d' % i + landmark.collide = True + landmark.movable = False + landmark.size = 0.2 + landmark.boundary = False + return world + + def reset_world(self, world, np_random): + # random properties for agents + for i, agent in enumerate(world.agents): + agent.color = np.array([0.35, 0.85, 0.35]) if not agent.adversary else np.array([0.85, 0.35, 0.35]) + # random properties for landmarks + for i, landmark in enumerate(world.landmarks): + landmark.color = np.array([0.25, 0.25, 0.25]) + # set random initial states + for agent in world.agents: + agent.state.p_pos = np_random.uniform(-1, +1, world.dim_p) + agent.state.p_vel = np.zeros(world.dim_p) + agent.state.c = np.zeros(world.dim_c) + for i, landmark in enumerate(world.landmarks): + if not landmark.boundary: + landmark.state.p_pos = np_random.uniform(-0.9, +0.9, world.dim_p) + landmark.state.p_vel = np.zeros(world.dim_p) + + def benchmark_data(self, agent, world): + # returns data for benchmarking purposes + if agent.adversary: + collisions = 0 + for a in self.good_agents(world): + if self.is_collision(a, agent): + collisions += 1 + return collisions + else: + return 0 + + def is_collision(self, agent1, agent2): + delta_pos = agent1.state.p_pos - agent2.state.p_pos + dist = np.sqrt(np.sum(np.square(delta_pos))) + dist_min = agent1.size + agent2.size + return True if dist < dist_min else False + + # return all agents that are not adversaries + def good_agents(self, world): + return [agent for agent in world.agents if not agent.adversary] + + # return all adversarial agents + def adversaries(self, world): + return [agent for agent in world.agents if agent.adversary] + + def reward(self, agent, world): + # Agents are rewarded based on minimum agent distance to each landmark + main_reward = self.adversary_reward(agent, world) if agent.adversary else self.agent_reward(agent, world) + return main_reward + + def agent_reward(self, agent, world): + # Agents are negatively rewarded if caught by adversaries + rew = 0 + shape = False + adversaries = self.adversaries(world) + if shape: # reward can optionally be shaped (increased reward for increased distance from adversary) + for adv in adversaries: + rew += 0.1 * np.sqrt(np.sum(np.square(agent.state.p_pos - adv.state.p_pos))) + if agent.collide: + for a in adversaries: + if self.is_collision(a, agent): + rew -= 10 + + # agents are penalized for exiting the screen, so that they can be caught by the adversaries + def bound(x): + if x < 0.9: + return 0 + if x < 1.0: + return (x - 0.9) * 10 + return min(np.exp(2 * x - 2), 10) + for p in range(world.dim_p): + x = abs(agent.state.p_pos[p]) + rew -= bound(x) + + return rew + + def adversary_reward(self, agent, world): + # Adversaries are rewarded for collisions with agents + rew = 0 + shape = False + agents = self.good_agents(world) + adversaries = self.adversaries(world) + if shape: # reward can optionally be shaped (decreased reward for increased distance from agents) + for adv in adversaries: + rew -= 0.1 * min(np.sqrt(np.sum(np.square(a.state.p_pos - adv.state.p_pos))) for a in agents) + if agent.collide: + for ag in agents: + for adv in adversaries: + if self.is_collision(ag, adv): + rew += 10 + return rew + + def observation(self, agent, world): + # get positions of all entities in this agent's reference frame + entity_pos = [] + for entity in world.landmarks: + if not entity.boundary: + entity_pos.append(entity.state.p_pos - agent.state.p_pos) + # communication of all other agents + comm = [] + other_pos = [] + other_vel = [] + for other in world.agents: + if other is agent: + continue + comm.append(other.state.c) + other_pos.append(other.state.p_pos - agent.state.p_pos) + if not other.adversary: + other_vel.append(other.state.p_vel) + return np.concatenate([agent.state.p_vel] + [agent.state.p_pos] + entity_pos + other_pos + other_vel) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_world_comm.py b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_world_comm.py new file mode 100644 index 0000000000000000000000000000000000000000..92cc580e05ce908d5c4cf9dff3848404524722a8 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/scenarios/simple_world_comm.py @@ -0,0 +1,295 @@ +import numpy as np + +from .._mpe_utils.core import Agent, Landmark, World +from .._mpe_utils.scenario import BaseScenario + + +class Scenario(BaseScenario): + def make_world(self, num_good_agents=2, num_adversaries=4, num_landmarks=1, num_food=2, num_forests=2): + world = World() + # set any world properties first + world.dim_c = 4 + # world.damping = 1 + num_good_agents = num_good_agents + num_adversaries = num_adversaries + num_agents = num_adversaries + num_good_agents + num_landmarks = num_landmarks + num_food = num_food + num_forests = num_forests + # add agents + world.agents = [Agent() for i in range(num_agents)] + for i, agent in enumerate(world.agents): + agent.adversary = True if i < num_adversaries else False + base_index = i - 1 if i < num_adversaries else i - num_adversaries + base_index = 0 if base_index < 0 else base_index + base_name = "adversary" if agent.adversary else "agent" + base_name = "leadadversary" if i == 0 else base_name + agent.name = f'{base_name}_{base_index}' + agent.collide = True + agent.leader = True if i == 0 else False + agent.silent = True if i > 0 else False + agent.size = 0.075 if agent.adversary else 0.045 + agent.accel = 3.0 if agent.adversary else 4.0 + # agent.accel = 20.0 if agent.adversary else 25.0 + agent.max_speed = 1.0 if agent.adversary else 1.3 + # add landmarks + world.landmarks = [Landmark() for i in range(num_landmarks)] + for i, landmark in enumerate(world.landmarks): + landmark.name = 'landmark %d' % i + landmark.collide = True + landmark.movable = False + landmark.size = 0.2 + landmark.boundary = False + world.food = [Landmark() for i in range(num_food)] + for i, landmark in enumerate(world.food): + landmark.name = 'food %d' % i + landmark.collide = False + landmark.movable = False + landmark.size = 0.03 + landmark.boundary = False + world.forests = [Landmark() for i in range(num_forests)] + for i, landmark in enumerate(world.forests): + landmark.name = 'forest %d' % i + landmark.collide = False + landmark.movable = False + landmark.size = 0.3 + landmark.boundary = False + world.landmarks += world.food + world.landmarks += world.forests + # world.landmarks += self.set_boundaries(world) + # world boundaries now penalized with negative reward + return world + + def set_boundaries(self, world): + boundary_list = [] + landmark_size = 1 + edge = 1 + landmark_size + num_landmarks = int(edge * 2 / landmark_size) + for x_pos in [-edge, edge]: + for i in range(num_landmarks): + l = Landmark() + l.state.p_pos = np.array([x_pos, -1 + i * landmark_size]) + boundary_list.append(l) + + for y_pos in [-edge, edge]: + for i in range(num_landmarks): + l = Landmark() + l.state.p_pos = np.array([-1 + i * landmark_size, y_pos]) + boundary_list.append(l) + + for i, l in enumerate(boundary_list): + l.name = 'boundary %d' % i + l.collide = True + l.movable = False + l.boundary = True + l.color = np.array([0.75, 0.75, 0.75]) + l.size = landmark_size + l.state.p_vel = np.zeros(world.dim_p) + + return boundary_list + + def reset_world(self, world, np_random): + # random properties for agents + for i, agent in enumerate(world.agents): + agent.color = np.array([0.45, 0.95, 0.45]) if not agent.adversary else np.array([0.95, 0.45, 0.45]) + agent.color -= np.array([0.3, 0.3, 0.3]) if agent.leader else np.array([0, 0, 0]) + # random properties for landmarks + for i, landmark in enumerate(world.landmarks): + landmark.color = np.array([0.25, 0.25, 0.25]) + for i, landmark in enumerate(world.food): + landmark.color = np.array([0.15, 0.15, 0.65]) + for i, landmark in enumerate(world.forests): + landmark.color = np.array([0.6, 0.9, 0.6]) + # set random initial states + for agent in world.agents: + agent.state.p_pos = np_random.uniform(-1, +1, world.dim_p) + agent.state.p_vel = np.zeros(world.dim_p) + agent.state.c = np.zeros(world.dim_c) + for i, landmark in enumerate(world.landmarks): + landmark.state.p_pos = np_random.uniform(-0.9, +0.9, world.dim_p) + landmark.state.p_vel = np.zeros(world.dim_p) + for i, landmark in enumerate(world.food): + landmark.state.p_pos = np_random.uniform(-0.9, +0.9, world.dim_p) + landmark.state.p_vel = np.zeros(world.dim_p) + for i, landmark in enumerate(world.forests): + landmark.state.p_pos = np_random.uniform(-0.9, +0.9, world.dim_p) + landmark.state.p_vel = np.zeros(world.dim_p) + + def benchmark_data(self, agent, world): + if agent.adversary: + collisions = 0 + for a in self.good_agents(world): + if self.is_collision(a, agent): + collisions += 1 + return collisions + else: + return 0 + + def is_collision(self, agent1, agent2): + delta_pos = agent1.state.p_pos - agent2.state.p_pos + dist = np.sqrt(np.sum(np.square(delta_pos))) + dist_min = agent1.size + agent2.size + return True if dist < dist_min else False + + # return all agents that are not adversaries + def good_agents(self, world): + return [agent for agent in world.agents if not agent.adversary] + + # return all adversarial agents + def adversaries(self, world): + return [agent for agent in world.agents if agent.adversary] + + def reward(self, agent, world): + # Agents are rewarded based on minimum agent distance to each landmark + # boundary_reward = -10 if self.outside_boundary(agent) else 0 + main_reward = self.adversary_reward(agent, world) if agent.adversary else self.agent_reward(agent, world) + return main_reward + + def outside_boundary(self, agent): + if agent.state.p_pos[0] > 1 or agent.state.p_pos[0] < -1 or agent.state.p_pos[1] > 1 or agent.state.p_pos[1] < -1: + return True + else: + return False + + def agent_reward(self, agent, world): + # Agents are rewarded based on minimum agent distance to each landmark + rew = 0 + shape = False + adversaries = self.adversaries(world) + if shape: + for adv in adversaries: + rew += 0.1 * np.sqrt(np.sum(np.square(agent.state.p_pos - adv.state.p_pos))) + if agent.collide: + for a in adversaries: + if self.is_collision(a, agent): + rew -= 5 + + def bound(x): + if x < 0.9: + return 0 + if x < 1.0: + return (x - 0.9) * 10 + return min(np.exp(2 * x - 2), 10) # 1 + (x - 1) * (x - 1) + + for p in range(world.dim_p): + x = abs(agent.state.p_pos[p]) + rew -= 2 * bound(x) + + for food in world.food: + if self.is_collision(agent, food): + rew += 2 + rew -= 0.05 * min(np.sqrt(np.sum(np.square(food.state.p_pos - agent.state.p_pos))) for food in world.food) + + return rew + + def adversary_reward(self, agent, world): + # Agents are rewarded based on minimum agent distance to each landmark + rew = 0 + shape = True + agents = self.good_agents(world) + adversaries = self.adversaries(world) + if shape: + rew -= 0.1 * min(np.sqrt(np.sum(np.square(a.state.p_pos - agent.state.p_pos))) for a in agents) + if agent.collide: + for ag in agents: + for adv in adversaries: + if self.is_collision(ag, adv): + rew += 5 + return rew + + def observation2(self, agent, world): + # get positions of all entities in this agent's reference frame + entity_pos = [] + for entity in world.landmarks: + if not entity.boundary: + entity_pos.append(entity.state.p_pos - agent.state.p_pos) + + food_pos = [] + for entity in world.food: + if not entity.boundary: + food_pos.append(entity.state.p_pos - agent.state.p_pos) + # communication of all other agents + comm = [] + other_pos = [] + other_vel = [] + for other in world.agents: + if other is agent: + continue + comm.append(other.state.c) + other_pos.append(other.state.p_pos - agent.state.p_pos) + if not other.adversary: + other_vel.append(other.state.p_vel) + return np.concatenate([agent.state.p_vel] + [agent.state.p_pos] + entity_pos + other_pos + other_vel) + + def observation(self, agent, world): + # get positions of all entities in this agent's reference frame + entity_pos = [] + for entity in world.landmarks: + if not entity.boundary: + entity_pos.append(entity.state.p_pos - agent.state.p_pos) + + in_forest = [np.array([-1]) for _ in range(len(world.forests))] + inf = [False for _ in range(len(world.forests))] + + for i in range(len(world.forests)): + if self.is_collision(agent, world.forests[i]): + in_forest[i] = np.array([1]) + inf[i] = True + + food_pos = [] + for entity in world.food: + if not entity.boundary: + food_pos.append(entity.state.p_pos - agent.state.p_pos) + # communication of all other agents + comm = [] + other_pos = [] + other_vel = [] + for other in world.agents: + if other is agent: + continue + comm.append(other.state.c) + + oth_f = [self.is_collision(other, world.forests[i]) for i in range(len(world.forests))] + + # without forest vis + for i in range(len(world.forests)): + if inf[i] and oth_f[i]: + other_pos.append(other.state.p_pos - agent.state.p_pos) + if not other.adversary: + other_vel.append(other.state.p_vel) + break + else: + if ((not any(inf)) and (not any(oth_f))) or agent.leader: + other_pos.append(other.state.p_pos - agent.state.p_pos) + if not other.adversary: + other_vel.append(other.state.p_vel) + else: + other_pos.append([0, 0]) + if not other.adversary: + other_vel.append([0, 0]) + + # to tell the pred when the prey are in the forest + prey_forest = [] + ga = self.good_agents(world) + for a in ga: + if any([self.is_collision(a, f) for f in world.forests]): + prey_forest.append(np.array([1])) + else: + prey_forest.append(np.array([-1])) + # to tell leader when pred are in forest + prey_forest_lead = [] + for f in world.forests: + if any([self.is_collision(a, f) for a in ga]): + prey_forest_lead.append(np.array([1])) + else: + prey_forest_lead.append(np.array([-1])) + + comm = [world.agents[0].state.c] + + if agent.adversary and not agent.leader: + return np.concatenate([agent.state.p_vel] + [agent.state.p_pos] + entity_pos + other_pos + other_vel + in_forest + comm) + if agent.leader: + return np.concatenate( + [agent.state.p_vel] + [agent.state.p_pos] + entity_pos + other_pos + other_vel + in_forest + comm) + else: + return np.concatenate([agent.state.p_vel] + [agent.state.p_pos] + entity_pos + other_pos + in_forest + other_vel) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/simple_adversary_v2.py b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_adversary_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..9092f67e3e0ee855c6ff5175474e2e624902c218 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_adversary_v2.py @@ -0,0 +1,16 @@ +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from ._mpe_utils.simple_env import SimpleEnv, make_env +from .scenarios.simple_adversary import Scenario + + +class raw_env(SimpleEnv): + def __init__(self, N=2, max_cycles=25, continuous_actions=False): + scenario = Scenario() + world = scenario.make_world(N) + super().__init__(scenario, world, max_cycles, continuous_actions) + self.metadata['name'] = "simple_adversary_v2" + + +env = make_env(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/simple_crypto_v2.py b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_crypto_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..5a342e224f89f064dba73b93301a50fa029c71a9 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_crypto_v2.py @@ -0,0 +1,16 @@ +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from ._mpe_utils.simple_env import SimpleEnv, make_env +from .scenarios.simple_crypto import Scenario + + +class raw_env(SimpleEnv): + def __init__(self, max_cycles=25, continuous_actions=False): + scenario = Scenario() + world = scenario.make_world() + super().__init__(scenario, world, max_cycles, continuous_actions) + self.metadata['name'] = "simple_crypto_v2" + + +env = make_env(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/simple_push_v2.py b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_push_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..b1c7126a9e74b7e3afa6878adb11c537aec8a99e --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_push_v2.py @@ -0,0 +1,16 @@ +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from ._mpe_utils.simple_env import SimpleEnv, make_env +from .scenarios.simple_push import Scenario + + +class raw_env(SimpleEnv): + def __init__(self, max_cycles=25, continuous_actions=False): + scenario = Scenario() + world = scenario.make_world() + super().__init__(scenario, world, max_cycles, continuous_actions) + self.metadata['name'] = "simple_push_v2" + + +env = make_env(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/simple_reference_v2.py b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_reference_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..54723d9e77b1225edb266986f7ecaa3d104f4864 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_reference_v2.py @@ -0,0 +1,17 @@ +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from ._mpe_utils.simple_env import SimpleEnv, make_env +from .scenarios.simple_reference import Scenario + + +class raw_env(SimpleEnv): + def __init__(self, local_ratio=0.5, max_cycles=25, continuous_actions=False): + assert 0. <= local_ratio <= 1., "local_ratio is a proportion. Must be between 0 and 1." + scenario = Scenario() + world = scenario.make_world() + super().__init__(scenario, world, max_cycles, continuous_actions, local_ratio) + self.metadata['name'] = "simple_reference_v2" + + +env = make_env(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/simple_speaker_listener_v3.py b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_speaker_listener_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..6921d5d994a877a2c1712d9a5143a88a955707f6 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_speaker_listener_v3.py @@ -0,0 +1,16 @@ +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from ._mpe_utils.simple_env import SimpleEnv, make_env +from .scenarios.simple_speaker_listener import Scenario + + +class raw_env(SimpleEnv): + def __init__(self, max_cycles=25, continuous_actions=False): + scenario = Scenario() + world = scenario.make_world() + super().__init__(scenario, world, max_cycles, continuous_actions) + self.metadata['name'] = "simple_speaker_listener_v3" + + +env = make_env(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/simple_spread_v2.py b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_spread_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..5834d243713b1fa8f5238b300ade7d256de61e81 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_spread_v2.py @@ -0,0 +1,17 @@ +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from ._mpe_utils.simple_env import SimpleEnv, make_env +from .scenarios.simple_spread import Scenario + + +class raw_env(SimpleEnv): + def __init__(self, N=3, local_ratio=0.5, max_cycles=25, continuous_actions=False): + assert 0. <= local_ratio <= 1., "local_ratio is a proportion. Must be between 0 and 1." + scenario = Scenario() + world = scenario.make_world(N) + super().__init__(scenario, world, max_cycles, continuous_actions, local_ratio) + self.metadata['name'] = "simple_spread_v2" + + +env = make_env(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/simple_tag_v2.py b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_tag_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..282344385531bbceedb4b73dc90a4050c3b0c270 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_tag_v2.py @@ -0,0 +1,16 @@ +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from ._mpe_utils.simple_env import SimpleEnv, make_env +from .scenarios.simple_tag import Scenario + + +class raw_env(SimpleEnv): + def __init__(self, num_good=1, num_adversaries=3, num_obstacles=2, max_cycles=25, continuous_actions=False): + scenario = Scenario() + world = scenario.make_world(num_good, num_adversaries, num_obstacles) + super().__init__(scenario, world, max_cycles, continuous_actions) + self.metadata['name'] = "simple_tag_v2" + + +env = make_env(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/simple_v2.py b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..a49403db8ed526f15311db6091ea2be7264686f5 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_v2.py @@ -0,0 +1,16 @@ +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from ._mpe_utils.simple_env import SimpleEnv, make_env +from .scenarios.simple import Scenario + + +class raw_env(SimpleEnv): + def __init__(self, max_cycles=25, continuous_actions=False): + scenario = Scenario() + world = scenario.make_world() + super().__init__(scenario, world, max_cycles, continuous_actions) + self.metadata['name'] = "simple_v2" + + +env = make_env(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/mpe/simple_world_comm_v2.py b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_world_comm_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..fe942de105c0213a67f431e70c0ac7f33e162685 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/mpe/simple_world_comm_v2.py @@ -0,0 +1,16 @@ +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from ._mpe_utils.simple_env import SimpleEnv, make_env +from .scenarios.simple_world_comm import Scenario + + +class raw_env(SimpleEnv): + def __init__(self, num_good=2, num_adversaries=4, num_obstacles=1, num_food=2, max_cycles=25, num_forests=2, continuous_actions=False): + scenario = Scenario() + world = scenario.make_world(num_good, num_adversaries, num_obstacles, num_food, num_forests) + super().__init__(scenario, world, max_cycles, continuous_actions) + self.metadata['name'] = "simple_world_comm_v2" + + +env = make_env(raw_env) +parallel_env = parallel_wrapper_fn(env) diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/__init__.py b/MLPY/Lib/site-packages/pettingzoo/sisl/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ded2c453b63215d5c2e1c2eb55d820238962a0fb --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/__init__.py @@ -0,0 +1,5 @@ +from pettingzoo.utils.deprecated_module import depricated_handler + + +def __getattr__(env_name): + return depricated_handler(env_name, __path__, __name__) diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..93bc608180c48b32fb2dc5327155474947334f87 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/__pycache__/_utils.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/__pycache__/_utils.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..850f557ba99cdc9b07b2fddeadc432e663c06a12 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/__pycache__/_utils.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/__pycache__/multiwalker_v8.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/__pycache__/multiwalker_v8.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..54c17cdd0121e55afd5de384bfd222f0caed6711 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/__pycache__/multiwalker_v8.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/__pycache__/pursuit_v4.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/__pycache__/pursuit_v4.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4988452cd4270d4d7dbabe113d856cb14e4f5de0 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/__pycache__/pursuit_v4.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/__pycache__/waterworld_v3.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/__pycache__/waterworld_v3.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..10e7621be3e9650373e6ebcff88fcb2be18eeba3 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/__pycache__/waterworld_v3.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/_utils.py b/MLPY/Lib/site-packages/pettingzoo/sisl/_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..df3489793a1c1ea0b3e39aeb198237fa3d8cd656 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/_utils.py @@ -0,0 +1,15 @@ +class Agent: + def __new__(cls, *args, **kwargs): + agent = super().__new__(cls) + return agent + + @property + def observation_space(self): + raise NotImplementedError() + + @property + def action_space(self): + raise NotImplementedError() + + def __str__(self): + return f"<{type(self).__name__} instance>" diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/__init__.py b/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..877403e2d77e6b83e4ea6e4700ad6e9218e07736 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/__pycache__/multiwalker.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/__pycache__/multiwalker.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..776b365384cf887e70769e7413f5118094c67c24 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/__pycache__/multiwalker.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/__pycache__/multiwalker_base.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/__pycache__/multiwalker_base.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5f2b5e9a02f8285764b1ae17fcc526ee1eec7c99 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/__pycache__/multiwalker_base.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/multiwalker.py b/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/multiwalker.py new file mode 100644 index 0000000000000000000000000000000000000000..5ab1a0ecbbe1640cb8469e173bc81be1b4214b14 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/multiwalker.py @@ -0,0 +1,115 @@ +import numpy as np +from gym.utils import EzPickle + +from pettingzoo import AECEnv +from pettingzoo.utils import agent_selector, wrappers +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from .multiwalker_base import FPS +from .multiwalker_base import MultiWalkerEnv as _env + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.ClipOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +parallel_env = parallel_wrapper_fn(env) + + +class raw_env(AECEnv, EzPickle): + + metadata = { + 'render.modes': ['human', "rgb_array"], + 'name': 'multiwalker_v8', + 'is_parallelizable': True, + 'video.frames_per_second': FPS, + } + + def __init__(self, *args, **kwargs): + EzPickle.__init__(self, *args, **kwargs) + self.env = _env(*args, **kwargs) + + self.agents = ["walker_" + str(r) for r in range(self.env.num_agents)] + self.possible_agents = self.agents[:] + self.agent_name_mapping = dict(zip(self.agents, list(range(self.num_agents)))) + self._agent_selector = agent_selector(self.agents) + # spaces + self.action_spaces = dict(zip(self.agents, self.env.action_space)) + self.observation_spaces = dict( + zip(self.agents, self.env.observation_space)) + self.steps = 0 + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def seed(self, seed=None): + self.env.seed(seed) + + def convert_to_dict(self, list_of_list): + return dict(zip(self.agents, list_of_list)) + + def reset(self): + self.env.reset() + self.steps = 0 + self.agents = self.possible_agents[:] + self._agent_selector.reinit(self.agents) + self.agent_selection = self._agent_selector.next() + self._cumulative_rewards = dict(zip(self.agents, [(0) for _ in self.agents])) + self.rewards = dict(zip(self.agents, [(0) for _ in self.agents])) + self.dones = dict(zip(self.agents, [False for _ in self.agents])) + self.infos = dict(zip(self.agents, [{} for _ in self.agents])) + + def close(self): + self.env.close() + + def render(self, mode="human"): + self.env.render(mode) + + import pyglet + buffer = pyglet.image.get_buffer_manager().get_color_buffer() + image_data = buffer.get_image_data() + arr = np.fromstring(image_data.get_data(), dtype=np.uint8, sep='') + arr = arr.reshape(buffer.height, buffer.width, 4) + arr = arr[::-1, :, 0:3] + return arr if mode == "rgb_array" else None + + def observe(self, agent): + return self.env.observe(self.agent_name_mapping[agent]) + + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + agent = self.agent_selection + action = np.array(action, dtype=np.float32) + is_last = self._agent_selector.is_last() + self.env.step(action, self.agent_name_mapping[agent], is_last) + if is_last: + last_rewards = self.env.get_last_rewards() + for r in self.rewards: + self.rewards[r] = last_rewards[self.agent_name_mapping[r]] + for d in self.dones: + self.dones[d] = self.env.get_last_dones()[self.agent_name_mapping[d]] + self.agent_name_mapping = {agent: i for i, (agent, done) in enumerate(zip(self.possible_agents, self.env.get_last_dones()))} + iter_agents = self.agents[:] + for a, d in self.dones.items(): + if d: + iter_agents.remove(a) + self._agent_selector.reinit(iter_agents) + else: + self._clear_rewards() + if self._agent_selector.agent_order: + self.agent_selection = self._agent_selector.next() + + if self.env.frames >= self.env.max_cycles: + self.dones = dict(zip(self.agents, [True for _ in self.agents])) + + self._cumulative_rewards[agent] = 0 + self._accumulate_rewards() + self._dones_step_first() + self.steps += 1 diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/multiwalker_base.py b/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/multiwalker_base.py new file mode 100644 index 0000000000000000000000000000000000000000..a6e74447c46e5f719d074a9e870315aff99e0185 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker/multiwalker_base.py @@ -0,0 +1,765 @@ +import copy +import math + +import Box2D +import numpy as np +import pygame +from Box2D.b2 import (circleShape, contactListener, edgeShape, fixtureDef, polygonShape, + revoluteJointDef) +from gym import spaces +from gym.utils import seeding +from pygame import gfxdraw + +from .._utils import Agent + +MAX_AGENTS = 40 + +FPS = 50 +SCALE = 30.0 # affects how fast-paced the game is, forces should be adjusted as well + +MOTORS_TORQUE = 80 +SPEED_HIP = 4 +SPEED_KNEE = 6 +LIDAR_RANGE = 160 / SCALE + +INITIAL_RANDOM = 5 + +HULL_POLY = [(-30, +9), (+6, +9), (+34, +1), (+34, -8), (-30, -8)] +LEG_DOWN = -8 / SCALE +LEG_W, LEG_H = 8 / SCALE, 34 / SCALE + +PACKAGE_POLY = [(-120, 5), (120, 5), (120, -5), (-120, -5)] + +PACKAGE_LENGTH = 240 + +VIEWPORT_W = 600 +VIEWPORT_H = 400 + +TERRAIN_STEP = 14 / SCALE +TERRAIN_LENGTH = 200 # in steps +TERRAIN_HEIGHT = VIEWPORT_H / SCALE / 4 +TERRAIN_GRASS = 10 # low long are grass spots, in steps +TERRAIN_STARTPAD = 20 # in steps +FRICTION = 2.5 + +WALKER_SEPERATION = 10 # in steps + + +class ContactDetector(contactListener): + + def __init__(self, env): + contactListener.__init__(self) + self.env = env + + def BeginContact(self, contact): + # if walkers fall on ground + for i, walker in enumerate(self.env.walkers): + if walker.hull is not None: + if walker.hull == contact.fixtureA.body: + if self.env.package != contact.fixtureB.body: + self.env.fallen_walkers[i] = True + if walker.hull == contact.fixtureB.body: + if self.env.package != contact.fixtureA.body: + self.env.fallen_walkers[i] = True + + # if package is on the ground + if self.env.package == contact.fixtureA.body: + if contact.fixtureB.body not in [w.hull for w in self.env.walkers]: + self.env.game_over = True + if self.env.package == contact.fixtureB.body: + if contact.fixtureA.body not in [w.hull for w in self.env.walkers]: + self.env.game_over = True + + # self.env.game_over = True + for walker in self.env.walkers: + if walker.hull is not None: + for leg in [walker.legs[1], walker.legs[3]]: + if leg in [contact.fixtureA.body, contact.fixtureB.body]: + leg.ground_contact = True + + def EndContact(self, contact): + for walker in self.env.walkers: + if walker.hull is not None: + for leg in [walker.legs[1], walker.legs[3]]: + if leg in [contact.fixtureA.body, contact.fixtureB.body]: + leg.ground_contact = False + + +class BipedalWalker(Agent): + + def __init__(self, world, init_x=TERRAIN_STEP * TERRAIN_STARTPAD / 2, + init_y=TERRAIN_HEIGHT + 2 * LEG_H, n_walkers=2, seed=None): + self.world = world + self._n_walkers = n_walkers + self.hull = None + self.init_x = init_x + self.init_y = init_y + self.walker_id = -int(self.init_x) + self._seed(seed) + + def _destroy(self): + if not self.hull: + return + self.world.DestroyBody(self.hull) + self.hull = None + for leg in self.legs: + self.world.DestroyBody(leg) + self.legs = [] + self.joints = [] + + def _seed(self, seed=None): + self.np_random, seed = seeding.np_random(seed) + return [seed] + + def _reset(self): + self._destroy() + init_x = self.init_x + init_y = self.init_y + self.hull = self.world.CreateDynamicBody( + position=(init_x, init_y), + fixtures=fixtureDef( + shape=polygonShape( + vertices=[(x / SCALE, y / SCALE) for x, y in HULL_POLY]), + density=5.0, + friction=0.1, + groupIndex=self.walker_id, + restitution=0.0) # 0.99 bouncy + ) + self.hull.color1 = (127, 51, 229) + self.hull.color2 = (76, 76, 127) + self.hull.ApplyForceToCenter((self.np_random.uniform(-INITIAL_RANDOM, INITIAL_RANDOM), 0), + True) + + self.legs = [] + self.joints = [] + for i in [-1, +1]: + leg = self.world.CreateDynamicBody( + position=(init_x, init_y - LEG_H / 2 - LEG_DOWN), + angle=(i * 0.05), + fixtures=fixtureDef( + shape=polygonShape(box=(LEG_W / 2, LEG_H / 2)), + density=1.0, + restitution=0.0, + groupIndex=self.walker_id, + ) # collide with ground only + ) + leg.color1 = (153 - i * 25, 76 - i * 25, 127 - i * 25) + leg.color2 = (102 - i * 25, 51 - i * 25, 76 - i * 25) + rjd = revoluteJointDef( + bodyA=self.hull, + bodyB=leg, + localAnchorA=(0, LEG_DOWN), + localAnchorB=(0, LEG_H / 2), + enableMotor=True, + enableLimit=True, + maxMotorTorque=MOTORS_TORQUE, + motorSpeed=i, + lowerAngle=-0.8, + upperAngle=1.1, + ) + self.legs.append(leg) + self.joints.append(self.world.CreateJoint(rjd)) + + lower = self.world.CreateDynamicBody( + position=(init_x, init_y - LEG_H * 3 / 2 - LEG_DOWN), + angle=(i * 0.05), + fixtures=fixtureDef( + shape=polygonShape(box=(0.8 * LEG_W / 2, LEG_H / 2)), + density=1.0, + restitution=0.0, + groupIndex=self.walker_id, + ) + ) + lower.color1 = (153 - i * 25, 76 - i * 25, 127 - i * 25) + lower.color2 = (102 - i * 25, 51 - i * 25, 76 - i * 25) + rjd = revoluteJointDef( + bodyA=leg, + bodyB=lower, + localAnchorA=(0, -LEG_H / 2), + localAnchorB=(0, LEG_H / 2), + enableMotor=True, + enableLimit=True, + maxMotorTorque=MOTORS_TORQUE, + motorSpeed=1, + lowerAngle=-1.6, + upperAngle=-0.1, + ) + lower.ground_contact = False + self.legs.append(lower) + self.joints.append(self.world.CreateJoint(rjd)) + + self.drawlist = self.legs + [self.hull] + + class LidarCallback(Box2D.b2.rayCastCallback): + + def ReportFixture(self, fixture, point, normal, fraction): + if (fixture.filterData.categoryBits & 1) == 0: + return -1 + self.p2 = point + self.fraction = fraction + return fraction + + self.lidar = [LidarCallback() for _ in range(10)] + + def apply_action(self, action): + + self.joints[0].motorSpeed = float(SPEED_HIP * np.sign(action[0])) + self.joints[0].maxMotorTorque = float( + MOTORS_TORQUE * np.clip(np.abs(action[0]), 0, 1)) + self.joints[1].motorSpeed = float(SPEED_KNEE * np.sign(action[1])) + self.joints[1].maxMotorTorque = float( + MOTORS_TORQUE * np.clip(np.abs(action[1]), 0, 1)) + self.joints[2].motorSpeed = float(SPEED_HIP * np.sign(action[2])) + self.joints[2].maxMotorTorque = float( + MOTORS_TORQUE * np.clip(np.abs(action[2]), 0, 1)) + self.joints[3].motorSpeed = float(SPEED_KNEE * np.sign(action[3])) + self.joints[3].maxMotorTorque = float( + MOTORS_TORQUE * np.clip(np.abs(action[3]), 0, 1)) + + def get_observation(self): + pos = self.hull.position + vel = self.hull.linearVelocity + + for i in range(10): + self.lidar[i].fraction = 1.0 + self.lidar[i].p1 = pos + self.lidar[i].p2 = (pos[0] + math.sin(1.5 * i / 10.0) * LIDAR_RANGE, + pos[1] - math.cos(1.5 * i / 10.0) * LIDAR_RANGE) + self.world.RayCast( + self.lidar[i], self.lidar[i].p1, self.lidar[i].p2) + + state = [ + # Normal angles up to 0.5 here, but sure more is possible. + self.hull.angle, + 2.0 * self.hull.angularVelocity / FPS, + # Normalized to get -1..1 range + 0.3 * vel.x * (VIEWPORT_W / SCALE) / FPS, + 0.3 * vel.y * (VIEWPORT_H / SCALE) / FPS, + # This will give 1.1 on high up, but it's still OK (and there should be spikes on hiting the ground, that's normal too) + self.joints[0].angle, + self.joints[0].speed / SPEED_HIP, + self.joints[1].angle + 1.0, + self.joints[1].speed / SPEED_KNEE, + 1.0 if self.legs[1].ground_contact else 0.0, + self.joints[2].angle, + self.joints[2].speed / SPEED_HIP, + self.joints[3].angle + 1.0, + self.joints[3].speed / SPEED_KNEE, + 1.0 if self.legs[3].ground_contact else 0.0 + ] + + state += [l.fraction for l in self.lidar] + assert len(state) == 24 + + return state + + @property + def observation_space(self): + # 24 original obs (joints, etc), 2 displacement obs for each neighboring walker, 3 for package + return spaces.Box(low=np.float32(-np.inf), high=np.float32(np.inf), shape=(24 + 4 + 3,), dtype=np.float32) + + @property + def action_space(self): + return spaces.Box(low=np.float32(-1), high=np.float32(1), shape=(4,), dtype=np.float32) + + +class MultiWalkerEnv(): + + metadata = {'render.modes': [ + 'human', 'rgb_array'], 'video.frames_per_second': FPS} + + hardcore = False + + def __init__(self, n_walkers=3, position_noise=1e-3, angle_noise=1e-3, forward_reward=1.0, + terminate_reward=-100.0, fall_reward=-10.0, shared_reward=True, + terminate_on_fall=True, remove_on_fall=True, terrain_length=TERRAIN_LENGTH, max_cycles=500): + """ + n_walkers: number of bipedal walkers in environment + position_noise: noise applied to agent positional sensor observations + angle_noise: noise applied to agent rotational sensor observations + forward_reward: reward applied for an agent standing, scaled by agent's x coordinate + fall_reward: reward applied when an agent falls down + shared_reward: whether reward is distributed among all agents or allocated locally + terminate_reward: reward applied for each fallen walker in environment + terminate_on_fall: toggles whether agent is done if it falls down + terrain_length: length of terrain in number of steps + max_cycles: after max_cycles steps all agents will return done + """ + + self.n_walkers = n_walkers + self.position_noise = position_noise + self.angle_noise = angle_noise + self.forward_reward = forward_reward + self.fall_reward = fall_reward + self.terminate_reward = terminate_reward + self.terminate_on_fall = terminate_on_fall + self.local_ratio = 1 - shared_reward + self.remove_on_fall = remove_on_fall + self.terrain_length = terrain_length + self.seed_val = None + self.seed() + self.setup() + self.screen = None + self.isopen = True + self.agent_list = list(range(self.n_walkers)) + self.last_rewards = [0 for _ in range(self.n_walkers)] + self.last_dones = [False for _ in range(self.n_walkers)] + self.last_obs = [None for _ in range(self.n_walkers)] + self.max_cycles = max_cycles + self.frames = 0 + + def get_param_values(self): + return self.__dict__ + + def setup(self): + self.viewer = None + + self.world = Box2D.b2World() + self.terrain = None + + init_x = TERRAIN_STEP * TERRAIN_STARTPAD / 2 + init_y = TERRAIN_HEIGHT + 2 * LEG_H + self.start_x = [ + init_x + WALKER_SEPERATION * i * TERRAIN_STEP for i in range(self.n_walkers) + ] + self.walkers = [ + BipedalWalker(self.world, init_x=sx, + init_y=init_y, seed=self.seed_val) + for sx in self.start_x + ] + self.num_agents = len(self.walkers) + self.observation_space = [ + agent.observation_space for agent in self.walkers] + self.action_space = [agent.action_space for agent in self.walkers] + + self.package_scale = self.n_walkers / 1.75 + self.package_length = PACKAGE_LENGTH / SCALE * self.package_scale + + self.total_agents = self.n_walkers + + self.prev_shaping = np.zeros(self.n_walkers) + self.prev_package_shaping = 0.0 + + @property + def agents(self): + return self.walkers + + def seed(self, seed=None): + self.np_random, seed_ = seeding.np_random(seed) + self.seed_val = seed_ + for walker in getattr(self, "walkers", []): + walker._seed(seed_) + return [seed_] + + def _destroy(self): + if not self.terrain: + return + self.world.contactListener = None + for t in self.terrain: + self.world.DestroyBody(t) + self.terrain = [] + self.world.DestroyBody(self.package) + self.package = None + + for walker in self.walkers: + walker._destroy() + + def close(self): + if self.screen is not None: + pygame.quit() + self.isopen = False + + def reset(self): + self.setup() + self.world.contactListener_bug_workaround = ContactDetector(self) + self.world.contactListener = self.world.contactListener_bug_workaround + self.game_over = False + self.fallen_walkers = np.zeros(self.n_walkers, dtype=bool) + self.prev_shaping = np.zeros(self.n_walkers) + self.prev_package_shaping = 0.0 + self.scroll = 0.0 + self.lidar_render = 0 + + self._generate_package() + self._generate_terrain(self.hardcore) + self._generate_clouds() + + self.drawlist = copy.copy(self.terrain) + + self.drawlist += [self.package] + + for walker in self.walkers: + walker._reset() + self.drawlist += walker.legs + self.drawlist += [walker.hull] + r, d, o = self.scroll_subroutine() + self.last_rewards = [0 for _ in range(self.n_walkers)] + self.last_dones = [False for _ in range(self.n_walkers)] + self.last_obs = o + self.frames = 0 + + return self.observe(0) + + def scroll_subroutine(self): + xpos = np.zeros(self.n_walkers) + obs = [] + done = False + rewards = np.zeros(self.n_walkers) + + for i in range(self.n_walkers): + if self.walkers[i].hull is None: + obs.append(np.zeros_like(self.observation_space[i].low)) + continue + pos = self.walkers[i].hull.position + x, y = pos.x, pos.y + xpos[i] = x + + walker_obs = self.walkers[i].get_observation() + neighbor_obs = [] + for j in [i - 1, i + 1]: + # if no neighbor (for edge walkers) + if j < 0 or j == self.n_walkers or self.walkers[j].hull is None: + neighbor_obs.append(0.0) + neighbor_obs.append(0.0) + else: + xm = (self.walkers[j].hull.position.x - x) / self.package_length + ym = (self.walkers[j].hull.position.y - y) / self.package_length + neighbor_obs.append(self.np_random.normal(xm, self.position_noise)) + neighbor_obs.append(self.np_random.normal(ym, self.position_noise)) + xd = (self.package.position.x - x) / self.package_length + yd = (self.package.position.y - y) / self.package_length + neighbor_obs.append(self.np_random.normal(xd, self.position_noise)) + neighbor_obs.append(self.np_random.normal(yd, self.position_noise)) + neighbor_obs.append(self.np_random.normal(self.package.angle, self.angle_noise)) + obs.append(np.array(walker_obs + neighbor_obs)) + + shaping = -5.0 * abs(walker_obs[0]) + rewards[i] = shaping - self.prev_shaping[i] + self.prev_shaping[i] = shaping + + package_shaping = self.forward_reward * 130 * self.package.position.x / SCALE + rewards += (package_shaping - self.prev_package_shaping) + self.prev_package_shaping = package_shaping + + self.scroll = xpos.mean() - VIEWPORT_W / SCALE / 5 - (self.n_walkers - 1) * \ + WALKER_SEPERATION * TERRAIN_STEP + + done = [False] * self.n_walkers + if self.game_over or self.package.position.x < 0: + rewards += self.terminate_reward + done = [True] * self.n_walkers + if self.package.position.x > (self.terrain_length - TERRAIN_GRASS) * TERRAIN_STEP: + done = [True] * self.n_walkers + rewards += self.fall_reward * self.fallen_walkers + if self.terminate_on_fall and np.sum(self.fallen_walkers) > 0: + rewards += self.terminate_reward + done = [True] * self.n_walkers + for i, (fallen, walker) in enumerate(zip(self.fallen_walkers, self.walkers)): + if fallen: + if not done[i]: + rewards[i] += self.terminate_reward + if self.remove_on_fall: + walker._destroy() + done[i] = True + + return rewards, done, obs + + def step(self, action, agent_id, is_last): + # action is array of size 4 + action = action.reshape(4) + assert self.walkers[agent_id].hull is not None, agent_id + self.walkers[agent_id].apply_action(action) + if is_last: + self.world.Step(1.0 / FPS, 6 * 30, 2 * 30) + rewards, done, mod_obs = self.scroll_subroutine() + self.last_obs = mod_obs + global_reward = rewards.mean() + local_reward = rewards * self.local_ratio + self.last_rewards = global_reward * (1. - self.local_ratio) + local_reward * self.local_ratio + self.last_dones = done + self.frames = self.frames + 1 + + def get_last_rewards(self): + return dict(zip(list(range(self.n_walkers)), map(lambda r: np.float64(r), self.last_rewards))) + + def get_last_dones(self): + return dict(zip(self.agent_list, self.last_dones)) + + def get_last_obs(self): + return dict(zip(list(range(self.n_walkers)), [walker.get_observation() for walker in self.walkers])) + + def observe(self, agent): + o = self.last_obs[agent] + o = np.array(o, dtype=np.float32) + return o + + def render(self, mode='human', close=False): + if close: + self.close() + return + + offset = 200 # compensates for the negative coordinates + render_scale = SCALE / self.package_scale / 0.75 + if self.screen is None: + pygame.init() + self.screen = pygame.display.set_mode((VIEWPORT_W, VIEWPORT_H)) + + self.surf = pygame.Surface((VIEWPORT_W + self.scroll * render_scale + offset, VIEWPORT_H)) + + pygame.draw.polygon( + self.surf, + color=(215, 215, 255), + points=[ + (self.scroll * render_scale + offset, 0), + (self.scroll * render_scale + VIEWPORT_W + offset, 0), + (self.scroll * render_scale + VIEWPORT_W + offset, VIEWPORT_H), + (self.scroll * render_scale + offset, VIEWPORT_H), + ], + ) + + for poly, x1, x2 in self.cloud_poly: + if x2 < self.scroll / 2: + continue + if x1 > self.scroll / 2 + VIEWPORT_W / SCALE * self.package_scale: + continue + gfxdraw.aapolygon( + self.surf, + [(p[0] * render_scale + self.scroll * render_scale / 2 + offset, + p[1] * render_scale) for p in poly], + (255, 255, 255), + ) + gfxdraw.filled_polygon( + self.surf, + [(p[0] * render_scale + self.scroll * render_scale / 2 + offset, + p[1] * render_scale) for p in poly], + (255, 255, 255), + ) + + for poly, color in self.terrain_poly: + if poly[1][0] < self.scroll: + continue + if poly[0][0] > self.scroll + VIEWPORT_W / SCALE * self.package_scale: + continue + scaled_poly = [] + for coord in poly: + scaled_poly.append(([coord[0] * render_scale + offset, coord[1] * render_scale])) + gfxdraw.aapolygon(self.surf, scaled_poly, color) + gfxdraw.filled_polygon(self.surf, scaled_poly, color) + + self.lidar_render = (self.lidar_render + 1) % 100 + i = self.lidar_render + for walker in self.walkers: + if i < 2 * len(walker.lidar): + l = walker.lidar[i] if i < len(walker.lidar) else walker.lidar[len(walker.lidar) - i - 1] + pygame.draw.line( + self.surf, + color=(255, 0, 0), + start_pos=(l.p1[0] * render_scale + offset, l.p1[1] * render_scale), + end_pos=(l.p2[0] * render_scale + offset, l.p2[1] * render_scale), + width=1, + ) + + for obj in self.drawlist: + for f in obj.fixtures: + trans = f.body.transform + if type(f.shape) is circleShape: + pygame.draw.circle( + self.surf, + color=obj.color1, + center=trans * f.shape.pos * render_scale + offset, + radius=f.shape.radius * render_scale, + ) + pygame.draw.circle( + self.surf, + color=obj.color2, + center=trans * f.shape.pos * render_scale + offset, + radius=f.shape.radius * render_scale, + ) + else: + path = [trans * v * render_scale for v in f.shape.vertices] + path = [[c[0] + offset, c[1]] for c in path] + if len(path) > 2: + gfxdraw.aapolygon(self.surf, path, obj.color1) + gfxdraw.filled_polygon(self.surf, path, obj.color1) + path.append(path[0]) + gfxdraw.aapolygon(self.surf, path, obj.color2) + else: + pygame.draw.aaline( + self.surf, + start_pos=path[0], + end_pos=path[1], + color=obj.color2, + ) + + flagy1 = TERRAIN_HEIGHT * render_scale + flagy2 = flagy1 + 50 * render_scale / SCALE + x = TERRAIN_STEP * 3 * render_scale + offset + pygame.draw.aaline( + self.surf, color=(0, 0, 0), start_pos=(x, flagy1), end_pos=(x, flagy2) + ) + + f = [ + (x, flagy2), + (x, flagy2 - 10 * render_scale / SCALE), + (x + 25 * render_scale / SCALE, flagy2 - 5 * render_scale / SCALE), + ] + pygame.draw.polygon(self.surf, color=(230, 51, 0), points=f) + pygame.draw.lines( + self.surf, color=(0, 0, 0), points=f + [f[0]], width=1, closed=False + ) + + self.surf = pygame.transform.flip(self.surf, False, True) + self.screen.blit(self.surf, (-self.scroll * render_scale - offset, 0)) + if mode == "human": + pygame.display.flip() + + if mode == "rgb_array": + return np.transpose( + np.array(pygame.surfarray.pixels3d(self.screen)), axes=(1, 0, 2) + ) + else: + return self.isopen + + def _generate_package(self): + init_x = np.mean(self.start_x) + init_y = TERRAIN_HEIGHT + 3 * LEG_H + self.package = self.world.CreateDynamicBody( + position=(init_x, init_y), + fixtures=fixtureDef( + shape=polygonShape(vertices=[(x * self.package_scale / SCALE, y / SCALE) + for x, y in PACKAGE_POLY]), + density=1.0, + friction=0.5, + categoryBits=0x004, + # maskBits=0x001, # collide only with ground + restitution=0.0) # 0.99 bouncy + ) + self.package.color1 = (127, 102, 229) + self.package.color2 = (76, 76, 127) + + def _generate_terrain(self, hardcore): + GRASS, STUMP, STAIRS, PIT, _STATES_ = range(5) + state = GRASS + velocity = 0.0 + y = TERRAIN_HEIGHT + counter = TERRAIN_STARTPAD + oneshot = False + self.terrain = [] + self.terrain_x = [] + self.terrain_y = [] + for i in range(self.terrain_length): + x = i * TERRAIN_STEP + self.terrain_x.append(x) + + if state == GRASS and not oneshot: + velocity = 0.8 * velocity + 0.01 * np.sign(TERRAIN_HEIGHT - y) + if i > TERRAIN_STARTPAD: + velocity += self.np_random.uniform(-1, 1) / SCALE + y += velocity + + elif state == PIT and oneshot: + counter = self.np_random.randint(3, 5) + poly = [ + (x, y), + (x + TERRAIN_STEP, y), + (x + TERRAIN_STEP, y - 4 * TERRAIN_STEP), + (x, y - 4 * TERRAIN_STEP), + ] + t = self.world.CreateStaticBody(fixtures=fixtureDef( + shape=polygonShape(vertices=poly), friction=FRICTION)) + t.color1, t.color2 = (255, 255, 255), (153, 153, 153) + self.terrain.append(t) + t = self.world.CreateStaticBody(fixtures=fixtureDef(shape=polygonShape( + vertices=[(p[0] + TERRAIN_STEP * counter, p[1]) for p in poly]), + friction=FRICTION)) + t.color1, t.color2 = (255, 255, 255), (153, 153, 153) + self.terrain.append(t) + counter += 2 + original_y = y + + elif state == PIT and not oneshot: + y = original_y + if counter > 1: + y -= 4 * TERRAIN_STEP + + elif state == STUMP and oneshot: + counter = self.np_random.randint(1, 3) + poly = [ + (x, y), + (x + counter * TERRAIN_STEP, y), + (x + counter * TERRAIN_STEP, y + counter * TERRAIN_STEP), + (x, y + counter * TERRAIN_STEP), + ] + t = self.world.CreateStaticBody(fixtures=fixtureDef( + shape=polygonShape(vertices=poly), friction=FRICTION)) + t.color1, t.color2 = (255, 255, 255), (153, 153, 153) + self.terrain.append(t) + + elif state == STAIRS and oneshot: + stair_height = +1 if self.np_random.rand() > 0.5 else -1 + stair_width = self.np_random.randint(4, 5) + stair_steps = self.np_random.randint(3, 5) + original_y = y + for s in range(stair_steps): + poly = [ + (x + (s * stair_width) * TERRAIN_STEP, + y + (s * stair_height) * TERRAIN_STEP), + (x + ((1 + s) * stair_width) * TERRAIN_STEP, + y + (s * stair_height) * TERRAIN_STEP), + (x + ((1 + s) * stair_width) * TERRAIN_STEP, + y + (-1 + s * stair_height) * TERRAIN_STEP), + (x + (s * stair_width) * TERRAIN_STEP, + y + (-1 + s * stair_height) * TERRAIN_STEP), + ] + t = self.world.CreateStaticBody(fixtures=fixtureDef( + shape=polygonShape(vertices=poly), friction=FRICTION)) + t.color1, t.color2 = (255, 255, 255), (153, 153, 153) + self.terrain.append(t) + counter = stair_steps * stair_width + + elif state == STAIRS and not oneshot: + s = stair_steps * stair_width - counter - stair_height + n = s / stair_width + y = original_y + (n * stair_height) * TERRAIN_STEP + + oneshot = False + self.terrain_y.append(y) + counter -= 1 + if counter == 0: + counter = self.np_random.randint( + TERRAIN_GRASS / 2, TERRAIN_GRASS) + if state == GRASS and hardcore: + state = self.np_random.randint(1, _STATES_) + oneshot = True + else: + state = GRASS + oneshot = True + + self.terrain_poly = [] + for i in range(self.terrain_length - 1): + poly = [(self.terrain_x[i], self.terrain_y[i]), + (self.terrain_x[i + 1], self.terrain_y[i + 1])] + t = self.world.CreateStaticBody(fixtures=fixtureDef( + shape=edgeShape(vertices=poly), + friction=FRICTION + )) + color = (76, 255 if i % 2 == 0 else 204, 76) + t.color1 = color + t.color2 = color + self.terrain.append(t) + color = (102, 153, 76) + poly += [(poly[1][0], 0), (poly[0][0], 0)] + self.terrain_poly.append((poly, color)) + self.terrain.reverse() + + def _generate_clouds(self): + # Sorry for the clouds, couldn't resist + self.cloud_poly = [] + for i in range(self.terrain_length // 20): + x = self.np_random.uniform(0, self.terrain_length) * TERRAIN_STEP + y = VIEWPORT_H / SCALE * 3 / 4 + poly = [(x + 15 * TERRAIN_STEP * math.sin(3.14 * 2 * a / 5) + self.np_random.uniform( + 0, 5 * TERRAIN_STEP), y + 5 * TERRAIN_STEP * math.cos(3.14 * 2 * a / 5) + self.np_random.uniform(0, 5 * TERRAIN_STEP)) for a in range(5)] + x1 = min(p[0] for p in poly) + x2 = max(p[0] for p in poly) + self.cloud_poly.append((poly, x1, x2)) diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker_v8.py b/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker_v8.py new file mode 100644 index 0000000000000000000000000000000000000000..93225ea93f202387c24585c04a17358fb6d6a67e --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/multiwalker_v8.py @@ -0,0 +1 @@ +from .multiwalker.multiwalker import env, parallel_env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/__init__.py b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bc102f7f8ef57dab6f3a4da9992f8985cafa8401 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/__pycache__/manual_control.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/__pycache__/manual_control.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..52c44ea8e7867f812ec7658b68f8e1b47f8e509f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/__pycache__/manual_control.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/__pycache__/pursuit.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/__pycache__/pursuit.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..14c3cc221ba6f7e759dce93fbfad94089646151b Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/__pycache__/pursuit.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/__pycache__/pursuit_base.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/__pycache__/pursuit_base.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f9454b57843517fac648774587dcd8bd97ff40b5 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/__pycache__/pursuit_base.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/manual_control.py b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/manual_control.py new file mode 100644 index 0000000000000000000000000000000000000000..4c72f46387dc7bf434ae0b11664933c1bcf49130 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/manual_control.py @@ -0,0 +1,83 @@ +import os +import time + +import numpy as np +import pygame + + +def manual_control(**kwargs): + from .pursuit import env as _env + + x_size = 5 + y_size = 5 + obs_range = 3 + n_evaders = 1 + n_pursuers = 2 + + clock = pygame.time.Clock() + + # obs_range should be odd 3, 5, 7, etc + env = _env( + n_pursuers=n_pursuers, n_evaders=n_evaders, x_size=x_size, y_size=y_size, obs_range=obs_range + ) + + env.reset() + + done = False + + global _quit_loop, _actions, _agent_id + _quit_loop = np.array([0]) + _actions = np.array([4] * env.num_agents) + _agent_id = np.array([0]) + + done = False + num_frames = 0 + total_reward = 0 + + while not done: + clock.tick(15) + num_frames += 1 + env.render() + if _quit_loop[0]: + break + for event in pygame.event.get(): + if event.type == pygame.KEYDOWN: + if event.key == pygame.K_j: + # pressing 'j' moves the focus of control to the next agent + # control rolls over to the first agent + _agent_id[0] = (_agent_id[0] + 1) % env.num_agents + elif event.key == pygame.K_k: + # pressing 'k' moves the focus of control to the previous agent + # control rolls over to the lastagent + _agent_id[0] = (_agent_id[0] - 1) % env.num_agents + elif event.key == pygame.K_UP: + # p1: up + _actions[_agent_id[0]] = 3 + elif event.key == pygame.K_DOWN: + # p1: down + _actions[_agent_id[0]] = 2 + elif event.key == pygame.K_LEFT: + # p1: left + _actions[_agent_id[0]] = 0 + elif event.key == pygame.K_RIGHT: + # p1: right + _actions[_agent_id[0]] = 1 + elif event.key == pygame.K_BACKSPACE: + env.reset() + elif event.key == pygame.K_ESCAPE: + _quit_loop[0] = 1 + # break + # actions should be a dict of numpy arrays + for a in _actions: + obs, reward, d, info = env.last() + env.step(a) + pygame.event.pump() + if d: + done = True + total_reward += reward + + _actions = np.array([4] * env.num_agents) + + env.render() + time.sleep(2) + env.close() diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/pursuit.py b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/pursuit.py new file mode 100644 index 0000000000000000000000000000000000000000..c632097d9f3a62dbc01b387a4eb48c040e3019ef --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/pursuit.py @@ -0,0 +1,100 @@ +import os + +import numpy as np +import pygame +from gym.utils import EzPickle + +from pettingzoo import AECEnv +from pettingzoo.utils import agent_selector, wrappers +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from .manual_control import manual_control +from .pursuit_base import Pursuit as _env + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +parallel_env = parallel_wrapper_fn(env) + + +class raw_env(AECEnv, EzPickle): + + metadata = { + "render.modes": ["human", "rgb_array"], + "name": "pursuit_v4", + 'is_parallelizable': True, + 'video.frames_per_second': 5, + } + + def __init__(self, *args, **kwargs): + EzPickle.__init__(self, *args, **kwargs) + self.env = _env(*args, **kwargs) + pygame.init() + self.agents = ["pursuer_" + str(a) for a in range(self.env.num_agents)] + self.possible_agents = self.agents[:] + self.agent_name_mapping = dict(zip(self.agents, list(range(self.num_agents)))) + self._agent_selector = agent_selector(self.agents) + # spaces + self.n_act_agents = self.env.act_dims[0] + self.action_spaces = dict(zip(self.agents, self.env.action_space)) + self.observation_spaces = dict(zip(self.agents, self.env.observation_space)) + self.steps = 0 + self.closed = False + + def seed(self, seed=None): + self.env.seed(seed) + + def reset(self): + self.steps = 0 + self.agents = self.possible_agents[:] + self.rewards = dict(zip(self.agents, [(0) for _ in self.agents])) + self._cumulative_rewards = dict(zip(self.agents, [(0) for _ in self.agents])) + self.dones = dict(zip(self.agents, [False for _ in self.agents])) + self.infos = dict(zip(self.agents, [{} for _ in self.agents])) + self._agent_selector.reinit(self.agents) + self.agent_selection = self._agent_selector.next() + self.env.reset() + + def close(self): + if not self.closed: + self.closed = True + self.env.close() + + def render(self, mode="human"): + if not self.closed: + return self.env.render(mode) + + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + agent = self.agent_selection + self.env.step( + action, self.agent_name_mapping[agent], self._agent_selector.is_last() + ) + for k in self.dones: + if self.env.frames >= self.env.max_cycles: + self.dones[k] = True + else: + self.dones[k] = self.env.is_terminal + for k in self.agents: + self.rewards[k] = self.env.latest_reward_state[self.agent_name_mapping[k]] + self.steps += 1 + + self._cumulative_rewards[self.agent_selection] = 0 + self.agent_selection = self._agent_selector.next() + self._accumulate_rewards() + + def observe(self, agent): + o = self.env.safely_observe(self.agent_name_mapping[agent]) + return np.swapaxes(o, 2, 0) + + def observation_space(self, agent: str): + return self.observation_spaces[agent] + + def action_space(self, agent: str): + return self.action_spaces[agent] diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/pursuit_base.py b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/pursuit_base.py new file mode 100644 index 0000000000000000000000000000000000000000..cf017a39d3f98b7758908b1d0f3a438bb5942702 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/pursuit_base.py @@ -0,0 +1,506 @@ +import glob +import os +from collections import defaultdict +from os.path import join +from subprocess import call +from typing import Optional + +import numpy as np +import pygame +from gym import spaces +from gym.utils import seeding + +from .utils import agent_utils, two_d_maps +from .utils.agent_layer import AgentLayer +from .utils.controllers import PursuitPolicy, RandomPolicy, SingleActionPolicy + + +class Pursuit: + + def __init__(self, x_size: int = 16, + y_size: int = 16, + max_cycles: int = 500, + shared_reward: bool = True, + n_evaders: int = 30, + n_pursuers: int = 8, + obs_range: int = 7, + n_catch: int = 2, + freeze_evaders: bool = False, + evader_controller: Optional[PursuitPolicy] = None, + pursuer_controller: Optional[PursuitPolicy] = None, + tag_reward: float = 0.01, + catch_reward: float = 5.0, + urgency_reward: float = -0.1, + surround: bool = True, + constraint_window: float = 1.0 + ): + """ + In evade pursuit a set of pursuers must 'tag' a set of evaders + Required arguments: + x_size, y_size: World size + shared_reward: whether the rewards should be shared between all agents + n_evaders + n_pursuers + obs_range: how far each agent can see + Optional arguments: + pursuer controller: stationary policy of ally pursuers + evader controller: stationary policy of opponent evaders + + tag_reward: reward for 'tagging' a single evader + + max_cycles: after how many frames should the game end + n_catch: how surrounded evader needs to be, before removal + freeze_evaders: toggle evaders move or not + catch_reward: reward for pursuer who catches an evader + urgency_reward: reward added in each step + surround: toggles surround condition for evader removal + constraint_window: window in which agents can randomly spawn + """ + + self.x_size = x_size + self.y_size = y_size + self.map_matrix = two_d_maps.rectangle_map(self.x_size, self.y_size) + self.max_cycles = max_cycles + self.seed() + + self.shared_reward = shared_reward + self.local_ratio = 1.0 - float(self.shared_reward) + + self.n_evaders = n_evaders + self.n_pursuers = n_pursuers + self.num_agents = self.n_pursuers + + self.latest_reward_state = [0 for _ in range(self.num_agents)] + self.latest_done_state = [False for _ in range(self.num_agents)] + self.latest_obs = [None for _ in range(self.num_agents)] + + # can see 7 grids around them by default + self.obs_range = obs_range + # assert self.obs_range % 2 != 0, "obs_range should be odd" + self.obs_offset = int((self.obs_range - 1) / 2) + self.pursuers = agent_utils.create_agents( + self.n_pursuers, self.map_matrix, self.obs_range, self.np_random) + self.evaders = agent_utils.create_agents( + self.n_evaders, self.map_matrix, self.obs_range, self.np_random) + + self.pursuer_layer = AgentLayer(x_size, y_size, self.pursuers) + self.evader_layer = AgentLayer(x_size, y_size, self.evaders) + + self.n_catch = n_catch + + n_act_purs = self.pursuer_layer.get_nactions(0) + n_act_ev = self.evader_layer.get_nactions(0) + + self.freeze_evaders = freeze_evaders + + if self.freeze_evaders: + self.evader_controller = SingleActionPolicy(4) if evader_controller is None else evader_controller + self.pursuer_controller = SingleActionPolicy(4) if pursuer_controller is None else pursuer_controller + else: + self.evader_controller = RandomPolicy(n_act_purs, self.np_random) if evader_controller is None else evader_controller + self.pursuer_controller = RandomPolicy(n_act_ev, self.np_random) if pursuer_controller is None else pursuer_controller + + self.current_agent_layer = np.zeros((x_size, y_size), dtype=np.int32) + + self.tag_reward = tag_reward + + self.catch_reward = catch_reward + + self.urgency_reward = urgency_reward + + self.ally_actions = np.zeros(n_act_purs, dtype=np.int32) + self.opponent_actions = np.zeros(n_act_ev, dtype=np.int32) + + max_agents_overlap = max(self.n_pursuers, self.n_evaders) + obs_space = spaces.Box(low=0, high=max_agents_overlap, shape=( + self.obs_range, self.obs_range, 3), dtype=np.float32) + act_space = spaces.Discrete(n_act_purs) + self.action_space = [act_space for _ in range(self.n_pursuers)] + + self.observation_space = [obs_space for _ in range(self.n_pursuers)] + self.act_dims = [n_act_purs for i in range(self.n_pursuers)] + + self.evaders_gone = np.array([False for i in range(self.n_evaders)]) + + self.surround = surround + + self.constraint_window = constraint_window + + self.surround_mask = np.array([[-1, 0], [1, 0], [0, 1], [0, -1]]) + + self.model_state = np.zeros( + (4,) + self.map_matrix.shape, dtype=np.float32) + self.renderOn = False + self.pixel_scale = 30 + + self.frames = 0 + self.reset() + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def close(self): + if self.renderOn: + pygame.event.pump() + pygame.display.quit() + pygame.quit() + self.renderOn = False + + ################################################################# + # The functions below are the interface with MultiAgentSiulator # + ################################################################# + + @property + def agents(self): + return self.pursuers + + def seed(self, seed=None): + self.np_random, seed_ = seeding.np_random(seed) + try: + policies = [self.evader_controller, self.pursuer_controller] + for policy in policies: + try: + policy.set_rng(self.np_random) + except AttributeError: + pass + except AttributeError: + pass + + return [seed_] + + def get_param_values(self): + return self.__dict__ + + def reset(self): + self.evaders_gone.fill(False) + + x_window_start = self.np_random.uniform(0.0, 1.0 - self.constraint_window) + y_window_start = self.np_random.uniform(0.0, 1.0 - self.constraint_window) + xlb, xub = int(self.x_size * x_window_start), int(self.x_size * (x_window_start + self.constraint_window)) + ylb, yub = int(self.y_size * y_window_start), int(self.y_size * (y_window_start + self.constraint_window)) + constraints = [[xlb, xub], [ylb, yub]] + + self.pursuers = agent_utils.create_agents(self.n_pursuers, self.map_matrix, self.obs_range, self.np_random, + randinit=True, constraints=constraints) + self.pursuer_layer = AgentLayer(self.x_size, self.y_size, self.pursuers) + + self.evaders = agent_utils.create_agents(self.n_evaders, self.map_matrix, self.obs_range, self.np_random, + randinit=True, constraints=constraints) + self.evader_layer = AgentLayer(self.x_size, self.y_size, self.evaders) + + self.latest_reward_state = [0 for _ in range(self.num_agents)] + self.latest_done_state = [False for _ in range(self.num_agents)] + self.latest_obs = [None for _ in range(self.num_agents)] + + self.model_state[0] = self.map_matrix + self.model_state[1] = self.pursuer_layer.get_state_matrix() + self.model_state[2] = self.evader_layer.get_state_matrix() + + self.frames = 0 + self.renderOn = False + + return self.safely_observe(0) + + def step(self, action, agent_id, is_last): + agent_layer = self.pursuer_layer + opponent_layer = self.evader_layer + opponent_controller = self.evader_controller + + # actual action application, change the pursuer layer + agent_layer.move_agent(agent_id, action) + + # Update only the pursuer layer + self.model_state[1] = self.pursuer_layer.get_state_matrix() + + self.latest_reward_state = self.reward() / self.num_agents + + if is_last: + # Possibly change the evader layer + ev_remove, pr_remove, pursuers_who_remove = self.remove_agents() + + for i in range(opponent_layer.n_agents()): + # controller input should be an observation, but doesn't matter right now + a = opponent_controller.act(self.model_state) + opponent_layer.move_agent(i, a) + + self.latest_reward_state += self.catch_reward * pursuers_who_remove + self.latest_reward_state += self.urgency_reward + self.frames = self.frames + 1 + + # Update the remaining layers + self.model_state[0] = self.map_matrix + self.model_state[2] = self.evader_layer.get_state_matrix() + + global_val = self.latest_reward_state.mean() + local_val = self.latest_reward_state + self.latest_reward_state = self.local_ratio * local_val + (1 - self.local_ratio) * global_val + + def draw_model_state(self): + # -1 is building pixel flag + x_len, y_len = self.model_state[0].shape + for x in range(x_len): + for y in range(y_len): + pos = pygame.Rect( + self.pixel_scale * x, self.pixel_scale * y, self.pixel_scale, self.pixel_scale) + col = (0, 0, 0) + if self.model_state[0][x][y] == -1: + col = (255, 255, 255) + pygame.draw.rect(self.screen, col, pos) + + def draw_pursuers_observations(self): + for i in range(self.pursuer_layer.n_agents()): + x, y = self.pursuer_layer.get_position(i) + patch = pygame.Surface( + (self.pixel_scale * self.obs_range, self.pixel_scale * self.obs_range)) + patch.set_alpha(128) + patch.fill((255, 152, 72)) + ofst = self.obs_range / 2.0 + self.screen.blit( + patch, (self.pixel_scale * (x - ofst + 1 / 2), self.pixel_scale * (y - ofst + 1 / 2))) + + def draw_pursuers(self): + for i in range(self.pursuer_layer.n_agents()): + x, y = self.pursuer_layer.get_position(i) + center = (int(self.pixel_scale * x + self.pixel_scale / 2), + int(self.pixel_scale * y + self.pixel_scale / 2)) + col = (255, 0, 0) + pygame.draw.circle(self.screen, col, center, int(self.pixel_scale / 3)) + + def draw_evaders(self): + for i in range(self.evader_layer.n_agents()): + x, y = self.evader_layer.get_position(i) + center = (int(self.pixel_scale * x + self.pixel_scale / 2), + int(self.pixel_scale * y + self.pixel_scale / 2)) + col = (0, 0, 255) + + pygame.draw.circle(self.screen, col, center, int(self.pixel_scale / 3)) + + def draw_agent_counts(self): + font = pygame.font.SysFont('Comic Sans MS', self.pixel_scale * 2 // 3) + + agent_positions = defaultdict(int) + evader_positions = defaultdict(int) + + for i in range(self.evader_layer.n_agents()): + x, y = self.evader_layer.get_position(i) + evader_positions[(x, y)] += 1 + + for i in range(self.pursuer_layer.n_agents()): + x, y = self.pursuer_layer.get_position(i) + agent_positions[(x, y)] += 1 + + for (x, y) in evader_positions: + (pos_x, pos_y) = (self.pixel_scale * x + self.pixel_scale // 2, + self.pixel_scale * y + self.pixel_scale // 2) + + agent_count = evader_positions[(x, y)] + count_text: str + if agent_count < 1: + count_text = "" + elif agent_count < 10: + count_text = str(agent_count) + else: + count_text = "+" + + text = font.render(count_text, False, (0, 255, 255)) + + self.screen.blit(text, (pos_x, pos_y)) + + for (x, y) in agent_positions: + (pos_x, pos_y) = (self.pixel_scale * x + self.pixel_scale // 2, + self.pixel_scale * y + self.pixel_scale // 2) + + agent_count = agent_positions[(x, y)] + count_text: str + if agent_count < 1: + count_text = "" + elif agent_count < 10: + count_text = str(agent_count) + else: + count_text = "+" + + text = font.render(count_text, False, (255, 255, 0)) + + self.screen.blit(text, (pos_x, pos_y - self.pixel_scale // 2)) + + def render(self, mode="human"): + if not self.renderOn: + if mode == "human": + pygame.display.init() + self.screen = pygame.display.set_mode( + (self.pixel_scale * self.x_size, self.pixel_scale * self.y_size)) + else: + self.screen = pygame.Surface((self.pixel_scale * self.x_size, self.pixel_scale * self.y_size)) + + self.renderOn = True + self.draw_model_state() + + self.draw_pursuers_observations() + + self.draw_evaders() + self.draw_pursuers() + self.draw_agent_counts() + + observation = pygame.surfarray.pixels3d(self.screen) + new_observation = np.copy(observation) + del observation + if mode == "human": + pygame.display.flip() + return np.transpose(new_observation, axes=(1, 0, 2)) if mode == "rgb_array" else None + + def save_image(self, file_name): + self.render() + capture = pygame.surfarray.array3d(self.screen) + + xl, xh = -self.obs_offset - 1, self.x_size + self.obs_offset + 1 + yl, yh = -self.obs_offset - 1, self.y_size + self.obs_offset + 1 + + window = pygame.Rect(xl, yl, xh, yh) + subcapture = capture.subsurface(window) + + pygame.image.save(subcapture, file_name) + + def reward(self): + es = self.evader_layer.get_state_matrix() # evader positions + rewards = [ + self.tag_reward * np.sum(es[np.clip( + self.pursuer_layer.get_position( + i)[0] + self.surround_mask[:, 0], 0, self.x_size - 1 + ), np.clip( + self.pursuer_layer.get_position(i)[1] + self.surround_mask[:, 1], 0, self.y_size - 1)]) + for i in range(self.n_pursuers) + ] + return np.array(rewards) + + @property + def is_terminal(self): + # ev = self.evader_layer.get_state_matrix() # evader positions + # if np.sum(ev) == 0.0: + if self.evader_layer.n_agents() == 0: + return True + return False + + def update_ally_controller(self, controller): + self.ally_controller = controller + + def update_opponent_controller(self, controller): + self.opponent_controller = controller + + def n_agents(self): + return self.pursuer_layer.n_agents() + + def safely_observe(self, i): + agent_layer = self.pursuer_layer + obs = self.collect_obs(agent_layer, i) + return obs + + def collect_obs(self, agent_layer, i): + for j in range(self.n_agents()): + if i == j: + return self.collect_obs_by_idx(agent_layer, i) + assert False, "bad index" + + def collect_obs_by_idx(self, agent_layer, agent_idx): + # returns a flattened array of all the observations + obs = np.zeros((3, self.obs_range, self.obs_range), dtype=np.float32) + obs[0].fill(1.0) # border walls set to -0.1? + xp, yp = agent_layer.get_position(agent_idx) + + xlo, xhi, ylo, yhi, xolo, xohi, yolo, yohi = self.obs_clip(xp, yp) + + obs[0:3, xolo:xohi, yolo:yohi] = np.abs(self.model_state[0:3, xlo:xhi, ylo:yhi]) + return obs + + def obs_clip(self, x, y): + xld = x - self.obs_offset + xhd = x + self.obs_offset + yld = y - self.obs_offset + yhd = y + self.obs_offset + xlo, xhi, ylo, yhi = (np.clip(xld, 0, self.x_size - 1), np.clip(xhd, 0, self.x_size - 1), + np.clip(yld, 0, self.y_size - 1), np.clip(yhd, 0, self.y_size - 1)) + xolo, yolo = abs(np.clip(xld, -self.obs_offset, 0) + ), abs(np.clip(yld, -self.obs_offset, 0)) + xohi, yohi = xolo + (xhi - xlo), yolo + (yhi - ylo) + return xlo, xhi + 1, ylo, yhi + 1, xolo, xohi + 1, yolo, yohi + 1 + + def remove_agents(self): + """ + Remove agents that are caught. Return tuple (n_evader_removed, n_pursuer_removed, purs_sur) + purs_sur: bool array, which pursuers surrounded an evader + """ + n_pursuer_removed = 0 + n_evader_removed = 0 + removed_evade = [] + removed_pursuit = [] + + ai = 0 + rems = 0 + xpur, ypur = np.nonzero(self.model_state[1]) + purs_sur = np.zeros(self.n_pursuers, dtype=bool) + for i in range(self.n_evaders): + if self.evaders_gone[i]: + continue + x, y = self.evader_layer.get_position(ai) + if self.surround: + pos_that_catch = self.surround_mask + \ + self.evader_layer.get_position(ai) + truths = np.array( + [np.equal([xi, yi], pos_that_catch).all(axis=1) for xi, yi in zip(xpur, ypur)]) + if np.sum(truths.any(axis=0)) == self.need_to_surround(x, y): + removed_evade.append(ai - rems) + self.evaders_gone[i] = True + rems += 1 + tt = truths.any(axis=1) + for j in range(self.n_pursuers): + xpp, ypp = self.pursuer_layer.get_position(j) + tes = np.concatenate( + (xpur[tt], ypur[tt])).reshape(2, len(xpur[tt])) + tem = tes.T == np.array([xpp, ypp]) + if np.any(np.all(tem, axis=1)): + purs_sur[j] = True + ai += 1 + else: + if self.model_state[1, x, y] >= self.n_catch: + # add prob remove? + removed_evade.append(ai - rems) + self.evaders_gone[i] = True + rems += 1 + for j in range(self.n_pursuers): + xpp, ypp = self.pursuer_layer.get_position(j) + if xpp == x and ypp == y: + purs_sur[j] = True + ai += 1 + + ai = 0 + for i in range(self.pursuer_layer.n_agents()): + x, y = self.pursuer_layer.get_position(i) + # can remove pursuers probabilitcally here? + for ridx in removed_evade: + self.evader_layer.remove_agent(ridx) + n_evader_removed += 1 + for ridx in removed_pursuit: + self.pursuer_layer.remove_agent(ridx) + n_pursuer_removed += 1 + return n_evader_removed, n_pursuer_removed, purs_sur + + def need_to_surround(self, x, y): + """ + Compute the number of surrounding grid cells in x,y position that are open + (no wall or obstacle) + """ + tosur = 4 + if x == 0 or x == (self.x_size - 1): + tosur -= 1 + if y == 0 or y == (self.y_size - 1): + tosur -= 1 + neighbors = self.surround_mask + np.array([x, y]) + for n in neighbors: + xn, yn = n + if not 0 < xn < self.x_size or not 0 < yn < self.y_size: + continue + if self.model_state[0][xn, yn] == -1: + tosur -= 1 + return tosur diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__init__.py b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..85e0ae0e4892e7be84d034ff52b99c401a863700 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__init__.py @@ -0,0 +1,6 @@ +from .agent_layer import AgentLayer +from .agent_utils import create_agents, feasible_position_exp, set_agents +from .controllers import RandomPolicy, SingleActionPolicy +from .discrete_agent import DiscreteAgent +from .two_d_maps import (add_rectangle, complex_map, cross_map, gen_map, multi_scale_map, + rectangle_map, resize, simple_soccer_map) diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1d0f71c0550a527806b6111ba6e6424b7c6378da Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/agent_layer.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/agent_layer.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5c9d89639ec3b11dd9d89bb6bbd58db39a62f471 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/agent_layer.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/agent_utils.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/agent_utils.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d73e8ec85a8d9966e585c543ef2cc4c9f0a26cd0 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/agent_utils.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/controllers.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/controllers.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ea916d58b518918f72873592d424b7a5ebeb2184 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/controllers.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/discrete_agent.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/discrete_agent.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..346c277bb7d451ca629a6639f24a8958e2fe1362 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/discrete_agent.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/two_d_maps.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/two_d_maps.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..30737169fafaf1d454437b71e4247fc5187a5adc Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/__pycache__/two_d_maps.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/agent_layer.py b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/agent_layer.py new file mode 100644 index 0000000000000000000000000000000000000000..98dbec54153c2cd0eb81bfca45217c612acc8f7a --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/agent_layer.py @@ -0,0 +1,72 @@ +import numpy as np + +################################################################# +# Implements a Cooperating Agent Layer for 2D problems +################################################################# + + +class AgentLayer: + def __init__(self, xs, ys, allies, seed=1): + """ + xs: x size of map + ys: y size of map + allies: list of ally agents + seed: seed + + Each ally agent must support: + - move(action) + - current_position() + - nactions() + - set_position(x, y) + """ + + self.allies = allies + self.nagents = len(allies) + self.global_state = np.zeros((xs, ys), dtype=np.int32) + + def n_agents(self): + return self.nagents + + def move_agent(self, agent_idx, action): + return self.allies[agent_idx].step(action) + + def set_position(self, agent_idx, x, y): + self.allies[agent_idx].set_position(x, y) + + def get_position(self, agent_idx): + """ + Returns the position of the given agent + """ + return self.allies[agent_idx].current_position() + + def get_nactions(self, agent_idx): + return self.allies[agent_idx].nactions() + + def remove_agent(self, agent_idx): + # idx is between zero and nagents + self.allies.pop(agent_idx) + self.nagents -= 1 + + def get_state_matrix(self): + """ + Returns a matrix representing the positions of all allies + Example: matrix contains the number of allies at give (x,y) position + 0 0 0 1 0 0 0 + 0 2 0 2 0 0 0 + 0 0 0 0 0 0 1 + 1 0 0 0 0 0 5 + """ + gs = self.global_state + gs.fill(0) + for ally in self.allies: + x, y = ally.current_position() + gs[x, y] += 1 + return gs + + def get_state(self): + pos = np.zeros(2 * len(self.allies)) + idx = 0 + for ally in self.allies: + pos[idx: (idx + 2)] = ally.get_state() + idx += 2 + return pos diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/agent_utils.py b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/agent_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..284c2dbec92af90b76a9222af60996dd4439ff79 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/agent_utils.py @@ -0,0 +1,85 @@ +import numpy as np + +from .discrete_agent import DiscreteAgent + +################################################################# +# Implements utility functions for multi-agent DRL +################################################################# + + +def create_agents( + nagents, + map_matrix, + obs_range, + randomizer, + flatten=False, + randinit=False, + constraints=None, +): + """ + Initializes the agents on a map (map_matrix) + -nagents: the number of agents to put on the map + -randinit: if True will place agents in random, feasible locations + if False will place all agents at 0 + expanded_mat: This matrix is used to spawn non-adjacent agents + """ + xs, ys = map_matrix.shape + agents = [] + expanded_mat = np.zeros((xs + 2, ys + 2)) + for i in range(nagents): + xinit, yinit = (0, 0) + if randinit: + xinit, yinit = feasible_position_exp( + randomizer, map_matrix, expanded_mat, constraints=constraints + ) + # fill expanded_mat + expanded_mat[xinit + 1, yinit + 1] = -1 + expanded_mat[xinit + 2, yinit + 1] = -1 + expanded_mat[xinit, yinit + 1] = -1 + expanded_mat[xinit + 1, yinit + 2] = -1 + expanded_mat[xinit + 1, yinit] = -1 + agent = DiscreteAgent( + xs, ys, map_matrix, randomizer, obs_range=obs_range, flatten=flatten + ) + agent.set_position(xinit, yinit) + agents.append(agent) + return agents + + +def feasible_position_exp(randomizer, map_matrix, expanded_mat, constraints=None): + """ + Returns a feasible position on map (map_matrix) + """ + xs, ys = map_matrix.shape + while True: + if constraints is None: + x = randomizer.randint(0, xs) + y = randomizer.randint(0, ys) + else: + xl, xu = constraints[0] + yl, yu = constraints[1] + x = randomizer.randint(xl, xu) + y = randomizer.randint(yl, yu) + if map_matrix[x, y] != -1 and expanded_mat[x + 1, y + 1] != -1: + return (x, y) + + +def set_agents(agent_matrix, map_matrix): + # check input sizes + if agent_matrix.shape != map_matrix.shape: + raise ValueError("Agent configuration and map matrix have mis-matched sizes") + + agents = [] + xs, ys = agent_matrix.shape + for i in range(xs): + for j in range(ys): + n_agents = agent_matrix[i, j] + if n_agents > 0: + if map_matrix[i, j] == -1: + raise ValueError( + "Trying to place an agent into a building: check map matrix and agent configuration" + ) + agent = DiscreteAgent(xs, ys, map_matrix) + agent.set_position(i, j) + agents.append(agent) + return agents diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/controllers.py b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/controllers.py new file mode 100644 index 0000000000000000000000000000000000000000..58a4adf83e3a1a1ac77b89defb08a814af0113e4 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/controllers.py @@ -0,0 +1,35 @@ +import abc + +import numpy as np + +################################################################# +# Implements multi-agent controllers +################################################################# + + +class PursuitPolicy(abc.ABC): + @abc.abstractmethod + def act(self, state: np.ndarray) -> int: + raise NotImplementedError + + +class RandomPolicy(PursuitPolicy): + + # constructor + def __init__(self, n_actions, rng): + self.rng = rng + self.n_actions = n_actions + + def set_rng(self, rng): + self.rng = rng + + def act(self, state): + return self.rng.randint(self.n_actions) + + +class SingleActionPolicy(PursuitPolicy): + def __init__(self, a): + self.action = a + + def act(self, state): + return self.action diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/discrete_agent.py b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/discrete_agent.py new file mode 100644 index 0000000000000000000000000000000000000000..a7f1b94043d3e34b725b7be07ecc8c1b2131a704 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/discrete_agent.py @@ -0,0 +1,124 @@ +import numpy as np +from gym import spaces + +from ..._utils import Agent + +################################################################# +# Implements the Single 2D Agent Dynamics +################################################################# + + +class DiscreteAgent(Agent): + + # constructor + def __init__( + self, + xs, + ys, + map_matrix, + randomizer, + obs_range=3, + n_channels=3, + seed=1, + flatten=False, + ): + # map_matrix is the may of the environment (-1 are buildings) + # n channels is the number of observation channels + + self.random_state = randomizer + + self.xs = xs + self.ys = ys + + self.eactions = [ + 0, # move left + 1, # move right + 2, # move up + 3, # move down + 4, + ] # stay + + self.motion_range = [[-1, 0], [1, 0], [0, 1], [0, -1], [0, 0]] + + self.current_pos = np.zeros(2, dtype=np.int32) # x and y position + self.last_pos = np.zeros(2, dtype=np.int32) + self.temp_pos = np.zeros(2, dtype=np.int32) + + self.map_matrix = map_matrix + + self.terminal = False + + self._obs_range = obs_range + + if flatten: + self._obs_shape = (n_channels * obs_range ** 2 + 1,) + else: + self._obs_shape = (obs_range, obs_range, 4) + # self._obs_shape = (4, obs_range, obs_range) + + @property + def observation_space(self): + return spaces.Box(low=-np.inf, high=np.inf, shape=self._obs_shape) + + @property + def action_space(self): + return spaces.Discrete(5) + + # Dynamics Functions + def step(self, a): + cpos = self.current_pos + lpos = self.last_pos + # if dead or reached goal dont move + if self.terminal: + return cpos + # if in building, dead, and stay there + if self.inbuilding(cpos[0], cpos[1]): + self.terminal = True + return cpos + tpos = self.temp_pos + tpos[0] = cpos[0] + tpos[1] = cpos[1] + + # transition is deterministic + tpos += self.motion_range[a] + x = tpos[0] + y = tpos[1] + # check bounds + if not self.inbounds(x, y): + return cpos + # if bumped into building, then stay + if self.inbuilding(x, y): + return cpos + else: + lpos[0] = cpos[0] + lpos[1] = cpos[1] + cpos[0] = x + cpos[1] = y + return cpos + + def get_state(self): + return self.current_pos + + # Helper Functions + def inbounds(self, x, y): + if 0 <= x < self.xs and 0 <= y < self.ys: + return True + return False + + def inbuilding(self, x, y): + if self.map_matrix[x, y] == -1: + return True + return False + + def nactions(self): + return len(self.eactions) + + def set_position(self, xs, ys): + self.current_pos[0] = xs + self.current_pos[1] = ys + + def current_position(self): + return self.current_pos + + def last_position(self): + return self.last_pos diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/two_d_maps.py b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/two_d_maps.py new file mode 100644 index 0000000000000000000000000000000000000000..f629810315d4cde27b4358cb659a66f892b58df1 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit/utils/two_d_maps.py @@ -0,0 +1,122 @@ +import numpy as np +from scipy.ndimage import zoom + + +def rectangle_map(xs, ys, xb=0.3, yb=0.2): + """ + Returns a 2D 'map' with a rectangle building centered in the middle + Map is a 2D numpy array + xb and yb are buffers for each dim representing the raio of the map to leave open on each side + """ + + rmap = np.zeros((xs, ys), dtype=np.int32) + for i in range(xs): + for j in range(ys): + # are we in the rectnagle in x dim? + if (float(i) / xs) > xb and (float(i) / xs) < (1.0 - xb): + # are we in the rectangle in y dim? + if (float(j) / ys) > yb and (float(j) / ys) < (1.0 - yb): + rmap[i, j] = -1 # -1 is building pixel flag + return rmap + + +def complex_map(xs, ys): + """ + Returns a 2D 'map' with a four different obstacles + Map is a 2D numpy array + """ + cmap = np.zeros((xs, ys), dtype=np.int32) + cmap = add_rectangle(cmap, xc=0.8, yc=0.5, xl=0.1, yl=0.8) + cmap = add_rectangle(cmap, xc=0.4, yc=0.8, xl=0.5, yl=0.2) + cmap = add_rectangle(cmap, xc=0.5, yc=0.5, xl=0.4, yl=0.2) + cmap = add_rectangle(cmap, xc=0.3, yc=0.1, xl=0.5, yl=0.1) + cmap = add_rectangle(cmap, xc=0.1, yc=0.3, xl=0.1, yl=0.5) + return cmap + + +def gen_map( + xs, + ys, + n_obs, + randomizer, + center_bounds=[0.0, 1.0], + length_bounds=[0.1, 0.5], + gmap=None, +): + cl, cu = center_bounds + ll, lu = length_bounds + if gmap is None: + gmap = np.zeros((xs, ys), dtype=np.int32) + for _ in range(n_obs): + xc = randomizer.uniform(cl, cu) + yc = randomizer.uniform(cl, cu) + xl = randomizer.uniform(ll, lu) + yl = randomizer.uniform(ll, lu) + gmap = add_rectangle(gmap, xc=xc, yc=yc, xl=xl, yl=yl) + return gmap + + +def multi_scale_map( + xs, + ys, + randomizer, + scales=[(3, [0.2, 0.3]), (10, [0.1, 0.2]), (30, [0.05, 0.1]), (150, [0.01, 0.05])], +): + gmap = np.zeros((xs, ys), dtype=np.int32) + for scale in scales: + n, lb = scale + gmap = gen_map(xs, ys, n, randomizer, length_bounds=lb, gmap=gmap) + return gmap + + +def add_rectangle(input_map, xc, yc, xl, yl): + """ + Add a rectangle to the input map + centered a xc, yc with dimensions xl, yl. + Input specs are normalized wrt the map. + """ + assert len(input_map.shape) == 2, "input_map must be a numpy matrix" + + xs, ys = input_map.shape + xcc, ycc = int(round(xs * xc)), int(round(ys * yc)) + xll, yll = int(round(xs * xl)), int(round(ys * yl)) + if xll <= 1: + x_lbound, x_upbound = xcc, xcc + 1 + else: + x_lbound, x_upbound = xcc - xll / 2, xcc + xll / 2 + if yll <= 1: + y_lbound, y_upbound = ycc, ycc + 1 + else: + y_lbound, y_upbound = ycc - yll / 2, ycc + yll / 2 + + # assert x_lbound >= 0 and x_upbound < xs, "Invalid rectangel config, x out of bounds" + # assert y_lbound >= 0 and y_upbound < ys, "Invalid rectangel config, y out of bounds" + + x_lbound, x_upbound = np.clip([x_lbound, x_upbound], 0, xs) + y_lbound, y_upbound = np.clip([y_lbound, y_upbound], 0, ys) + + for i in range(x_lbound, x_upbound): + for j in range(y_lbound, y_upbound): + input_map[j, i] = -1 + return input_map + + +def resize(scale, old_mats): + new_mats = [] + for mat in old_mats: + new_mats.append(zoom(mat, scale, order=0)) + return np.array(new_mats) + + +def simple_soccer_map(xs=6, ys=9): + assert xs % 2 == 0, "xs must be even" + smap = np.zeros((xs, ys), dtype=np.int32) + smap[0: xs / 2 - 1, 0] = -1 + smap[xs / 2 + 1: xs, 0] = -1 + smap[0: xs / 2 - 1, ys - 1] = -1 + smap[xs / 2 + 1: xs, ys - 1] = -1 + return smap + + +def cross_map(xs, ys): + pass diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit_v4.py b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit_v4.py new file mode 100644 index 0000000000000000000000000000000000000000..5fbe2f29e83f04738779957ad1d30b125d2821a9 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/pursuit_v4.py @@ -0,0 +1 @@ +from .pursuit.pursuit import env, manual_control, parallel_env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/__init__.py b/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..97fa0671b26f9c8864f14cd804a60ee894c33201 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/__pycache__/waterworld.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/__pycache__/waterworld.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9ed62be4efff1e0cc94067c683567b5ee72b6bf Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/__pycache__/waterworld.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/__pycache__/waterworld_base.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/__pycache__/waterworld_base.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4e4ff7972b00e5b355d27d2d9a42fb53d3bfea37 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/__pycache__/waterworld_base.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/waterworld.py b/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/waterworld.py new file mode 100644 index 0000000000000000000000000000000000000000..a2df9cfd2ccd4587ecb18372dcf0e41ab206c003 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/waterworld.py @@ -0,0 +1,97 @@ +import numpy as np + +from pettingzoo import AECEnv +from pettingzoo.utils import agent_selector, wrappers +from pettingzoo.utils.conversions import parallel_wrapper_fn + +from .waterworld_base import FPS +from .waterworld_base import MAWaterWorld as _env + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.ClipOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +parallel_env = parallel_wrapper_fn(env) + + +class raw_env(AECEnv): + + metadata = { + 'render.modes': ['human', "rgb_array"], + 'name': 'waterworld_v3', + 'is_parallelizable': True, + 'video.frames_per_second': FPS, + } + + def __init__(self, *args, **kwargs): + super().__init__() + self.env = _env(*args, **kwargs) + + self.agents = ["pursuer_" + str(r) for r in range(self.env.num_agents)] + self.possible_agents = self.agents[:] + self.agent_name_mapping = dict(zip(self.agents, list(range(self.num_agents)))) + self._agent_selector = agent_selector(self.agents) + # spaces + self.action_spaces = dict(zip(self.agents, self.env.action_space)) + self.observation_spaces = dict( + zip(self.agents, self.env.observation_space)) + self.has_reset = False + + def observation_space(self, agent): + return self.observation_spaces[agent] + + def action_space(self, agent): + return self.action_spaces[agent] + + def seed(self, seed=None): + self.env.seed(seed) + + def convert_to_dict(self, list_of_list): + return dict(zip(self.agents, list_of_list)) + + def reset(self): + self.has_reset = True + self.env.reset() + self.agents = self.possible_agents[:] + self._agent_selector.reinit(self.agents) + self.agent_selection = self._agent_selector.next() + self.rewards = dict(zip(self.agents, [(0) for _ in self.agents])) + self._cumulative_rewards = dict(zip(self.agents, [(0) for _ in self.agents])) + self.dones = dict(zip(self.agents, [False for _ in self.agents])) + self.infos = dict(zip(self.agents, [{} for _ in self.agents])) + + def close(self): + if self.has_reset: + self.env.close() + + def render(self, mode="human"): + return self.env.render(mode) + + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + agent = self.agent_selection + + is_last = self._agent_selector.is_last() + self.env.step(action, self.agent_name_mapping[agent], is_last) + + for r in self.rewards: + self.rewards[r] = self.env.control_rewards[self.agent_name_mapping[r]] + if is_last: + for r in self.rewards: + self.rewards[r] += self.env.last_rewards[self.agent_name_mapping[r]] + + if self.env.frames >= self.env.max_cycles: + self.dones = dict(zip(self.agents, [True for _ in self.agents])) + else: + self.dones = dict(zip(self.agents, self.env.last_dones)) + self._cumulative_rewards[self.agent_selection] = 0 + self.agent_selection = self._agent_selector.next() + self._accumulate_rewards() + + def observe(self, agent): + return self.env.observe(self.agent_name_mapping[agent]) diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/waterworld_base.py b/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/waterworld_base.py new file mode 100644 index 0000000000000000000000000000000000000000..0baea6fe88fcf0c9935056435df656fa0a1136a1 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld/waterworld_base.py @@ -0,0 +1,611 @@ +import math +import os + +import numpy as np +import pygame +from gym import spaces +from gym.utils import seeding +from scipy.spatial import distance as ssd + +from .._utils import Agent + +FPS = 15 + +class Archea(Agent): + + def __init__(self, idx, radius, n_sensors, sensor_range, max_accel, speed_features=True): + self._idx = idx + self._radius = radius + self._n_sensors = n_sensors + self._sensor_range = sensor_range + self._max_accel = max_accel + # Number of observation coordinates from each sensor + self._sensor_obscoord = 5 + if speed_features: + self._sensor_obscoord += 3 + self._sensor_obs_coord = self._n_sensors * self._sensor_obscoord + self._obs_dim = self._sensor_obs_coord + 2 # +1 for is_colliding_evader, +1 for is_colliding_poison + + self._position = None + self._velocity = None + + # Generate self._n_sensors angles, evenly spaced from 0 to 2pi + # We generate 1 extra angle and remove it because linspace[0] = 0 = 2pi = linspace[-1] + angles = np.linspace(0., 2. * np.pi, self._n_sensors + 1)[:-1] + # Convert angles to x-y coordinates + sensor_vectors = np.c_[np.cos(angles), np.sin(angles)] + self._sensors = sensor_vectors + + @property + def observation_space(self): + return spaces.Box(low=np.float32(-np.sqrt(2)), high=np.float32(2 * np.sqrt(2)), shape=(self._obs_dim,), dtype=np.float32) + + @property + def action_space(self): + return spaces.Box(low=np.float32(-self._max_accel), high=np.float32(self._max_accel), shape=(2,), dtype=np.float32) + + @property + def position(self): + assert self._position is not None + return self._position + + @property + def velocity(self): + assert self._velocity is not None + return self._velocity + + def set_position(self, pos): + assert pos.shape == (2,) + self._position = pos + + def set_velocity(self, velocity): + assert velocity.shape == (2,) + self._velocity = velocity + + @property + def sensors(self): + assert self._sensors is not None + return self._sensors + + def sensed(self, object_coord, object_radius, same=False): + """Whether object would be sensed by the pursuers""" + relative_coord = object_coord - np.expand_dims(self.position, 0) + # Projection of object coordinate in direction of sensor + sensorvals = self.sensors.dot(relative_coord.T) + # Set sensorvals to np.inf when object should not be seen by sensor + distance_squared = (relative_coord**2).sum(axis=1)[None, :] + sensorvals[ + (sensorvals < 0) # Wrong direction (by more than 90 degrees in both directions) + | (sensorvals - object_radius > self._sensor_range) # Outside sensor range + | (distance_squared - sensorvals**2 > object_radius**2) # Sensor does not intersect object + ] = np.inf + if same: + # Set sensors values for sensing the current object to np.inf + sensorvals[:, self._idx - 1] = np.inf + return sensorvals + + def sense_barriers(self, min_pos=0, max_pos=1): + sensor_vectors = self.sensors * self._sensor_range + sensor_endpoints = sensor_vectors + self.position + + # Clip sensor lines on the environment's barriers. + # Note that any clipped vectors may not be at the same angle as the original sensors + clipped_endpoints = np.clip(sensor_endpoints, min_pos, max_pos) + + # Extract just the sensor vectors after clipping + clipped_vectors = clipped_endpoints - self.position + + # Find the ratio of the clipped sensor vector to the original sensor vector + # Scaling the vector by this ratio will limit the end of the vector to the barriers + ratios = np.divide(clipped_vectors, sensor_vectors, out=np.ones_like(clipped_vectors), + where=np.abs(sensor_vectors) > 0.00000001) + + # Find the minimum ratio (x or y) of clipped endpoints to original endpoints + minimum_ratios = np.amin(ratios, axis=1) + + # Convert to 2d array of size (n_sensors, 1) + sensor_values = np.expand_dims(minimum_ratios, 0) + + # Set values beyond sensor range to infinity + does_sense = minimum_ratios < (1.0 - 1e-4) + does_sense = np.expand_dims(does_sense, 0) + sensor_values[np.logical_not(does_sense)] = np.inf + + # Convert -0 to 0 + sensor_values[sensor_values == -0] = 0 + + return sensor_values.T + + +class MAWaterWorld(): + + def __init__(self, n_pursuers=5, n_evaders=5, n_poison=10, n_coop=2, n_sensors=30, sensor_range=0.2, + radius=0.015, obstacle_radius=0.2, obstacle_coord=(0.5, 0.5), + pursuer_max_accel=0.01, evader_speed=0.01, poison_speed=0.01, poison_reward=-1.0, + food_reward=10.0, encounter_reward=0.01, thrust_penalty=-0.5, local_ratio=1.0, + speed_features=True, max_cycles=500): + """ + n_pursuers: number of pursuing archea (agents) + n_evaders: number of evader archea + n_poison: number of poison archea + n_coop: number of pursuing archea (agents) that must be touching food at the same time to consume it + n_sensors: number of sensors on all pursuing archea (agents) + sensor_range: length of sensor dendrite on all pursuing archea (agents) + radius: archea base radius. Pursuer: radius, evader: 2 x radius, poison: 3/4 x radius + obstacle_radius: radius of obstacle object + obstacle_coord: coordinate of obstacle object. Can be set to `None` to use a random location + pursuer_max_accel: pursuer archea maximum acceleration (maximum action size) + evader_speed: evading archea speed + poison_speed: poison archea speed + poison_reward: reward for pursuer consuming a poison object (typically negative) + food_reward:reward for pursuers consuming an evading archea + encounter_reward: reward for a pursuer colliding with an evading archea + thrust_penalty: scaling factor for the negative reward used to penalize large actions + local_ratio: Proportion of reward allocated locally vs distributed globally among all agents + speed_features: toggles whether pursuing archea (agent) sensors detect speed of other archea + max_cycles: After max_cycles steps all agents will return done + """ + self.n_pursuers = n_pursuers + self.n_evaders = n_evaders + self.n_coop = n_coop + self.n_poison = n_poison + self.obstacle_radius = obstacle_radius + obstacle_coord = np.array(obstacle_coord) + self.initial_obstacle_coord = np.random.uniform(0, 1, 2) if obstacle_coord is None else obstacle_coord + self.pursuer_max_accel = pursuer_max_accel + self.evader_speed = evader_speed + self.poison_speed = poison_speed + self.radius = radius + self.n_sensors = n_sensors + self.sensor_range = np.ones(self.n_pursuers) * min(sensor_range, (math.ceil(math.sqrt(2) * 100) / 100.0)) + self.poison_reward = poison_reward + self.food_reward = food_reward + self.thrust_penalty = thrust_penalty + self.encounter_reward = encounter_reward + self.last_rewards = [np.float64(0) for _ in range(self.n_pursuers)] + self.control_rewards = [0 for _ in range(self.n_pursuers)] + self.last_dones = [False for _ in range(self.n_pursuers)] + self.last_obs = [None for _ in range(self.n_pursuers)] + + self.n_obstacles = 1 + self.local_ratio = local_ratio + self._speed_features = speed_features + self.max_cycles = max_cycles + self.seed() + # TODO: Look into changing hardcoded radius ratios + self._pursuers = [ + Archea(pursuer_idx + 1, self.radius, self.n_sensors, sensor_range, self.pursuer_max_accel, + speed_features=self._speed_features) + for pursuer_idx in range(self.n_pursuers) + ] + self._evaders = [ + Archea(evader_idx + 1, self.radius * 2, self.n_pursuers, 0, self.evader_speed) + for evader_idx in range(self.n_evaders) + ] + self._poisons = [ + Archea(poison_idx + 1, self.radius * 3 / 4, self.n_poison, 0, self.poison_speed) + for poison_idx in range(self.n_poison) + ] + + self.num_agents = self.n_pursuers + self.action_space = [agent.action_space for agent in self._pursuers] + self.observation_space = [ + agent.observation_space for agent in self._pursuers] + + self.renderOn = False + self.pixel_scale = 30 * 25 + + self.cycle_time = 1.0 * 15. / FPS + self.frames = 0 + self.reset() + + def close(self): + if self.renderOn: + # pygame.event.pump() + pygame.display.quit() + pygame.quit() + + @property + def agents(self): + return self._pursuers + + def get_param_values(self): + return self.__dict__ + + def seed(self, seed=None): + self.np_random, seed = seeding.np_random(seed) + return [seed] + + def _generate_coord(self, radius): + coord = self.np_random.rand(2) + # Create random coordinate that avoids obstacles + while ssd.cdist(coord[None, :], self.obstacle_coords) <= radius * 2 + self.obstacle_radius: + coord = self.np_random.rand(2) + return coord + + def reset(self): + self.frames = 0 + # Initialize obstacles + if self.initial_obstacle_coord is None: + # Generate obstacle positions in range [0, 1) + self.obstacle_coords = self.np_random.rand(self.n_obstacles, 2) + else: + self.obstacle_coords = self.initial_obstacle_coord[None, :] + # Set each obstacle's velocity to 0 + # TODO: remove if obstacles should never move + self.obstacle_speeds = np.zeros((self.n_obstacles, 2)) + + # Initialize pursuers + for pursuer in self._pursuers: + pursuer.set_position(self._generate_coord(pursuer._radius)) + pursuer.set_velocity(np.zeros(2)) + + # Initialize evaders + for evader in self._evaders: + evader.set_position(self._generate_coord(evader._radius)) + # Generate velocity such that speed <= self.evader_speed + velocity = self.np_random.rand(2) - 0.5 + speed = np.linalg.norm(velocity) + if speed > self.evader_speed: + # Limit speed to self.evader_speed + velocity = velocity / speed * self.evader_speed + evader.set_velocity(velocity) + + # Initialize poisons + for poison in self._poisons: + poison.set_position(self._generate_coord(poison._radius)) + # Generate both velocity components from range [-self.poison_speed, self.poison_speed) + # Generate velocity such that speed <= self.poison_speed + velocity = self.np_random.rand(2) - 0.5 + speed = np.linalg.norm(velocity) + if speed > self.poison_speed: + # Limit speed to self.poison_speed + velocity = velocity / speed * self.poison_speed + poison.set_velocity(velocity) + + rewards = np.zeros(self.n_pursuers) + sensor_features, collided_pursuer_evader, collided_pursuer_poison, rewards \ + = self.collision_handling_subroutine(rewards, True) + obs_list = self.observe_list( + sensor_features, collided_pursuer_evader, collided_pursuer_poison) + self.last_rewards = [np.float64(0) for _ in range(self.n_pursuers)] + self.control_rewards = [0 for _ in range(self.n_pursuers)] + self.last_dones = [False for _ in range(self.n_pursuers)] + self.last_obs = obs_list + + return obs_list[0] + + def _caught(self, is_colliding_x_y, n_coop): + """ Check whether collision results in catching the object + + This is because you need `n_coop` agents to collide with the object to actually catch it + """ + # Number of collisions for each y + n_collisions = is_colliding_x_y.sum(axis=0) + # List of y that have been caught + caught_y = np.where(n_collisions >= n_coop)[0] + + # Boolean array indicating which x caught any y in caught_y + did_x_catch_y = is_colliding_x_y[:, caught_y] + # List of x that caught corresponding y in caught_y + x_caught_y = np.where(did_x_catch_y >= 1)[0] + + return caught_y, x_caught_y + + def _closest_dist(self, closest_object_idx, input_sensorvals): + """Closest distances according to `idx`""" + sensorvals = [] + + for pursuer_idx in range(self.n_pursuers): + sensors = np.arange(self.n_sensors) # sensor indices + objects = closest_object_idx[pursuer_idx, ...] # object indices + sensorvals.append(input_sensorvals[pursuer_idx, ..., sensors, objects]) + + return np.c_[sensorvals] + + def _extract_speed_features(self, object_velocities, object_sensorvals, sensed_mask): + # sensed_mask is a boolean mask of which sensor values detected an object + sensorvals = [] + for pursuer in self._pursuers: + relative_speed = object_velocities - np.expand_dims(pursuer.velocity, 0) + sensorvals.append(pursuer.sensors.dot(relative_speed.T)) + sensed_speed = np.c_[sensorvals] # Speeds in direction of each sensor + + speed_features = np.zeros((self.n_pursuers, self.n_sensors)) + + sensorvals = [] + for pursuer_idx in range(self.n_pursuers): + sensorvals.append( + sensed_speed[pursuer_idx, :, :][np.arange(self.n_sensors), object_sensorvals[pursuer_idx, :]] + ) + # Set sensed values, all others remain 0 + speed_features[sensed_mask] = np.c_[sensorvals][sensed_mask] + + return speed_features + + def collision_handling_subroutine(self, rewards, is_last): + # Stop pursuers upon hitting a wall + for pursuer in self._pursuers: + clipped_coord = np.clip(pursuer.position, 0, 1) + clipped_velocity = pursuer.velocity + # If x or y position gets clipped, set x or y velocity to 0 respectively + clipped_velocity[pursuer.position != clipped_coord] = 0 + # Save clipped velocity and position + pursuer.set_velocity(clipped_velocity) + pursuer.set_position(clipped_coord) + + def rebound_particles(particles, n): + collisions_particle_obstacle = np.zeros(n) + # Particles rebound on hitting an obstacle + for idx, particle in enumerate(particles): + obstacle_distance = ssd.cdist(np.expand_dims( + particle.position, 0), self.obstacle_coords) + is_colliding = obstacle_distance <= particle._radius + self.obstacle_radius + collisions_particle_obstacle[idx] = is_colliding.sum() + if collisions_particle_obstacle[idx] > 0: + # Rebound the particle that collided with an obstacle + velocity_scale = particle._radius + self.obstacle_radius - \ + ssd.euclidean(particle.position, self.obstacle_coords) + pos_diff = particle.position - self.obstacle_coords[0] + new_pos = particle.position + velocity_scale * pos_diff + particle.set_position(new_pos) + + collision_normal = particle.position - self.obstacle_coords[0] + # project current velocity onto collision normal + current_vel = particle.velocity + proj_numer = np.dot(current_vel, collision_normal) + cllsn_mag = np.dot(collision_normal, collision_normal) + proj_vel = (proj_numer / cllsn_mag) * collision_normal + perp_vel = current_vel - proj_vel + total_vel = perp_vel - proj_vel + particle.set_velocity(total_vel) + + rebound_particles(self._pursuers, self.n_pursuers) + + if is_last: + rebound_particles(self._evaders, self.n_evaders) + rebound_particles(self._poisons, self.n_poison) + + positions_pursuer = np.array([pursuer.position for pursuer in self._pursuers]) + positions_evader = np.array([evader.position for evader in self._evaders]) + positions_poison = np.array([poison.position for poison in self._poisons]) + + # Find evader collisions + distances_pursuer_evader = ssd.cdist(positions_pursuer, positions_evader) + # Generate n_evaders x n_pursuers matrix of boolean values for collisions + collisions_pursuer_evader = distances_pursuer_evader <= np.asarray([ + pursuer._radius + evader._radius for pursuer in self._pursuers + for evader in self._evaders + ]).reshape(self.n_pursuers, self.n_evaders) + + # Number of collisions depends on n_coop, how many are needed to catch an evader + caught_evaders, pursuer_evader_catches = self._caught( + collisions_pursuer_evader, self.n_coop) + + # Find poison collisions + distances_pursuer_poison = ssd.cdist(positions_pursuer, positions_poison) + collisions_pursuer_poison = distances_pursuer_poison <= np.asarray([ + pursuer._radius + poison._radius for pursuer in self._pursuers + for poison in self._poisons + ]).reshape(self.n_pursuers, self.n_poison) + + caught_poisons, pursuer_poison_collisions = self._caught( + collisions_pursuer_poison, 1) + + # Find sensed obstacles + sensorvals_pursuer_obstacle = np.array( + [pursuer.sensed(self.obstacle_coords, self.obstacle_radius) for pursuer in self._pursuers]) + + # Find sensed barriers + sensorvals_pursuer_barrier = np.array( + [pursuer.sense_barriers() for pursuer in self._pursuers]) + + # Find sensed evaders + sensorvals_pursuer_evader = np.array( + [pursuer.sensed(positions_evader, self.radius * 2) for pursuer in self._pursuers]) + + # Find sensed poisons + sensorvals_pursuer_poison = np.array( + [pursuer.sensed(positions_poison, self.radius * 3 / 4) for pursuer in self._pursuers]) + + # Find sensed pursuers + sensorvals_pursuer_pursuer = np.array( + [pursuer.sensed(positions_pursuer, self.radius, same=True) for pursuer in self._pursuers]) + + # Collect distance features + def sensor_features(sensorvals): + closest_idx_array = np.argmin(sensorvals, axis=2) + closest_distances = self._closest_dist(closest_idx_array, sensorvals) + finite_mask = np.isfinite(closest_distances) + sensed_distances = np.ones((self.n_pursuers, self.n_sensors)) + sensed_distances[finite_mask] = closest_distances[finite_mask] + return sensed_distances, closest_idx_array, finite_mask + + obstacle_distance_features, _, _ = sensor_features(sensorvals_pursuer_obstacle) + barrier_distance_features, _, _ = sensor_features(sensorvals_pursuer_barrier) + evader_distance_features, closest_evader_idx, evader_mask = sensor_features(sensorvals_pursuer_evader) + poison_distance_features, closest_poison_idx, poison_mask = sensor_features(sensorvals_pursuer_poison) + pursuer_distance_features, closest_pursuer_idx, pursuer_mask = sensor_features(sensorvals_pursuer_pursuer) + + # Collect speed features + pursuers_speed = np.array([pursuer.velocity for pursuer in self._pursuers]) + evaders_speed = np.array([evader.velocity for evader in self._evaders]) + poisons_speed = np.array([poison.velocity for poison in self._poisons]) + + evader_speed_features = self._extract_speed_features(evaders_speed, closest_evader_idx, evader_mask) + poison_speed_features = self._extract_speed_features(poisons_speed, closest_poison_idx, poison_mask) + pursuer_speed_features = self._extract_speed_features(pursuers_speed, closest_pursuer_idx, pursuer_mask) + + # Process collisions + # If object collided with required number of players, reset its position and velocity + # Effectively the same as removing it and adding it back + def reset_caught_objects(caught_objects, objects, speed): + if caught_objects.size: + for object_idx in caught_objects: + objects[object_idx].set_position( + self._generate_coord(objects[object_idx]._radius)) + # Generate both velocity components from range [-self.evader_speed, self.evader_speed) + objects[object_idx].set_velocity( + (self.np_random.rand(2,) - 0.5) * 2 * speed) + + reset_caught_objects(caught_evaders, self._evaders, self.evader_speed) + reset_caught_objects(caught_poisons, self._poisons, self.poison_speed) + + pursuer_evader_encounters, pursuer_evader_encounter_matrix = self._caught( + collisions_pursuer_evader, 1) + + # Update reward based on these collisions + rewards[pursuer_evader_catches] += self.food_reward + rewards[pursuer_poison_collisions] += self.poison_reward + rewards[pursuer_evader_encounter_matrix] += self.encounter_reward + + # Add features together + if self._speed_features: + sensorfeatures = np.c_[ + obstacle_distance_features, barrier_distance_features, + evader_distance_features, evader_speed_features, + poison_distance_features, poison_speed_features, + pursuer_distance_features, pursuer_speed_features + ] + else: + sensorfeatures = np.c_[ + obstacle_distance_features, + barrier_distance_features, + evader_distance_features, + poison_distance_features, + pursuer_distance_features + ] + + return sensorfeatures, collisions_pursuer_evader, collisions_pursuer_poison, rewards + + def observe_list(self, sensor_feature, is_colliding_evader, is_colliding_poison): + obslist = [] + for pursuer_idx in range(self.n_pursuers): + obslist.append( + np.concatenate([ + sensor_feature[pursuer_idx, ...].ravel(), [ + float((is_colliding_evader[pursuer_idx, :]).sum() > 0), float(( + is_colliding_poison[pursuer_idx, :]).sum() > 0) + ] + ])) + return obslist + + def step(self, action, agent_id, is_last): + action = np.asarray(action) + action = action.reshape(2) + speed = np.linalg.norm(action) + if speed > self.pursuer_max_accel: + # Limit added thrust to self.pursuer_max_accel + action = action / speed * self.pursuer_max_accel + + p = self._pursuers[agent_id] + p.set_velocity(p.velocity + action) + p.set_position(p.position + self.cycle_time * p.velocity) + + # Penalize large thrusts + accel_penalty = self.thrust_penalty * math.sqrt((action ** 2).sum()) + # Average thrust penalty among all agents, and assign each agent global portion designated by (1 - local_ratio) + self.control_rewards = (accel_penalty / self.n_pursuers) * np.ones(self.n_pursuers) * (1 - self.local_ratio) + # Assign the current agent the local portion designated by local_ratio + self.control_rewards[agent_id] += accel_penalty * self.local_ratio + + if is_last: + def move_objects(objects): + for obj in objects: + # Move objects + obj.set_position(obj.position + self.cycle_time * obj.velocity) + # Bounce object if it hits a wall + for i in range(len(obj.position)): + if obj.position[i] >= 1 or obj.position[i] <= 0: + obj.position[i] = np.clip(obj.position[i], 0, 1) + obj.velocity[i] = -1 * obj.velocity[i] + + move_objects(self._evaders) + move_objects(self._poisons) + + rewards = np.zeros(self.n_pursuers) + sensorfeatures, collisions_pursuer_evader, collisions_pursuer_poison, rewards = self.collision_handling_subroutine(rewards, is_last) + obs_list = self.observe_list( + sensorfeatures, collisions_pursuer_evader, collisions_pursuer_poison) + self.last_obs = obs_list + + local_reward = rewards + global_reward = local_reward.mean() + # Distribute local and global rewards according to local_ratio + self.last_rewards = local_reward * self.local_ratio + global_reward * (1 - self.local_ratio) + + self.frames += 1 + + return self.observe(agent_id) + + def observe(self, agent): + return np.array(self.last_obs[agent], dtype=np.float32) + + def draw_obstacles(self): + for obstacle in self.obstacle_coords: + assert obstacle.shape == (2,) + x, y = obstacle + center = (int(self.pixel_scale * x), + int(self.pixel_scale * y)) + color = (120, 176, 178) + pygame.draw.circle(self.screen, color, center, self.pixel_scale * self.obstacle_radius) + + def draw_background(self): + # -1 is building pixel flag + color = (255, 255, 255) + rect = pygame.Rect(0, 0, self.pixel_scale, self.pixel_scale) + pygame.draw.rect(self.screen, color, rect) + + def draw_pursuers(self): + for pursuer in self._pursuers: + x, y = pursuer.position + center = (int(self.pixel_scale * x), + int(self.pixel_scale * y)) + for sensor in pursuer._sensors: + start = center + end = center + self.pixel_scale * (pursuer._sensor_range * sensor) + color = (0, 0, 0) + pygame.draw.line(self.screen, color, start, end, 1) + color = (101, 104, 249) + pygame.draw.circle(self.screen, color, center, self.pixel_scale * self.radius) + + def draw_evaders(self): + for evader in self._evaders: + x, y = evader.position + center = (int(self.pixel_scale * x), + int(self.pixel_scale * y)) + color = (238, 116, 106) + + pygame.draw.circle(self.screen, color, center, self.pixel_scale * self.radius * 2) + + def draw_poisons(self): + for poison in self._poisons: + x, y = poison.position + center = (int(self.pixel_scale * x), + int(self.pixel_scale * y)) + color = (145, 250, 116) + + pygame.draw.circle(self.screen, color, center, self.pixel_scale * self.radius * 3 / 4) + + def render(self, mode="human"): + if not self.renderOn: + if mode == "human": + pygame.display.init() + self.screen = pygame.display.set_mode( + (self.pixel_scale, self.pixel_scale)) + else: + self.screen = pygame.Surface((self.pixel_scale, self.pixel_scale)) + self.renderOn = True + + self.draw_background() + self.draw_obstacles() + self.draw_pursuers() + self.draw_evaders() + self.draw_poisons() + + observation = pygame.surfarray.pixels3d(self.screen) + new_observation = np.copy(observation) + del observation + if mode == "human": + pygame.display.flip() + return np.transpose(new_observation, axes=(1, 0, 2)) if mode == "rgb_array" else None diff --git a/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld_v3.py b/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..9789dc61247fc47b926165ef48f1be11901bf3e4 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/sisl/waterworld_v3.py @@ -0,0 +1 @@ +from .waterworld.waterworld import env, parallel_env, raw_env diff --git a/MLPY/Lib/site-packages/pettingzoo/test/__init__.py b/MLPY/Lib/site-packages/pettingzoo/test/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ce1af63242e7437e76c45c9cec24c0bc4ded3353 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/test/__init__.py @@ -0,0 +1,10 @@ +from .api_test import api_test +from .bombardment_test import bombardment_test +from .manual_control_test import manual_control_test +from .max_cycles_test import max_cycles_test +from .parallel_test import parallel_api_test +from .performance_benchmark import performance_benchmark +from .render_test import collect_render_results, render_test +from .save_obs_test import test_save_obs +from .seed_test import seed_test +from .state_test import state_test diff --git a/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..861b1580e0d3bd2628662e91877dbdca7ffcada8 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/api_test.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/api_test.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..24714b62fe5fb07211977e9563d4c1c83617d57d Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/api_test.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/bombardment_test.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/bombardment_test.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0cd97fe6f283d4a852aea41ac6bac9200628eb4d Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/bombardment_test.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/manual_control_test.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/manual_control_test.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c964c55b35483bcfc00adafb662e7c113b79ae37 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/manual_control_test.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/max_cycles_test.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/max_cycles_test.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..da78981c089dd018ec457013492acd3110bc08e8 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/max_cycles_test.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/parallel_test.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/parallel_test.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9a7195989909e4290e911d07ea1f286e3b79037 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/parallel_test.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/performance_benchmark.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/performance_benchmark.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7cb604b0603ab82c61add011c75aa11b6d87a050 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/performance_benchmark.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/render_test.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/render_test.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e5de4d52a422460f4853b684532dd1042bcec094 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/render_test.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/save_obs_test.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/save_obs_test.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..108ca4a6f0fca917177569f31dee22b1cc423e33 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/save_obs_test.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/seed_test.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/seed_test.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..37422f24bec9d2373905be70afb30ac194bdc269 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/seed_test.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/state_test.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/state_test.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..74579af2090a1414d10427f9f518f63e847e529f Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/test/__pycache__/state_test.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/test/api_test.py b/MLPY/Lib/site-packages/pettingzoo/test/api_test.py new file mode 100644 index 0000000000000000000000000000000000000000..9a16ff24d93330df3b0f27b0de8c728ec5b326e4 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/test/api_test.py @@ -0,0 +1,296 @@ +import random +import re +import warnings +from collections import defaultdict + +import gym +import numpy as np + +import pettingzoo +from pettingzoo.utils.conversions import aec_to_parallel_wrapper, parallel_to_aec_wrapper +from pettingzoo.utils.wrappers import BaseWrapper + +missing_attr_warning = '''This environment does not have {name} defined. +This is not a required part 'of the API as environments with procedurally +generated agents cannot always have this property defined. However, this is +very uncommon and these features should be included whenever possible as all +standard learning code requires these properties. Also not that if you do not +have {name} it should also not be possible for you to expose the possible_agents +list and observation_spaces, action_spaces dictionaries.''' + + +def test_observation(observation, observation_0): + if isinstance(observation, np.ndarray): + if np.isinf(observation).any(): + warnings.warn("Observation contains infinity (np.inf) or negative infinity (-np.inf)") + if np.isnan(observation).any(): + warnings.warn("Observation contains NaNs") + if len(observation.shape) > 3: + warnings.warn("Observation has more than 3 dimensions") + if observation.shape == (0,): + assert False, "Observation can not be an empty array" + if observation.shape == (1,): + warnings.warn("Observation is a single number") + if not isinstance(observation, observation_0.__class__): + warnings.warn("Observations between agents are different classes") + if (observation.shape != observation_0.shape) and (len(observation.shape) == len(observation_0.shape)): + warnings.warn("Observations are different shapes") + if len(observation.shape) != len(observation_0.shape): + warnings.warn("Observations have different number of dimensions") + if not np.can_cast(observation.dtype, np.dtype("float64")): + warnings.warn("Observation numpy array is not a numeric dtype") + if np.array_equal(observation, np.zeros(observation.shape)): + warnings.warn("Observation numpy array is all zeros.") + if not np.all(observation >= 0) and ((len(observation.shape) == 2) or (len(observation.shape) == 3 and observation.shape[2] == 1) or (len(observation.shape) == 3 and observation.shape[2] == 3)): + warnings.warn("The observation contains negative numbers and is in the shape of a graphical observation. This might be a bad thing.") + else: + warnings.warn("Observation is not NumPy array") + + +def test_observation_action_spaces(env, agent_0): + for agent in env.agents: + assert isinstance(env.observation_space(agent), gym.spaces.Space), "Observation space for each agent must extend gym.spaces.Space" + assert isinstance(env.action_space(agent), gym.spaces.Space), "Agent space for each agent must extend gym.spaces.Space" + assert env.observation_space(agent) is env.observation_space(agent), "observation_space should return the exact same space object (not a copy) for an agent. Consider decorating your observation_space(self, agent) method with @functools.lru_cache(maxsize=None)" + assert env.action_space(agent) is env.action_space(agent), "action_space should return the exact same space object (not a copy) for an agent (ensures that action space seeding works as expected). Consider decorating your action_space(self, agent) method with @functools.lru_cache(maxsize=None)" + if not (isinstance(env.observation_space(agent), gym.spaces.Box) or isinstance(env.observation_space(agent), gym.spaces.Discrete)): + warnings.warn("Observation space for each agent probably should be gym.spaces.box or gym.spaces.discrete") + if not (isinstance(env.action_space(agent), gym.spaces.Box) or isinstance(env.action_space(agent), gym.spaces.Discrete)): + warnings.warn("Action space for each agent probably should be gym.spaces.box or gym.spaces.discrete") + if (not isinstance(agent, str)) and agent != 'env': + warnings.warn("Agent's are recommended to have numbered string names, like player_0") + if not isinstance(agent, str) or not re.match("[a-z]+_[0-9]+", agent): # regex for ending in _ + warnings.warn("We recommend agents to be named in the format _, like \"player_0\"") + if not isinstance(env.observation_space(agent), env.observation_space(agent_0).__class__): + warnings.warn("The class of observation spaces is different between two agents") + if not isinstance(env.action_space(agent), env.action_space(agent).__class__): + warnings.warn("The class of action spaces is different between two agents") + if env.observation_space(agent) != env.observation_space(agent_0): + warnings.warn("Agents have different observation space sizes") + if env.action_space(agent) != env.action_space(agent): + warnings.warn("Agents have different action space sizes") + + if isinstance(env.action_space(agent), gym.spaces.Box): + if np.any(np.equal(env.action_space(agent).low, -np.inf)): + warnings.warn("Agent's minimum action space value is -infinity. This is probably too low.") + if np.any(np.equal(env.action_space(agent).high, np.inf)): + warnings.warn("Agent's maxmimum action space value is infinity. This is probably too high") + if np.any(np.equal(env.action_space(agent).low, env.action_space(agent).high)): + warnings.warn("Agent's maximum and minimum action space values are equal") + if np.any(np.greater(env.action_space(agent).low, env.action_space(agent).high)): + assert False, "Agent's minimum action space value is greater than it's maximum" + if env.action_space(agent).low.shape != env.action_space(agent).shape: + assert False, "Agent's action_space.low and action_space have different shapes" + if env.action_space(agent).high.shape != env.action_space(agent).shape: + assert False, "Agent's action_space.high and action_space have different shapes" + + if isinstance(env.observation_space(agent), gym.spaces.Box): + if np.any(np.equal(env.observation_space(agent).low, -np.inf)): + warnings.warn("Agent's minimum observation space value is -infinity. This is probably too low.") + if np.any(np.equal(env.observation_space(agent).high, np.inf)): + warnings.warn("Agent's maxmimum observation space value is infinity. This is probably too high") + if np.any(np.equal(env.observation_space(agent).low, env.observation_space(agent).high)): + warnings.warn("Agent's maximum and minimum observation space values are equal") + if np.any(np.greater(env.observation_space(agent).low, env.observation_space(agent).high)): + assert False, "Agent's minimum observation space value is greater than it's maximum" + if env.observation_space(agent).low.shape != env.observation_space(agent).shape: + assert False, "Agent's observation_space.low and observation_space have different shapes" + if env.observation_space(agent).high.shape != env.observation_space(agent).shape: + assert False, "Agent's observation_space.high and observation_space have different shapes" + + +def test_reward(reward): + if not (isinstance(reward, int) or isinstance(reward, float)) and not isinstance(np.dtype(reward), np.dtype) and not isinstance(reward, np.ndarray): + warnings.warn("Reward should be int, float, NumPy dtype or NumPy array") + if isinstance(reward, np.ndarray): + if isinstance(reward, np.ndarray) and not reward.shape == (1,): + assert False, "Rewards can only be one number" + if np.isinf(reward): + assert False, "Reward must be finite" + if np.isnan(reward): + assert False, "Rewards cannot be NaN" + if not np.can_cast(reward.dtype, np.dtype("float64")): + assert False, "Reward NumPy array is not a numeric dtype" + + +def test_rewards_dones(env, agent_0): + for agent in env.agents: + assert isinstance(env.dones[agent], bool), "Agent's values in dones must be True or False" + float(env.rewards[agent]) # "Rewards for each agent must be convertible to float + test_reward(env.rewards[agent]) + + +def play_test(env, observation_0, num_cycles): + ''' + plays through environment and does dynamic checks to make + sure the state returned by the environment is + consistent. In particular it checks: + + * Whether the reward returned by last is the accumulated reward + * Whether the agents list shrinks when agents are done + * Whether the keys of the rewards, dones, infos are equal to the agents list + * tests that the observation is in bounds. + ''' + env.reset() + + done = {agent: False for agent in env.agents} + live_agents = set(env.agents[:]) + has_finished = set() + generated_agents = set() + accumulated_rewards = defaultdict(int) + for agent in env.agent_iter(env.num_agents * num_cycles): + generated_agents.add(agent) + assert agent not in has_finished, "agents cannot resurect! Generate a new agent with a new name." + assert isinstance(env.infos[agent], dict), "an environment agent's info must be a dictionary" + prev_observe, reward, done, info = env.last() + if done: + action = None + elif isinstance(prev_observe, dict) and 'action_mask' in prev_observe: + action = random.choice(np.flatnonzero(prev_observe['action_mask'])) + else: + action = env.action_space(agent).sample() + + if agent not in live_agents: + live_agents.add(agent) + + assert live_agents.issubset(set(env.agents)), "environment must delete agents as the game continues" + + if done: + live_agents.remove(agent) + has_finished.add(agent) + + assert accumulated_rewards[agent] == reward, "reward returned by last is not the accumulated rewards in its rewards dict" + accumulated_rewards[agent] = 0 + + env.step(action) + + for a, rew in env.rewards.items(): + accumulated_rewards[a] += rew + + assert env.num_agents == len(env.agents), "env.num_agents is not equal to len(env.agents)" + assert set(env.rewards.keys()) == (set(env.agents)), "agents should not be given a reward if they were done last turn" + assert set(env.dones.keys()) == (set(env.agents)), "agents should not be given a done if they were done last turn" + assert set(env.infos.keys()) == (set(env.agents)), "agents should not be given an info if they were done last turn" + if hasattr(env, 'possible_agents'): + assert set(env.agents).issubset(set(env.possible_agents)), "possible agents should always include all agents, if it exists" + + if not env.agents: + break + + if isinstance(env.observation_space(agent), gym.spaces.Box): + assert env.observation_space(agent).dtype == prev_observe.dtype + assert env.observation_space(agent).contains(prev_observe), \ + ("Out of bounds observation: " + str(prev_observe)) + + assert env.observation_space(agent).contains(prev_observe), "Agent's observation is outside of it's observation space" + test_observation(prev_observe, observation_0) + if not isinstance(env.infos[env.agent_selection], dict): + warnings.warn("The info of each agent should be a dict, use {} if you aren't using info") + + if not env.agents: + assert has_finished == generated_agents, "not all agents finished, some were skipped over" + + env.reset() + for agent in env.agent_iter(env.num_agents * 2): + obs, reward, done, info = env.last() + if done: + action = None + elif isinstance(obs, dict) and 'action_mask' in obs: + action = random.choice(np.flatnonzero(obs['action_mask'])) + else: + action = env.action_space(agent).sample() + assert isinstance(done, bool), "Done from last is not True or False" + assert done == env.dones[agent], "Done from last() and dones[agent] do not match" + assert info == env.infos[agent], "Info from last() and infos[agent] do not match" + float(env.rewards[agent]) # "Rewards for each agent must be convertible to float + test_reward(reward) + observation = env.step(action) + assert observation is None, "step() must not return anything" + + +def test_action_flexibility(env): + env.reset() + agent = env.agent_selection + action_space = env.action_space(agent) + if isinstance(action_space, gym.spaces.Discrete): + obs, reward, done, info = env.last() + if done: + action = None + elif isinstance(obs, dict) and 'action_mask' in obs: + action = random.choice(np.flatnonzero(obs['action_mask'])) + else: + action = 0 + env.step(action) + env.reset() + env.step(np.int32(action)) + elif isinstance(action_space, gym.spaces.Box): + env.step(np.zeros_like(action_space.low)) + env.reset() + env.step(np.zeros_like(action_space.low)) + + +def api_test(env, num_cycles=10, verbose_progress=False): + def progress_report(msg): + if verbose_progress: + print(msg) + + print("Starting API test") + if not hasattr(env, 'possible_agents'): + warnings.warn(missing_attr_warning.format(name='possible_agents')) + + env.reset() + + assert isinstance(env, pettingzoo.AECEnv), "Env must be an instance of pettingzoo.AECEnv" + + env.reset() + assert not any(env.dones.values()), "dones must all be False after reset" + + assert isinstance(env.num_agents, int), "num_agents must be an integer" + assert env.num_agents != 0, "An environment should have a nonzero number of agents" + assert env.num_agents > 0, "An environment should have a positive number of agents" + + env.reset() + observation_0, _, _, _ = env.last() + test_observation(observation_0, observation_0) + + non_observe, _, _, _ = env.last(observe=False) + assert non_observe is None, "last must return a None when observe=False" + + progress_report("Finished test_observation") + + agent_0 = env.agent_selection + + test_observation_action_spaces(env, agent_0) + + progress_report("Finished test_observation_action_spaces") + + play_test(env, observation_0, num_cycles) + + progress_report("Finished play test") + + assert isinstance(env.rewards, dict), "rewards must be a dict" + assert isinstance(env.dones, dict), "dones must be a dict" + assert isinstance(env.infos, dict), "infos must be a dict" + + assert len(env.rewards) == len(env.dones) == len(env.infos) == len(env.agents), "rewards, dones, infos and agents must have the same length" + + test_rewards_dones(env, agent_0) + + test_action_flexibility(env) + + progress_report("Finished test_rewards_dones") + + # checks unwrapped attribute + assert not isinstance(env.unwrapped, aec_to_parallel_wrapper) + assert not isinstance(env.unwrapped, parallel_to_aec_wrapper) + assert not isinstance(env.unwrapped, BaseWrapper) + + # Test that if env has overridden render(), they must have overridden close() as well + base_render = pettingzoo.utils.env.AECEnv.render + base_close = pettingzoo.utils.env.AECEnv.close + if base_render != env.__class__.render: + assert (base_close != env.__class__.close), "If render method defined, then close method required" + else: + warnings.warn("Environment has not defined a render() method") + + print("Passed API test") diff --git a/MLPY/Lib/site-packages/pettingzoo/test/bombardment_test.py b/MLPY/Lib/site-packages/pettingzoo/test/bombardment_test.py new file mode 100644 index 0000000000000000000000000000000000000000..bdbdae9f07af3d20483c6c5bf2d43d2542c81f67 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/test/bombardment_test.py @@ -0,0 +1,32 @@ +import random +from copy import copy + +import numpy as np + +from .api_test import test_observation + + +def bombardment_test(env, cycles=10000): + print("Starting bombardment test") + + env.reset() + prev_observe, _, _, _ = env.last() + observation_0 = copy(prev_observe) + for i in range(cycles): + if i == cycles / 2: + print("\t50% through bombardment test") + for agent in env.agent_iter(env.num_agents): # step through every agent once with observe=True + obs, reward, done, info = env.last() + if done: + action = None + elif isinstance(obs, dict) and 'action_mask' in obs: + action = random.choice(np.flatnonzero(obs['action_mask'])) + else: + action = env.action_space(agent).sample() + next_observe = env.step(action) + assert env.observation_space(agent).contains(prev_observe), "Agent's observation is outside of its observation space" + test_observation(prev_observe, observation_0) + prev_observe = next_observe + env.reset() + prev_observe, _, _, _ = env.last() + print("Passed bombardment test") diff --git a/MLPY/Lib/site-packages/pettingzoo/test/example_envs/__init__.py b/MLPY/Lib/site-packages/pettingzoo/test/example_envs/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/MLPY/Lib/site-packages/pettingzoo/test/example_envs/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/test/example_envs/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e1092d2441c3f36d81dd172e4b5dc478c4cbc4fc Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/test/example_envs/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/test/example_envs/__pycache__/generated_agents_env_v0.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/test/example_envs/__pycache__/generated_agents_env_v0.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2f8f40295a7bc4a9946e686ea5b9f199a8cfec3a Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/test/example_envs/__pycache__/generated_agents_env_v0.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/test/example_envs/__pycache__/generated_agents_parallel_v0.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/test/example_envs/__pycache__/generated_agents_parallel_v0.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3f899b8f92ea84c4138614101b105a872d850ac1 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/test/example_envs/__pycache__/generated_agents_parallel_v0.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/test/example_envs/generated_agents_env_v0.py b/MLPY/Lib/site-packages/pettingzoo/test/example_envs/generated_agents_env_v0.py new file mode 100644 index 0000000000000000000000000000000000000000..47a26ce57fa5e9bd78e8a4ff943dd30f1cb2012c --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/test/example_envs/generated_agents_env_v0.py @@ -0,0 +1,123 @@ +import copy +import itertools +import warnings + +import gym +import numpy as np + +from pettingzoo import AECEnv +from pettingzoo.utils import wrappers +from pettingzoo.utils.agent_selector import agent_selector + + +def env(): + env = raw_env() + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +def get_type(agent): + return agent[: agent.rfind("_")] + + +class raw_env(AECEnv): + + metadata = {"render.modes": ["human"], "name": "generated_agents_env_v0"} + + def __init__(self, max_cycles=100): + super().__init__() + self._obs_spaces = {} + self._act_spaces = {} + self.types = [] + self._agent_counters = {} + self.max_cycles = max_cycles + self.seed() + for i in range(3): + self.add_type() + + def observation_space(self, agent): + return self._obs_spaces[get_type(agent)] + + def action_space(self, agent): + return self._act_spaces[get_type(agent)] + + def observe(self, agent): + return self.observation_space(agent).sample() + + def add_type(self): + type_id = len(self.types) + num_actions = self.np_random.randint(3, 10) + obs_size = self.np_random.randint(10, 50) + obs_space = gym.spaces.Box(low=0, high=1, shape=(obs_size,)) + act_space = gym.spaces.Discrete(num_actions) + new_type = f"type{type_id}" + self.types.append(new_type) + self._obs_spaces[new_type] = obs_space + self._act_spaces[new_type] = act_space + self._agent_counters[new_type] = 0 + return new_type + + def add_agent(self, type): + agent_id = self._agent_counters[type] + self._agent_counters[type] += 1 + agent = f"{type}_{agent_id}" + self.agents.append(agent) + self.dones[agent] = False + self.rewards[agent] = 0 + self._cumulative_rewards[agent] = 0 + self.infos[agent] = {} + return agent + + def reset(self): + self.agents = [] + self.rewards = {} + self._cumulative_rewards = {} + self.dones = {} + self.infos = {} + self.num_steps = 0 + for i in range(5): + self.add_agent(self.np_random.choice(self.types)) + + self._agent_selector = agent_selector(self.agents) + self.agent_selection = self._agent_selector.reset() + + def seed(self, seed=None): + self.np_random, _ = gym.utils.seeding.np_random(seed) + + def step(self, action): + if self.dones[self.agent_selection]: + return self._was_done_step(action) + + self._clear_rewards() + self._cumulative_rewards[self.agent_selection] = 0 + + if self._agent_selector.is_last(): + for i in range(5): + if self.np_random.random() < 0.1: + if self.np_random.random() < 0.1: + type = self.add_type() + else: + type = self.np_random.choice(self.types) + + agent = self.add_agent(type) + if len(self.agents) >= 20: + self.dones[self.np_random.choice(self.agents)] = True + + if self._agent_selector.is_last(): + self.num_steps += 1 + + if self.num_steps > self.max_cycles: + for agent in self.agents: + self.dones[agent] = True + + self.rewards[self.np_random.choice(self.agents)] = 1 + + self._accumulate_rewards() + self._dones_step_first() + + def render(self, mode="human"): + print(self.agents) + + def close(self): + pass diff --git a/MLPY/Lib/site-packages/pettingzoo/test/example_envs/generated_agents_parallel_v0.py b/MLPY/Lib/site-packages/pettingzoo/test/example_envs/generated_agents_parallel_v0.py new file mode 100644 index 0000000000000000000000000000000000000000..1f09675bb7f8caa3cda9b0d407122a1b5b565042 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/test/example_envs/generated_agents_parallel_v0.py @@ -0,0 +1,114 @@ +import copy +import itertools +import warnings + +import gym +import numpy as np + +from pettingzoo import AECEnv, ParallelEnv +from pettingzoo.utils import conversions, wrappers +from pettingzoo.utils.agent_selector import agent_selector + + +def env(**kwargs): + env = raw_env(**kwargs) + env = wrappers.AssertOutOfBoundsWrapper(env) + env = wrappers.OrderEnforcingWrapper(env) + return env + + +def raw_env(**kwargs): + return conversions.parallel_to_aec(parallel_env(**kwargs)) + + +def get_type(agent): + return agent[: agent.rfind("_")] + + +class parallel_env(ParallelEnv): + + metadata = {"render.modes": ["human"], "name": "generated_agents_parallel_v0"} + + def __init__(self, max_cycles=100): + super().__init__() + self._obs_spaces = {} + self._act_spaces = {} + self.types = [] + self._agent_counters = {} + self.max_cycles = max_cycles + self.seed() + for i in range(3): + self.add_type() + + def observation_space(self, agent): + return self._obs_spaces[get_type(agent)] + + def action_space(self, agent): + return self._act_spaces[get_type(agent)] + + def observe(self, agent): + return self.observation_space(agent).sample() + + def add_type(self): + type_id = len(self.types) + num_actions = self.np_random.randint(3, 10) + obs_size = self.np_random.randint(10, 50) + obs_space = gym.spaces.Box(low=0, high=1, shape=(obs_size,)) + act_space = gym.spaces.Discrete(num_actions) + new_type = f"type{type_id}" + self.types.append(new_type) + self._obs_spaces[new_type] = obs_space + self._act_spaces[new_type] = act_space + self._agent_counters[new_type] = 0 + return new_type + + def add_agent(self, type): + agent_id = self._agent_counters[type] + self._agent_counters[type] += 1 + agent_name = f"{type}_{agent_id}" + self.agents.append(agent_name) + return agent_name + + def reset(self): + self.all_dones = {} + self.agents = [] + self.num_steps = 0 + for i in range(5): + self.add_agent(self.np_random.choice(self.types)) + return {agent: self.observe(agent) for agent in self.agents} + + def seed(self, seed=None): + self.np_random, _ = gym.utils.seeding.np_random(seed) + + def step(self, actions): + done = self.num_steps >= self.max_cycles + for agent in self.agents: + assert agent in actions + all_dones = {agent: done for agent in self.agents} + if not done: + for i in range(6): + if self.np_random.random() < 0.1 and len(self.agents) >= 10: + all_dones[self.np_random.choice(self.agents)] = True + + for i in range(3): + if self.np_random.random() < 0.1: + if self.np_random.random() < 0.1: + type = self.add_type() + else: + type = self.np_random.choice(self.types) + + new_agent = self.add_agent(type) + all_dones[new_agent] = False + + all_infos = {agent: {} for agent in self.agents} + all_rewards = {agent: 0 for agent in self.agents} + all_rewards[self.np_random.choice(self.agents)] = 1 + all_observes = {agent: self.observe(agent) for agent in self.agents} + self.agents = [agent for agent in self.agents if not all_dones[agent]] + return all_observes, all_rewards, all_dones, all_infos + + def render(self, mode="human"): + print(self.agents) + + def close(self): + pass diff --git a/MLPY/Lib/site-packages/pettingzoo/test/manual_control_test.py b/MLPY/Lib/site-packages/pettingzoo/test/manual_control_test.py new file mode 100644 index 0000000000000000000000000000000000000000..a85dfe09e31f10d24d1a3ae62f6c2c3a3fc5c524 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/test/manual_control_test.py @@ -0,0 +1,31 @@ +import random +import threading +import time + + +def inp_handler(name): + from pynput.keyboard import Controller as KeyboardController + from pynput.keyboard import Key + + keyboard = KeyboardController() + time.sleep(0.1) + choices = ['w', 'a', 's', 'd', 'j', 'k', Key.left, Key.right, Key.up, Key.down] + NUM_TESTS = 50 + for x in range(NUM_TESTS): + i = random.choice(choices) if x != NUM_TESTS - 1 else Key.esc + keyboard.press(i) + time.sleep(0.1) + keyboard.release(i) + + +def manual_control_test(manual_control): + manual_in_thread = threading.Thread(target=inp_handler, args=(1,)) + + manual_in_thread.start() + + try: + manual_control() + except Exception: + raise Exception("manual_control() has crashed. Please fix it.") + + manual_in_thread.join() diff --git a/MLPY/Lib/site-packages/pettingzoo/test/max_cycles_test.py b/MLPY/Lib/site-packages/pettingzoo/test/max_cycles_test.py new file mode 100644 index 0000000000000000000000000000000000000000..18865f902dc6b7cb2f38ac721fff0266a60bf991 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/test/max_cycles_test.py @@ -0,0 +1,35 @@ +import numpy as np + + +def max_cycles_test(mod): + max_cycles = 4 + parallel_env = mod.parallel_env(max_cycles=max_cycles) + + observations = parallel_env.reset() + dones = {agent: False for agent in parallel_env.agents} + test_cycles = max_cycles + 10 # allows environment to do more than max_cycles if it so wishes + for step in range(test_cycles): + actions = {agent: parallel_env.action_space(agent).sample() for agent in parallel_env.agents if not dones[agent]} + observations, rewards, dones, infos = parallel_env.step(actions) + if all(dones.values()): + break + + pstep = step + 1 + + env = mod.env(max_cycles=max_cycles) + env.reset() + agent_counts = np.zeros(len(env.possible_agents)) + for a in env.agent_iter(): + # counts agent index + aidx = env.possible_agents.index(a) + agent_counts[aidx] += 1 + + action = env.action_space(a).sample() if not env.dones[a] else None + env.step(action) + + assert max_cycles == pstep + # does not check the minimum value because some agents might be killed before + # all the steps are complete. However, most agents should still be alive + # given a short number of cycles + assert max_cycles == np.max(agent_counts) - 1 + assert max_cycles == np.median(agent_counts) - 1 diff --git a/MLPY/Lib/site-packages/pettingzoo/test/parallel_test.py b/MLPY/Lib/site-packages/pettingzoo/test/parallel_test.py new file mode 100644 index 0000000000000000000000000000000000000000..a0afc3630320c3641213e1c8949dbd1d5bfa5d88 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/test/parallel_test.py @@ -0,0 +1,63 @@ +import warnings + +from pettingzoo.utils.conversions import aec_to_parallel_wrapper, parallel_to_aec_wrapper +from pettingzoo.utils.wrappers import BaseWrapper + +from .api_test import missing_attr_warning + + +def parallel_api_test(par_env, num_cycles=10): + if not hasattr(par_env, 'possible_agents'): + warnings.warn(missing_attr_warning.format(name='possible_agents')) + + assert not isinstance(par_env.unwrapped, aec_to_parallel_wrapper) + assert not isinstance(par_env.unwrapped, parallel_to_aec_wrapper) + assert not isinstance(par_env.unwrapped, BaseWrapper) + MAX_RESETS = 2 + for n_resets in range(MAX_RESETS): + obs = par_env.reset() + assert isinstance(obs, dict) + assert set(obs.keys()) == (set(par_env.agents)) + done = {agent: False for agent in par_env.agents} + live_agents = set(par_env.agents[:]) + has_finished = set() + for i in range(num_cycles): + actions = {agent: par_env.action_space(agent).sample() for agent in par_env.agents if agent in done and not done[agent]} + obs, rew, done, info = par_env.step(actions) + for agent in par_env.agents: + assert agent not in has_finished, "agent cannot be revived once done" + + if agent not in live_agents: + live_agents.add(agent) + + assert isinstance(obs, dict) + assert isinstance(rew, dict) + assert isinstance(done, dict) + assert isinstance(info, dict) + + agents_set = set(live_agents) + keys = 'observation reward done info'.split() + vals = [obs, rew, done, info] + for k, v in zip(keys, vals): + if set(v.keys()) == agents_set: + continue + warnings.warn('Agent was given: {} but was done last turn'.format(k)) + + if hasattr(par_env, 'possible_agents'): + assert set(par_env.agents).issubset(set(par_env.possible_agents)), "possible_agents defined but does not contain all agents" + + has_finished |= {agent for agent, d in done.items() if d} + if not par_env.agents and has_finished != set(par_env.possible_agents): + warnings.warn('No agents present but not all possible_agents are done') + elif not par_env.agents: + warnings.warn('No agents present') + + for agent in par_env.agents: + assert par_env.observation_space(agent) is par_env.observation_space(agent), "observation_space should return the exact same space object (not a copy) for an agent. Consider decorating your observation_space(self, agent) method with @functools.lru_cache(maxsize=None)" + assert par_env.action_space(agent) is par_env.action_space(agent), "action_space should return the exact same space object (not a copy) for an agent (ensures that action space seeding works as expected). Consider decorating your action_space(self, agent) method with @functools.lru_cache(maxsize=None)" + + for agent, d in done.items(): + if d: + live_agents.remove(agent) + + assert set(par_env.agents) == live_agents diff --git a/MLPY/Lib/site-packages/pettingzoo/test/performance_benchmark.py b/MLPY/Lib/site-packages/pettingzoo/test/performance_benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..ce6607a38f2aec128923defef438ab1a9a9b2eb3 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/test/performance_benchmark.py @@ -0,0 +1,41 @@ +import random +import time + +import numpy as np + + +def performance_benchmark(env): + print("Starting performance benchmark") + cycles = 0 + turn = 0 + env.reset() + start = time.time() + end = 0 + + while True: + cycles += 1 + for agent in env.agent_iter(env.num_agents): # step through every agent once with observe=True + obs, reward, done, info = env.last() + if done: + action = None + elif isinstance(obs, dict) and 'action_mask' in obs: + action = random.choice(np.flatnonzero(obs['action_mask'])) + else: + action = env.action_space(agent).sample() + env.step(action) + turn += 1 + + if all(env.dones.values()): + env.reset() + + if time.time() - start > 5: + end = time.time() + break + + length = end - start + + turns_per_time = turn / length + cycles_per_time = cycles / length + print(str(turns_per_time) + " turns per second") + print(str(cycles_per_time) + " cycles per second") + print("Finished performance benchmark") diff --git a/MLPY/Lib/site-packages/pettingzoo/test/render_test.py b/MLPY/Lib/site-packages/pettingzoo/test/render_test.py new file mode 100644 index 0000000000000000000000000000000000000000..3ce271c9409be5e58c525e80b3676be2bd23999b --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/test/render_test.py @@ -0,0 +1,43 @@ +import random + +import numpy as np + + +def collect_render_results(env, mode): + results = [] + + env.reset() + for i in range(5): + if i > 0: + for agent in env.agent_iter(env.num_agents // 2 + 1): + obs, reward, done, info = env.last() + if done: + action = None + elif isinstance(obs, dict) and 'action_mask' in obs: + action = random.choice(np.flatnonzero(obs['action_mask'])) + else: + action = env.action_space(agent).sample() + env.step(action) + render_result = env.render(mode=mode) + results.append(render_result) + + return results + + +def render_test(env_fn, custom_tests={}): + env = env_fn() + render_modes = env.metadata.get('render.modes')[:] + assert render_modes is not None, "Environment's that support rendering must define render modes in metadata" + for mode in render_modes: + render_results = collect_render_results(env, mode) + for res in render_results: + if mode in custom_tests.keys(): + assert custom_tests[mode](res) + if mode == 'rgb_array': + assert isinstance(res, np.ndarray) and len(res.shape) == 3 and res.shape[2] == 3 and res.dtype == np.uint8, f"rgb_array mode must return a valid image array, is {res}" + if mode == 'ansi': + assert isinstance(res, str) # and len(res.shape) == 3 and res.shape[2] == 3 and res.dtype == np.uint8, "rgb_array mode must have shit in it" + if mode == "human": + assert res is None + env.close() + env = env_fn() diff --git a/MLPY/Lib/site-packages/pettingzoo/test/save_obs_test.py b/MLPY/Lib/site-packages/pettingzoo/test/save_obs_test.py new file mode 100644 index 0000000000000000000000000000000000000000..7f81657023725a4c70e4efffb441aa279f311ffe --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/test/save_obs_test.py @@ -0,0 +1,24 @@ +import gym +import numpy as np + +from pettingzoo.utils import save_observation + + +def check_save_obs(env): + for agent in env.agents: + assert isinstance(env.observation_space(agent), gym.spaces.Box), "Observations must be Box to save observations as image" + assert np.all(np.equal(env.observation_space(agent).low, 0)) and np.all(np.equal(env.observation_space(agent).high, 255)), "Observations must be 0 to 255 to save as image" + assert len(env.observation_space(agent).shape) == 3 or len(env.observation_space(agent).shape) == 2, "Observations must be 2D or 3D to save as image" + if len(env.observation_space(agent).shape) == 3: + assert env.observation_space(agent).shape[2] == 1 or env.observation_space(agent).shape[2] == 3, "3D observations can only have 1 or 3 channels to save as an image" + + +def test_save_obs(env): + env.reset() + try: + check_save_obs(env) + for agent in env.agents: + save_observation(env=env, agent=agent, save_dir="saved_observations") + + except AssertionError as ae: + print("did not save the observations: ", ae) diff --git a/MLPY/Lib/site-packages/pettingzoo/test/seed_test.py b/MLPY/Lib/site-packages/pettingzoo/test/seed_test.py new file mode 100644 index 0000000000000000000000000000000000000000..c90a4941719566ea3eda0d5836e47092b80d0f1f --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/test/seed_test.py @@ -0,0 +1,115 @@ +import hashlib +import pickle +import random +import warnings + +import numpy as np + +from pettingzoo.utils import parallel_to_aec + + +def hash(val): + val = pickle.dumps(val) + hasher = hashlib.md5() + hasher.update(val) + return hasher.hexdigest() + + +def calc_hash(new_env, rand_issue, max_env_iters): + cur_hashes = [] + sampler = random.Random(42) + for i in range(3): + new_env.reset() + for j in range(rand_issue + 1): + random.randint(0, 1000) + np.random.normal(size=100) + for agent in new_env.agent_iter(max_env_iters): + obs, rew, done, info = new_env.last() + if done: + action = None + elif isinstance(obs, dict) and 'action_mask' in obs: + action = sampler.choice(np.flatnonzero(obs['action_mask'])) + else: + action = new_env.action_space(agent).sample() + new_env.step(action) + cur_hashes.append(agent) + cur_hashes.append(hash_obsevation(obs)) + cur_hashes.append(float(rew)) + + return hash(tuple(cur_hashes)) + + +def seed_action_spaces(env): + if hasattr(env, 'possible_agents'): + for i, agent in enumerate(env.possible_agents): + env.action_space(agent).seed(42 + i) + + +def check_environment_deterministic(env1, env2, num_cycles): + ''' + env1 and env2 should be seeded environments + + returns a bool: true if env1 and env2 execute the same way + ''' + + # seeds action space so that actions are deterministic + seed_action_spaces(env1) + seed_action_spaces(env2) + + num_agents = max(1, len(getattr(env1, 'possible_agents', []))) + + # checks deterministic behavior if seed is set + hashes = [] + num_seeds = 2 + max_env_iters = num_cycles * num_agents + envs = [env1, env2] + for x in range(num_seeds): + hashes.append(calc_hash(envs[x], x, max_env_iters)) + + return all(hashes[0] == h for h in hashes) + + +def hash_obsevation(obs): + try: + val = hash(obs.tobytes()) + return val + except AttributeError: + try: + return hash(obs) + except TypeError: + warnings.warn("Observation not an int or an Numpy array") + return 0 + + +def test_environment_reset_deterministic(env1, num_cycles): + seed_action_spaces(env1) + env1.seed(42) + env1.reset() + hash1 = calc_hash(env1, 1, num_cycles) + seed_action_spaces(env1) + env1.seed(42) + env1.reset() + hash2 = calc_hash(env1, 2, num_cycles) + assert hash1 == hash2, "environments kept state after seed(42) and reset()" + + +def seed_test(env_constructor, num_cycles=10, test_kept_state=True): + env1 = env_constructor() + if test_kept_state: + test_environment_reset_deterministic(env1, num_cycles) + env2 = env_constructor() + base_seed = 42 + env1.seed(base_seed) + env2.seed(base_seed) + + assert check_environment_deterministic(env1, env2, num_cycles), \ + ("The environment gives different results on multiple runs when initialized with the same seed. This is usually a sign that you are using np.random or random modules directly, which uses a global random state.") + + +def parallel_seed_test(parallel_env_fn, num_cycles=10, test_kept_state=True): + def aec_env_fn(): + parallel_env = parallel_env_fn() + env = parallel_to_aec(parallel_env) + return env + + seed_test(aec_env_fn, num_cycles, test_kept_state) diff --git a/MLPY/Lib/site-packages/pettingzoo/test/state_test.py b/MLPY/Lib/site-packages/pettingzoo/test/state_test.py new file mode 100644 index 0000000000000000000000000000000000000000..f614d92cdf4a6b81f94950281fc370fd6cff0252 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/test/state_test.py @@ -0,0 +1,84 @@ +import random +import re +import warnings + +import gym +import numpy as np + +import pettingzoo + + +def test_state_space(env): + assert isinstance(env.state_space, gym.spaces.Space), "State space for each environment must extend gym.spaces.Space" + if not (isinstance(env.state_space, gym.spaces.Box) or isinstance(env.state_space, gym.spaces.Discrete)): + warnings.warn("State space for each environment probably should be gym.spaces.box or gym.spaces.discrete") + + if isinstance(env.state_space, gym.spaces.Box): + if np.any(np.equal(env.state_space.low, -np.inf)): + warnings.warn("Environment's minimum state space value is -infinity. This is probably too low.") + if np.any(np.equal(env.state_space.high, np.inf)): + warnings.warn("Environment's maxmimum state space value is infinity. This is probably too high") + if np.any(np.equal(env.state_space.low, env.state_space.high)): + warnings.warn("Environment's maximum and minimum state space values are equal") + if np.any(np.greater(env.state_space.low, env.state_space.high)): + assert False, "Environment's minimum state space value is greater than it's maximum" + if env.state_space.low.shape != env.state_space.shape: + assert False, "Environment's state_space.low and state_space have different shapes" + if env.state_space.high.shape != env.state_space.shape: + assert False, "Environment's state_space.high and state_space have different shapes" + + +def test_state(env, num_cycles): + env.reset() + state_0 = env.state() + for agent in env.agent_iter(env.num_agents * num_cycles): + observation, reward, done, info = env.last(observe=False) + if done: + action = None + else: + action = env.action_space(agent).sample() + + env.step(action) + new_state = env.state() + assert env.state_space.contains(new_state), "Environment's state is outside of it's state space" + if isinstance(new_state, np.ndarray): + if np.isinf(new_state).any(): + warnings.warn("State contains infinity (np.inf) or negative infinity (-np.inf)") + if np.isnan(new_state).any(): + warnings.warn("State contains NaNs") + if len(new_state.shape) > 3: + warnings.warn("State has more than 3 dimensions") + if new_state.shape == (0,): + assert False, "State can not be an empty array" + if new_state.shape == (1,): + warnings.warn("State is a single number") + if not isinstance(new_state, state_0.__class__): + warnings.warn("State between Observations are different classes") + if (new_state.shape != state_0.shape) and (len(new_state.shape) == len(state_0.shape)): + warnings.warn("States are different shapes") + if len(new_state.shape) != len(state_0.shape): + warnings.warn("States have different number of dimensions") + if not np.can_cast(new_state.dtype, np.dtype("float64")): + warnings.warn("State numpy array is not a numeric dtype") + if np.array_equal(new_state, np.zeros(new_state.shape)): + warnings.warn("State numpy array is all zeros.") + if not np.all(new_state >= 0) and ((len(new_state.shape) == 2) or (len(new_state.shape) == 3 and new_state.shape[2] == 1) or (len(new_state.shape) == 3 and new_state.shape[2] == 3)): + warnings.warn("The state contains negative numbers and is in the shape of a graphical observation. This might be a bad thing.") + else: + warnings.warn("State is not NumPy array") + + +def test_parallel_env(parallel_env): + + parallel_env.reset() + + assert isinstance(parallel_env.state_space, gym.spaces.Space), "State space for each parallel environment must extend gym.spaces.Space" + + state_0 = parallel_env.state() + assert parallel_env.state_space.contains(state_0), "ParallelEnvironment's state is outside of it's state space" + + +def state_test(env, parallel_env, num_cycles=10): + test_state_space(env) + test_state(env, num_cycles) + test_parallel_env(parallel_env) diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/__init__.py b/MLPY/Lib/site-packages/pettingzoo/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8dde07b720ca90dc7a627c8668978fb6005cad43 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/__init__.py @@ -0,0 +1,9 @@ +from .agent_selector import agent_selector +from .average_total_reward import average_total_reward +from .conversions import aec_to_parallel, parallel_to_aec +from .env import AECEnv, ParallelEnv +from .random_demo import random_demo +from .save_observation import save_observation +from .wrappers import (AssertOutOfBoundsWrapper, BaseParallelWraper, BaseWrapper, + CaptureStdoutWrapper, ClipOutOfBoundsWrapper, OrderEnforcingWrapper, + TerminateIllegalWrapper) diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c56da5c4c783f1ae709092ee775ee2f0bb8c76ef Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/agent_selector.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/agent_selector.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..30753812524fb710815ace7b0345ca7d767f9539 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/agent_selector.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/average_total_reward.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/average_total_reward.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a62ad664bb190cf4408a7b8e22accf97bed48d54 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/average_total_reward.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/capture_stdout.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/capture_stdout.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9ae8c1de8ee8870f10a298d16e6189b61e3beb36 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/capture_stdout.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/conversions.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/conversions.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a5b53d228e17634679ed389560a69d7318ba03b1 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/conversions.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/deprecated_module.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/deprecated_module.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4406c94250419189f8e736731df7058a5fb8c5b1 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/deprecated_module.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/env.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/env.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2283f34f6b0b1710ea1dca23d4d1c819e48c200c Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/env.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/env_logger.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/env_logger.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d709994ca9e2881e6d0f80de0030fc9647afce87 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/env_logger.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/random_demo.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/random_demo.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5ffb39fcbccc4099b0dc4c1cc6f28ccf09e30a92 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/random_demo.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/save_observation.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/save_observation.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6957299307ce637f4e60b6ea2467943f8197561d Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/__pycache__/save_observation.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/agent_selector.py b/MLPY/Lib/site-packages/pettingzoo/utils/agent_selector.py new file mode 100644 index 0000000000000000000000000000000000000000..828f276dba688d761e47563353f8bb27605f4578 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/agent_selector.py @@ -0,0 +1,35 @@ +class agent_selector(): + ''' + Outputs an agent in the given order whenever agent_select is called. Can reinitialize to a new order + ''' + def __init__(self, agent_order): + self.reinit(agent_order) + + def reinit(self, agent_order): + self.agent_order = agent_order + self._current_agent = 0 + self.selected_agent = 0 + + def reset(self): + self.reinit(self.agent_order) + return self.next() + + def next(self): + self._current_agent = (self._current_agent + 1) % len(self.agent_order) + self.selected_agent = self.agent_order[self._current_agent - 1] + return self.selected_agent + + def is_last(self): + ''' + Does not work as expected if you change the order + ''' + return self.selected_agent == self.agent_order[-1] + + def is_first(self): + return self.selected_agent == self.agent_order[0] + + def __eq__(self, other): + if not isinstance(other, agent_selector): + return NotImplemented + + return self.agent_order == other.agent_order and self._current_agent == other._current_agent and self.selected_agent == other.selected_agent diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/average_total_reward.py b/MLPY/Lib/site-packages/pettingzoo/utils/average_total_reward.py new file mode 100644 index 0000000000000000000000000000000000000000..0b89e0efc634c4c96d20dfcbd1034c10e6fc9665 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/average_total_reward.py @@ -0,0 +1,39 @@ +import random + +import numpy as np + + +def average_total_reward(env, max_episodes=100, max_steps=10000000000): + ''' + Runs an env object with random actions until either max_episodes or + max_steps is reached. Calculates the average total reward over the + episodes. + + Reward is summed across all agents, making it unsuited for use in zero-sum + games. + ''' + total_reward = 0 + total_steps = 0 + done = False + + for episode in range(max_episodes): + if total_steps >= max_steps: + break + + env.reset() + for agent in env.agent_iter(): + obs, reward, done, _ = env.last(observe=False) + total_reward += reward + total_steps += 1 + if done: + action = None + elif isinstance(obs, dict) and 'action_mask' in obs: + action = random.choice(np.flatnonzero(obs['action_mask'])) + else: + action = env.action_space(agent).sample() + env.step(action) + + num_episodes = episode + 1 + print("Average total reward", total_reward / num_episodes) + + return total_reward / num_episodes diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/capture_stdout.py b/MLPY/Lib/site-packages/pettingzoo/utils/capture_stdout.py new file mode 100644 index 0000000000000000000000000000000000000000..39f7dcd45988afb56ef075f850b5e049b232d04d --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/capture_stdout.py @@ -0,0 +1,28 @@ +import io +import sys + +import numpy as np + + +class capture_stdout: + ''' + usage: + + with capture_stdout() as var: + print("hithere") + + # value of var will be "hithere" + data = var.getvalue() + ''' + def __init__(self): + self.old_stdout = None + + def __enter__(self): + self.old_stdout = sys.stdout + self.buff = io.StringIO() + sys.stdout = self.buff + return self.buff + + def __exit__(self, type, value, traceback): + sys.stdout = self.old_stdout + self.buff.close() diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/conversions.py b/MLPY/Lib/site-packages/pettingzoo/utils/conversions.py new file mode 100644 index 0000000000000000000000000000000000000000..6b7624133058422d7c8bf19083e60b222a4e5344 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/conversions.py @@ -0,0 +1,258 @@ +import copy +import warnings +from collections import defaultdict + +from pettingzoo.utils import agent_selector +from pettingzoo.utils.env import AECEnv, ParallelEnv +from pettingzoo.utils.wrappers import OrderEnforcingWrapper + + +def parallel_wrapper_fn(env_fn): + def par_fn(**kwargs): + env = env_fn(**kwargs) + env = aec_to_parallel_wrapper(env) + return env + return par_fn + + +def aec_to_parallel(aec_env): + if isinstance(aec_env, parallel_to_aec_wrapper): + return aec_env.env + else: + par_env = aec_to_parallel_wrapper(aec_env) + return par_env + + +def parallel_to_aec(par_env): + if isinstance(par_env, aec_to_parallel_wrapper): + return par_env.aec_env + else: + aec_env = parallel_to_aec_wrapper(par_env) + ordered_env = OrderEnforcingWrapper(aec_env) + return ordered_env + + +def to_parallel(aec_env): + warnings.warn("The `to_parallel` function is deprecated. Use the `aec_to_parallel` function instead.") + return aec_to_parallel(aec_env) + + +def from_parallel(par_env): + warnings.warn("The `from_parallel` function is deprecated. Use the `parallel_to_aec` function instead.") + return parallel_to_aec(par_env) + + +class aec_to_parallel_wrapper(ParallelEnv): + def __init__(self, aec_env): + assert aec_env.metadata.get('is_parallelizable', False), \ + "Converting from an AEC environment to a parallel environment " \ + "with the to_parallel wrapper is not generally safe " \ + "(the AEC environment should only update once at the end " \ + "of each cycle). If you have confirmed that your AEC environment " \ + "can be converted in this way, then please set the `is_parallelizable` "\ + "key in your metadata to True" + + self.aec_env = aec_env + + try: + self.possible_agents = aec_env.possible_agents + except AttributeError: + pass + + self.metadata = aec_env.metadata + + # Not every environment has the .state_space attribute implemented + try: + self.state_space = self.aec_env.state_space + except AttributeError: + pass + + @property + def observation_spaces(self): + warnings.warn("The `observation_spaces` dictionary is deprecated. Use the `observation_space` function instead.") + try: + return {agent: self.observation_space(agent) for agent in self.possible_agents} + except AttributeError: + raise AttributeError("The base environment does not have an `observation_spaces` dict attribute. Use the environments `observation_space` method instead") + + @property + def action_spaces(self): + warnings.warn("The `action_spaces` dictionary is deprecated. Use the `action_space` function instead.") + try: + return {agent: self.action_space(agent) for agent in self.possible_agents} + except AttributeError: + raise AttributeError("The base environment does not have an action_spaces dict attribute. Use the environments `action_space` method instead") + + def observation_space(self, agent): + return self.aec_env.observation_space(agent) + + def action_space(self, agent): + return self.aec_env.action_space(agent) + + @property + def unwrapped(self): + return self.aec_env.unwrapped + + def seed(self, seed=None): + return self.aec_env.seed(seed) + + def reset(self): + self.aec_env.reset() + self.agents = self.aec_env.agents[:] + observations = {agent: self.aec_env.observe(agent) for agent in self.aec_env.agents if not self.aec_env.dones[agent]} + return observations + + def step(self, actions): + rewards = defaultdict(int) + dones = {} + infos = {} + observations = {} + for agent in self.aec_env.agents: + if agent != self.aec_env.agent_selection: + if self.aec_env.dones[agent]: + raise AssertionError(f"expected agent {agent} got done agent {self.aec_env.agent_selection}. Parallel environment wrapper expects all agent termination (setting an agent's self.dones entry to True) to happen only at the end of a cycle.") + else: + raise AssertionError(f"expected agent {agent} got agent {self.aec_env.agent_selection}, Parallel environment wrapper expects agents to step in a cycle.") + obs, rew, done, info = self.aec_env.last() + self.aec_env.step(actions[agent]) + for agent in self.aec_env.agents: + rewards[agent] += self.aec_env.rewards[agent] + + dones = dict(**self.aec_env.dones) + infos = dict(**self.aec_env.infos) + observations = {agent: self.aec_env.observe(agent) for agent in self.aec_env.agents} + while self.aec_env.agents and self.aec_env.dones[self.aec_env.agent_selection]: + self.aec_env.step(None) + + self.agents = self.aec_env.agents + return observations, rewards, dones, infos + + def render(self, mode="human"): + return self.aec_env.render(mode) + + def state(self): + return self.aec_env.state() + + def close(self): + return self.aec_env.close() + + +class parallel_to_aec_wrapper(AECEnv): + def __init__(self, parallel_env): + self.env = parallel_env + + self.metadata = {**parallel_env.metadata} + self.metadata['is_parallelizable'] = True + + try: + self.possible_agents = parallel_env.possible_agents + except AttributeError: + pass + + # Not every environment has the .state_space attribute implemented + try: + self.state_space = self.env.state_space + except AttributeError: + pass + + @property + def unwrapped(self): + return self.env.unwrapped + + @property + def observation_spaces(self): + warnings.warn("The `observation_spaces` dictionary is deprecated. Use the `observation_space` function instead.") + try: + return {agent: self.observation_space(agent) for agent in self.possible_agents} + except AttributeError: + raise AttributeError("The base environment does not have an `observation_spaces` dict attribute. Use the environments `observation_space` method instead") + + @property + def action_spaces(self): + warnings.warn("The `action_spaces` dictionary is deprecated. Use the `action_space` function instead.") + try: + return {agent: self.action_space(agent) for agent in self.possible_agents} + except AttributeError: + raise AttributeError("The base environment does not have an action_spaces dict attribute. Use the environments `action_space` method instead") + + def observation_space(self, agent): + return self.env.observation_space(agent) + + def action_space(self, agent): + return self.env.action_space(agent) + + def seed(self, seed=None): + self.env.seed(seed) + + def reset(self): + self._observations = self.env.reset() + self.agents = self.env.agents[:] + self._live_agents = self.agents[:] + self._actions = {agent: None for agent in self.agents} + self._agent_selector = agent_selector(self._live_agents) + self.agent_selection = self._agent_selector.reset() + self.dones = {agent: False for agent in self.agents} + self.infos = {agent: {} for agent in self.agents} + self.rewards = {agent: 0 for agent in self.agents} + self._cumulative_rewards = {agent: 0 for agent in self.agents} + self.new_agents = [] + self.new_values = {} + + def observe(self, agent): + return self._observations[agent] + + def state(self): + return self.env.state() + + def add_new_agent(self, new_agent): + self._agent_selector._current_agent = len(self._agent_selector.agent_order) + self._agent_selector.agent_order.append(new_agent) + self.agent_selection = self._agent_selector.next() + self.agents.append(new_agent) + self.dones[new_agent] = False + self.infos[new_agent] = {} + self.rewards[new_agent] = 0 + self._cumulative_rewards[new_agent] = 0 + + def step(self, action): + if self.dones[self.agent_selection]: + del self._actions[self.agent_selection] + return self._was_done_step(action) + self._actions[self.agent_selection] = action + if self._agent_selector.is_last(): + obss, rews, dones, infos = self.env.step(self._actions) + + self._observations = copy.copy(obss) + self.dones = copy.copy(dones) + self.infos = copy.copy(infos) + self.rewards = copy.copy(rews) + self._cumulative_rewards = copy.copy(rews) + + env_agent_set = set(self.env.agents) + + self.agents = self.env.agents + [agent for agent in sorted(self._observations.keys()) if agent not in env_agent_set] + + if len(self.env.agents): + self._agent_selector = agent_selector(self.env.agents) + self.agent_selection = self._agent_selector.reset() + + self._dones_step_first() + else: + if self._agent_selector.is_first(): + self._clear_rewards() + + self.agent_selection = self._agent_selector.next() + + def last(self, observe=True): + agent = self.agent_selection + observation = self.observe(agent) if observe else None + return observation, self._cumulative_rewards[agent], self.dones[agent], self.infos[agent] + + def render(self, mode="human"): + return self.env.render(mode) + + def close(self): + self.env.close() + + def __str__(self): + return str(self.env) diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/deprecated_module.py b/MLPY/Lib/site-packages/pettingzoo/utils/deprecated_module.py new file mode 100644 index 0000000000000000000000000000000000000000..422053f617fab488689bdd89398f91415c1223cc --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/deprecated_module.py @@ -0,0 +1,42 @@ +import importlib +import pkgutil +import re + + +class DeprecatedEnv(ImportError): + pass + + +class DeprecatedModule: + def __init__(self, name, old_version, new_version): + def env(*args, **kwargs): + raise DeprecatedEnv(f"{name}_v{old_version} is now deprecated, use {name}_v{new_version} instead") + + self.env = env + self.raw_env = env + self.parallel_env = env + self.manual_control = env + + +def is_env(env_name): + return bool(re.fullmatch("[a-zA-Z_]+_v[0-9]+", env_name)) + + +def depricated_handler(env_name, module_path, module_name): + try: + return importlib.import_module(f"{module_name}.{env_name}") + except ImportError: + pass + + if not is_env(env_name): + raise ImportError(f"cannot import name '{env_name}' from '{module_name}'") + name, version = env_name.rsplit("_v") + + for loader, alt_env_name, is_pkg in pkgutil.iter_modules(module_path): + if is_env(alt_env_name): + alt_name, alt_version = alt_env_name.rsplit("_v") + if alt_name == name: + if int(alt_version) > int(version): + return DeprecatedModule(name, version, alt_version) + else: + raise ImportError(f"cannot import name '{env_name}' from '{module_name}'") diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/env.py b/MLPY/Lib/site-packages/pettingzoo/utils/env.py new file mode 100644 index 0000000000000000000000000000000000000000..90eb77c736541742ee09b95971614faec4bff94c --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/env.py @@ -0,0 +1,308 @@ +import warnings + +''' +Base environment definitions + +See docs/api.md for api documentation +See docs/dev_docs.md for additional documentation and an example environment. +''' + + +class AECEnv: + ''' + The AECEnv steps agents one at a time. If you are unsure if you + have implemented a AECEnv correctly, try running the `api_test` documented in + the Developer documentation on the website. + ''' + def __init__(self): + pass + + def step(self, action): + ''' + Receives a dictionary of actions keyed by the agent name. + Returns the observation dictionary, reward dictionary, done dictionary, and info dictionary, + where each dictionary is keyed by the agent. + ''' + raise NotImplementedError + + def reset(self): + ''' + Resets the environment to a starting state. + ''' + raise NotImplementedError + + def seed(self, seed=None): + ''' + Reseeds the environment (making the resulting environment deterministic). + `reset()` must be called after `seed()`, and before `step()`. + ''' + pass + + def observe(self, agent): + ''' + Returns the observation an agent currently can make. `last()` calls this function. + ''' + raise NotImplementedError + + def render(self, mode='human'): + ''' + Displays a rendered frame from the environment, if supported. + Alternate render modes in the default environments are `'rgb_array'` + which returns a numpy array and is supported by all environments outside of classic, + and `'ansi'` which returns the strings printed (specific to classic environments). + ''' + raise NotImplementedError + + def state(self): + ''' + State returns a global view of the environment appropriate for + centralized training decentralized execution methods like QMIX + ''' + raise NotImplementedError('state() method has not been implemented in the environment {}.'.format(self.metadata.get('name', self.__class__.__name__))) + + def close(self): + ''' + Closes the rendering window, subprocesses, network connections, or any other resources + that should be released. + ''' + pass + + def observation_space(self, agent): + ''' + Takes in agent and returns the observation space for that agent. + + MUST return the same value for the same agent name + + Default implementation is to return the observation_spaces dict + ''' + warnings.warn("Your environment should override the observation_space function. Attempting to use the observation_spaces dict attribute.") + return self.observation_spaces[agent] + + def action_space(self, agent): + ''' + Takes in agent and returns the action space for that agent. + + MUST return the same value for the same agent name + + Default implementation is to return the action_spaces dict + ''' + warnings.warn("Your environment should override the action_space function. Attempting to use the action_spaces dict attribute.") + return self.action_spaces[agent] + + @property + def num_agents(self): + return len(self.agents) + + @property + def max_num_agents(self): + return len(self.possible_agents) + + def _dones_step_first(self): + ''' + Makes .agent_selection point to first done agent. Stores old value of agent_selection + so that _was_done_step can restore the variable after the done agent steps. + ''' + _dones_order = [agent for agent in self.agents if self.dones[agent]] + if _dones_order: + self._skip_agent_selection = self.agent_selection + self.agent_selection = _dones_order[0] + return self.agent_selection + + def _clear_rewards(self): + ''' + clears all items in .rewards + ''' + for agent in self.rewards: + self.rewards[agent] = 0 + + def _accumulate_rewards(self): + ''' + adds .rewards dictionary to ._cumulative_rewards dictionary. Typically + called near the end of a step() method + ''' + for agent, reward in self.rewards.items(): + self._cumulative_rewards[agent] += reward + + def agent_iter(self, max_iter=2**63): + ''' + yields the current agent (self.agent_selection) when used in a loop where you step() each iteration. + ''' + return AECIterable(self, max_iter) + + def last(self, observe=True): + ''' + returns observation, cumulative reward, done, info for the current agent (specified by self.agent_selection) + ''' + agent = self.agent_selection + observation = self.observe(agent) if observe else None + return observation, self._cumulative_rewards[agent], self.dones[agent], self.infos[agent] + + def _was_done_step(self, action): + ''' + Helper function that performs step() for done agents. + + Does the following: + + 1. Removes done agent from .agents, .dones, .rewards, ._cumulative_rewards, and .infos + 2. Loads next agent into .agent_selection: if another agent is done, loads that one, otherwise load next live agent + 3. Clear the rewards dict + + Highly recommended to use at the beginning of step as follows: + + def step(self, action): + if self.dones[self.agent_selection]: + self._was_done_step() + return + # main contents of step + ''' + if action is not None: + raise ValueError("when an agent is done, the only valid action is None") + + # removes done agent + agent = self.agent_selection + assert self.dones[agent], "an agent that was not done as attempted to be removed" + del self.dones[agent] + del self.rewards[agent] + del self._cumulative_rewards[agent] + del self.infos[agent] + self.agents.remove(agent) + + # finds next done agent or loads next live agent (Stored in _skip_agent_selection) + _dones_order = [agent for agent in self.agents if self.dones[agent]] + if _dones_order: + if getattr(self, '_skip_agent_selection', None) is None: + self._skip_agent_selection = self.agent_selection + self.agent_selection = _dones_order[0] + else: + if getattr(self, '_skip_agent_selection', None) is not None: + self.agent_selection = self._skip_agent_selection + self._skip_agent_selection = None + self._clear_rewards() + + def __str__(self): + ''' + returns a name which looks like: "space_invaders_v1" + ''' + if hasattr(self, 'metadata'): + return self.metadata.get('name', self.__class__.__name__) + else: + return self.__class__.__name__ + + @property + def unwrapped(self): + return self + + +class AECIterable: + def __init__(self, env, max_iter): + self.env = env + self.max_iter = max_iter + + def __iter__(self): + return AECIterator(self.env, self.max_iter) + + +class AECIterator: + def __init__(self, env, max_iter): + self.env = env + self.iters_til_term = max_iter + + def __next__(self): + if not self.env.agents or self.iters_til_term <= 0: + raise StopIteration + self.iters_til_term -= 1 + return self.env.agent_selection + + +class ParallelEnv: + ''' + The Parallel environment steps every live agent at once. If you are unsure if you + have implemented a ParallelEnv correctly, try running the `parallel_api_test` in + the Developer documentation on the website. + ''' + def reset(self): + ''' + resets the environment and returns a dictionary of observations (keyed by the agent name) + ''' + raise NotImplementedError + + def seed(self, seed=None): + ''' + Reseeds the environment (making it deterministic). + `reset()` must be called after `seed()`, and before `step()`. + ''' + pass + + def step(self, actions): + ''' + receives a dictionary of actions keyed by the agent name. + Returns the observation dictionary, reward dictionary, done dictionary, + and info dictionary, where each dictionary is keyed by the agent. + ''' + raise NotImplementedError + + def render(self, mode="human"): + ''' + Displays a rendered frame from the environment, if supported. + Alternate render modes in the default environments are `'rgb_array'` + which returns a numpy array and is supported by all environments outside + of classic, and `'ansi'` which returns the strings printed + (specific to classic environments). + ''' + raise NotImplementedError + + def close(self): + ''' + Closes the rendering window. + ''' + pass + + def state(self): + ''' + State returns a global view of the environment appropriate for + centralized training decentralized execution methods like QMIX + ''' + raise NotImplementedError('state() method has not been implemented in the environment {}.'.format(self.metadata.get('name', self.__class__.__name__))) + + def observation_space(self, agent): + ''' + Takes in agent and returns the observation space for that agent. + + MUST return the same value for the same agent name + + Default implementation is to return the observation_spaces dict + ''' + warnings.warn("Your environment should override the observation_space function. Attempting to use the observation_spaces dict attribute.") + return self.observation_spaces[agent] + + def action_space(self, agent): + ''' + Takes in agent and returns the action space for that agent. + + MUST return the same value for the same agent name + + Default implementation is to return the action_spaces dict + ''' + warnings.warn("Your environment should override the action_space function. Attempting to use the action_spaces dict attribute.") + return self.action_spaces[agent] + + @property + def num_agents(self): + return len(self.agents) + + @property + def max_num_agents(self): + return len(self.possible_agents) + + def __str__(self): + ''' + returns a name which looks like: "space_invaders_v1" by default + ''' + if hasattr(self, 'metadata'): + return self.metadata.get('name', self.__class__.__name__) + else: + return self.__class__.__name__ + + @property + def unwrapped(self): + return self diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/env_logger.py b/MLPY/Lib/site-packages/pettingzoo/utils/env_logger.py new file mode 100644 index 0000000000000000000000000000000000000000..d1d91f6c0352d3fde89a1972e40af75bedb76350 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/env_logger.py @@ -0,0 +1,93 @@ +import logging + + +class EnvLogger(): + mqueue = [] + _output = True + + @staticmethod + def get_logger(): + logger = logging.getLogger(__name__) + return logger + + @staticmethod + def _generic_warning(msg): + logger = EnvLogger.get_logger() + if not logger.hasHandlers(): + handler = EnvWarningHandler(mqueue=EnvLogger.mqueue) + logger.addHandler(handler) + logger.warning(msg) + # needed to get the pytest runner to work correctly, and doesn't seem to have serious issues + EnvLogger.mqueue.append(msg) + + @staticmethod + def flush(): + EnvLogger.mqueue.clear() + + @staticmethod + def suppress_output(): + EnvLogger._output = False + + @staticmethod + def unsuppress_output(): + EnvLogger._output = True + + @staticmethod + def error_possible_agents_attribute_missing(name): + raise AttributeError(f'[ERROR]: This environment does not support {name}. This means that either the environment has procedurally generated agents such that this property cannot be well defined (which requires special learning code to handle) or the environment was improperly configured by the developer.') + + @staticmethod + def warn_action_out_of_bound(action, action_space, backup_policy): + EnvLogger._generic_warning(f"[WARNING]: Received an action {action} that was outside action space {action_space}. Environment is {backup_policy}") + + @staticmethod + def warn_close_unrendered_env(): + EnvLogger._generic_warning("[WARNING]: Called close on an unrendered environment.") + + @staticmethod + def warn_close_before_reset(): + EnvLogger._generic_warning("[WARNING]: reset() needs to be called before close.") + + @staticmethod + def warn_on_illegal_move(): + EnvLogger._generic_warning("[WARNING]: Illegal move made, game terminating with current player losing. \nobs['action_mask'] contains a mask of all legal moves that can be chosen.") + + @staticmethod + def error_observe_before_reset(): + assert False, "reset() needs to be called before observe" + + @staticmethod + def error_step_before_reset(): + assert False, "reset() needs to be called before step" + + @staticmethod + def warn_step_after_done(): + EnvLogger._generic_warning("[WARNING]: step() called after all agents are done. Should reset() first.") + + @staticmethod + def error_render_before_reset(): + assert False, "reset() needs to be called before render" + + @staticmethod + def error_agent_iter_before_reset(): + assert False, "reset() needs to be called before agent_iter" + + @staticmethod + def error_nan_action(): + assert False, "step() cannot take in a nan action" + + @staticmethod + def error_state_before_reset(): + assert False, "reset() needs to be called before state" + + +class EnvWarningHandler(logging.Handler): + def __init__(self, *args, mqueue, **kwargs): + logging.Handler.__init__(self, *args, **kwargs) + self.mqueue = mqueue + + def emit(self, record): + m = self.format(record).rstrip("\n") + self.mqueue.append(m) + if EnvLogger._output: + print(m) diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/random_demo.py b/MLPY/Lib/site-packages/pettingzoo/utils/random_demo.py new file mode 100644 index 0000000000000000000000000000000000000000..aeaa6ca19d3f119547a0046dc3ef3fd4ca0de8b5 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/random_demo.py @@ -0,0 +1,38 @@ +import random + +import numpy as np + + +def random_demo(env, render=True, episodes=1): + ''' + Runs an env object with random actions. + ''' + + total_reward = 0 + done = False + completed_episodes = 0 + + while completed_episodes < episodes: + env.reset() + for agent in env.agent_iter(): + if render: + env.render() + + obs, reward, done, _ = env.last() + total_reward += reward + if done: + action = None + elif isinstance(obs, dict) and 'action_mask' in obs: + action = random.choice(np.flatnonzero(obs['action_mask'])) + else: + action = env.action_space(agent).sample() + env.step(action) + + completed_episodes += 1 + + if render: + env.close() + + print("Average total reward", total_reward / episodes) + + return total_reward diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/save_observation.py b/MLPY/Lib/site-packages/pettingzoo/utils/save_observation.py new file mode 100644 index 0000000000000000000000000000000000000000..90011710295a862923e4635ec22c0fe7bdf10872 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/save_observation.py @@ -0,0 +1,34 @@ +import os + +import gym +import numpy as np + + +def _check_observation_saveable(env, agent): + obs_space = env.observation_space(agent) + assert isinstance(obs_space, gym.spaces.Box), "Observations must be Box to save observations as image" + assert np.all(np.equal(obs_space.low, 0)) and np.all(np.equal(obs_space.high, 255)), "Observations must be 0 to 255 to save as image" + assert len(obs_space.shape) == 3 or len(obs_space.shape) == 2, "Observations must be 2D or 3D to save as image" + if len(obs_space.shape) == 3: + assert obs_space.shape[2] == 1 or obs_space.shape[2] == 3, "3D observations can only have 1 or 3 channels to save as an image" + + +# save the observation of an agent. If agent not specified uses env selected agent. If all_agents +# then all agents in environment observation recorded. +def save_observation(env, agent=None, all_agents=False, save_dir=os.getcwd()): + from PIL import Image + if agent is None: + agent = env.agent_selection + agent_list = [agent] + if all_agents: + agent_list = env.agents[:] + for a in agent_list: + _check_observation_saveable(env, a) + save_folder = "{}/{}".format(save_dir, str(env).replace("<", "_").replace(">", "_")) + os.makedirs(save_folder, exist_ok=True) + + observation = env.observe(a) + rescaled = observation.astype(np.uint8) + im = Image.fromarray(rescaled) + fname = os.path.join(save_folder, str(a) + ".png") + im.save(fname) diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__init__.py b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0f8396b7a7023948f0997d322b3ac9d1c02c94af --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__init__.py @@ -0,0 +1,7 @@ +from .assert_out_of_bounds import AssertOutOfBoundsWrapper +from .base import BaseWrapper +from .base_parallel import BaseParallelWraper +from .capture_stdout import CaptureStdoutWrapper +from .clip_out_of_bounds import ClipOutOfBoundsWrapper +from .order_enforcing import OrderEnforcingWrapper +from .terminate_illegal import TerminateIllegalWrapper diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/__init__.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e76b7997122eb1a4a10638de1a05e205b14b9c8 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/__init__.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/assert_out_of_bounds.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/assert_out_of_bounds.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0d7cb79d50802c5116169027fcc801599cb5c856 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/assert_out_of_bounds.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/base.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/base.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..acc7f2ff0de59b0f69146019073a409e24b90027 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/base.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/base_parallel.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/base_parallel.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..518e901e5ebfdaa95faff33131b8ad1ad11d15eb Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/base_parallel.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/capture_stdout.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/capture_stdout.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4c292cd737028c58a295cc9522fb5f32221859e5 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/capture_stdout.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/clip_out_of_bounds.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/clip_out_of_bounds.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5e8ede860dca51e4925e91477bca1364694e910b Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/clip_out_of_bounds.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/order_enforcing.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/order_enforcing.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ed0d32390bde1747fec9e70f59fc8ced16d261e5 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/order_enforcing.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/terminate_illegal.cpython-39.pyc b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/terminate_illegal.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0fda8a7135f1e88265f84361dc7c45ee8d995d64 Binary files /dev/null and b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/__pycache__/terminate_illegal.cpython-39.pyc differ diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/assert_out_of_bounds.py b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/assert_out_of_bounds.py new file mode 100644 index 0000000000000000000000000000000000000000..b959ca7554311b0bc2608d586751d0b479317364 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/assert_out_of_bounds.py @@ -0,0 +1,20 @@ +from gym.spaces import Discrete + +from .base import BaseWrapper + + +class AssertOutOfBoundsWrapper(BaseWrapper): + ''' + this wrapper crashes for out of bounds actions + Should be used for Discrete spaces + ''' + def __init__(self, env): + super().__init__(env) + assert all(isinstance(self.action_space(agent), Discrete) for agent in getattr(self, 'possible_agents', [])), "should only use AssertOutOfBoundsWrapper for Discrete spaces" + + def step(self, action): + assert (action is None and self.dones[self.agent_selection]) or self.action_space(self.agent_selection).contains(action), "action is not in action space" + super().step(action) + + def __str__(self): + return str(self.env) diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/base.py b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/base.py new file mode 100644 index 0000000000000000000000000000000000000000..eb6fbb7c58dc59fc94054493aaa48d04b05695c7 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/base.py @@ -0,0 +1,107 @@ +import warnings + +from pettingzoo.utils.env import AECEnv + + +class BaseWrapper(AECEnv): + ''' + Creates a wrapper around `env` parameter. Extend this class + to create a useful wrapper. + ''' + + def __init__(self, env): + super().__init__() + self.env = env + + try: + self.possible_agents = self.env.possible_agents + except AttributeError: + pass + + self.metadata = self.env.metadata + + # we don't want these defined as we don't want them used before they are gotten + + # self.agent_selection = self.env.agent_selection + + # self.rewards = self.env.rewards + # self.dones = self.env.dones + + # we don't want to care one way or the other whether environments have an infos or not before reset + try: + self.infos = self.env.infos + except AttributeError: + pass + + # Not every environment has the .state_space attribute implemented + try: + self.state_space = self.env.state_space + except AttributeError: + pass + + @property + def observation_spaces(self): + warnings.warn("The `observation_spaces` dictionary is deprecated. Use the `observation_space` function instead.") + try: + return {agent: self.observation_space(agent) for agent in self.possible_agents} + except AttributeError: + raise AttributeError("The base environment does not have an `observation_spaces` dict attribute. Use the environment's `observation_space` method instead") + + @property + def action_spaces(self): + warnings.warn("The `action_spaces` dictionary is deprecated. Use the `action_space` function instead.") + try: + return {agent: self.action_space(agent) for agent in self.possible_agents} + except AttributeError: + raise AttributeError("The base environment does not have an action_spaces dict attribute. Use the environment's `action_space` method instead") + + def observation_space(self, agent): + return self.env.observation_space(agent) + + def action_space(self, agent): + return self.env.action_space(agent) + + @property + def unwrapped(self): + return self.env.unwrapped + + def seed(self, seed=None): + self.env.seed(seed) + + def close(self): + self.env.close() + + def render(self, mode='human'): + return self.env.render(mode) + + def reset(self): + self.env.reset() + + self.agent_selection = self.env.agent_selection + self.rewards = self.env.rewards + self.dones = self.env.dones + self.infos = self.env.infos + self.agents = self.env.agents + self._cumulative_rewards = self.env._cumulative_rewards + + def observe(self, agent): + return self.env.observe(agent) + + def state(self): + return self.env.state() + + def step(self, action): + self.env.step(action) + + self.agent_selection = self.env.agent_selection + self.rewards = self.env.rewards + self.dones = self.env.dones + self.infos = self.env.infos + self.agents = self.env.agents + self._cumulative_rewards = self.env._cumulative_rewards + + def __str__(self): + ''' + returns a name which looks like: "max_observation" + ''' + return f'{type(self).__name__}<{str(self.env)}>' diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/base_parallel.py b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/base_parallel.py new file mode 100644 index 0000000000000000000000000000000000000000..76b01a5759159bf2daf670eef8b03fb3343692ca --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/base_parallel.py @@ -0,0 +1,73 @@ +import warnings + +from ..env import ParallelEnv + + +class BaseParallelWraper(ParallelEnv): + def __init__(self, env): + self.env = env + + self.metadata = env.metadata + try: + self.possible_agents = env.possible_agents + except AttributeError: + pass + + # Not every environment has the .state_space attribute implemented + try: + self.state_space = self.env.state_space + except AttributeError: + pass + + def reset(self): + res = self.env.reset() + self.agents = self.env.agents + return res + + def step(self, actions): + res = self.env.step(actions) + self.agents = self.env.agents + return res + + def render(self, mode="human"): + return self.env.render(mode) + + def close(self): + return self.env.close() + + @property + def unwrapped(self): + return self.env.unwrapped + + def state(self): + return self.env.state() + + @property + def observation_spaces(self): + warnings.warn( + "The `observation_spaces` dictionary is deprecated. Use the `observation_space` function instead." + ) + try: + return {agent: self.observation_space(agent) for agent in self.possible_agents} + except AttributeError: + raise AttributeError( + "The base environment does not have an `observation_spaces` dict attribute. Use the environments `observation_space` method instead" + ) + + @property + def action_spaces(self): + warnings.warn( + "The `action_spaces` dictionary is deprecated. Use the `action_space` function instead." + ) + try: + return {agent: self.action_space(agent) for agent in self.possible_agents} + except AttributeError: + raise AttributeError( + "The base environment does not have an action_spaces dict attribute. Use the environments `action_space` method instead" + ) + + def observation_space(self, agent): + return self.env.observation_space(agent) + + def action_space(self, agent): + return self.env.action_space(agent) diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/capture_stdout.py b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/capture_stdout.py new file mode 100644 index 0000000000000000000000000000000000000000..f88798ec239597dff355b7f38643bf6b9907a18f --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/capture_stdout.py @@ -0,0 +1,22 @@ +from ..capture_stdout import capture_stdout +from .base import BaseWrapper + + +class CaptureStdoutWrapper(BaseWrapper): + def __init__(self, env): + super().__init__(env) + self.metadata['render.modes'].append("ansi") + + def render(self, mode="human"): + if mode == "ansi": + with capture_stdout() as stdout: + + super().render("human") + + val = stdout.getvalue() + return val + else: + return super().render(mode) + + def __str__(self): + return str(self.env) diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/clip_out_of_bounds.py b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/clip_out_of_bounds.py new file mode 100644 index 0000000000000000000000000000000000000000..f41299e38c1c1874f621c2538a7b1a40826a1008 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/clip_out_of_bounds.py @@ -0,0 +1,29 @@ +import numpy as np +from gym.spaces import Box + +from ..env_logger import EnvLogger +from .base import BaseWrapper + + +class ClipOutOfBoundsWrapper(BaseWrapper): + ''' + this wrapper crops out of bounds actions for Box spaces + ''' + def __init__(self, env): + super().__init__(env) + assert all(isinstance(self.action_space(agent), Box) for agent in getattr(self, 'possible_agents', [])), "should only use ClipOutOfBoundsWrapper for Box spaces" + + def step(self, action): + space = self.action_space(self.agent_selection) + if not (action is None and self.dones[self.agent_selection]) and not space.contains(action): + assert space.shape == action.shape, f"action should have shape {space.shape}, has shape {action.shape}" + if np.isnan(action).any(): + EnvLogger.error_nan_action() + + EnvLogger.warn_action_out_of_bound(action=action, action_space=space, backup_policy="clipping to space") + action = np.clip(action, space.low, space.high) + + super().step(action) + + def __str__(self): + return str(self.env) diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/order_enforcing.py b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/order_enforcing.py new file mode 100644 index 0000000000000000000000000000000000000000..507b09af4521e4e48cf8725912ee1ce71d6ce6d1 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/order_enforcing.py @@ -0,0 +1,101 @@ +from ..env import AECIterable, AECIterator +from ..env_logger import EnvLogger +from .base import BaseWrapper + + +class OrderEnforcingWrapper(BaseWrapper): + ''' + check all call orders: + + * error on getting rewards, dones, infos, agent_selection before reset + * error on calling step, observe before reset + * error on iterating without stepping or resetting environment. + * warn on calling close before render or reset + * warn on calling step after environment is done + ''' + def __init__(self, env): + self._has_reset = False + self._has_rendered = False + self._has_updated = False + super().__init__(env) + + def __getattr__(self, value): + ''' + raises an error message when data is gotten from the env + which should only be gotten after reset + ''' + if value == "unwrapped": + return self.env.unwrapped + elif value == 'possible_agents': + EnvLogger.error_possible_agents_attribute_missing('possible_agents') + elif value == 'observation_spaces': + raise AttributeError("The base environment does not have an possible_agents attribute. Use the environments `observation_space` method instead") + elif value == 'action_spaces': + raise AttributeError("The base environment does not have an possible_agents attribute. Use the environments `action_space` method instead") + elif value == "agent_order": + raise AttributeError("agent_order has been removed from the API. Please consider using agent_iter instead.") + elif value in {"rewards", "dones", "infos", "agent_selection", "num_agents", "agents"}: + raise AttributeError(f"{value} cannot be accessed before reset") + else: + raise AttributeError(f"'{type(self).__name__}' object has no attribute '{value}'") + + def seed(self, seed=None): + self._has_reset = False + super().seed(seed) + + def render(self, mode='human'): + if not self._has_reset: + EnvLogger.error_render_before_reset() + assert mode in self.metadata['render.modes'] + self._has_rendered = True + return super().render(mode) + + def step(self, action): + if not self._has_reset: + EnvLogger.error_step_before_reset() + elif not self.agents: + self._has_updated = True + EnvLogger.warn_step_after_done() + return None + else: + self._has_updated = True + super().step(action) + + def observe(self, agent): + if not self._has_reset: + EnvLogger.error_observe_before_reset() + return super().observe(agent) + + def state(self): + if not self._has_reset: + EnvLogger.error_state_before_reset() + return super().state() + + def agent_iter(self, max_iter=2**63): + if not self._has_reset: + EnvLogger.error_agent_iter_before_reset() + return AECOrderEnforcingIterable(self, max_iter) + + def reset(self): + self._has_reset = True + self._has_updated = True + super().reset() + + def __str__(self): + if hasattr(self, 'metadata'): + return str(self.env) if self.__class__ is OrderEnforcingWrapper else f'{type(self).__name__}<{str(self.env)}>' + else: + return repr(self) + + +class AECOrderEnforcingIterable(AECIterable): + def __iter__(self): + return AECOrderEnforcingIterator(self.env, self.max_iter) + + +class AECOrderEnforcingIterator(AECIterator): + def __next__(self): + agent = super().__next__() + assert self.env._has_updated, "need to call step() or reset() in a loop over `agent_iter`" + self.env._has_updated = False + return agent diff --git a/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/terminate_illegal.py b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/terminate_illegal.py new file mode 100644 index 0000000000000000000000000000000000000000..489ea73a4b0c09b0d33046ad97ebb5c097fa2875 --- /dev/null +++ b/MLPY/Lib/site-packages/pettingzoo/utils/wrappers/terminate_illegal.py @@ -0,0 +1,52 @@ +from ..env_logger import EnvLogger +from .base import BaseWrapper + + +class TerminateIllegalWrapper(BaseWrapper): + ''' + this wrapper terminates the game with the current player losing + in case of illegal values + + parameters: + - illegal_reward: number that is the value of the player making an illegal move. + ''' + def __init__(self, env, illegal_reward): + super().__init__(env) + self._illegal_value = illegal_reward + self._prev_obs = None + + def reset(self): + self._terminated = False + self._prev_obs = None + super().reset() + + def observe(self, agent): + obs = super().observe(agent) + if agent == self.agent_selection: + self._prev_obs = obs + return obs + + def step(self, action): + current_agent = self.agent_selection + if self._prev_obs is None: + self.observe(self.agent_selection) + assert 'action_mask' in self._prev_obs, "action_mask must always be part of environment observation as an element in a dictionary observation to use the TerminateIllegalWrapper" + _prev_action_mask = self._prev_obs['action_mask'] + self._prev_obs = None + if self._terminated and self.dones[self.agent_selection]: + self._was_done_step(action) + elif not self.dones[self.agent_selection] and not _prev_action_mask[action]: + EnvLogger.warn_on_illegal_move() + self._cumulative_rewards[self.agent_selection] = 0 + self.dones = {d: True for d in self.dones} + self._prev_obs = None + self.rewards = {d: 0 for d in self.dones} + self.rewards[current_agent] = float(self._illegal_value) + self._accumulate_rewards() + self._dones_step_first() + self._terminated = True + else: + super().step(action) + + def __str__(self): + return str(self.env) diff --git a/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/INSTALLER b/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/LICENSE b/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..5de826a41b69e1c402a19bd02c21d831e9eb3071 --- /dev/null +++ b/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/LICENSE @@ -0,0 +1,1219 @@ +The Python Imaging Library (PIL) is + + Copyright © 1997-2011 by Secret Labs AB + Copyright © 1995-2011 by Fredrik Lundh and contributors + +Pillow is the friendly PIL fork. It is + + Copyright © 2010-2024 by Jeffrey A. Clark and contributors + +Like PIL, Pillow is licensed under the open source HPND License: + +By obtaining, using, and/or copying this software and/or its associated +documentation, you agree that you have read, understood, and will comply +with the following terms and conditions: + +Permission to use, copy, modify and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appears in all copies, and that +both that copyright notice and this permission notice appear in supporting +documentation, and that the name of Secret Labs AB or the author not be +used in advertising or publicity pertaining to distribution of the software +without specific, written prior permission. + +SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. +IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR BE LIABLE FOR ANY SPECIAL, +INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + +===== brotli-1.1.0 ===== + +Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +===== freetype-2.13.2 ===== + +FREETYPE LICENSES +----------------- + +The FreeType 2 font engine is copyrighted work and cannot be used +legally without a software license. In order to make this project +usable to a vast majority of developers, we distribute it under two +mutually exclusive open-source licenses. + +This means that *you* must choose *one* of the two licenses described +below, then obey all its terms and conditions when using FreeType 2 in +any of your projects or products. + + - The FreeType License, found in the file `docs/FTL.TXT`, which is + similar to the original BSD license *with* an advertising clause + that forces you to explicitly cite the FreeType project in your + product's documentation. All details are in the license file. + This license is suited to products which don't use the GNU General + Public License. + + Note that this license is compatible to the GNU General Public + License version 3, but not version 2. + + - The GNU General Public License version 2, found in + `docs/GPLv2.TXT` (any later version can be used also), for + programs which already use the GPL. Note that the FTL is + incompatible with GPLv2 due to its advertisement clause. + +The contributed BDF and PCF drivers come with a license similar to +that of the X Window System. It is compatible to the above two +licenses (see files `src/bdf/README` and `src/pcf/README`). The same +holds for the source code files `src/base/fthash.c` and +`include/freetype/internal/fthash.h`; they were part of the BDF driver +in earlier FreeType versions. + +The gzip module uses the zlib license (see `src/gzip/zlib.h`) which +too is compatible to the above two licenses. + +The files `src/autofit/ft-hb.c` and `src/autofit/ft-hb.h` contain code +taken almost verbatim from the HarfBuzz file `hb-ft.cc`, which uses +the 'Old MIT' license, compatible to the above two licenses. + +The MD5 checksum support (only used for debugging in development +builds) is in the public domain. + + +--- end of LICENSE.TXT --- + The FreeType Project LICENSE + ---------------------------- + + 2006-Jan-27 + + Copyright 1996-2002, 2006 by + David Turner, Robert Wilhelm, and Werner Lemberg + + + +Introduction +============ + + The FreeType Project is distributed in several archive packages; + some of them may contain, in addition to the FreeType font engine, + various tools and contributions which rely on, or relate to, the + FreeType Project. + + This license applies to all files found in such packages, and + which do not fall under their own explicit license. The license + affects thus the FreeType font engine, the test programs, + documentation and makefiles, at the very least. + + This license was inspired by the BSD, Artistic, and IJG + (Independent JPEG Group) licenses, which all encourage inclusion + and use of free software in commercial and freeware products + alike. As a consequence, its main points are that: + + o We don't promise that this software works. However, we will be + interested in any kind of bug reports. (`as is' distribution) + + o You can use this software for whatever you want, in parts or + full form, without having to pay us. (`royalty-free' usage) + + o You may not pretend that you wrote this software. If you use + it, or only parts of it, in a program, you must acknowledge + somewhere in your documentation that you have used the + FreeType code. (`credits') + + We specifically permit and encourage the inclusion of this + software, with or without modifications, in commercial products. + We disclaim all warranties covering The FreeType Project and + assume no liability related to The FreeType Project. + + + Finally, many people asked us for a preferred form for a + credit/disclaimer to use in compliance with this license. We thus + encourage you to use the following text: + + """ + Portions of this software are copyright © The FreeType + Project (www.freetype.org). All rights reserved. + """ + + Please replace with the value from the FreeType version you + actually use. + + +Legal Terms +=========== + +0. Definitions +-------------- + + Throughout this license, the terms `package', `FreeType Project', + and `FreeType archive' refer to the set of files originally + distributed by the authors (David Turner, Robert Wilhelm, and + Werner Lemberg) as the `FreeType Project', be they named as alpha, + beta or final release. + + `You' refers to the licensee, or person using the project, where + `using' is a generic term including compiling the project's source + code as well as linking it to form a `program' or `executable'. + This program is referred to as `a program using the FreeType + engine'. + + This license applies to all files distributed in the original + FreeType Project, including all source code, binaries and + documentation, unless otherwise stated in the file in its + original, unmodified form as distributed in the original archive. + If you are unsure whether or not a particular file is covered by + this license, you must contact us to verify this. + + The FreeType Project is copyright (C) 1996-2000 by David Turner, + Robert Wilhelm, and Werner Lemberg. All rights reserved except as + specified below. + +1. No Warranty +-------------- + + THE FREETYPE PROJECT IS PROVIDED `AS IS' WITHOUT WARRANTY OF ANY + KIND, EITHER EXPRESS OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. IN NO EVENT WILL ANY OF THE AUTHORS OR COPYRIGHT HOLDERS + BE LIABLE FOR ANY DAMAGES CAUSED BY THE USE OR THE INABILITY TO + USE, OF THE FREETYPE PROJECT. + +2. Redistribution +----------------- + + This license grants a worldwide, royalty-free, perpetual and + irrevocable right and license to use, execute, perform, compile, + display, copy, create derivative works of, distribute and + sublicense the FreeType Project (in both source and object code + forms) and derivative works thereof for any purpose; and to + authorize others to exercise some or all of the rights granted + herein, subject to the following conditions: + + o Redistribution of source code must retain this license file + (`FTL.TXT') unaltered; any additions, deletions or changes to + the original files must be clearly indicated in accompanying + documentation. The copyright notices of the unaltered, + original files must be preserved in all copies of source + files. + + o Redistribution in binary form must provide a disclaimer that + states that the software is based in part of the work of the + FreeType Team, in the distribution documentation. We also + encourage you to put an URL to the FreeType web page in your + documentation, though this isn't mandatory. + + These conditions apply to any software derived from or based on + the FreeType Project, not just the unmodified files. If you use + our work, you must acknowledge us. However, no fee need be paid + to us. + +3. Advertising +-------------- + + Neither the FreeType authors and contributors nor you shall use + the name of the other for commercial, advertising, or promotional + purposes without specific prior written permission. + + We suggest, but do not require, that you use one or more of the + following phrases to refer to this software in your documentation + or advertising materials: `FreeType Project', `FreeType Engine', + `FreeType library', or `FreeType Distribution'. + + As you have not signed this license, you are not required to + accept it. However, as the FreeType Project is copyrighted + material, only this license, or another one contracted with the + authors, grants you the right to use, distribute, and modify it. + Therefore, by using, distributing, or modifying the FreeType + Project, you indicate that you understand and accept all the terms + of this license. + +4. Contacts +----------- + + There are two mailing lists related to FreeType: + + o freetype@nongnu.org + + Discusses general use and applications of FreeType, as well as + future and wanted additions to the library and distribution. + If you are looking for support, start in this list if you + haven't found anything to help you in the documentation. + + o freetype-devel@nongnu.org + + Discusses bugs, as well as engine internals, design issues, + specific licenses, porting, etc. + + Our home page can be found at + + https://www.freetype.org + + +--- end of FTL.TXT --- + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc. + 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Library General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Library General +Public License instead of this License. + +===== harfbuzz-8.5.0 ===== + +HarfBuzz is licensed under the so-called "Old MIT" license. Details follow. +For parts of HarfBuzz that are licensed under different licenses see individual +files names COPYING in subdirectories where applicable. + +Copyright © 2010-2022 Google, Inc. +Copyright © 2015-2020 Ebrahim Byagowi +Copyright © 2019,2020 Facebook, Inc. +Copyright © 2012,2015 Mozilla Foundation +Copyright © 2011 Codethink Limited +Copyright © 2008,2010 Nokia Corporation and/or its subsidiary(-ies) +Copyright © 2009 Keith Stribley +Copyright © 2011 Martin Hosken and SIL International +Copyright © 2007 Chris Wilson +Copyright © 2005,2006,2020,2021,2022,2023 Behdad Esfahbod +Copyright © 2004,2007,2008,2009,2010,2013,2021,2022,2023 Red Hat, Inc. +Copyright © 1998-2005 David Turner and Werner Lemberg +Copyright © 2016 Igalia S.L. +Copyright © 2022 Matthias Clasen +Copyright © 2018,2021 Khaled Hosny +Copyright © 2018,2019,2020 Adobe, Inc +Copyright © 2013-2015 Alexei Podtelezhnikov + +For full copyright notices consult the individual files in the package. + + +Permission is hereby granted, without written agreement and without +license or royalty fees, to use, copy, modify, and distribute this +software and its documentation for any purpose, provided that the +above copyright notice and the following two paragraphs appear in +all copies of this software. + +IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR +DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES +ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN +IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. + +THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, +BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS +ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO +PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + +===== lcms2-2.16 ===== + +MIT License + +Copyright (c) 2023 Marti Maria Saguer + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject +to the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +===== libjpeg-turbo-3.0.3 ===== + +LEGAL ISSUES +============ + +In plain English: + +1. We don't promise that this software works. (But if you find any bugs, + please let us know!) +2. You can use this software for whatever you want. You don't have to pay us. +3. You may not pretend that you wrote this software. If you use it in a + program, you must acknowledge somewhere in your documentation that + you've used the IJG code. + +In legalese: + +The authors make NO WARRANTY or representation, either express or implied, +with respect to this software, its quality, accuracy, merchantability, or +fitness for a particular purpose. This software is provided "AS IS", and you, +its user, assume the entire risk as to its quality and accuracy. + +This software is copyright (C) 1991-2020, Thomas G. Lane, Guido Vollbeding. +All Rights Reserved except as specified below. + +Permission is hereby granted to use, copy, modify, and distribute this +software (or portions thereof) for any purpose, without fee, subject to these +conditions: +(1) If any part of the source code for this software is distributed, then this +README file must be included, with this copyright and no-warranty notice +unaltered; and any additions, deletions, or changes to the original files +must be clearly indicated in accompanying documentation. +(2) If only executable code is distributed, then the accompanying +documentation must state that "this software is based in part on the work of +the Independent JPEG Group". +(3) Permission for use of this software is granted only if the user accepts +full responsibility for any undesirable consequences; the authors accept +NO LIABILITY for damages of any kind. + +These conditions apply to any software derived from or based on the IJG code, +not just to the unmodified library. If you use our work, you ought to +acknowledge us. + +Permission is NOT granted for the use of any IJG author's name or company name +in advertising or publicity relating to this software or products derived from +it. This software may be referred to only as "the Independent JPEG Group's +software". + +We specifically permit and encourage the use of this software as the basis of +commercial products, provided that all warranty or liability claims are +assumed by the product vendor. + +libjpeg-turbo Licenses +====================== + +libjpeg-turbo is covered by two compatible BSD-style open source licenses: + +- The IJG (Independent JPEG Group) License, which is listed in + [README.ijg](README.ijg) + + This license applies to the libjpeg API library and associated programs, + including any code inherited from libjpeg and any modifications to that + code. Note that the libjpeg-turbo SIMD source code bears the + [zlib License](https://opensource.org/licenses/Zlib), but in the context of + the overall libjpeg API library, the terms of the zlib License are subsumed + by the terms of the IJG License. + +- The Modified (3-clause) BSD License, which is listed below + + This license applies to the TurboJPEG API library and associated programs, as + well as the build system. Note that the TurboJPEG API library wraps the + libjpeg API library, so in the context of the overall TurboJPEG API library, + both the terms of the IJG License and the terms of the Modified (3-clause) + BSD License apply. + + +Complying with the libjpeg-turbo Licenses +========================================= + +This section provides a roll-up of the libjpeg-turbo licensing terms, to the +best of our understanding. This is not a license in and of itself. It is +intended solely for clarification. + +1. If you are distributing a modified version of the libjpeg-turbo source, + then: + + 1. You cannot alter or remove any existing copyright or license notices + from the source. + + **Origin** + - Clause 1 of the IJG License + - Clause 1 of the Modified BSD License + - Clauses 1 and 3 of the zlib License + + 2. You must add your own copyright notice to the header of each source + file you modified, so others can tell that you modified that file. (If + there is not an existing copyright header in that file, then you can + simply add a notice stating that you modified the file.) + + **Origin** + - Clause 1 of the IJG License + - Clause 2 of the zlib License + + 3. You must include the IJG README file, and you must not alter any of the + copyright or license text in that file. + + **Origin** + - Clause 1 of the IJG License + +2. If you are distributing only libjpeg-turbo binaries without the source, or + if you are distributing an application that statically links with + libjpeg-turbo, then: + + 1. Your product documentation must include a message stating: + + This software is based in part on the work of the Independent JPEG + Group. + + **Origin** + - Clause 2 of the IJG license + + 2. If your binary distribution includes or uses the TurboJPEG API, then + your product documentation must include the text of the Modified BSD + License (see below.) + + **Origin** + - Clause 2 of the Modified BSD License + +3. You cannot use the name of the IJG or The libjpeg-turbo Project or the + contributors thereof in advertising, publicity, etc. + + **Origin** + - IJG License + - Clause 3 of the Modified BSD License + +4. The IJG and The libjpeg-turbo Project do not warrant libjpeg-turbo to be + free of defects, nor do we accept any liability for undesirable + consequences resulting from your use of the software. + + **Origin** + - IJG License + - Modified BSD License + - zlib License + + +The Modified (3-clause) BSD License +=================================== + +Copyright (C)2009-2023 D. R. Commander. All Rights Reserved.
+Copyright (C)2015 Viktor Szathmáry. All Rights Reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +- Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. +- Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +- Neither the name of the libjpeg-turbo Project nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS", +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + + +Why Two Licenses? +================= + +The zlib License could have been used instead of the Modified (3-clause) BSD +License, and since the IJG License effectively subsumes the distribution +conditions of the zlib License, this would have effectively placed +libjpeg-turbo binary distributions under the IJG License. However, the IJG +License specifically refers to the Independent JPEG Group and does not extend +attribution and endorsement protections to other entities. Thus, it was +desirable to choose a license that granted us the same protections for new code +that were granted to the IJG for code derived from their software. + +===== libwebp-1.4.0 ===== + +Copyright (c) 2010, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + * Neither the name of Google nor the names of its contributors may + be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +===== lpng1643 ===== + +COPYRIGHT NOTICE, DISCLAIMER, and LICENSE +========================================= + +PNG Reference Library License version 2 +--------------------------------------- + + * Copyright (c) 1995-2024 The PNG Reference Library Authors. + * Copyright (c) 2018-2024 Cosmin Truta. + * Copyright (c) 2000-2002, 2004, 2006-2018 Glenn Randers-Pehrson. + * Copyright (c) 1996-1997 Andreas Dilger. + * Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc. + +The software is supplied "as is", without warranty of any kind, +express or implied, including, without limitation, the warranties +of merchantability, fitness for a particular purpose, title, and +non-infringement. In no event shall the Copyright owners, or +anyone distributing the software, be liable for any damages or +other liability, whether in contract, tort or otherwise, arising +from, out of, or in connection with the software, or the use or +other dealings in the software, even if advised of the possibility +of such damage. + +Permission is hereby granted to use, copy, modify, and distribute +this software, or portions hereof, for any purpose, without fee, +subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you + must not claim that you wrote the original software. If you + use this software in a product, an acknowledgment in the product + documentation would be appreciated, but is not required. + + 2. Altered source versions must be plainly marked as such, and must + not be misrepresented as being the original software. + + 3. This Copyright notice may not be removed or altered from any + source or altered source distribution. + + +PNG Reference Library License version 1 (for libpng 0.5 through 1.6.35) +----------------------------------------------------------------------- + +libpng versions 1.0.7, July 1, 2000, through 1.6.35, July 15, 2018 are +Copyright (c) 2000-2002, 2004, 2006-2018 Glenn Randers-Pehrson, are +derived from libpng-1.0.6, and are distributed according to the same +disclaimer and license as libpng-1.0.6 with the following individuals +added to the list of Contributing Authors: + + Simon-Pierre Cadieux + Eric S. Raymond + Mans Rullgard + Cosmin Truta + Gilles Vollant + James Yu + Mandar Sahastrabuddhe + Google Inc. + Vadim Barkov + +and with the following additions to the disclaimer: + + There is no warranty against interference with your enjoyment of + the library or against infringement. There is no warranty that our + efforts or the library will fulfill any of your particular purposes + or needs. This library is provided with all faults, and the entire + risk of satisfactory quality, performance, accuracy, and effort is + with the user. + +Some files in the "contrib" directory and some configure-generated +files that are distributed with libpng have other copyright owners, and +are released under other open source licenses. + +libpng versions 0.97, January 1998, through 1.0.6, March 20, 2000, are +Copyright (c) 1998-2000 Glenn Randers-Pehrson, are derived from +libpng-0.96, and are distributed according to the same disclaimer and +license as libpng-0.96, with the following individuals added to the +list of Contributing Authors: + + Tom Lane + Glenn Randers-Pehrson + Willem van Schaik + +libpng versions 0.89, June 1996, through 0.96, May 1997, are +Copyright (c) 1996-1997 Andreas Dilger, are derived from libpng-0.88, +and are distributed according to the same disclaimer and license as +libpng-0.88, with the following individuals added to the list of +Contributing Authors: + + John Bowler + Kevin Bracey + Sam Bushell + Magnus Holmgren + Greg Roelofs + Tom Tanner + +Some files in the "scripts" directory have other copyright owners, +but are released under this license. + +libpng versions 0.5, May 1995, through 0.88, January 1996, are +Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc. + +For the purposes of this copyright and license, "Contributing Authors" +is defined as the following set of individuals: + + Andreas Dilger + Dave Martindale + Guy Eric Schalnat + Paul Schmidt + Tim Wegner + +The PNG Reference Library is supplied "AS IS". The Contributing +Authors and Group 42, Inc. disclaim all warranties, expressed or +implied, including, without limitation, the warranties of +merchantability and of fitness for any purpose. The Contributing +Authors and Group 42, Inc. assume no liability for direct, indirect, +incidental, special, exemplary, or consequential damages, which may +result from the use of the PNG Reference Library, even if advised of +the possibility of such damage. + +Permission is hereby granted to use, copy, modify, and distribute this +source code, or portions hereof, for any purpose, without fee, subject +to the following restrictions: + + 1. The origin of this source code must not be misrepresented. + + 2. Altered versions must be plainly marked as such and must not + be misrepresented as being the original source. + + 3. This Copyright notice may not be removed or altered from any + source or altered source distribution. + +The Contributing Authors and Group 42, Inc. specifically permit, +without fee, and encourage the use of this source code as a component +to supporting the PNG file format in commercial products. If you use +this source code in a product, acknowledgment is not required but would +be appreciated. + +===== openjpeg-2.5.2 ===== + +/* + * The copyright in this software is being made available under the 2-clauses + * BSD License, included below. This software may be subject to other third + * party and contributor rights, including patent rights, and no such rights + * are granted under this license. + * + * Copyright (c) 2002-2014, Universite catholique de Louvain (UCL), Belgium + * Copyright (c) 2002-2014, Professor Benoit Macq + * Copyright (c) 2003-2014, Antonin Descampe + * Copyright (c) 2003-2009, Francois-Olivier Devaux + * Copyright (c) 2005, Herve Drolon, FreeImage Team + * Copyright (c) 2002-2003, Yannick Verschueren + * Copyright (c) 2001-2003, David Janssens + * Copyright (c) 2011-2012, Centre National d'Etudes Spatiales (CNES), France + * Copyright (c) 2012, CS Systemes d'Information, France + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS `AS IS' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +===== tiff-4.6.0 ===== + +# LibTIFF license + +Copyright © 1988-1997 Sam Leffler\ +Copyright © 1991-1997 Silicon Graphics, Inc. + +Permission to use, copy, modify, distribute, and sell this software and +its documentation for any purpose is hereby granted without fee, provided +that (i) the above copyright notices and this permission notice appear in +all copies of the software and related documentation, and (ii) the names of +Sam Leffler and Silicon Graphics may not be used in any advertising or +publicity relating to the software without the specific, prior written +permission of Sam Leffler and Silicon Graphics. + +THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND, +EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY +WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. + +IN NO EVENT SHALL SAM LEFFLER OR SILICON GRAPHICS BE LIABLE FOR +ANY SPECIAL, INCIDENTAL, INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER OR NOT ADVISED OF THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF +LIABILITY, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE +OF THIS SOFTWARE. + +===== xz-5.4.5 ===== + + +XZ Utils Licensing +================== + + Different licenses apply to different files in this package. Here + is a rough summary of which licenses apply to which parts of this + package (but check the individual files to be sure!): + + - liblzma is in the public domain. + + - xz, xzdec, and lzmadec command line tools are in the public + domain unless GNU getopt_long had to be compiled and linked + in from the lib directory. The getopt_long code is under + GNU LGPLv2.1+. + + - The scripts to grep, diff, and view compressed files have been + adapted from gzip. These scripts and their documentation are + under GNU GPLv2+. + + - All the documentation in the doc directory and most of the + XZ Utils specific documentation files in other directories + are in the public domain. + + Note: The JavaScript files (under the MIT license) have + been removed from the Doxygen-generated HTML version of the + liblzma API documentation. Doxygen itself is under the GNU GPL + but the remaining files generated by Doxygen are not affected + by the licenses used in Doxygen because Doxygen licensing has + the following exception: + + "Documents produced by doxygen are derivative works + derived from the input used in their production; + they are not affected by this license." + + - Translated messages are in the public domain. + + - The build system contains public domain files, and files that + are under GNU GPLv2+ or GNU GPLv3+. None of these files end up + in the binaries being built. + + - Test files and test code in the tests directory, and debugging + utilities in the debug directory are in the public domain. + + - The extra directory may contain public domain files, and files + that are under various free software licenses. + + You can do whatever you want with the files that have been put into + the public domain. If you find public domain legally problematic, + take the previous sentence as a license grant. If you still find + the lack of copyright legally problematic, you have too many + lawyers. + + As usual, this software is provided "as is", without any warranty. + + If you copy significant amounts of public domain code from XZ Utils + into your project, acknowledging this somewhere in your software is + polite (especially if it is proprietary, non-free software), but + naturally it is not legally required. Here is an example of a good + notice to put into "about box" or into documentation: + + This software includes code from XZ Utils . + + The following license texts are included in the following files: + - COPYING.LGPLv2.1: GNU Lesser General Public License version 2.1 + - COPYING.GPLv2: GNU General Public License version 2 + - COPYING.GPLv3: GNU General Public License version 3 + + Note that the toolchain (compiler, linker etc.) may add some code + pieces that are copyrighted. Thus, it is possible that e.g. liblzma + binary wouldn't actually be in the public domain in its entirety + even though it contains no copyrighted code from the XZ Utils source + package. + + If you have questions, don't hesitate to ask the author(s) for more + information. + + +===== zlib-1.3.1 ===== + + (C) 1995-2024 Jean-loup Gailly and Mark Adler + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. + + Jean-loup Gailly Mark Adler + jloup@gzip.org madler@alumni.caltech.edu + +If you use the zlib library in a product, we would appreciate *not* receiving +lengthy legal documents to sign. The sources are provided for free but without +warranty of any kind. The library has been entirely written by Jean-loup +Gailly and Mark Adler; it does not include third-party code. We make all +contributions to and distributions of this project solely in our personal +capacity, and are not conveying any rights to any intellectual property of +any third parties. + +If you redistribute modified sources, we would appreciate that you include in +the file ChangeLog history information documenting your changes. Please read +the FAQ for more information on the distribution of modified source versions. diff --git a/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/METADATA b/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..9a09fb34161a18b61f80e009942e7dd9fcca8011 --- /dev/null +++ b/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/METADATA @@ -0,0 +1,175 @@ +Metadata-Version: 2.1 +Name: pillow +Version: 10.4.0 +Summary: Python Imaging Library (Fork) +Author-email: "Jeffrey A. Clark" +License: HPND +Project-URL: Changelog, https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst +Project-URL: Documentation, https://pillow.readthedocs.io +Project-URL: Funding, https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=pypi +Project-URL: Homepage, https://python-pillow.org +Project-URL: Mastodon, https://fosstodon.org/@pillow +Project-URL: Release notes, https://pillow.readthedocs.io/en/stable/releasenotes/index.html +Project-URL: Source, https://github.com/python-pillow/Pillow +Keywords: Imaging +Classifier: Development Status :: 6 - Mature +Classifier: License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND) +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Multimedia :: Graphics +Classifier: Topic :: Multimedia :: Graphics :: Capture :: Digital Camera +Classifier: Topic :: Multimedia :: Graphics :: Capture :: Screen Capture +Classifier: Topic :: Multimedia :: Graphics :: Graphics Conversion +Classifier: Topic :: Multimedia :: Graphics :: Viewers +Classifier: Typing :: Typed +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: docs +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: olefile ; extra == 'docs' +Requires-Dist: sphinx >=7.3 ; extra == 'docs' +Requires-Dist: sphinx-copybutton ; extra == 'docs' +Requires-Dist: sphinx-inline-tabs ; extra == 'docs' +Requires-Dist: sphinxext-opengraph ; extra == 'docs' +Provides-Extra: fpx +Requires-Dist: olefile ; extra == 'fpx' +Provides-Extra: mic +Requires-Dist: olefile ; extra == 'mic' +Provides-Extra: tests +Requires-Dist: check-manifest ; extra == 'tests' +Requires-Dist: coverage ; extra == 'tests' +Requires-Dist: defusedxml ; extra == 'tests' +Requires-Dist: markdown2 ; extra == 'tests' +Requires-Dist: olefile ; extra == 'tests' +Requires-Dist: packaging ; extra == 'tests' +Requires-Dist: pyroma ; extra == 'tests' +Requires-Dist: pytest ; extra == 'tests' +Requires-Dist: pytest-cov ; extra == 'tests' +Requires-Dist: pytest-timeout ; extra == 'tests' +Provides-Extra: typing +Requires-Dist: typing-extensions ; (python_version < "3.10") and extra == 'typing' +Provides-Extra: xmp +Requires-Dist: defusedxml ; extra == 'xmp' + +

+ Pillow logo +

+ +# Pillow + +## Python Imaging Library (Fork) + +Pillow is the friendly PIL fork by [Jeffrey A. Clark and +contributors](https://github.com/python-pillow/Pillow/graphs/contributors). +PIL is the Python Imaging Library by Fredrik Lundh and contributors. +As of 2019, Pillow development is +[supported by Tidelift](https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=readme&utm_campaign=enterprise). + + + + + + + + + + + + + + + + + + +
docs + Documentation Status +
tests + GitHub Actions build status (Lint) + GitHub Actions build status (Test Linux and macOS) + GitHub Actions build status (Test Windows) + GitHub Actions build status (Test MinGW) + GitHub Actions build status (Test Cygwin) + GitHub Actions build status (Test Docker) + AppVeyor CI build status (Windows) + GitHub Actions build status (Wheels) + Code coverage + Fuzzing Status +
package + Zenodo + Tidelift + Newest PyPI version + Number of PyPI downloads + OpenSSF Best Practices +
social + Join the chat at https://gitter.im/python-pillow/Pillow + Follow on https://fosstodon.org/@pillow +
+ +## Overview + +The Python Imaging Library adds image processing capabilities to your Python interpreter. + +This library provides extensive file format support, an efficient internal representation, and fairly powerful image processing capabilities. + +The core image library is designed for fast access to data stored in a few basic pixel formats. It should provide a solid foundation for a general image processing tool. + +## More Information + +- [Documentation](https://pillow.readthedocs.io/) + - [Installation](https://pillow.readthedocs.io/en/latest/installation/basic-installation.html) + - [Handbook](https://pillow.readthedocs.io/en/latest/handbook/index.html) +- [Contribute](https://github.com/python-pillow/Pillow/blob/main/.github/CONTRIBUTING.md) + - [Issues](https://github.com/python-pillow/Pillow/issues) + - [Pull requests](https://github.com/python-pillow/Pillow/pulls) +- [Release notes](https://pillow.readthedocs.io/en/stable/releasenotes/index.html) +- [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) + - [Pre-fork](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst#pre-fork) + +## Report a Vulnerability + +To report a security vulnerability, please follow the procedure described in the [Tidelift security policy](https://tidelift.com/docs/security). diff --git a/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/RECORD b/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..29ccdac3c80c126794ca22ea8cf77ca506fda2f8 --- /dev/null +++ b/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/RECORD @@ -0,0 +1,213 @@ +PIL/BdfFontFile.py,sha256=JJLBb0JZwTmSIIkqQoe2vzus-XTczN_O47DQneXKM1o,3610 +PIL/BlpImagePlugin.py,sha256=n7Eghktvwb6WVWouvrLMGzB66PiP6aPZaKwOzx_5ux0,16854 +PIL/BmpImagePlugin.py,sha256=CbsPOXskQNx7ZoIlloRNntrmBo0UkYkwlWrvwEhCSks,19054 +PIL/BufrStubImagePlugin.py,sha256=sY28XJU_Fu-UsbPpAoN-fN63FemmhCMi8rW5Kf9JioE,1829 +PIL/ContainerIO.py,sha256=BTz6Qlz-VyDmurXnWpQU-lAevLxgcsOcEGZP0CtvSKc,3302 +PIL/CurImagePlugin.py,sha256=3e6_djFaRvGO2PMP_E0HwbHx4SjqTlmlvPraNUFeLkQ,1839 +PIL/DcxImagePlugin.py,sha256=iaVs9updbtEstQKPLKKIlJVhfxFarbgCPoO8j96BmDA,2114 +PIL/DdsImagePlugin.py,sha256=dwlTfJcpUA9NAcWW6WoRjsexu_5xFTDYJhYuQnSLUJ4,17489 +PIL/EpsImagePlugin.py,sha256=I7DstQ6ZijJMglp6qEi1xljwY2bY4MkwWPWs0WPWM4Y,16614 +PIL/ExifTags.py,sha256=LA3OxhNImajSkIRnCMXDTJw4MprMEeFq_Sqf-sjn20w,10134 +PIL/FitsImagePlugin.py,sha256=ngEk16Ljz2K1f-Jz-KConyALGxVzFcVTEpWW1-VjVgw,4745 +PIL/FliImagePlugin.py,sha256=l8awoi3gN9gWsBonvjwDY-StXLSGnN6h5ZZg1S09Pjs,4785 +PIL/FontFile.py,sha256=iLSV32yQetLnE4SgG8HnHb2FdqkqFBjY9n--E6u5UE0,3711 +PIL/FpxImagePlugin.py,sha256=uecjqTuKyZHle3cg2VYpCthkIlfzVFnvDKBI3T6EdrA,7315 +PIL/FtexImagePlugin.py,sha256=KQwb4dvvnBxE4WOiuAjdougODgm67AsbYGWapChmDwU,3617 +PIL/GbrImagePlugin.py,sha256=KbLlo2oVhIbkzP7zQW0vARTE6aMjX6DpJEkghUny6Gk,3071 +PIL/GdImageFile.py,sha256=5SZA0952NckwJYsproC-ykj_UfVUjiGxNiohzRYf2fE,2897 +PIL/GifImagePlugin.py,sha256=eubRq9j9MVgEbJt0A4ARRBdIIoFjAkh5dheNJ0J27IA,40904 +PIL/GimpGradientFile.py,sha256=AFEEGWtylUg7tIYK0MgBC4hZHq3WxSzIvdq_MAEAUq8,4047 +PIL/GimpPaletteFile.py,sha256=EmKLnuvsHx0GLhWh8YnfidiTEhUm51-ZNKgQbAX1zcU,1485 +PIL/GribStubImagePlugin.py,sha256=Vf_VvZltyP3QqTuz-gUfCT6I2g3F5Rh8BYMGjxwpAoM,1823 +PIL/Hdf5StubImagePlugin.py,sha256=70vlB50QgPYYH2b8kE8U_QN5Q8TlmjmN8vk0FrhLkJ4,1826 +PIL/IcnsImagePlugin.py,sha256=U8sXlLVueqXHOVfzKS9ELsIZeH0cE5miFKMjAagK5fI,12405 +PIL/IcoImagePlugin.py,sha256=4OLIfifidVCQbA7VJpxH-VD-Ar0mzhIj8bFYP5rRO0o,12147 +PIL/ImImagePlugin.py,sha256=uI_xoDFxGetvSkcgER5IHtmebcQcQXV_1wRu4ztOS-A,11494 +PIL/Image.py,sha256=_J1nF9SELAzg188weTM0ozhnHj2WOPHJqXpyu56w5Ks,146669 +PIL/ImageChops.py,sha256=hZ8EPUPlQIzugsEedV8trkKX0jBCDGb6Cszma6ZeMZQ,8257 +PIL/ImageCms.py,sha256=W4juhbdt9eV_Zvf8WNO0oede2iCvUPZMtuFYy4cx9lA,43151 +PIL/ImageColor.py,sha256=KV-u7HnZWrrL3zuBAOLqerI-7vFcXTxdLeoaYVjsnwI,9761 +PIL/ImageDraw.py,sha256=QESkqodXPEg2Ao5-ZabcMkXLButss7h671RQPHV32ME,42528 +PIL/ImageDraw2.py,sha256=aRmS6VPTFa4vpnHMLb3oOowjYt1W8ij81x5HOi5kotE,6229 +PIL/ImageEnhance.py,sha256=xwj3y_MBWUJICjYc_l_8uIe7ZEtqUDyl9kQ0HQSQMTQ,3490 +PIL/ImageFile.py,sha256=HiDum7mPIxlp_bB80wIw8mhYbmatDhMQYysI44rHrNQ,25847 +PIL/ImageFilter.py,sha256=b-4kwg9GDMep01eRgwLlkWehHDioRHB-V3-pzpOZJ7g,19274 +PIL/ImageFont.py,sha256=5TWH1kzlMp0Ru5hsIEZQXoMOHLyQvvkQD_cFPwObQHc,63357 +PIL/ImageGrab.py,sha256=CJP_aZNA1mXU5dI77eElm4_Au198Uf7yVZ7Xw0BJ53s,6552 +PIL/ImageMath.py,sha256=Ib655EKVOMeVJ2x_yVt0xaM2D9kfpCv2dX9CsCwizCM,11835 +PIL/ImageMode.py,sha256=n4-2kSolyB7v2u6dXyIf3_vDL_LMvSNwhJvd9Do8cc8,2773 +PIL/ImageMorph.py,sha256=5hHeZAmow6sFHRQ9wocxVcS4CmYcOCl5KUkPYxhOb9g,8820 +PIL/ImageOps.py,sha256=RbcKwMwRLAv_UBwkmYfHRyv5aEKlcbTGsWMqcCgM0ek,25740 +PIL/ImagePalette.py,sha256=jpyVO1j7nty9pUiB2f3i0Ds6U9ED6rwTuGWwaYAQRzk,9254 +PIL/ImagePath.py,sha256=ZnnJuvQNtbKRhCmr61TEmEh1vVV5_90WMEPL8Opy5l8,391 +PIL/ImageQt.py,sha256=tAl3NtwgTofzQnUc6F9TUQRD2u2HQOeRDqG-PadVzMg,6111 +PIL/ImageSequence.py,sha256=jyVU7FmpmrvkBASsZIN-fI21g9DUcCzmSmj0FxS4Fng,2278 +PIL/ImageShow.py,sha256=m-XAcuWPFoz8Dv_JfaqrI5ZEvNIxMbDKa4Po5PcAZ9A,10391 +PIL/ImageStat.py,sha256=iA5KJrQeEpbwk-FhczGD8L4TaLWUt4VT_mp4drvMhv8,5485 +PIL/ImageTk.py,sha256=rRGvldzlOvqgiseGvMH-jgiMmY5wri4SJzkvXOb9uJ4,8893 +PIL/ImageTransform.py,sha256=xvHSE9-TtXtm_MilxVVf4dvB2-r7fJHhUFMJHFIs2PY,3994 +PIL/ImageWin.py,sha256=jz_6kBZWKyJsUhYKvdMsQUsCn7ZJZPIYqaZOBE3mD1E,7761 +PIL/ImtImagePlugin.py,sha256=TFLgRU0ko5gAxrsKUqGkK_Y8o233OVKLNJKE4IeOac8,2761 +PIL/IptcImagePlugin.py,sha256=I2H6QXywjOeGpDo-LOEgJ7vYhhjbGEJgFjN6bcY0sSU,6370 +PIL/Jpeg2KImagePlugin.py,sha256=P-2gvXFhjlDlKaWul8MXvQgQ19XzyeFHVh1fR_MgeLM,12809 +PIL/JpegImagePlugin.py,sha256=5wcd77uSne-bEFHqPW6psgXnKw7spmR3gjZh_LYdvbI,30720 +PIL/JpegPresets.py,sha256=0XoRcIdU_U1szfxfQEt_YgarmdB1INfTpbEgCanBbns,12664 +PIL/McIdasImagePlugin.py,sha256=KNmMyMzyaBz_pjUhtxi0Fjtj6MdjzrT-b1P_LgC10gg,1979 +PIL/MicImagePlugin.py,sha256=1mCwO0p5CJi_CXl3jFumdMxwd6vL11y0ckGjLayqPH8,2774 +PIL/MpegImagePlugin.py,sha256=SR-JGne4xNIrHTc1vKdEbuW61oI-TIar2oIP-MeRuiI,2188 +PIL/MpoImagePlugin.py,sha256=8jfsGFWqSx8D_ao9tPavMi45EWN7r080WnlZzyP7_2M,6027 +PIL/MspImagePlugin.py,sha256=widjELAylDoYtS2Dc-JPePA7AiBo0uakagOptWkPD20,6036 +PIL/PSDraw.py,sha256=L7W05HCqBKtZ271aEIPUheBzf0ZWcDgsXCvJUSMw3zc,7220 +PIL/PaletteFile.py,sha256=lNPfuBTHraW6i1v1b9thNoFyIG2MRMMzFHxVTaxcwj8,1265 +PIL/PalmImagePlugin.py,sha256=pxIMXUSRmuKUfbseT1K5dv7IqZTg_z5bphqC5PayF8E,9513 +PIL/PcdImagePlugin.py,sha256=F_8URHMLJxGt_MhONZzako6F3qYcC6RkUZrKgr0FjTM,1689 +PIL/PcfFontFile.py,sha256=RkM5wUp3SgRpQhpsTBEtk8uuFrQPnBSYBryOmcoRphQ,7401 +PIL/PcxImagePlugin.py,sha256=Rq_7JaTH7MqWfC8ZxBExYAPCAeJwOaZwh7Iarg1TGfM,6444 +PIL/PdfImagePlugin.py,sha256=Rk1HmJE0I2exdOuhK8nBe5CXaLkmllAeSnjajdXUq5c,9220 +PIL/PdfParser.py,sha256=89DQp0TI7XA3EXw0mWrJADD_9ySWb8yldRRWN4i2SwY,35872 +PIL/PixarImagePlugin.py,sha256=90zIgbzb8-bACCrJtQD2ubQmp_x5jGBOoWpjsS7Y038,1818 +PIL/PngImagePlugin.py,sha256=4NibeAP8zvqFQzPvNsP6stiHUXo5VnIFq5I0LRt6MCQ,50055 +PIL/PpmImagePlugin.py,sha256=bOOvSHAZHQ35WBSEYOW7YBFuIAkJuK2l5k5Geon_nF4,12587 +PIL/PsdImagePlugin.py,sha256=bfF5u2gW4shYgSmIhAx8Qyn03YaYOY8KoQtFtSeEWA0,8615 +PIL/PyAccess.py,sha256=vowudnRMgj8F9JvwHUsRzDV_szOtNommpH6SFH_bbzE,11224 +PIL/QoiImagePlugin.py,sha256=vm98aN1PTbNv192gN6onYmbgSgT5f0V_AY4fr3VLySw,4261 +PIL/SgiImagePlugin.py,sha256=RU265Sg7bEZN-TmU0M5E6ODfc4twjORx9oAD2fDsDQI,6683 +PIL/SpiderImagePlugin.py,sha256=aCERP7JRSeUDuzLfm9jnhxVNKEyehM_p-1xSLZLDG5o,10379 +PIL/SunImagePlugin.py,sha256=JT8IrJC7JIDBgdfIBzC-HSzxnoD4-W1aybpBDW0L0aU,4640 +PIL/TarIO.py,sha256=LzQZ1ZRCVxXQREFRodRn9J_d25zxnSklBKdVikjzMGw,1651 +PIL/TgaImagePlugin.py,sha256=Y48o5LSAAnNqthiC8nIBvOxyhPRemQfSZHKJMfiowT0,7204 +PIL/TiffImagePlugin.py,sha256=w1drQBgBaCo3ElhzWesV9TXrZHw9x8KiuKq6XE4d5hA,80428 +PIL/TiffTags.py,sha256=3sFYZ3VTzJcnCguPr_AIPYm7kT_sSdwqq5h10EKy0Ow,17270 +PIL/WalImageFile.py,sha256=emYeZjisXBBE_RhK-hh6XVuC9N94pua3mwVXKwQ3rYw,5687 +PIL/WebPImagePlugin.py,sha256=uHUch0U7vVAuCB9QSCHkPc6fZfdZRVdKe1vd8nbIsYY,11891 +PIL/WmfImagePlugin.py,sha256=Lj7genL51Fu3YXcSce0xmaWNiKwzB3NUsCa-iMrvs0Q,5164 +PIL/XVThumbImagePlugin.py,sha256=nlqdy2bGDbqG9Sl-62YQcfN0x8JAUgUQDo5a3CrNPQU,2162 +PIL/XbmImagePlugin.py,sha256=g7Q5A_2yV4U293ygm_1SoIbYHnie3vajkMbqHrPgR6A,2747 +PIL/XpmImagePlugin.py,sha256=1o0NGFfUTTvR7m8qDx1MAPeL22sJceEo5cdglBJzQYc,3344 +PIL/__init__.py,sha256=98abxVfn8od1jJaTIr65YrYrIb7zMKbOJ5o68ryE2O0,2094 +PIL/__main__.py,sha256=X8eIpGlmHfnp7zazp5mdav228Itcf2lkiMP0tLU6X9c,140 +PIL/__pycache__/BdfFontFile.cpython-39.pyc,, +PIL/__pycache__/BlpImagePlugin.cpython-39.pyc,, +PIL/__pycache__/BmpImagePlugin.cpython-39.pyc,, +PIL/__pycache__/BufrStubImagePlugin.cpython-39.pyc,, +PIL/__pycache__/ContainerIO.cpython-39.pyc,, +PIL/__pycache__/CurImagePlugin.cpython-39.pyc,, +PIL/__pycache__/DcxImagePlugin.cpython-39.pyc,, +PIL/__pycache__/DdsImagePlugin.cpython-39.pyc,, +PIL/__pycache__/EpsImagePlugin.cpython-39.pyc,, +PIL/__pycache__/ExifTags.cpython-39.pyc,, +PIL/__pycache__/FitsImagePlugin.cpython-39.pyc,, +PIL/__pycache__/FliImagePlugin.cpython-39.pyc,, +PIL/__pycache__/FontFile.cpython-39.pyc,, +PIL/__pycache__/FpxImagePlugin.cpython-39.pyc,, +PIL/__pycache__/FtexImagePlugin.cpython-39.pyc,, +PIL/__pycache__/GbrImagePlugin.cpython-39.pyc,, +PIL/__pycache__/GdImageFile.cpython-39.pyc,, +PIL/__pycache__/GifImagePlugin.cpython-39.pyc,, +PIL/__pycache__/GimpGradientFile.cpython-39.pyc,, +PIL/__pycache__/GimpPaletteFile.cpython-39.pyc,, +PIL/__pycache__/GribStubImagePlugin.cpython-39.pyc,, +PIL/__pycache__/Hdf5StubImagePlugin.cpython-39.pyc,, +PIL/__pycache__/IcnsImagePlugin.cpython-39.pyc,, +PIL/__pycache__/IcoImagePlugin.cpython-39.pyc,, +PIL/__pycache__/ImImagePlugin.cpython-39.pyc,, +PIL/__pycache__/Image.cpython-39.pyc,, +PIL/__pycache__/ImageChops.cpython-39.pyc,, +PIL/__pycache__/ImageCms.cpython-39.pyc,, +PIL/__pycache__/ImageColor.cpython-39.pyc,, +PIL/__pycache__/ImageDraw.cpython-39.pyc,, +PIL/__pycache__/ImageDraw2.cpython-39.pyc,, +PIL/__pycache__/ImageEnhance.cpython-39.pyc,, +PIL/__pycache__/ImageFile.cpython-39.pyc,, +PIL/__pycache__/ImageFilter.cpython-39.pyc,, +PIL/__pycache__/ImageFont.cpython-39.pyc,, +PIL/__pycache__/ImageGrab.cpython-39.pyc,, +PIL/__pycache__/ImageMath.cpython-39.pyc,, +PIL/__pycache__/ImageMode.cpython-39.pyc,, +PIL/__pycache__/ImageMorph.cpython-39.pyc,, +PIL/__pycache__/ImageOps.cpython-39.pyc,, +PIL/__pycache__/ImagePalette.cpython-39.pyc,, +PIL/__pycache__/ImagePath.cpython-39.pyc,, +PIL/__pycache__/ImageQt.cpython-39.pyc,, +PIL/__pycache__/ImageSequence.cpython-39.pyc,, +PIL/__pycache__/ImageShow.cpython-39.pyc,, +PIL/__pycache__/ImageStat.cpython-39.pyc,, +PIL/__pycache__/ImageTk.cpython-39.pyc,, +PIL/__pycache__/ImageTransform.cpython-39.pyc,, +PIL/__pycache__/ImageWin.cpython-39.pyc,, +PIL/__pycache__/ImtImagePlugin.cpython-39.pyc,, +PIL/__pycache__/IptcImagePlugin.cpython-39.pyc,, +PIL/__pycache__/Jpeg2KImagePlugin.cpython-39.pyc,, +PIL/__pycache__/JpegImagePlugin.cpython-39.pyc,, +PIL/__pycache__/JpegPresets.cpython-39.pyc,, +PIL/__pycache__/McIdasImagePlugin.cpython-39.pyc,, +PIL/__pycache__/MicImagePlugin.cpython-39.pyc,, +PIL/__pycache__/MpegImagePlugin.cpython-39.pyc,, +PIL/__pycache__/MpoImagePlugin.cpython-39.pyc,, +PIL/__pycache__/MspImagePlugin.cpython-39.pyc,, +PIL/__pycache__/PSDraw.cpython-39.pyc,, +PIL/__pycache__/PaletteFile.cpython-39.pyc,, +PIL/__pycache__/PalmImagePlugin.cpython-39.pyc,, +PIL/__pycache__/PcdImagePlugin.cpython-39.pyc,, +PIL/__pycache__/PcfFontFile.cpython-39.pyc,, +PIL/__pycache__/PcxImagePlugin.cpython-39.pyc,, +PIL/__pycache__/PdfImagePlugin.cpython-39.pyc,, +PIL/__pycache__/PdfParser.cpython-39.pyc,, +PIL/__pycache__/PixarImagePlugin.cpython-39.pyc,, +PIL/__pycache__/PngImagePlugin.cpython-39.pyc,, +PIL/__pycache__/PpmImagePlugin.cpython-39.pyc,, +PIL/__pycache__/PsdImagePlugin.cpython-39.pyc,, +PIL/__pycache__/PyAccess.cpython-39.pyc,, +PIL/__pycache__/QoiImagePlugin.cpython-39.pyc,, +PIL/__pycache__/SgiImagePlugin.cpython-39.pyc,, +PIL/__pycache__/SpiderImagePlugin.cpython-39.pyc,, +PIL/__pycache__/SunImagePlugin.cpython-39.pyc,, +PIL/__pycache__/TarIO.cpython-39.pyc,, +PIL/__pycache__/TgaImagePlugin.cpython-39.pyc,, +PIL/__pycache__/TiffImagePlugin.cpython-39.pyc,, +PIL/__pycache__/TiffTags.cpython-39.pyc,, +PIL/__pycache__/WalImageFile.cpython-39.pyc,, +PIL/__pycache__/WebPImagePlugin.cpython-39.pyc,, +PIL/__pycache__/WmfImagePlugin.cpython-39.pyc,, +PIL/__pycache__/XVThumbImagePlugin.cpython-39.pyc,, +PIL/__pycache__/XbmImagePlugin.cpython-39.pyc,, +PIL/__pycache__/XpmImagePlugin.cpython-39.pyc,, +PIL/__pycache__/__init__.cpython-39.pyc,, +PIL/__pycache__/__main__.cpython-39.pyc,, +PIL/__pycache__/_binary.cpython-39.pyc,, +PIL/__pycache__/_deprecate.cpython-39.pyc,, +PIL/__pycache__/_tkinter_finder.cpython-39.pyc,, +PIL/__pycache__/_typing.cpython-39.pyc,, +PIL/__pycache__/_util.cpython-39.pyc,, +PIL/__pycache__/_version.cpython-39.pyc,, +PIL/__pycache__/features.cpython-39.pyc,, +PIL/__pycache__/report.cpython-39.pyc,, +PIL/_binary.py,sha256=cb9p-_mwzBYumlVsWbnoTWsrLo59towA6atLOZvjO3w,2662 +PIL/_deprecate.py,sha256=5WrrZE3Q65nRF3pwwRN9wsmY4lqFOJayT6Uxt-i9tf0,2071 +PIL/_imaging.cp39-win_amd64.pyd,sha256=0_9zIhbMzvcjFqtOcfnUrKKqOkoUsVeofKqoA2muxdE,2341888 +PIL/_imaging.pyi,sha256=wjkxIX-PkUYnjQCdcvR3wcrZG9KEUL5m_L33Zw4W79A,846 +PIL/_imagingcms.cp39-win_amd64.pyd,sha256=wxcBOMariH-ivX9uUSxwxxBJG9njrCRh_PDnqpamhzw,263168 +PIL/_imagingcms.pyi,sha256=-1QgyLqhW56OLsu8Kgn9wc8IifLldpOuCRILf8SBfsE,4480 +PIL/_imagingft.cp39-win_amd64.pyd,sha256=wtOFPUKZ2jg6LM2ELX1PqoJ0apJNnVJy6NUeiIC8xQk,1819136 +PIL/_imagingft.pyi,sha256=SpEugAoNqOCdmR-bAghPf0AWfBpMfziUnXkJ65jY4dc,1748 +PIL/_imagingmath.cp39-win_amd64.pyd,sha256=w_HXdNYr8JN08whIyhd3yqJRqW5pz09doaN7MYSBLI0,24064 +PIL/_imagingmath.pyi,sha256=zD8vAoPC8aEIVjfckLtFskRW5saiVel3-sJUA2pHaGc,66 +PIL/_imagingmorph.cp39-win_amd64.pyd,sha256=E54m6ZRU5cZSMUef-gFwRsaRxI2DwwWf_zMfQP-OVms,13824 +PIL/_imagingmorph.pyi,sha256=zD8vAoPC8aEIVjfckLtFskRW5saiVel3-sJUA2pHaGc,66 +PIL/_imagingtk.cp39-win_amd64.pyd,sha256=ahiwP_A7AKX2DtHKH0lG8ooOpRiddDAUPc4F068O5h0,14848 +PIL/_tkinter_finder.py,sha256=jKydPAxnrytggsZQHB6kAQep6A9kzRNyx_nToT4ClKY,561 +PIL/_typing.py,sha256=ZEXNlEU-TV_Dl1RPO7Nx74CQQbrI6BGP-cDnRKWIrRQ,890 +PIL/_util.py,sha256=ifUUlojtqTnWOxQFrwNCpqO1gjzkFIWovj7uBnq6DrY,844 +PIL/_version.py,sha256=h4GziQDuM-Mtj--2-J-vf3al0vVtouQhTpAQOKexxWA,91 +PIL/_webp.cp39-win_amd64.pyd,sha256=XGX78PKz5rw3lRvUnIRW_dk5QujeeU6bnFUaKF8p8jc,412672 +PIL/_webp.pyi,sha256=zD8vAoPC8aEIVjfckLtFskRW5saiVel3-sJUA2pHaGc,66 +PIL/features.py,sha256=UD4iRB2Hs4mR0odVOLJ1aLz_5YbGsBkL01-RO3oyrDk,10853 +PIL/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +PIL/report.py,sha256=6m7NOv1a24577ZiJoxX89ip5JeOgf2O1F95f6-1K5aM,105 +pillow-10.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pillow-10.4.0.dist-info/LICENSE,sha256=PFOBLi0mO1BbpKT2CpPZY3wmU-ISYDhlFccrXMtYqJ4,56937 +pillow-10.4.0.dist-info/METADATA,sha256=EfTKW2FZO1afRRUC0aehHiVIUjsDtNAOIqNbPn_0Bac,9342 +pillow-10.4.0.dist-info/RECORD,, +pillow-10.4.0.dist-info/WHEEL,sha256=wiZIM0rcrSicmQ0ERKSRO5jHaj2LT1egLSf9yJsV0fE,99 +pillow-10.4.0.dist-info/top_level.txt,sha256=riZqrk-hyZqh5f1Z0Zwii3dKfxEsByhu9cU9IODF-NY,4 +pillow-10.4.0.dist-info/zip-safe,sha256=frcCV1k9oG9oKj3dpUqdJg1PxRT2RSN_XKdLCPjaYaY,2 diff --git a/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/WHEEL b/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..cf0a8cc06bc104dcfe11e8cb14e072fa64021bb3 --- /dev/null +++ b/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (70.1.1) +Root-Is-Purelib: false +Tag: cp39-cp39-win_amd64 + diff --git a/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/top_level.txt b/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..b338169ce0c740c335bfe82912227ae8637bd492 --- /dev/null +++ b/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +PIL diff --git a/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/zip-safe b/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/zip-safe new file mode 100644 index 0000000000000000000000000000000000000000..d3f5a12faa99758192ecc4ed3fc22c9249232e86 --- /dev/null +++ b/MLPY/Lib/site-packages/pillow-10.4.0.dist-info/zip-safe @@ -0,0 +1 @@ +